summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ishtar_common/models_imports.py59
-rw-r--r--ishtar_common/views_api.py4
-rw-r--r--ishtar_common/views_item.py26
3 files changed, 78 insertions, 11 deletions
diff --git a/ishtar_common/models_imports.py b/ishtar_common/models_imports.py
index 2553cde3f..80b13f312 100644
--- a/ishtar_common/models_imports.py
+++ b/ishtar_common/models_imports.py
@@ -520,10 +520,59 @@ class ImporterType(models.Model):
types.append(ctype)
return types
+ def _create_default_geo_type(self, geo_target):
+ geo_prefix = geo_target[:geo_target.index("geodata")]
+ default = ImporterDefault.objects.create(
+ importer_type=self,
+ target=geo_prefix + "geodata"
+ )
+ ImporterDefaultValues.objects.create(
+ default_target=default,
+ target="data_type",
+ value=self.gis_type.txt_idx,
+ )
+
+ def set_default_geo_data(self):
+ """
+ Set default geo data for QGIS Importer
+ """
+ if self.type != "qgs" or not self.gis_type:
+ return
+
+ # check key is not already created and with the good value
+ q = self.defaults.filter(target__contains="geodata")
+ if q.exists():
+ default = q.all()[0]
+ q = default.default_values.filter(target="data_type")
+ if not q.exists():
+ ImporterDefaultValues.objects.create(
+ default_target=default,
+ target="data_type",
+ value=self.gis_type.txt_idx,
+ )
+ return
+ default_value = q.all()[0]
+ if default_value.value != self.gis_type.txt_idx:
+ default_value.value = self.gis_type.txt_idx
+ default_value.save()
+ return
+
+ for column in self.columns.all():
+ q1 = column.targets.filter(target__contains='geodata')
+ if q1.exists():
+ target = q1.all()[0]
+ self._create_default_geo_type(target.target)
+ return
+ q2 = column.duplicate_fields.filter(field_name__contains='geodata')
+ if q2.exists():
+ duplicate = q2.all()[0]
+ self._create_default_geo_type(duplicate.field_name)
+
def save(self, *args, **kwargs):
if not self.slug:
self.slug = create_slug(ImporterType, self.name)
- return super().save(*args, **kwargs)
+ super().save(*args, **kwargs)
+ self.set_default_geo_data()
class ImporterGroup(models.Model):
@@ -890,6 +939,10 @@ class ImporterDuplicateField(models.Model):
def natural_key(self):
return self.column.importer_type.slug, self.column.col_number, self.field_name
+ def save(self, *args, **kwargs):
+ super().save(*args, **kwargs)
+ self.column.importer_type.set_default_geo_data()
+
class NamedManager(models.Manager):
def get_by_natural_key(self, name):
@@ -1032,6 +1085,10 @@ class ImportTarget(models.Model):
return []
return self.associated_model.get_types()
+ def save(self, *args, **kwargs):
+ super().save(*args, **kwargs)
+ self.column.importer_type.set_default_geo_data()
+
class TargetKeyGroup(models.Model):
"""
diff --git a/ishtar_common/views_api.py b/ishtar_common/views_api.py
index fed1dc14e..2b9ee5713 100644
--- a/ishtar_common/views_api.py
+++ b/ishtar_common/views_api.py
@@ -114,10 +114,11 @@ class GISExportAPI(GISBaseImportView, GISAPIView):
query = unquote_plus(request.GET["query"])
dct = {"query": query, "length": self.PAGE_LEN}
if page > 1:
- dct["start"] = (page - 1) * self.PAGE_LEN + 1
+ dct["start"] = (page - 1) * self.PAGE_LEN
try:
importer_class = importer.get_importer_class()
import_key = importer.get_gis_import_key()
+ geo_type = importer.gis_type.txt_idx
cols, col_names = importer.get_columns(importer_class=importer_class)
obj_name = importer_class.OBJECT_CLS.__name__.lower()
return get_item(importer_class.OBJECT_CLS, "get_" + obj_name, obj_name,
@@ -126,6 +127,7 @@ class GISExportAPI(GISBaseImportView, GISAPIView):
no_link=True, col_names=col_names,
col_types=importer.get_columns_types(),
geo_import_key=import_key,
+ geo_type=geo_type,
**dct
)
except ImporterError as e:
diff --git a/ishtar_common/views_item.py b/ishtar_common/views_item.py
index 83c84ae78..b96ab62e9 100644
--- a/ishtar_common/views_item.py
+++ b/ishtar_common/views_item.py
@@ -1875,9 +1875,8 @@ def _format_geojson(rows, link_template, display_polygon):
def _get_data_from_query(items, query_table_cols, extra_request_keys,
- geo_fields=None, geo_import_key=None, q_slice=None):
+ geo_fields=None, geo_import_key=None, geo_type=None, q_slice=None):
# TODO: manage data json field
-
for query_keys in query_table_cols:
if not isinstance(query_keys, (tuple, list)):
query_keys = [query_keys]
@@ -1920,7 +1919,7 @@ def _get_data_from_query(items, query_table_cols, extra_request_keys,
result = items[q_slice[0]:q_slice[1]].values_list(*values)
else:
result = items.values_list(*values)
- if not geo_import_key:
+ if not geo_import_key and not geo_type:
return result
# get correct data with geo_import_key match
@@ -1941,11 +1940,16 @@ def _get_data_from_query(items, query_table_cols, extra_request_keys,
column_index[k] = idx + 1
new_geo_values = {}
- q2 = items.filter(**{f"{geo_prefix}geodata__import_key": geo_import_key})
- if q_slice:
- q2 = q2[q_slice[0]:q_slice[1]]
- for geo_result in q2.values_list(*geo_values):
- new_geo_values[geo_result[0]] = geo_result
+ q = items.model.objects.filter(id__in=[v[0] for v in result])
+ if geo_type:
+ q2 = q.filter(**{f"{geo_prefix}geodata__data_type__txt_idx": geo_type})
+ for geo_result in q2.values_list(*geo_values):
+ new_geo_values[geo_result[0]] = geo_result
+ if geo_import_key:
+ q2 = q.filter(**{f"{geo_prefix}geodata__import_key": geo_import_key})
+ for geo_result in q2.values_list(*geo_values):
+ if geo_result[0] not in new_geo_values:
+ new_geo_values[geo_result[0]] = geo_result
new_values = []
for v in result:
v = list(v)
@@ -2522,6 +2526,7 @@ def get_item(
)
geo_import_key = dct.get("geo_import_key", None)
+ geo_type = dct.get("geo_type", None)
fields = [model._meta.get_field(k) for k in get_all_field_names(model)]
@@ -3063,7 +3068,10 @@ def get_item(
geo_fields=geo_fields, q_slice=q_slice
)
elif data_type != "csv" and getattr(model, "NEW_QUERY_ENGINE", False):
- datas = _get_data_from_query(items, query_table_cols, my_extra_request_keys, geo_import_key=geo_import_key, q_slice=q_slice)
+ datas = _get_data_from_query(
+ items, query_table_cols, my_extra_request_keys,
+ geo_import_key=geo_import_key, geo_type=geo_type, q_slice=q_slice,
+ )
else:
if q_slice:
items = items[q_slice[0]:q_slice[1]]