diff options
Diffstat (limited to 'ishtar_common')
-rw-r--r-- | ishtar_common/models_imports.py | 6 | ||||
-rw-r--r-- | ishtar_common/views_api.py | 2 | ||||
-rw-r--r-- | ishtar_common/views_item.py | 62 |
3 files changed, 64 insertions, 6 deletions
diff --git a/ishtar_common/models_imports.py b/ishtar_common/models_imports.py index dd675e840..2b568df21 100644 --- a/ishtar_common/models_imports.py +++ b/ishtar_common/models_imports.py @@ -291,6 +291,12 @@ class ImporterType(models.Model): def importer_groups_label(self) -> str: return " ; ".join([imp.group.name for imp in self.groups.all()]) + def get_gis_import_key(self): + for default in self.defaults.filter(target__endswith="geodata").all(): + q = default.default_values.filter(target="import_key") + if q.count(): + return q.all()[0].value + def get_libreoffice_template(self): if not UnoCalc: return diff --git a/ishtar_common/views_api.py b/ishtar_common/views_api.py index fe301b04d..6c0f8107b 100644 --- a/ishtar_common/views_api.py +++ b/ishtar_common/views_api.py @@ -107,6 +107,7 @@ class GISExportAPI(GISBaseImportView, GISAPIView): dct["start"] = (page - 1) * self.PAGE_LEN + 1 try: importer_class = importer.get_importer_class() + import_key = importer.get_gis_import_key() cols, col_names = importer.get_columns(importer_class=importer_class) obj_name = importer_class.OBJECT_CLS.__name__.lower() return get_item(importer_class.OBJECT_CLS, "get_" + obj_name, obj_name, @@ -114,6 +115,7 @@ class GISExportAPI(GISBaseImportView, GISAPIView): request, data_type="json", full=False, force_own=False, no_link=True, col_names=col_names, col_types=importer.get_columns_types(), + geo_import_key=import_key, **dct ) except ImporterError as e: diff --git a/ishtar_common/views_item.py b/ishtar_common/views_item.py index 5e9135c7a..3ca98979f 100644 --- a/ishtar_common/views_item.py +++ b/ishtar_common/views_item.py @@ -1874,8 +1874,9 @@ def _format_geojson(rows, link_template, display_polygon): def _get_data_from_query(items, query_table_cols, extra_request_keys, - geo_fields=None): + geo_fields=None, geo_import_key=None, q_slice=None): # TODO: manage data json field + for query_keys in query_table_cols: if not isinstance(query_keys, (tuple, list)): query_keys = [query_keys] @@ -1914,7 +1915,50 @@ def _get_data_from_query(items, query_table_cols, extra_request_keys, values.append("locked") values.append("lock_user_id") values = [v for v in values if v] # filter empty values - return items.values_list(*values) + if q_slice: + result = items[q_slice[0]:q_slice[1]].values_list(*values) + else: + result = items.values_list(*values) + if not geo_import_key: + return result + + # get correct data with geo_import_key match + geo_prefix = None + for k in query_table_cols: + # get geodata prefix + if "geodata" in k: + geo_prefix = k[:k.index("geodata")] + break + if geo_prefix is None: + # no real geodata + return result + geo_values = ["id"] + column_index = {} + for idx, k in enumerate(query_table_cols): + if k.startswith(f"{geo_prefix}geodata"): + geo_values.append(k) + column_index[k] = idx + 1 + + new_geo_values = {} + q2 = items.filter(**{f"{geo_prefix}geodata__import_key": geo_import_key}) + if q_slice: + q2 = q2[q_slice[0]:q_slice[1]] + for geo_result in q2.values_list(*geo_values): + new_geo_values[geo_result[0]] = geo_result + new_values = [] + for v in result: + v = list(v) + if v[0] not in new_geo_values: # 0 is id + for geo_value in geo_values[1:]: + idx = column_index[geo_value] + v[idx] = "" + else: + geo_res = new_geo_values[v[0]] + for geo_value in geo_values[1:]: + idx = column_index[geo_value] + v[idx] = geo_res[idx] + new_values.append(v) + return new_values def _get_data_from_query_old( @@ -2465,6 +2509,8 @@ def get_item( copy(relation_types_prefix) if relation_types_prefix else {} ) + geo_import_key = dct.get("geo_import_key", None) + fields = [model._meta.get_field(k) for k in get_all_field_names(model)] request_keys = dict( @@ -2973,12 +3019,15 @@ def get_item( slice_query = (start, end) callback("get_item", request, data_type, items, slice_query) + q_slice = [] if manual_sort_key: items = items.all() else: - items = items[start:end] + q_slice = [start, end] if old: + if q_slice: + items = items[q_slice[0]:q_slice[1]] items = [item.get_previous(old) for item in items] if data_type == "json-map": @@ -2988,11 +3037,13 @@ def get_item( geo_fields = query_table_cols[-2:] datas = _get_data_from_query( items, query_table_cols, my_extra_request_keys, - geo_fields=geo_fields + geo_fields=geo_fields, q_slice=q_slice ) elif data_type != "csv" and getattr(model, "NEW_QUERY_ENGINE", False): - datas = _get_data_from_query(items, query_table_cols, my_extra_request_keys) + datas = _get_data_from_query(items, query_table_cols, my_extra_request_keys, geo_import_key=geo_import_key, q_slice=q_slice) else: + if q_slice: + items = items[q_slice[0]:q_slice[1]] datas = _get_data_from_query_old( items, query_table_cols, @@ -3000,7 +3051,6 @@ def get_item( my_extra_request_keys, do_not_deduplicate, ) - if manual_sort_key: # +1 because the id is added as a first col idx_col = None |