diff options
author | Étienne Loks <etienne.loks@iggdrasil.net> | 2022-02-24 10:21:39 +0100 |
---|---|---|
committer | Étienne Loks <etienne.loks@iggdrasil.net> | 2022-12-12 12:21:00 +0100 |
commit | a14d0067a933ff3898773119fb85146545530dc3 (patch) | |
tree | 191aa0fefa6fe216edc331c6421c5e342716f5d7 | |
parent | 55f311f240402612feb72766925131e585040833 (diff) | |
download | Ishtar-a14d0067a933ff3898773119fb85146545530dc3.tar.bz2 Ishtar-a14d0067a933ff3898773119fb85146545530dc3.zip |
Geodata redesign: site migration
-rw-r--r-- | archaeological_operations/models.py | 187 | ||||
-rw-r--r-- | ishtar_common/management/commands/migrate_to_geo_v4.py | 166 | ||||
-rw-r--r-- | ishtar_common/models.py | 1 |
3 files changed, 196 insertions, 158 deletions
diff --git a/archaeological_operations/models.py b/archaeological_operations/models.py index 40ee27fba..263aa5f55 100644 --- a/archaeological_operations/models.py +++ b/archaeological_operations/models.py @@ -159,11 +159,97 @@ post_save.connect(post_save_cache, sender=RecordQualityType) post_delete.connect(post_save_cache, sender=RecordQualityType) +class GeographicTownItem(GeoItem): + class Meta: + abstract = True + + def post_save_geo(self, save=True): + # manage geodata towns + if getattr(self, "_post_save_geo_ok", False): + # prevent infinite loop - should not happen, but... + return + self._post_save_geo_ok = True + q_towns = self.towns.filter(main_geodata__multi_polygon__isnull=False) + q_towns_nb = q_towns.count() + q_geodata_town = self.geodata.filter( + source_content_type__model="town", + source_content_type__app_label="ishtar_common", + ) + q_geodata_area = self.geodata.filter( + source_content_type__model="area", + source_content_type__app_label="ishtar_common", + ) + changed = False + if q_towns_nb != 1: + # no simple town - clean + for geo in q_geodata_town.all(): + self.geodata.remove(geo) + if self.main_geodata == geo: + self.main_geodata = None + changed = True + if q_towns_nb < 2: + # no area - clean + for geo in q_geodata_area.all(): + self.geodata.remove(geo) + if self.main_geodata == geo: + self.main_geodata = None + changed = True + + current_town_geo = None + if q_towns_nb == 1: + current_town_geo = q_towns.all()[0] + if not q_geodata_town.filter(pk=current_town_geo.pk).count(): + for geo in q_geodata_town.exclude(source_id=current_town_geo.pk).all(): + self.geodata.remove(geo) + if self.main_geodata == geo: + self.main_geodata = None + self.geodata.add(current_town_geo.main_geodata) + changed = True + + current_geo_area = None + if q_towns_nb > 1: + current_geo_area = Area.get_or_create_by_towns(q_towns, get_geo=True) + if ( + current_geo_area + and not q_geodata_area.filter(pk=current_geo_area.pk).count() + ): + for geo in q_geodata_area.all(): + self.geodata.remove(geo) + if self.main_geodata == geo: + self.main_geodata = None + self.geodata.add(current_geo_area) + changed = True + + if current_town_geo: + q_extra_geo_town = q_geodata_town.exclude(source_id=current_town_geo.pk) + if q_extra_geo_town.count(): + # should not occur but bad migrations, bad imports... + for geo in q_extra_geo_town.all(): + self.geodata.remove(geo) + if self.main_geodata == geo: + self.main_geodata = None + changed = True + if current_geo_area: + q_extra_geo_area = q_geodata_area.exclude(pk=current_geo_area.pk) + if q_extra_geo_area.count(): + # should not occur but bad migrations, bad imports... + for geo in q_extra_geo_area.all(): + self.geodata.remove(geo) + if self.main_geodata == geo: + self.main_geodata = None + changed = True + + if changed and save: + self.skip_history_when_saving = True + self._no_move = True + self.save() + + class ArchaeologicalSite( DocumentItem, BaseHistorizedItem, CompleteIdentifierItem, - GeoItem, + GeographicTownItem, OwnPerms, ValueGetter, MainItem, @@ -786,7 +872,7 @@ class Operation( DocumentItem, BaseHistorizedItem, CompleteIdentifierItem, - GeoItem, + GeographicTownItem, OwnPerms, ValueGetter, MainItem, @@ -1132,11 +1218,17 @@ class Operation( ] SERIALIZE_PROPERTIES = MainItem.SERIALIZE_PROPERTIES + ["short_label"] SERIALIZE_DATES = ["start_date", "excavation_end_date"] - SERIALIZE_CALL = {"closing": "serialize_closing", - "archaeological_sites_list": "archaeological_sites_list", - "documents_list": "documents_list"} - SERIALIZE_EXCLUDE = ["search_vector", "documents", "operations", - "archaeological_sites"] + SERIALIZE_CALL = { + "closing": "serialize_closing", + "archaeological_sites_list": "archaeological_sites_list", + "documents_list": "documents_list", + } + SERIALIZE_EXCLUDE = [ + "search_vector", + "documents", + "operations", + "archaeological_sites", + ] SERIALIZE_STRING = ["scientist", "in_charge", "cira_rapporteur"] # fields definition @@ -1479,8 +1571,9 @@ class Operation( return dct def archaeological_sites_list(self) -> list: - return self.get_associated_main_item_list("archaeological_sites", - ArchaeologicalSite) + return self.get_associated_main_item_list( + "archaeological_sites", ArchaeologicalSite + ) @classmethod def _get_department_code(cls, value): @@ -2067,82 +2160,6 @@ class Operation( res["mode"] = " ; ".join([str(m) for m in mode(finds)]) return res - def post_save_geo(self, save=True): - # manage geodata towns - if getattr(self, "_post_save_geo_ok", False): - # prevent infinite loop - should not happen, but... - return - self._post_save_geo_ok = True - q_towns = self.towns.filter(main_geodata__multi_polygon__isnull=False) - q_towns_nb = q_towns.count() - q_geodata_town = self.geodata.filter( - source_content_type__model="town", - source_content_type__app_label="ishtar_common") - q_geodata_area = self.geodata.filter( - source_content_type__model="area", - source_content_type__app_label="ishtar_common") - changed = False - if q_towns_nb != 1: - # no simple town - clean - for geo in q_geodata_town.all(): - self.geodata.remove(geo) - if self.main_geodata == geo: - self.main_geodata = None - changed = True - if q_towns_nb < 2: - # no area - clean - for geo in q_geodata_area.all(): - self.geodata.remove(geo) - if self.main_geodata == geo: - self.main_geodata = None - changed = True - - current_town_geo = None - if q_towns_nb == 1: - current_town_geo = q_towns.all()[0] - if not q_geodata_town.filter(pk=current_town_geo.pk).count(): - for geo in q_geodata_town.exclude(source_id=current_town_geo.pk).all(): - self.geodata.remove(geo) - if self.main_geodata == geo: - self.main_geodata = None - self.geodata.add(current_town_geo.main_geodata) - changed = True - - current_geo_area = None - if q_towns_nb > 1: - current_geo_area = Area.get_or_create_by_towns(q_towns, get_geo=True) - if current_geo_area and not q_geodata_area.filter(source_id=current_geo_area.pk).count(): - for geo in q_geodata_area.all(): - self.geodata.remove(geo) - if self.main_geodata == geo: - self.main_geodata = None - self.geodata.add(current_geo_area) - changed = True - - if current_town_geo: - q_extra_geo_town = q_geodata_town.exclude(source_id=current_town_geo.pk) - if q_extra_geo_town.count(): - # should not occur but bad migrations, bad imports... - for geo in q_extra_geo_town.all(): - self.geodata.remove(geo) - if self.main_geodata == geo: - self.main_geodata = None - changed = True - if current_geo_area: - q_extra_geo_area = q_geodata_area.exclude(source_id=current_geo_area.pk) - if q_extra_geo_area.count(): - # should not occur but bad migrations, bad imports... - for geo in q_extra_geo_area.all(): - self.geodata.remove(geo) - if self.main_geodata == geo: - self.main_geodata = None - changed = True - - if changed and save: - self.skip_history_when_saving = True - self._no_move = True - self.save() - def save(self, *args, **kwargs): # put a default year if start_date is defined if self.start_date and not self.year: diff --git a/ishtar_common/management/commands/migrate_to_geo_v4.py b/ishtar_common/management/commands/migrate_to_geo_v4.py index 8daf921d9..ed1a877f3 100644 --- a/ishtar_common/management/commands/migrate_to_geo_v4.py +++ b/ishtar_common/management/commands/migrate_to_geo_v4.py @@ -59,84 +59,104 @@ def migrate(quiet=False, log=True): sys.stdout.write(f"\r[{get_time()}] Towns migrated\n") sys.stdout.flush() - # manage operation vector sources - operation_content_type = ContentType.objects.get( - app_label="archaeological_operations", model="operation" - ) - q = Operation.objects.exclude(main_geodata__isnull=False) - nb = q.count() - data_type_area, __ = models_common.GeoDataType.objects.get_or_create( - txt_idx="operation-area", defaults={"label": "Emprise de l'opération"} - ) - data_type_center, __ = models_common.GeoDataType.objects.get_or_create( - txt_idx="operation-center", defaults={"label": "Centre de l'opération"} - ) - for idx, operation in enumerate(q.all()): - if not quiet: - sys.stdout.write(f"\r[{percent(idx, nb)}] Migrate operations {idx + 1}/{nb}") - sys.stdout.flush() - - operation._no_move = True - operation.skip_history_when_saving = True - operation.save() # auto manage geo town association - q_towns = operation.towns.filter(main_geodata__multi_polygon__isnull=False) - if q_towns.count() > 1: - changed.append( - ["operation", str(operation), operation.pk, - "Association géo de zone communale"]) - elif q_towns.count() == 1: - changed.append( - ["operation", str(operation), operation.pk, - "Association géo de commune"]) - if operation.multi_polygon_source == "P" and operation.multi_polygon: - attrs = { - "name": f"{_('Operation')}{_(':')} {str(operation)}", - "source_content_type": operation_content_type, - "source_id": operation.pk, - "multi_polygon": operation.multi_polygon, - "data_type": data_type_area, - } - data = models_common.GeoVectorData.objects.create(**attrs) - operation.main_geodata = data - operation.save() - changed.append( - ["geovectordata", data.name, data.pk, "Multi-polygone opération"]) - if operation.point_source == "P" and operation.point_2d: - if operation.x and operation.y: - attrs = { - "name": f"{_('Operation')}{_(':')} {str(operation)}", - "source_content_type": operation_content_type, - "source_id": operation.pk, - "data_type": data_type_center, - "x": operation.x, - "y": operation.y, - "z": operation.z, - } - data = models_common.GeoVectorData.objects.create(**attrs) - operation.main_geodata = data - operation.save() + model_list = [ + ("operation", "opération", "de l'opération", Operation), + ("archaeologicalsite", "site", "du site", ArchaeologicalSite), + ] + for model_slug, model_name, model_full_name, model in model_list: + # manage operation vector sources + model_content_type = ContentType.objects.get( + app_label="archaeological_operations", model=model_slug + ) + q = model.objects.exclude(main_geodata__isnull=False) + nb = q.count() + data_type_area, __ = models_common.GeoDataType.objects.get_or_create( + txt_idx=f"{model_slug}-area", + defaults={"label": f"Emprise {model_full_name}"}, + ) + data_type_center, __ = models_common.GeoDataType.objects.get_or_create( + txt_idx="operation-center", defaults={"label": f"Centre {model_full_name}"} + ) + for idx, obj in enumerate(q.all()): + if not quiet: + sys.stdout.write( + f"\r[{percent(idx, nb)}] Migrate {model_name}s {idx + 1}/{nb}" + ) + sys.stdout.flush() + + obj._no_move = True + obj.skip_history_when_saving = True + obj.save() # auto manage geo town association + q_towns = obj.towns.filter(main_geodata__multi_polygon__isnull=False) + if q_towns.count() > 1: + changed.append( + [model_slug, str(obj), obj.pk, "Association géo de zone communale"] + ) + elif q_towns.count() == 1: changed.append( - ["geovectordata", data.name, data.pk, "Coordonnées opération"]) - elif operation.point_2d: + [model_slug, str(obj), obj.pk, "Association géo de commune"] + ) + if obj.multi_polygon_source == "P" and obj.multi_polygon: attrs = { - "name": f"{_('Operation')}{_(':')} {str(operation)}", - "source_content_type": operation_content_type, - "source_id": operation.pk, - "data_type": data_type_center, + "name": f"{_(model_name.capitalize())}{_(':')} {str(obj)}", + "source_content_type": model_content_type, + "source_id": obj.pk, + "multi_polygon": obj.multi_polygon, + "data_type": data_type_area, } - if operation.point: - attrs["point_3d"] = operation.point - else: - attrs["point_2d"] = operation.point_2d data = models_common.GeoVectorData.objects.create(**attrs) - operation.main_geodata = data - operation.save() + obj.main_geodata = data + obj.save() changed.append( - ["geovectordata", data.name, data.pk, "Point opération"]) - if not quiet and nb: - sys.stdout.write(f"\r[{get_time()}] Operation migrated\n") - sys.stdout.flush() - + [ + "geovectordata", + data.name, + data.pk, + f"Multi-polygone {model_name}", + ] + ) + if obj.point_source == "P" and obj.point_2d: + if obj.x and obj.y: + attrs = { + "name": f"{_(model_name.capitalize())}{_(':')} {str(obj)}", + "source_content_type": model_content_type, + "source_id": obj.pk, + "data_type": data_type_center, + "x": obj.x, + "y": obj.y, + "z": obj.z, + } + data = models_common.GeoVectorData.objects.create(**attrs) + obj.main_geodata = data + obj.save() + changed.append( + [ + "geovectordata", + data.name, + data.pk, + f"Coordonnées {model_name}", + ] + ) + elif obj.point_2d: + attrs = { + "name": f"{_(model_name.capitalize())}{_(':')} {str(obj)}", + "source_content_type": model_content_type, + "source_id": obj.pk, + "data_type": data_type_center, + } + if obj.point: + attrs["point_3d"] = obj.point + else: + attrs["point_2d"] = obj.point_2d + data = models_common.GeoVectorData.objects.create(**attrs) + obj.main_geodata = data + obj.save() + changed.append( + ["geovectordata", data.name, data.pk, f"Point {model_name}"] + ) + if not quiet and nb: + sys.stdout.write(f"\r[{get_time()}] {model_name.capitalize()} migrated\n") + sys.stdout.flush() if log and changed: filename = f"geo_migration-created-{get_time().replace(':', '')}.csv" diff --git a/ishtar_common/models.py b/ishtar_common/models.py index 833851b5c..fbab13010 100644 --- a/ishtar_common/models.py +++ b/ishtar_common/models.py @@ -2298,6 +2298,7 @@ class Area(HierarchicalType): defaults={"label": str(_("Communal area boundaries"))} ) attrs["data_type"] = data_type + attrs["name"] = name geo = GeoVectorData.objects.create(**attrs) else: geo = q.all()[0] |