#!/usr/bin/env python3 # -*- coding: utf-8 -*- import csv import datetime from django.db import connection from multiprocessing import Pool, Process import os import sys from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.core.management.base import BaseCommand from ishtar_common.utils import ugettext_lazy as _, get_log_time, get_percent, get_eta from ishtar_common import models_common, models from archaeological_operations.models import Operation, ArchaeologicalSite from archaeological_context_records.models import ContextRecord from archaeological_finds.models import BaseFind from archaeological_warehouse.models import Warehouse, Container debug = False log_path = os.sep.join([settings.ROOT_PATH, "logs"]) if not os.path.exists(log_path): os.mkdir(log_path, mode=0o770) town_content_type = ContentType.objects.get(app_label="ishtar_common", model="town") data_type, __ = models_common.GeoDataType.objects.get_or_create( txt_idx="town-limit", defaults={"label": "Limites commune"} ) provider, __ = models_common.GeoProviderType.objects.get_or_create( txt_idx="france-ign", defaults={"label": "IGN"} ) changed = [] def _process_town(town_id): connection.close() town = models_common.Town.objects.get(pk=town_id) attrs = { "name": town._generate_cached_label(), "source_content_type": town_content_type, "source_id": town.pk, "data_type": data_type, "provider": provider, } if town.limit: attrs["multi_polygon"] = town.limit else: attrs["point_2d"] = town.center data, created = models_common.GeoVectorData.objects.get_or_create(**attrs) town.main_geodata = data town._post_save_geo_ok = False town.save() if created: changed.append(["geovectordata", data.name, data.pk, "Création commune"]) model_slug, model_name, model_full_name, model = None, None, None, None model_content_type, data_type_area, data_type_center = None, None, None cls_labels = { "ContextRecord": ["Context Record", "Unité d'Enregistrement", "", None], "BaseFind": ["Base find", "Mobilier d'origine", "", None], "Operation": ["Operation", "Opération", "", None], "ArchaeologicalSite": ["Entité (EA)", "Entité archéologique", "Archaeological site", "", None], } def _process_site_ope(obj): connection.close() if debug: base_debug = "_process_site_ope - l. {} - " + f"{obj} ({obj.pk}) - " + "{}" obj._no_move = True obj.skip_history_when_saving = True obj.save() # auto manage geo town association q_towns = obj.towns.filter(main_geodata__multi_polygon__isnull=False) if q_towns.count() > 1: changed.append( [model_slug, str(obj), obj.pk, "Association géo de zone communale"] ) elif q_towns.count() == 1: changed.append( [model_slug, str(obj), obj.pk, "Association géo de commune"] ) obj_verbose_names = cls_labels[obj.__class__.__name__] has_poly = False if obj.multi_polygon_source == "P" and obj.multi_polygon \ and obj.multi_polygon_source_item in obj_verbose_names: if debug: print(base_debug.format(95, "multi_polygon")) attrs = { "name": f"{_(model_name.capitalize())}{_(':')} {str(obj)}", "source_content_type": model_content_type, "source_id": obj.pk, "multi_polygon": obj.multi_polygon, "data_type": data_type_area, } data = models_common.GeoVectorData.objects.create(**attrs) obj.main_geodata = data obj._post_save_geo_ok = False obj.save() has_poly = True changed.append( [ "geovectordata", data.name, data.pk, f"Multi-polygone {model_name}", ] ) if obj.point_source == "P" and obj.point_2d \ and obj.point_source_item in obj_verbose_names: if obj.x and obj.y: if debug: print(base_debug.format(95, "point - coordinates")) attrs = { "name": f"{_(model_name.capitalize())}{_(':')} {str(obj)}", "source_content_type": model_content_type, "source_id": obj.pk, "data_type": data_type_center, "x": obj.x, "y": obj.y, "z": obj.z, "spatial_reference_system": obj.spatial_reference_system } data = models_common.GeoVectorData.objects.create(**attrs) if not has_poly: obj.main_geodata = data obj._post_save_geo_ok = False obj.save() else: obj.geodata.add(data) changed.append( [ "geovectordata", data.name, data.pk, f"Coordonnées {model_name}", ] ) elif obj.point_2d: if debug: print(base_debug.format(95, "point_2d")) attrs = { "name": f"{_(model_name.capitalize())}{_(':')} {str(obj)}", "source_content_type": model_content_type, "source_id": obj.pk, "data_type": data_type_center, } if obj.point: attrs["point_3d"] = obj.point else: attrs["point_2d"] = obj.point_2d data = models_common.GeoVectorData.objects.create(**attrs) if not has_poly: obj.main_geodata = data obj._post_save_geo_ok = False obj.save() else: obj.geodata.add(data) changed.append( ["geovectordata", data.name, data.pk, f"Point {model_name}"] ) data_type_outline = None def _process_main(obj): connection.close() obj._no_move = True obj.skip_history_when_saving = True obj.save() # auto manage geo town association if obj.main_geodata: changed.append( [model_slug, str(obj), obj.pk, "Association géo de zone communale"] ) obj_verbose_names = cls_labels[obj.__class__.__name__] has_poly = False if obj.multi_polygon_source == "P" and obj.multi_polygon \ and (obj.multi_polygon_source_item in obj_verbose_names or obj.multi_polygon_source_item in (None, "")): attrs = { "name": f"{_(model_name.capitalize())}{_(':')} {str(obj)}", "source_content_type": model_content_type, "source_id": obj.pk, "multi_polygon": obj.multi_polygon, "data_type": data_type_outline, } data = models_common.GeoVectorData.objects.create(**attrs) obj.main_geodata = data obj._post_save_geo_ok = False has_poly = True obj.save() changed.append( [ "geovectordata", data.name, data.pk, f"Multi-polygone {model_name}", ] ) if obj.point_source == "P" and obj.point_2d \ and (obj.point_source_item in obj_verbose_names or obj.point_source_item in (None, "")): if obj.x and obj.y: attrs = { "name": f"{_(model_name.capitalize())}{_(':')} {str(obj)}", "source_content_type": model_content_type, "source_id": obj.pk, "data_type": data_type_center, "x": obj.x, "y": obj.y, "z": obj.z, "spatial_reference_system": obj.spatial_reference_system } data = models_common.GeoVectorData.objects.create(**attrs) if not has_poly: obj.main_geodata = data obj._post_save_geo_ok = False obj.save() else: obj.geodata.add(data) changed.append( [ "geovectordata", data.name, data.pk, f"Coordonnées {model_name}", ] ) elif obj.point_2d: attrs = { "name": f"{_(model_name.capitalize())}{_(':')} {str(obj)}", "source_content_type": model_content_type, "source_id": obj.pk, "data_type": data_type_center, } if obj.point: attrs["point_3d"] = obj.point else: attrs["point_2d"] = obj.point_2d data = models_common.GeoVectorData.objects.create(**attrs) if not has_poly: obj.main_geodata = data obj._post_save_geo_ok = False obj.save() else: obj.geodata.add(data) changed.append( ["geovectordata", data.name, data.pk, f"Point {model_name}"] ) def _process_simple(obj): connection.close() obj._post_save_geo_ok = False obj._no_move = True obj.skip_history_when_saving = True obj.save() idx = 0 total = 0 ref_time = None def write_output(arg): global idx, total, model_name, ref_time, quiet if quiet: return idx = idx + 1 lbl = f"\r[{get_percent(idx, total)}] Migrate {model_name}s {idx + 1}/{total}" if ref_time: lbl += f" ({get_eta(idx, total, ref_time, datetime.datetime.now())} left)" sys.stdout.write(lbl) sys.stdout.flush() def launch_job(lst, name, process_number, process_func): global idx, total, model_name, ref_time idx, total, model_name, ref_time = 0, len(lst), name, datetime.datetime.now() pool = None if process_number > 1: pool = Pool(processes=process_number) for item in lst: if pool: pool.apply_async(process_func, (item,), callback=write_output) else: process_func(item) write_output(None) if pool: pool.close() pool.join() quiet = False def migrate(log=True, process_number=1): global idx # create towns q = models_common.Town.objects.exclude( center__isnull=True, limit__isnull=True ).exclude(main_geodata__isnull=False).distinct() town_ids = list(q.values_list("id", flat=True)) idx = 0 launch_job(town_ids, "town", process_number, _process_town) model_list = [ ("operation", "opération", "de l'opération", Operation), ("archaeologicalsite", "site", "du site", ArchaeologicalSite), ] global model_slug, model_name, model_full_name, model global model_content_type, data_type_area, data_type_center, data_type_outline for model_slug, model_name, model_full_name, model in model_list: connection.close() # manage operation vector sources model_content_type = ContentType.objects.get( app_label="archaeological_operations", model=model_slug ) data_type_area, __ = models_common.GeoDataType.objects.get_or_create( txt_idx=f"{model_slug}-area", defaults={"label": f"Emprise {model_full_name}"}, ) data_type_center, __ = models_common.GeoDataType.objects.get_or_create( txt_idx=f"{model_slug}-center", defaults={"label": f"Centre {model_full_name}"}, ) q = model.objects.exclude(main_geodata__isnull=False) launch_job(list(q.all()), model_name, process_number, _process_site_ope) if not quiet: sys.stdout.write( f"\r[{get_log_time()}] {model_name.capitalize()} migrated" + " " * 20 + "\n" ) sys.stdout.flush() model_list = [ ( "archaeological_context_records", "contextrecord", "unité d'enregistrement", "de l'unité d'enregistrement", ContextRecord, ), ( "archaeological_finds", "basefind", "mobilier d'origine", "du mobilier d'origine", BaseFind, ), ] for app, model_slug, model_name, model_full_name, model in model_list: connection.close() model_content_type = ContentType.objects.get(app_label=app, model=model_slug) data_type_outline, __ = models_common.GeoDataType.objects.get_or_create( txt_idx=f"{model_slug}-outline", defaults={"label": f"Contour {model_full_name}"}, ) lbl = f"Centre {model_full_name}" if model == BaseFind: lbl = "Point mobilier" data_type_center, __ = models_common.GeoDataType.objects.get_or_create( txt_idx=f"{model_slug}-center", defaults={"label": lbl}, ) q = model.objects.exclude(main_geodata__isnull=False) launch_job(q.all(), model_name, process_number, _process_main) if not quiet: sys.stdout.write( f"\r[{get_log_time()}] {model_name.capitalize()} migrated" + " " * 20 + "\n" ) sys.stdout.flush() model_list = [Warehouse, Container] for model in model_list: connection.close() q = model.objects.exclude(main_geodata__isnull=False) launch_job(q.all(), model.__name__, process_number, _process_simple) if not quiet: sys.stdout.write( f"\r[{get_log_time()}] {model.__name__.capitalize()} migrated" + " " * 20 + "\n" ) sys.stdout.flush() if log and changed: filename = f"geo_migration-created-{get_log_time().replace(':', '')}.csv" path = os.sep.join([log_path, filename]) with open(path, "w+") as fle: writer = csv.writer(fle) writer.writerow(["model", "name", "id", "context"]) for change in changed: writer.writerow(change) if not quiet: sys.stdout.write(f"[{get_log_time()}] Log: {path} written\n") class Command(BaseCommand): help = "Migrate to new geo data management" def add_arguments(self, parser): parser.add_argument( "--process", dest="process", help="Number of process" ) parser.add_argument( "--quiet", dest="quiet", action="store_true", help="Quiet output" ) parser.add_argument( "--log", dest="log", action="store_false", help="Log into a file" ) def handle(self, *args, **options): log = options["log"] global quiet quiet = options["quiet"] if not quiet: sys.stdout.write(f"[{get_log_time()}] Processing migration\n") errors = migrate(log=log, process_number=int(options["process"] or 1)) if not errors: if not quiet: sys.stdout.write(f"[{get_log_time()}] Migration finished\n") sys.exit() if not quiet: sys.stdout.write("\n".join(errors)) sys.exit(1)