summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorÉtienne Loks <etienne.loks@iggdrasil.net>2025-05-27 16:30:21 +0200
committerÉtienne Loks <etienne.loks@iggdrasil.net>2025-05-27 16:30:21 +0200
commitc2d836b31da08412b12e67fc440b95a110a7aa55 (patch)
tree0b9f5444493a4e6fcbe8b0eaa173dccf680bc120
parent131973f319d9ec11ac1509d7921e0d6c39d10305 (diff)
downloadIshtar-c2d836b31da08412b12e67fc440b95a110a7aa55.tar.bz2
Ishtar-c2d836b31da08412b12e67fc440b95a110a7aa55.zip
🐛 admin town: fix geojson import/export - ♻️ management command: dump/load towns refactoring
-rw-r--r--ishtar_common/admin.py165
-rw-r--r--ishtar_common/management/commands/dump_towns.py50
-rw-r--r--ishtar_common/management/commands/load_towns.py158
-rw-r--r--ishtar_common/models_common.py245
-rw-r--r--ishtar_common/templates/admin/import_from_file.html6
5 files changed, 365 insertions, 259 deletions
diff --git a/ishtar_common/admin.py b/ishtar_common/admin.py
index 50dbbe25a..36331e2f7 100644
--- a/ishtar_common/admin.py
+++ b/ishtar_common/admin.py
@@ -229,7 +229,7 @@ def export_as_csv_action(
def export_as_geojson_action(
geometry_field,
- description=_("Export selected as GeoJSON file"),
+ description=_("Export selected as GeoJSON/JSON file"),
fields=None,
exclude=None,
):
@@ -243,24 +243,29 @@ def export_as_geojson_action(
Generic zipped geojson export admin action.
"""
opts = modeladmin.model._meta
- field_names = set(
- [field.name for field in opts.fields if field.name != geometry_field]
- )
- if fields:
- fieldset = set(fields)
- field_names = field_names & fieldset
- if exclude:
- excludeset = set(exclude)
- field_names = field_names - excludeset
+ if hasattr(modeladmin.model, "geodata_export"):
+ geojson = []
+ for item in queryset.order_by("pk").all():
+ geojson += item.geodata_export
+ geojson = json.dumps(geojson, indent=4)
+ else:
+ field_names = set(
+ [field.name for field in opts.fields if field.name != geometry_field]
+ )
+ if fields:
+ fieldset = set(fields)
+ field_names = field_names & fieldset
+ if exclude:
+ excludeset = set(exclude)
+ field_names = field_names - excludeset
+ geojson = serialize(
+ "geojson",
+ queryset.order_by("pk"),
+ geometry_field=geometry_field,
+ fields=field_names,
+ ).encode("utf-8")
basename = str(opts).replace(".", "_")
-
- geojson = serialize(
- "geojson",
- queryset.order_by("pk"),
- geometry_field=geometry_field,
- fields=field_names,
- ).encode("utf-8")
in_memory = BytesIO()
czip = zipfile.ZipFile(in_memory, "a")
czip.writestr(basename + ".geojson", geojson)
@@ -1234,7 +1239,7 @@ class ImportActionAdmin(admin.ModelAdmin):
class ImportGeoJsonForm(forms.Form):
json_file = forms.FileField(
- label=_("Geojson file"),
+ label=_("Geojson/JSON file"),
help_text=_(
"Only unicode encoding is managed - convert your"
" file first. The file must be a geojson file or a zip "
@@ -1312,11 +1317,11 @@ class ImportGEOJSONActionAdmin(object):
self.message_user(request, error, level=messages.ERROR)
return False
if geom.geom_type == "Point":
- values["center"] = geom
+ values["point_2d"] = geom
elif geom.geom_type == "MultiPolygon":
- values["limit"] = geom
+ values["multi_polygon"] = geom
elif geom.geom_type == "Polygon":
- values["limit"] = MultiPolygon(geom)
+ values["multi_polygon"] = MultiPolygon(geom)
else:
if trace_error:
error = str(_("Geometry {} not managed for towns - feature {}")).format(
@@ -1355,7 +1360,8 @@ class ImportGEOJSONActionAdmin(object):
if form.is_valid():
json_file_obj = request.FILES["json_file"]
- base_dct = {"file_form": form, "current_action": "import_geojson"}
+ base_dct = {"file_form": form, "current_action": "import_geojson",
+ "is_town": True}
tempdir = tempfile.mkdtemp()
tmpfilename = tempdir + os.sep + "dest_file"
@@ -1389,53 +1395,78 @@ class ImportGEOJSONActionAdmin(object):
"insee_prefix": request.POST.get("numero_insee_prefix", None) or "",
"surface_unit": int(request.POST.get("surface_unit")),
}
+ town_content_type = ContentType.objects.get(
+ app_label="ishtar_common", model="town"
+ )
+ town_data_type, __ = models.GeoDataType.objects.get_or_create(
+ txt_idx="town-limit", defaults={"label": _("Town limit")}
+ )
with open(json_filename) as json_file_obj:
json_file = json_file_obj.read()
- try:
- dct = json.loads(json_file)
- if "features" not in dct or not dct["features"]:
- raise ValueError()
- except (ValueError, AssertionError):
- error = _("Bad geojson file")
- return self.import_geojson_error(
- request, error, base_dct, tempdir
- )
-
- error_count = 0
- created = 0
- updated = 0
- for idx, feat in enumerate(dct["features"]):
- trace_error = True
- if error_count == 6:
- self.message_user(
- request, _("Too many errors..."), level=messages.ERROR
+ dct = json.loads(json_file)
+ if "features" not in dct or not dct["features"]:
+ # probably Ishtar JSON
+ created, __, updated, __ = models.Town.geodata_import(dct)
+ else:
+ error_count = 0
+ created = 0
+ updated = 0
+ for idx, feat in enumerate(dct["features"]):
+ trace_error = True
+ if error_count == 6:
+ self.message_user(
+ request, _("Too many errors..."), level=messages.ERROR
+ )
+ if error_count > 5:
+ trace_error = False
+ values = self.geojson_values(
+ request, idx + 1, feat, keys, trace_error
)
- if error_count > 5:
- trace_error = False
- values = self.geojson_values(
- request, idx + 1, feat, keys, trace_error
- )
- if not values:
- error_count += 1
- continue
- num_insee = values.pop("numero_insee")
- year = values.pop("year") or None
- t, c = models_common.Town.objects.get_or_create(
- numero_insee=num_insee, year=year, defaults=values
- )
- if c:
- created += 1
- else:
- modified = False
- for k in values:
- if keys["update"] and k not in ["center", "limit"]:
- continue
- if values[k] != getattr(t, k):
- setattr(t, k, values[k])
- modified = True
- if modified:
- updated += 1
- t.save()
+ if not values:
+ error_count += 1
+ continue
+ num_insee = values.pop("numero_insee")
+ year = values.pop("year") or None
+ geo_values = {}
+ for k in ["point_2d", "multi_polygon"]:
+ if k in values:
+ geo_values[k] = values.pop(k)
+ t, c = models_common.Town.objects.get_or_create(
+ numero_insee=num_insee, year=year, defaults=values
+ )
+ if c:
+ created += 1
+ else:
+ modified = False
+ if not keys["update"]:
+ for k in values:
+ if not getattr(t, k) or \
+ values[k] != getattr(t, k):
+ setattr(t, k, values[k])
+ modified = True
+ if modified:
+ updated += 1
+ t.save()
+ if geo_values:
+ if t.main_geodata: # update
+ for k in geo_values:
+ c_value = getattr(t.main_geodata, k)
+ if not c_value or \
+ geo_values[k] != c_value.wkt:
+ setattr(t.main_geodata, k, geo_values[k])
+ t.main_geodata.save()
+ else:
+ gd, __ = models.GeoVectorData.objects.get_or_create(
+ source_id=t.pk,
+ source_content_type=town_content_type,
+ data_type=town_data_type,
+ defaults={"name": t.cached_label}
+ )
+ for k in geo_values:
+ setattr(gd, k, geo_values[k])
+ gd.save()
+ t.main_geodata = gd
+ t.save()
if created:
self.message_user(
request, str(_("%d item(s) created.")) % created
@@ -1455,7 +1486,7 @@ class ImportGEOJSONActionAdmin(object):
return render(
request,
"admin/import_from_file.html",
- {"file_form": form, "current_action": "import_geojson"},
+ {"file_form": form, "current_action": "import_geojson", "is_town": True},
)
diff --git a/ishtar_common/management/commands/dump_towns.py b/ishtar_common/management/commands/dump_towns.py
index 31465aa13..aa5270aef 100644
--- a/ishtar_common/management/commands/dump_towns.py
+++ b/ishtar_common/management/commands/dump_towns.py
@@ -44,7 +44,8 @@ class Command(BaseCommand):
def handle(self, *args, **options):
quiet = options['quiet']
query = options["query"]
- q = Town.objects.filter(main_geodata__isnull=False, main_geodata__multi_polygon__isnull=False)
+ q = Town.objects.filter(main_geodata__isnull=False,
+ main_geodata__multi_polygon__isnull=False)
if query:
try:
query = json.loads(query)
@@ -61,52 +62,7 @@ class Command(BaseCommand):
get_progress("processing town", idx, nb_lines, started)
)
sys.stdout.flush()
- geo = town.main_geodata
- town_dct = {
- "model": "ishtar_common.town",
- "fields": {
- "name": town.name,
- "surface": town.surface,
- "numero_insee": town.numero_insee,
- "notice": town.notice,
- "year": town.year,
- "cached_label": town.cached_label,
- "main_geodata": [
- "ishtar_common",
- "town",
- town.numero_insee
- ],
- "geodata": [
- ["ishtar_common", "town", town.numero_insee]
- ],
- "children": [
- t.numero_insee
- for t in town.children.filter(numero_insee__isnull=False).all()
- ]
- }
- }
- geo_dct = {
- "model": "ishtar_common.geovectordata",
- "fields": {
- "name": geo.name,
- "source_content_type": [
- "ishtar_common",
- "town"
- ],
- "source": town.numero_insee,
- "data_type": [
- "town-limit"
- ],
- "provider": geo.provider.txt_idx,
- "comment": geo.comment,
- "cached_x": geo.cached_x,
- "cached_y": geo.cached_y,
- "spatial_reference_system": None,
- "multi_polygon": geo.multi_polygon.wkt
- }
- }
- result.append(geo_dct)
- result.append(town_dct)
+ result += town.geodata_export
today = datetime.date.today()
result_file = f"ishtar-towns-{today.strftime('%Y-%m-%d')}.json"
with open(result_file, "w") as r:
diff --git a/ishtar_common/management/commands/load_towns.py b/ishtar_common/management/commands/load_towns.py
index 1ab73d677..86066c4d8 100644
--- a/ishtar_common/management/commands/load_towns.py
+++ b/ishtar_common/management/commands/load_towns.py
@@ -17,24 +17,16 @@
# See the file COPYING for details.
-import datetime
import json
import os
import sys
from django.conf import settings
from django.core.management.base import BaseCommand
-from django.contrib.contenttypes.models import ContentType
from django.db import transaction
-from ishtar_common.utils import BColors, get_log_time, get_progress
-from ishtar_common.models import Town, GeoVectorData, GeoDataType, GeoProviderType
-
-
-town_content_type = ContentType.objects.get(app_label="ishtar_common", model="town")
-town_data_type, __ = GeoDataType.objects.get_or_create(
- txt_idx="town-limit", defaults={"label": "Limites commune"}
-)
+from ishtar_common.utils import BColors, get_log_time
+from ishtar_common.models import Town
class Command(BaseCommand):
@@ -61,143 +53,23 @@ class Command(BaseCommand):
with open(towns_file, "r") as t:
src = json.loads(t.read())
- nb_lines = len(src)
- started = datetime.datetime.now()
- self.geo_updated, self.geo_created = 0, 0
- self.town_created = 0
log_filename = f"load_towns-{get_log_time().replace(':', '')}.csv"
log_path = os.sep.join([log_path, log_filename])
- towns, geo_datas, children = {}, {}, {}
- for idx, values in enumerate(src):
- sys.stdout.write(get_progress("processing", idx, nb_lines, started))
- sys.stdout.flush()
- fields = values["fields"]
- if values["model"] == "ishtar_common.town":
- if self.limit and not values["numero_insee"].startswith(self.limit):
- continue
- c_children = fields.pop("children")
- if c_children:
- children[fields["numero_insee"]] = c_children
- towns[fields["numero_insee"]], created = self.update_town(
- fields, geo_datas
- )
- if values["model"] == "ishtar_common.geovectordata":
- self.update_geodata(fields, geo_datas)
- # manage geo sources
- for insee in geo_datas:
- if insee not in towns:
- sys.stdout.write(
- f"\n{BColors.FAIL}geodata source : INSEE manquant {insee}{BColors.ENDC}\n"
- )
- else:
- g = geo_datas[insee]
- if g.source_id != towns[insee].pk:
- g.source_id = towns[insee].pk
- g.save()
- nb_lines = len(children)
- started = datetime.datetime.now()
- self.nb_rels = 0
- print()
- # management childrens
- for idx, insee in enumerate(children):
- sys.stdout.write(get_progress("update children", idx, nb_lines, started))
- sys.stdout.flush()
- self.get_children(insee, towns, children)
+ town_created, geo_created, geo_updated, nb_rels = Town.geodata_import(
+ src, limit=self.limit, log_path=log_path, verbose=not quiet
+ )
+ if quiet:
+ return
sys.stdout.write(BColors.OKGREEN)
- if self.town_created:
- sys.stdout.write(f'\n* {self.town_created} town created')
- if self.geo_created:
- sys.stdout.write(f'\n* {self.geo_created} geo created')
- if self.geo_updated:
- sys.stdout.write(f'\n* {self.geo_updated} geo updated')
- if self.nb_rels:
- sys.stdout.write(f'\n* {self.nb_rels} relations updated')
+ if town_created:
+ sys.stdout.write(f'\n* {town_created} town created')
+ if geo_created:
+ sys.stdout.write(f'\n* {geo_created} geo created')
+ if geo_updated:
+ sys.stdout.write(f'\n* {geo_updated} geo updated')
+ if nb_rels:
+ sys.stdout.write(f'\n* {nb_rels} relations updated')
sys.stdout.write(BColors.ENDC + "\n")
sys.stdout.flush()
-
- def update_town(self, fields, geo_datas):
- values = fields.copy()
- geos = []
- for geo in values.pop("geodata"):
- geo_id = geo[2]
- if geo_id not in geo_datas:
- sys.stdout.write(f"\n{BColors.FAIL}geodata : Geo INSEE manquant {geo_id}{BColors.ENDC}\n")
- else:
- geos.append(geo_datas[geo_id])
- main_geo = values["main_geodata"][2]
- if main_geo not in geo_datas:
- sys.stdout.write(f"\n{BColors.FAIL}main_geodata : Geo INSEE manquant {main_geo}{BColors.ENDC}\n")
- values.pop(main_geo)
- else:
- values["main_geodata"] = geo_datas[main_geo]
-
- q = Town.objects.filter(numero_insee=values["numero_insee"])
- created = False
- if q.count():
- q.update(**values)
- town = q.all()[0]
- else:
- created = True
- self.town_created += 1
- town = Town.objects.create(**values)
- for geo in geos:
- town.geodata.add(geo)
- return town, created
-
- def update_geodata(self, fields, geo_datas):
- numero_insee = fields.pop('source')
- if self.limit and not numero_insee.startswith(self.limit):
- return
- q = Town.objects.filter(numero_insee=numero_insee)
- values = {
- "provider": GeoProviderType.objects.get(txt_idx=fields["provider"]),
- "comment": fields["comment"],
- 'multi_polygon': fields["multi_polygon"]
- }
- if q.count():
- source_id = q.all()[0].pk
- q2 = GeoVectorData.objects.filter(
- source_id=source_id,
- source_content_type=town_content_type,
- data_type=town_data_type
- )
- if q2.count():
- geo = q2.all()[0]
- changed = False
- for k in values:
- if k == "multi_polygon":
- if geo.multi_polygon.wkt != values[k]:
- setattr(geo, k, values[k])
- changed = True
- elif getattr(geo, k) != values[k]:
- setattr(geo, k, values[k])
- changed = True
- if changed:
- self.geo_updated += 1
- geo.save()
- geo_datas[numero_insee] = geo
- return
- values.update({
- "source_content_type": town_content_type,
- "data_type": town_data_type
- })
- self.geo_created += 1
- geo = GeoVectorData.objects.create(**values)
- geo_datas[numero_insee] = geo
-
- def get_children(self, insee, towns, children):
- if insee not in towns:
- sys.stdout.write(f"\n{BColors.FAIL}children : INSEE manquant {insee}{BColors.ENDC}\n")
- return
- town = towns[insee]
- current_children = list(town.children.values_list("id", flat=True))
- for child in children[insee]:
- if child not in towns:
- sys.stdout.write(f"\n{BColors.FAIL}children-child : INSEE manquant {insee}{BColors.ENDC}\n")
- continue
- if towns[child].id in current_children:
- continue
- self.nb_rels += 1
- town.children.add(towns[child])
diff --git a/ishtar_common/models_common.py b/ishtar_common/models_common.py
index 949cab832..bfbcb2748 100644
--- a/ishtar_common/models_common.py
+++ b/ishtar_common/models_common.py
@@ -16,6 +16,7 @@ import os
import pyqrcode
import re
import shutil
+import sys
import tempfile
import time
from unidecode import unidecode
@@ -46,6 +47,7 @@ from django.template.defaultfilters import slugify
from django.utils.safestring import SafeText, mark_safe
from django.utils.translation import activate, deactivate
from ishtar_common.utils import (
+ BColors,
ugettext_lazy as _,
pgettext_lazy,
get_image_path,
@@ -53,7 +55,8 @@ from ishtar_common.utils import (
human_date,
HistoryError,
SearchAltName,
- SheetItem
+ SheetItem,
+ get_progress
)
from simple_history.models import HistoricalRecords as BaseHistoricalRecords
from simple_history.signals import (
@@ -3638,6 +3641,243 @@ class Town(GeographicItem, Imported, DocumentItem, MainItem, models.Model):
return 0
return round(self.surface / 10000.0, 5)
+ @classmethod
+ def _geodata_import_update_town(cls, fields, geo_datas, verbose=False, limit=None):
+ """
+ specific geodata_import method - import town
+ """
+ values = fields.copy()
+ geos = []
+ missing_lbl = _("INSEE code is missing")
+ missing_geo_lbl = _("Geo INSEE is missing")
+ for geo in values.pop("geodata"):
+ geo_id = geo[2]
+ if geo_id not in geo_datas:
+ if verbose:
+ sys.stdout.write(f"\n{BColors.FAIL}geodata: {missing_lbl} ")
+ sys.stdout.write(f"{geo_id}{BColors.ENDC}\n")
+ else:
+ geos.append(geo_datas[geo_id])
+ main_geo = values["main_geodata"][2]
+ if main_geo not in geo_datas:
+ if verbose:
+ sys.stdout.write(f"\n{BColors.FAIL}main_geodata: {missing_geo_lbl}")
+ sys.stdout.write(f" {main_geo}{BColors.ENDC}\n")
+ values.pop("main_geodata")
+ else:
+ values["main_geodata"] = geo_datas[main_geo]
+
+ q = Town.objects.filter(numero_insee=values["numero_insee"])
+ created = False
+ if q.count():
+ q.update(**values)
+ town = q.all()[0]
+ else:
+ created = True
+ cls.__town_created += 1
+ town = Town.objects.create(**values)
+ for geo in geos:
+ town.geodata.add(geo)
+ return town, created
+
+ @classmethod
+ def _geodata_import_update_geodata(cls, fields, geo_datas, limit=None):
+ """
+ specific geodata_import method - import geodata
+ """
+ if not getattr(cls, "_town_content_type", None):
+ cls._town_content_type = ContentType.objects.get(
+ app_label="ishtar_common", model="town"
+ )
+ if not getattr(cls, "_town_town_data_type", None):
+ cls._town_data_type, __ = GeoDataType.objects.get_or_create(
+ txt_idx="town-limit", defaults={"label": _("Town limit")}
+ )
+
+ numero_insee = fields.pop('source')
+ if limit and not numero_insee.startswith(limit):
+ return
+ q = Town.objects.filter(numero_insee=numero_insee)
+ values = {
+ "provider": GeoProviderType.objects.get(txt_idx=fields["provider"]),
+ "comment": fields["comment"],
+ 'multi_polygon': fields["multi_polygon"]
+ }
+ if q.count():
+ source_id = q.all()[0].pk
+ q2 = GeoVectorData.objects.filter(
+ source_id=source_id,
+ source_content_type=cls._town_content_type,
+ data_type=cls._town_data_type
+ )
+ if q2.count():
+ geo = q2.all()[0]
+ changed = False
+ for k in values:
+ if k == "multi_polygon":
+ if geo.multi_polygon.wkt != values[k]:
+ setattr(geo, k, values[k])
+ changed = True
+ elif getattr(geo, k) != values[k]:
+ setattr(geo, k, values[k])
+ changed = True
+ if changed:
+ cls.__geo_updated += 1
+ geo.save()
+ geo_datas[numero_insee] = geo
+ return
+ values.update({
+ "source_content_type": cls._town_content_type,
+ "data_type": cls._town_data_type
+ })
+ cls.__geo_created += 1
+ geo = GeoVectorData.objects.create(**values)
+ geo_datas[numero_insee] = geo
+
+ @classmethod
+ def _geodata_import_get_children(cls, insee, towns, children, verbose=False):
+ """
+ specific geodata_import method - make link between towns
+ """
+ missing_lbl = _("INSEE code is missing")
+ if insee not in towns:
+ if verbose:
+ sys.stdout.write(f"\n{BColors.FAIL}children: {missing_lbl}")
+ sys.stdout.write(f" {insee}{BColors.ENDC}\n")
+ return
+ town = towns[insee]
+ current_children = list(town.children.values_list("id", flat=True))
+ for child in children[insee]:
+ if child not in towns:
+ q = Town.objects.filter(numero_insee=child)
+ if not q.count():
+ sys.stdout.write(f"\n{BColors.FAIL}children-child: {missing_lbl}")
+ sys.stdout.write(f" {child}{BColors.ENDC}\n")
+ continue
+ towns[child] = q.all()[0]
+ if towns[child].id in current_children:
+ continue
+ cls.__nb_rels += 1
+ town.children.add(towns[child])
+
+ @classmethod
+ def geodata_import(cls, src, limit=None, log_path=None, verbose=False):
+ """
+ Import custom geodata format
+ - src: geodata dict
+ - limit: if provided, only import town with code starting with this limit
+ - verbose: verbose output
+ """
+ nb_lines = len(src)
+ started = datetime.datetime.now()
+ cls.__geo_updated, cls.__geo_created = 0, 0
+ cls.__town_created = 0
+
+ towns, geo_datas, children = {}, {}, {}
+ for idx, values in enumerate(src):
+ if verbose:
+ sys.stdout.write(get_progress("processing", idx, nb_lines, started))
+ sys.stdout.flush()
+ fields = values["fields"]
+ if values["model"] == "ishtar_common.town":
+ if limit and not values["numero_insee"].startswith(limit):
+ continue
+ c_children = fields.pop("children")
+ if c_children:
+ children[fields["numero_insee"]] = c_children
+ towns[fields["numero_insee"]], created = cls._geodata_import_update_town(
+ fields, geo_datas, verbose, limit
+ )
+ if values["model"] == "ishtar_common.geovectordata":
+ cls._geodata_import_update_geodata(fields, geo_datas, limit)
+
+ # manage geo sources
+ missing_lbl = _("INSEE code is missing")
+ for insee in geo_datas:
+ if insee not in towns:
+ if verbose:
+ sys.stdout.write(
+ f"\n{BColors.FAIL}geodata source: {missing_lbl} "
+ )
+ sys.stdout.write(
+ f" {insee}{BColors.ENDC}\n"
+ )
+ else:
+ g = geo_datas[insee]
+ if g.source_id != towns[insee].pk:
+ g.source_id = towns[insee].pk
+ g.save()
+
+ nb_lines = len(children)
+ started = datetime.datetime.now()
+ cls.__nb_rels = 0
+ if verbose:
+ print()
+ # management childrens
+ for idx, insee in enumerate(children):
+ if verbose:
+ sys.stdout.write(get_progress("update children", idx, nb_lines, started))
+ sys.stdout.flush()
+ cls._geodata_import_get_children(insee, towns, children, verbose=verbose)
+ return cls.__town_created, cls.__geo_created, cls.__geo_updated, cls.__nb_rels
+
+ @property
+ def geodata_export(self):
+ """
+ Custom geodata export format to manage easily parent and main geodata
+ """
+ main_geodata, geodata = None, []
+ if self.main_geodata:
+ main_geodata = ["ishtar_common", "town", self.numero_insee]
+ geodata = [main_geodata]
+ geo = self.main_geodata
+ result = [
+ {
+ "model": "ishtar_common.town",
+ "fields": {
+ "name": self.name,
+ "surface": self.surface,
+ "numero_insee": self.numero_insee,
+ "notice": self.notice,
+ "year": self.year,
+ "cached_label": self.cached_label,
+ "main_geodata": main_geodata,
+ "geodata": geodata,
+ "children": [
+ t.numero_insee
+ for t in self.children.filter(numero_insee__isnull=False).all()
+ ]
+ }
+ }
+ ]
+ if not self.main_geodata:
+ return result
+ # put the geodata before the town
+ result = [
+ {
+ "model": "ishtar_common.geovectordata",
+ "fields": {
+ "name": geo.name if geo.name and geo.name != "-"
+ else self.cached_label,
+ "source_content_type": [
+ "ishtar_common",
+ "town"
+ ],
+ "source": self.numero_insee,
+ "data_type": [
+ "town-limit"
+ ],
+ "provider": geo.provider.txt_idx,
+ "comment": geo.comment,
+ "cached_x": geo.cached_x,
+ "cached_y": geo.cached_y,
+ "spatial_reference_system": None,
+ "multi_polygon": geo.multi_polygon.wkt
+ }
+ }
+ ] + result
+ return result
+
def get_filename(self):
if self.numero_insee:
return f"{self.numero_insee} - {slugify(self.name)}"
@@ -3768,7 +4008,8 @@ class Town(GeographicItem, Imported, DocumentItem, MainItem, models.Model):
def _generate_cached_label(self):
cached_label = self.name
- if settings.COUNTRY == "fr" and self.numero_insee:
+ if settings.COUNTRY == "fr" and self.numero_insee \
+ and "X" not in self.numero_insee:
dpt_len = 2
if (
self.numero_insee.startswith("97")
diff --git a/ishtar_common/templates/admin/import_from_file.html b/ishtar_common/templates/admin/import_from_file.html
index 578a489d6..d520b9532 100644
--- a/ishtar_common/templates/admin/import_from_file.html
+++ b/ishtar_common/templates/admin/import_from_file.html
@@ -1,9 +1,15 @@
{% extends "admin/base_site.html" %}
+{% load i18n %}
{% block content %}
<form action="." method="post" enctype="multipart/form-data">
{% csrf_token %}
+{% if is_town %}
+<ul class="messagelist">
+ <li class="warning">{% trans "If your json file is from an Ishtar export, provide the json file and do not change the other fields." %}</li>
+ </ul>
+{% endif %}
<table>
{{ file_form }}
</table>