summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorÉtienne Loks <etienne.loks@iggdrasil.net>2025-06-24 14:18:32 +0200
committerÉtienne Loks <etienne.loks@iggdrasil.net>2025-07-29 08:49:03 +0200
commit63a6b5021626e956e6151ba3cc1e358aabf29e8a (patch)
tree7d57dcac6d2fa675c1981443b6a81cddb686313b
parent283c594606b5c69ef2cc750f4eabdafcc904f0b4 (diff)
downloadIshtar-63a6b5021626e956e6151ba3cc1e358aabf29e8a.tar.bz2
Ishtar-63a6b5021626e956e6151ba3cc1e358aabf29e8a.zip
First steps in the creation of new export format and new version of the .gpkg creator
-rw-r--r--archaeological_finds/tests.py92
-rw-r--r--archaeological_finds/tests/Finds.csv3
-rw-r--r--archaeological_finds/tests/Finds.gpkgbin98304 -> 98304 bytes
-rw-r--r--ishtar_common/migrations/0263_alter_historicaldocument_options_and_more.py479
-rw-r--r--ishtar_common/migrations/0264_alter_importertype_export_format.py18
-rw-r--r--ishtar_common/migrations/0265_alter_importertype_export_format.py18
-rw-r--r--ishtar_common/migrations/0266_alter_importertype_type.py18
-rw-r--r--ishtar_common/migrations/0267_alter_documenttemplate_export_format_and_more.py23
-rw-r--r--ishtar_common/migrations/0268_alter_documenttemplate_export_format.py18
-rw-r--r--ishtar_common/models_imports.py7
-rw-r--r--ishtar_common/tests/Finds.gpkgbin0 -> 98304 bytes
-rw-r--r--ishtar_common/views.py3
-rw-r--r--ishtar_common/views_item.py83
13 files changed, 710 insertions, 52 deletions
diff --git a/archaeological_finds/tests.py b/archaeological_finds/tests.py
index 46f11e3e6..87ed1f179 100644
--- a/archaeological_finds/tests.py
+++ b/archaeological_finds/tests.py
@@ -24,8 +24,8 @@ import os
import shutil
import tempfile
from zipfile import ZipFile
-import subprocess
from osgeo import ogr, osr
+import shutil
from rest_framework.test import APITestCase
from rest_framework.authtoken.models import Token
@@ -1014,6 +1014,7 @@ class ImportFindTest(BaseImportFindTest):
# check errors
self.assertEqual(len(impt.errors), 0)
+
def test_verify_qfield_zip(self):
"""
:function: Test if all the files of the QField zipped folder are correct
@@ -1030,6 +1031,7 @@ class ImportFindTest(BaseImportFindTest):
# Closing of the .zip
zip_file.close()
+
def test_add_file_qfield_zip(self):
"""
:function: Try the addition of a file in the zip for QField that will be dowloaded
@@ -1037,8 +1039,10 @@ class ImportFindTest(BaseImportFindTest):
# Definition of the path to test importer data for GIS data
root = settings.LIB_BASE_PATH + "archaeological_finds/tests/"
filename = os.path.join(root, "qfield-prospection.zip")
+ copy = os.path.join(root, "qfield-prospection-copy.zip")
+ shutil.copyfile(filename, copy)
# Opening of the .zip
- with ZipFile(os.path.join(root, "qfield-prospection.zip"), 'a') as zip_file:
+ with ZipFile(copy, 'a') as zip_file:
# Verification of the number of files in the .zip before adding a new one
self.assertEqual(len(zip_file.namelist()), 2)
# Recovery of the .csv to add for the test
@@ -1050,22 +1054,10 @@ class ImportFindTest(BaseImportFindTest):
# Verification of the names of the files in the .zip
list_files = ["Qfield_prospection.qgs", "Qfield_prospection_attachments.zip","Finds.csv"]
self.assertEqual(zip_file.namelist(), list_files)
- # Cloning and deletion of the .zip to have 2 files once again
- zip_temp = filename+".temp"
- with ZipFile(zip_temp, 'w') as zip_new:
- for item in zip_file.infolist():
- if item.filename != "Finds.csv" :
- zip_new.writestr(item, zip_file.read(item.filename))
# Closing of the old .zip
zip_file.close()
- # Squashing the old .zip with the new one
- os.replace(zip_temp,filename)
- # Opening of the new .zip
- with ZipFile(os.path.join(root, "qfield-prospection.zip"), 'r') as zip_file:
- # Verification of the number of files in the .zip after deleting the .csv
- self.assertEqual(len(zip_file.namelist()), 2)
- # Closing of the new .zip
- zip_file.close()
+ os.remove(copy)
+
def test_qfield_import_finds(self):
"""
@@ -1138,6 +1130,7 @@ class ImportFindTest(BaseImportFindTest):
self.assertTrue(geo.z)
self.assertEqual(geo.z, 2000)
+
def test_qfield_import_group(self):
"""
:function: Try the importation of datas from a QField prodject (context record, finds and documents)
@@ -1190,6 +1183,7 @@ class ImportFindTest(BaseImportFindTest):
self.assertEqual(Document.objects.count(), nb_docs + 1)
self.assertFalse(any(imp.error_file for imp in impt.imports.all()), msg="Error on group import")
+
def test_csv_to_gpkg(self):
"""
:function: Creation of a .gpkg file from the data of an imported .csv
@@ -1231,15 +1225,10 @@ class ImportFindTest(BaseImportFindTest):
operation=ope,
label="CR"
)
- # Getting referential values (nb objects, containers,docs, etc.)
- nb_base_find = models.BaseFind.objects.count()
- nb_find = models.Find.objects.count()
- nb_docs = Document.objects.count()
# Beggining of importation
impt.importation()
# Step 2 : Convertion to .gpkg
gpkg = os.path.join(root, "Finds.gpkg")
- layer_name = "Finds"
# Deletion of the .gpkg if already existing
if os.path.exists(gpkg):
os.remove(gpkg)
@@ -1250,47 +1239,52 @@ class ImportFindTest(BaseImportFindTest):
srs.ImportFromEPSG(4326)
# Layer creation
layer = datasource.CreateLayer("Finds", srs, ogr.wkbPoint)
- # Attributes creation
- layer.CreateField(ogr.FieldDefn("identifiant", ogr.OFTString))
- layer.CreateField(ogr.FieldDefn("operation", ogr.OFTString))
- layer.CreateField(ogr.FieldDefn("date", ogr.OFTDate))
- layer.CreateField(ogr.FieldDefn("x", ogr.OFTReal))
- layer.CreateField(ogr.FieldDefn("y", ogr.OFTReal))
- layer.CreateField(ogr.FieldDefn("z", ogr.OFTReal))
- layer.CreateField(ogr.FieldDefn("materiau(x)", ogr.OFTString))
- layer.CreateField(ogr.FieldDefn("description", ogr.OFTString))
- layer.CreateField(ogr.FieldDefn("wkt", ogr.OFTString))
+ col_names = ["identifiant",
+ "operation",
+ "date",
+ "x",
+ "y",
+ "z",
+ "materiau(x)",
+ "description",
+ "wkt"]
+ for name in col_names :
+ # Attributes creation
+ layer.CreateField(ogr.FieldDefn(name, ogr.OFTString))
# Importation of the data
feature = ogr.Feature(layer.GetLayerDefn())
+ datas = []
new = models.BaseFind.objects.order_by("-pk").all()[:1]
+ finds = models.Find.objects.order_by("-pk").all()[:1]
+ crs = ContextRecord.objects.order_by("-pk").all()[:1]
+ geos = GeoVectorData.objects.order_by("-pk").all()[:1]
for find in new :
- feature.SetField("identifiant", find.label)
- feature.SetField("date",
- int(find.discovery_date.year),
- int(find.discovery_date.month),
- int(find.discovery_date.day),
- 0, 0, 0, 0)
- feature.SetField("wkt", str(find.point_2d))
- new = models.Find.objects.order_by("-pk").all()[:1]
+ datas.append(find.label)
+ for cr in crs:
+ datas.append(cr.label)
for find in new:
- feature.SetField("materiau(x)", str(find.material_types))
- feature.SetField("description", str(find.description))
- new = ContextRecord.objects.order_by("-pk").all()[:1]
- for cr in new:
- feature.SetField("operation", cr.label)
- new = GeoVectorData.objects.order_by("-pk").all()[:1]
- for geo in new:
- feature.SetField("x", geo.x)
- feature.SetField("y", geo.y)
- feature.SetField("z", geo.z)
+ datas.append(find.discovery_date)
+ for geo in geos:
+ datas.append(geo.x)
+ datas.append(geo.y)
+ datas.append(geo.z)
# Geometry creation
point = ogr.Geometry(ogr.wkbPoint)
point.AddPoint(geo.x, geo.y)
feature.SetGeometry(point)
+ for find in finds:
+ datas.append(find.material_types)
+ datas.append(find.description)
+ for find in new:
+ datas.append(str(find.point_2d))
+ max = len(datas)
+ for n in range(0, max):
+ feature.SetField(col_names[n], str(datas[n]))
layer.CreateFeature(feature)
feature = None
datasource = None
+
class ExportTest(FindInit, TestCase):
fixtures = FIND_TOWNS_FIXTURES
diff --git a/archaeological_finds/tests/Finds.csv b/archaeological_finds/tests/Finds.csv
index 1ac6e2be1..d123f01de 100644
--- a/archaeological_finds/tests/Finds.csv
+++ b/archaeological_finds/tests/Finds.csv
@@ -1 +1,2 @@
-id,id_unique,ue,date,x,y,z,materiau(x),descr,media,wkt
+fid,identifiant,operation,date,x,y,z,materiau(x),description,wkt
+"1","123",CR,2025/04/07,14,3,2000,archaeological_finds.MaterialType.None,Test,"SRID=4326;POINT (-2.26868001391598 47.3849390721505)"
diff --git a/archaeological_finds/tests/Finds.gpkg b/archaeological_finds/tests/Finds.gpkg
index bad041f4d..1b71e2aa6 100644
--- a/archaeological_finds/tests/Finds.gpkg
+++ b/archaeological_finds/tests/Finds.gpkg
Binary files differ
diff --git a/ishtar_common/migrations/0263_alter_historicaldocument_options_and_more.py b/ishtar_common/migrations/0263_alter_historicaldocument_options_and_more.py
new file mode 100644
index 000000000..3d8564d69
--- /dev/null
+++ b/ishtar_common/migrations/0263_alter_historicaldocument_options_and_more.py
@@ -0,0 +1,479 @@
+# Generated by Django 4.2.19 on 2025-04-17 08:20
+
+import django.core.validators
+from django.db import migrations, models
+import django.db.models.deletion
+import django.utils.timezone
+import re
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0262_migrate_custom_form_slug'),
+ ]
+
+ operations = [
+ migrations.AlterModelOptions(
+ name='historicaldocument',
+ options={'get_latest_by': ('history_date', 'history_id'), 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical Document', 'verbose_name_plural': 'historical Documents'},
+ ),
+ migrations.AlterModelOptions(
+ name='historicalorganization',
+ options={'get_latest_by': ('history_date', 'history_id'), 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical Organization', 'verbose_name_plural': 'historical Organizations'},
+ ),
+ migrations.AlterModelOptions(
+ name='historicalperson',
+ options={'get_latest_by': ('history_date', 'history_id'), 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical Person', 'verbose_name_plural': 'historical Persons'},
+ ),
+ migrations.AddField(
+ model_name='importertype',
+ name='export_format',
+ field=models.CharField(blank=True, choices=[], default='', max_length=4, verbose_name='Export format'),
+ ),
+ migrations.AlterField(
+ model_name='administrationtask',
+ name='creation_date',
+ field=models.DateTimeField(default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='area',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='authortype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='backgroundtask',
+ name='creation_date',
+ field=models.DateTimeField(default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='biographicalnote',
+ name='created',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='biographicalnote',
+ name='data',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='biographicalnote',
+ name='history_m2m',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='biographicalnote',
+ name='imports',
+ field=models.ManyToManyField(blank=True, related_name='imported_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Created by imports'),
+ ),
+ migrations.AlterField(
+ model_name='biographicalnote',
+ name='imports_updated',
+ field=models.ManyToManyField(blank=True, related_name='import_updated_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Updated by imports'),
+ ),
+ migrations.AlterField(
+ model_name='biographicalnote',
+ name='ishtar_users',
+ field=models.ManyToManyField(blank=True, related_name='%(class)s_associated', to='ishtar_common.ishtaruser'),
+ ),
+ migrations.AlterField(
+ model_name='biographicalnote',
+ name='last_modified',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='document',
+ name='created',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='document',
+ name='data',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='document',
+ name='duplicate',
+ field=models.BooleanField(blank=True, null=True, verbose_name='Has a duplicate'),
+ ),
+ migrations.AlterField(
+ model_name='document',
+ name='history_m2m',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='document',
+ name='imports',
+ field=models.ManyToManyField(blank=True, related_name='imported_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Created by imports'),
+ ),
+ migrations.AlterField(
+ model_name='document',
+ name='imports_updated',
+ field=models.ManyToManyField(blank=True, related_name='import_updated_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Updated by imports'),
+ ),
+ migrations.AlterField(
+ model_name='document',
+ name='ishtar_users',
+ field=models.ManyToManyField(blank=True, related_name='%(class)s_associated', to='ishtar_common.ishtaruser'),
+ ),
+ migrations.AlterField(
+ model_name='document',
+ name='last_modified',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='documenttag',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='documenttemplate',
+ name='export_format',
+ field=models.CharField(blank=True, choices=[('', '---')], default='', max_length=4, verbose_name='Export format'),
+ ),
+ migrations.AlterField(
+ model_name='exporttask',
+ name='creation_date',
+ field=models.DateTimeField(default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='format',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='gdprlog',
+ name='date',
+ field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='Date'),
+ ),
+ migrations.AlterField(
+ model_name='gdprlog',
+ name='persons',
+ field=models.ManyToManyField(blank=True, related_name='logs', to='ishtar_common.gdprperson', verbose_name='Persons'),
+ ),
+ migrations.AlterField(
+ model_name='geobuffertype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='geodatatype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='geoorigintype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='geoprovidertype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='geovectordata',
+ name='imports',
+ field=models.ManyToManyField(blank=True, related_name='imported_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Created by imports'),
+ ),
+ migrations.AlterField(
+ model_name='geovectordata',
+ name='imports_updated',
+ field=models.ManyToManyField(blank=True, related_name='import_updated_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Updated by imports'),
+ ),
+ migrations.AlterField(
+ model_name='historicaldocument',
+ name='created',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='historicaldocument',
+ name='data',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='historicaldocument',
+ name='duplicate',
+ field=models.BooleanField(blank=True, null=True, verbose_name='Has a duplicate'),
+ ),
+ migrations.AlterField(
+ model_name='historicaldocument',
+ name='history_date',
+ field=models.DateTimeField(db_index=True),
+ ),
+ migrations.AlterField(
+ model_name='historicaldocument',
+ name='history_m2m',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='historicaldocument',
+ name='last_modified',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='historicalorganization',
+ name='archived',
+ field=models.BooleanField(blank=True, default=False, null=True),
+ ),
+ migrations.AlterField(
+ model_name='historicalorganization',
+ name='created',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='historicalorganization',
+ name='data',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='historicalorganization',
+ name='history_date',
+ field=models.DateTimeField(db_index=True),
+ ),
+ migrations.AlterField(
+ model_name='historicalorganization',
+ name='history_m2m',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='historicalorganization',
+ name='last_modified',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='historicalperson',
+ name='archived',
+ field=models.BooleanField(blank=True, default=False, null=True),
+ ),
+ migrations.AlterField(
+ model_name='historicalperson',
+ name='created',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='historicalperson',
+ name='data',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='historicalperson',
+ name='history_date',
+ field=models.DateTimeField(db_index=True),
+ ),
+ migrations.AlterField(
+ model_name='historicalperson',
+ name='history_m2m',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='historicalperson',
+ name='last_modified',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='import',
+ name='encoding',
+ field=models.CharField(choices=[('', ''), ('ISO-8859-15', 'ISO-8859-15'), ('utf-8', 'utf-8')], default='utf-8', help_text='Only required for CSV file', max_length=15, verbose_name='Encoding'),
+ ),
+ migrations.AlterField(
+ model_name='importgroup',
+ name='encoding',
+ field=models.CharField(choices=[('', ''), ('ISO-8859-15', 'ISO-8859-15'), ('utf-8', 'utf-8')], default='utf-8', help_text='Only required for CSV file', max_length=15, verbose_name='Encoding'),
+ ),
+ migrations.AlterField(
+ model_name='importtask',
+ name='creation_date',
+ field=models.DateTimeField(default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='language',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='licensetype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='operationtype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='organization',
+ name='archived',
+ field=models.BooleanField(blank=True, default=False, null=True),
+ ),
+ migrations.AlterField(
+ model_name='organization',
+ name='created',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='organization',
+ name='data',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='organization',
+ name='history_m2m',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='organization',
+ name='imports',
+ field=models.ManyToManyField(blank=True, related_name='imported_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Created by imports'),
+ ),
+ migrations.AlterField(
+ model_name='organization',
+ name='imports_updated',
+ field=models.ManyToManyField(blank=True, related_name='import_updated_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Updated by imports'),
+ ),
+ migrations.AlterField(
+ model_name='organization',
+ name='ishtar_users',
+ field=models.ManyToManyField(blank=True, related_name='%(class)s_associated', to='ishtar_common.ishtaruser'),
+ ),
+ migrations.AlterField(
+ model_name='organization',
+ name='last_modified',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='organization',
+ name='merge_candidate',
+ field=models.ManyToManyField(blank=True, to='ishtar_common.organization'),
+ ),
+ migrations.AlterField(
+ model_name='organization',
+ name='merge_exclusion',
+ field=models.ManyToManyField(blank=True, to='ishtar_common.organization'),
+ ),
+ migrations.AlterField(
+ model_name='organizationtype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='person',
+ name='archived',
+ field=models.BooleanField(blank=True, default=False, null=True),
+ ),
+ migrations.AlterField(
+ model_name='person',
+ name='created',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='person',
+ name='data',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='person',
+ name='history_m2m',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='person',
+ name='imports',
+ field=models.ManyToManyField(blank=True, related_name='imported_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Created by imports'),
+ ),
+ migrations.AlterField(
+ model_name='person',
+ name='imports_updated',
+ field=models.ManyToManyField(blank=True, related_name='import_updated_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Updated by imports'),
+ ),
+ migrations.AlterField(
+ model_name='person',
+ name='ishtar_users',
+ field=models.ManyToManyField(blank=True, related_name='%(class)s_associated', to='ishtar_common.ishtaruser'),
+ ),
+ migrations.AlterField(
+ model_name='person',
+ name='last_modified',
+ field=models.DateTimeField(blank=True, default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='person',
+ name='merge_candidate',
+ field=models.ManyToManyField(blank=True, to='ishtar_common.person'),
+ ),
+ migrations.AlterField(
+ model_name='person',
+ name='merge_exclusion',
+ field=models.ManyToManyField(blank=True, to='ishtar_common.person'),
+ ),
+ migrations.AlterField(
+ model_name='persontype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='profiletype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='shootingangle',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='sourcetype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='spatialreferencesystem',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='statscache',
+ name='updated',
+ field=models.DateTimeField(default=django.utils.timezone.now),
+ ),
+ migrations.AlterField(
+ model_name='statscache',
+ name='values',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='supporttype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='titletype',
+ name='txt_idx',
+ field=models.TextField(help_text='The slug is the standardized version of the name. It contains only lowercase letters, numbers and hyphens. Each slug must be unique.', unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid')], verbose_name='Textual ID'),
+ ),
+ migrations.AlterField(
+ model_name='town',
+ name='geodata',
+ field=models.ManyToManyField(blank=True, related_name='related_items_%(app_label)s_%(class)s', to='ishtar_common.geovectordata', verbose_name='Geodata'),
+ ),
+ migrations.AlterField(
+ model_name='town',
+ name='imports',
+ field=models.ManyToManyField(blank=True, related_name='imported_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Created by imports'),
+ ),
+ migrations.AlterField(
+ model_name='town',
+ name='imports_updated',
+ field=models.ManyToManyField(blank=True, related_name='import_updated_%(app_label)s_%(class)s', to='ishtar_common.import', verbose_name='Updated by imports'),
+ ),
+ migrations.AlterField(
+ model_name='town',
+ name='main_geodata',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='main_related_items_%(app_label)s_%(class)s', to='ishtar_common.geovectordata', verbose_name='Main geodata'),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0264_alter_importertype_export_format.py b/ishtar_common/migrations/0264_alter_importertype_export_format.py
new file mode 100644
index 000000000..628e265be
--- /dev/null
+++ b/ishtar_common/migrations/0264_alter_importertype_export_format.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.2.19 on 2025-04-17 08:52
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0263_alter_historicaldocument_options_and_more'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='importertype',
+ name='export_format',
+ field=models.CharField(blank=True, choices=[('qfld', 'QFIELD')], default='', max_length=4, verbose_name='Export format'),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0265_alter_importertype_export_format.py b/ishtar_common/migrations/0265_alter_importertype_export_format.py
new file mode 100644
index 000000000..62ad8236a
--- /dev/null
+++ b/ishtar_common/migrations/0265_alter_importertype_export_format.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.2.19 on 2025-04-17 08:53
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0264_alter_importertype_export_format'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='importertype',
+ name='export_format',
+ field=models.CharField(blank=True, choices=[('gpkg', 'QFIELD')], default='', max_length=4, verbose_name='Export format'),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0266_alter_importertype_type.py b/ishtar_common/migrations/0266_alter_importertype_type.py
new file mode 100644
index 000000000..0eace8814
--- /dev/null
+++ b/ishtar_common/migrations/0266_alter_importertype_type.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.2.19 on 2025-04-18 07:30
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0265_alter_importertype_export_format'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='importertype',
+ name='type',
+ field=models.CharField(choices=[('tab', 'Table'), ('gis', 'GIS'), ('qfd', 'QFIELD')], default='tab', max_length=3, verbose_name='Type'),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0267_alter_documenttemplate_export_format_and_more.py b/ishtar_common/migrations/0267_alter_documenttemplate_export_format_and_more.py
new file mode 100644
index 000000000..0051f7a0c
--- /dev/null
+++ b/ishtar_common/migrations/0267_alter_documenttemplate_export_format_and_more.py
@@ -0,0 +1,23 @@
+# Generated by Django 4.2.19 on 2025-04-18 07:34
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0266_alter_importertype_type'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='documenttemplate',
+ name='export_format',
+ field=models.CharField(blank=True, choices=[('', '---'), ('gpkg', 'QFIELD')], default='', max_length=4, verbose_name='Export format'),
+ ),
+ migrations.AlterField(
+ model_name='importertype',
+ name='type',
+ field=models.CharField(choices=[('tab', 'Table'), ('gis', 'GIS')], default='tab', max_length=3, verbose_name='Type'),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0268_alter_documenttemplate_export_format.py b/ishtar_common/migrations/0268_alter_documenttemplate_export_format.py
new file mode 100644
index 000000000..93d1ff05d
--- /dev/null
+++ b/ishtar_common/migrations/0268_alter_documenttemplate_export_format.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.2.19 on 2025-04-18 07:40
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0267_alter_documenttemplate_export_format_and_more'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='documenttemplate',
+ name='export_format',
+ field=models.CharField(blank=True, choices=[('', '---')], default='', max_length=4, verbose_name='Export format'),
+ ),
+ ]
diff --git a/ishtar_common/models_imports.py b/ishtar_common/models_imports.py
index 38ac41d38..6a9b8b145 100644
--- a/ishtar_common/models_imports.py
+++ b/ishtar_common/models_imports.py
@@ -56,6 +56,8 @@ from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _, pgettext_lazy
+#from ishtar_common.models import EXPORT_FORMATS
+
UnoCalc = None
ITALIC = None
if settings.USE_LIBREOFFICE:
@@ -153,6 +155,9 @@ IMPORT_TYPES = (
)
IMPORT_TYPES_DICT = dict(IMPORT_TYPES)
+# Try to imitate the EXPORT_FORMAT of models.py
+EXPORT_FORMATS = [("gpkg", _("QFIELD")),]
+EXPORT_FORMATS_DICT = dict(EXPORT_FORMATS)
SPECIFIC_TYPES_COLUMNS = {
"point_2d": "Point2D",
@@ -179,6 +184,8 @@ class ImporterType(models.Model):
type = models.CharField(
_("Type"), max_length=3, choices=IMPORT_TYPES, default="tab"
)
+ # Addition of the export_format
+ export_format = models.CharField(_("Export format"), max_length=4, choices=EXPORT_FORMATS, blank=True, default="")
available = models.BooleanField(_("Available"), default=True)
users = models.ManyToManyField("IshtarUser", verbose_name=_("Users"), blank=True)
associated_models = models.ForeignKey(
diff --git a/ishtar_common/tests/Finds.gpkg b/ishtar_common/tests/Finds.gpkg
new file mode 100644
index 000000000..5000f2234
--- /dev/null
+++ b/ishtar_common/tests/Finds.gpkg
Binary files differ
diff --git a/ishtar_common/views.py b/ishtar_common/views.py
index f89ea865a..a2c041030 100644
--- a/ishtar_common/views.py
+++ b/ishtar_common/views.py
@@ -872,6 +872,9 @@ def get_by_importer(
cols, col_names = importer.get_columns(importer_class=importer_class)
if data_type == "csv" or dct.get("type", "") == "csv":
obj_name = importer.name
+ print(obj_name)
+ elif data_type == "gpkg" or dct.get("type", "") == "gpkg":
+ obj_name = importer.name
else:
obj_name = importer_class.OBJECT_CLS.__name__.lower()
return get_item(importer_class.OBJECT_CLS, "get_" + obj_name, obj_name,
diff --git a/ishtar_common/views_item.py b/ishtar_common/views_item.py
index 8d2786d17..4126219fd 100644
--- a/ishtar_common/views_item.py
+++ b/ishtar_common/views_item.py
@@ -20,6 +20,7 @@
from collections import OrderedDict
from copy import copy, deepcopy
+import os
import csv
import datetime
import json
@@ -29,6 +30,8 @@ import requests
# nosec: no user input used
import subprocess # nosec
from tempfile import NamedTemporaryFile
+from osgeo import ogr, osr
+import shutil
from django.apps import apps
from django.conf import settings
@@ -3215,9 +3218,85 @@ def get_item(
except UnicodeEncodeError:
vals.append(unidecode(v).encode(ENCODING).decode(ENCODING))
writer.writerow(vals)
- return response
+ #return response
+ elif data_type == "gpkg":
+ # Work in progress
+ # Creation of the .gpkg
+ driver = ogr.GetDriverByName("GPKG")
+ root = settings.LIB_BASE_PATH + "ishtar_common/tests/"
+ filename = os.path.join(root, "Finds.gpkg")
+ # Verification to delete it if already existing
+ if os.path.exists(filename):
+ os.remove(filename)
+ datasource = driver.CreateDataSource(filename)
+ srs = osr.SpatialReference()
+ srs.ImportFromEPSG(4326)
+ # Layer creation
+ layer = datasource.CreateLayer("Finds", srs, ogr.wkbPoint)
+ # Getting all the column names (copy from below)
+ if col_names:
+ col_names = [name for name in col_names]
+ else:
+ col_names = []
+ for field_name in table_cols:
+ if type(field_name) in (list, tuple):
+ field_name = " & ".join(field_name)
+ if hasattr(model, "COL_LABELS") and field_name in model.COL_LABELS:
+ field = model.COL_LABELS[field_name]
+ col_names.append(str(field))
+ continue
+ else:
+ try:
+ field = model._meta.get_field(field_name)
+ except:
+ col_names.append("")
+ logger.warning(
+ "**WARN get_item - csv export**: no col name "
+ "for {}\nadd explicit label to "
+ "COL_LABELS attribute of "
+ "{}".format(field_name, model)
+ )
+ continue
+ col_names.append(str(field.verbose_name))
+ # Creation of the columns
+ for name in col_names:
+ layer.CreateField(ogr.FieldDefn(name, ogr.OFTString))
+ max = len(col_names)
+ # Looping on all the datas extracted
+ for data in datas:
+ # Creation of a new feature
+ feature = ogr.Feature(layer.GetLayerDefn())
+ # Looping on the attributes to add them to the feature
+ for n in range (0, max) :
+ # +1 because the first value in the attributes is ''
+ m = n + 1
+ feature.SetField(col_names[n], str(data[m]))
+ # First version to create the geometry of the feature
+ # Work in progress
+ if "x" in col_names[n] or "X" in col_names[n]:
+ try:
+ float(data[m])
+ geom_x = data[m]
+ except:
+ pass
+ if "y" in col_names[n] or "Y" in col_names[n]:
+ try:
+ float(data[m])
+ geom_y = data[m]
+ except:
+ pass
+ try:
+ point = ogr.Geometry(ogr.wkbPoint)
+ point.AddPoint(float(geom_x), float(geom_y))
+ feature.SetGeometry(point)
+ layer.CreateFeature(feature)
+ except:
+ pass
+ feature = None
+ datasource = None
+ # Missing : Part where the new .gpkg is moved to a copy of the QField folder
+ # Work in progress
return HttpResponse("{}", content_type="text/plain")
-
return func