summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThomas André <thomas.andre@iggdrasil.net>2025-04-17 08:58:48 +0200
committerÉtienne Loks <etienne.loks@iggdrasil.net>2025-07-29 08:49:03 +0200
commit283c594606b5c69ef2cc750f4eabdafcc904f0b4 (patch)
tree61091917fbb7b4d1213b4f16ec367b6400fb6588
parentaf13ebbe3d2314cef33bd8f9e372f62518db4465 (diff)
downloadIshtar-283c594606b5c69ef2cc750f4eabdafcc904f0b4.tar.bz2
Ishtar-283c594606b5c69ef2cc750f4eabdafcc904f0b4.zip
Tests for the convertion of data from the database to a .gpkg + Modifications to keep specific importers in Ishtar
-rw-r--r--archaeological_context_records/tests.py13
-rw-r--r--archaeological_finds/tests.py214
-rw-r--r--archaeological_finds/tests/Finds.csv2
-rw-r--r--archaeological_finds/tests/Finds.gpkgbin0 -> 98304 bytes
-rw-r--r--archaeological_finds/tests/qfield-csv-test.zipbin0 -> 5877 bytes
-rw-r--r--archaeological_finds/tests/qfield-importeur-data.csv2
-rw-r--r--archaeological_finds/tests/qfield-importeur-test.zipbin5768 -> 0 bytes
-rw-r--r--ishtar_common/fixtures/initial_importtypes-qfield.json483
-rw-r--r--ishtar_common/tests.py2
9 files changed, 651 insertions, 65 deletions
diff --git a/archaeological_context_records/tests.py b/archaeological_context_records/tests.py
index 1716920e9..4cdf676b6 100644
--- a/archaeological_context_records/tests.py
+++ b/archaeological_context_records/tests.py
@@ -20,6 +20,7 @@ import csv
import json
from io import StringIO
import locale
+import os
from django.apps import apps
from django.conf import settings
@@ -230,12 +231,12 @@ class ImportContextRecordTest(ImportTest, TestCase):
"""
# Definition of the path to test importer data for GIS data
root = settings.LIB_BASE_PATH + "archaeological_context_records/tests/"
- filename = root + "qfield-ue-test.zip"
+ filename = os.path.join(root, "qfield-ue-test.zip")
self.restore_serialized(filename)
# Uses of a class in ishtar_commons.model_import to retrieve a model via its slug (?)
imp_type = ImporterType.objects.get(slug="qfield-ue-test") # Change the name with the slug of the importeur !!!
# Opening of the CSV
- with open(root + "qfield-importeur-data.csv", "rb") as imp_file :
+ with open(os.path.join(root, "qfield-importeur-data.csv"), "rb") as imp_file :
file_dict = {
"imported_file": SimpleUploadedFile(imp_file.name, imp_file.read())
}
@@ -252,20 +253,22 @@ class ImportContextRecordTest(ImportTest, TestCase):
)
self.assertTrue(form.is_valid())
impt = form.save(self.ishtar_user)
- # Initialisation de l'import
+ # Initialisation of the importation
impt.initialize()
# Creation of an operation for the importation
ope, __ = models.Operation.objects.get_or_create(
code_patriarche="OP",
operation_type=models_ope.OperationType.objects.all()[0])
-
# Getting referential values (nb objects, containers,docs, etc.)
nb_cr = models.ContextRecord.objects.count()
# Beggining of importation
impt.importation()
# Getting values after modifications
self.assertEqual(models.ContextRecord.objects.count(), nb_cr + 1)
-
+ new = models.ContextRecord.objects.order_by("-pk").all()[:1]
+ for cr in new:
+ print(cr.main_geodata)
+ #self.assertTrue(cr.main_geodata.point_3d) # To modify in future
class ContextRecordInit(OperationInitTest):
diff --git a/archaeological_finds/tests.py b/archaeological_finds/tests.py
index 8d3f3607d..46f11e3e6 100644
--- a/archaeological_finds/tests.py
+++ b/archaeological_finds/tests.py
@@ -24,6 +24,8 @@ import os
import shutil
import tempfile
from zipfile import ZipFile
+import subprocess
+from osgeo import ogr, osr
from rest_framework.test import APITestCase
from rest_framework.authtoken.models import Token
@@ -1017,62 +1019,60 @@ class ImportFindTest(BaseImportFindTest):
:function: Test if all the files of the QField zipped folder are correct
"""
# Definition of the path to test importer data for GIS data
- root = settings.LIB_BASE_PATH + "archaeological_finds/tests/" # Ne pas ch
- filename = os.path.join(root, "qfield-prospection.zip")
+ root = settings.LIB_BASE_PATH + "archaeological_finds/tests/"
# Opening of the .zip
- zip_file = ZipFile(filename, 'r')
- # Verification of the number of files in the .zip
- self.assertEqual(len(zip_file.namelist()),2)
- # Verification of the names of the files in the .zip
- list_files=["Qfield_prospection.qgs","Qfield_prospection_attachments.zip"]
- self.assertEqual(zip_file.namelist(), list_files)
- # Closing of the .zip
- zip_file.close()
+ with ZipFile(os.path.join(root, "qfield-prospection.zip"), 'r') as zip_file:
+ # Verification of the number of files in the .zip
+ self.assertEqual(len(zip_file.namelist()),2)
+ # Verification of the names of the files in the .zip
+ list_files = ["Qfield_prospection.qgs","Qfield_prospection_attachments.zip"]
+ self.assertEqual(zip_file.namelist(), list_files)
+ # Closing of the .zip
+ zip_file.close()
def test_add_file_qfield_zip(self):
"""
:function: Try the addition of a file in the zip for QField that will be dowloaded
"""
# Definition of the path to test importer data for GIS data
- root = settings.LIB_BASE_PATH + "archaeological_finds/tests/" # Ne pas ch
+ root = settings.LIB_BASE_PATH + "archaeological_finds/tests/"
filename = os.path.join(root, "qfield-prospection.zip")
# Opening of the .zip
- zip_file = ZipFile(filename, 'a')
- # Verification of the number of files in the .zip before adding a new one
- self.assertEqual(len(zip_file.namelist()), 2)
- # Recovery of the .csv to add for the test
- csv=os.path.join(root, "Finds.csv")
- # Adding the .csv to the .zip
- zip_file.write(csv, os.path.basename(csv))
- # Verification of the number of files in the .zip after adding the .csv
- self.assertEqual(len(zip_file.namelist()), 3)
- # Verification of the names of the files in the .zip
- list_files = ["Qfield_prospection.qgs", "Qfield_prospection_attachments.zip","Finds.csv"]
- self.assertEqual(zip_file.namelist(), list_files)
- # Cloning and deletion of the .zip to have 2 files once again
- zip_temp=filename+".temp"
- with ZipFile(filename, 'r') as zip_orig:
+ with ZipFile(os.path.join(root, "qfield-prospection.zip"), 'a') as zip_file:
+ # Verification of the number of files in the .zip before adding a new one
+ self.assertEqual(len(zip_file.namelist()), 2)
+ # Recovery of the .csv to add for the test
+ data = os.path.join(root, "Finds.csv")
+ # Adding the .csv to the .zip
+ zip_file.write(data, os.path.basename(data))
+ # Verification of the number of files in the .zip after adding the .csv
+ self.assertEqual(len(zip_file.namelist()), 3)
+ # Verification of the names of the files in the .zip
+ list_files = ["Qfield_prospection.qgs", "Qfield_prospection_attachments.zip","Finds.csv"]
+ self.assertEqual(zip_file.namelist(), list_files)
+ # Cloning and deletion of the .zip to have 2 files once again
+ zip_temp = filename+".temp"
with ZipFile(zip_temp, 'w') as zip_new:
- for item in zip_orig.infolist():
- if item.filename!= "Finds.csv" :
- zip_new.writestr(item,zip_orig.read(item.filename))
- # Closing of the old .zip
- zip_file.close()
+ for item in zip_file.infolist():
+ if item.filename != "Finds.csv" :
+ zip_new.writestr(item, zip_file.read(item.filename))
+ # Closing of the old .zip
+ zip_file.close()
# Squashing the old .zip with the new one
os.replace(zip_temp,filename)
# Opening of the new .zip
- zip_file = ZipFile(filename, 'r')
- # Verification of the number of files in the .zip after deleting the .csv
- self.assertEqual(len(zip_file.namelist()), 2)
- # Closing of the new .zip
- zip_file.close()
+ with ZipFile(os.path.join(root, "qfield-prospection.zip"), 'r') as zip_file:
+ # Verification of the number of files in the .zip after deleting the .csv
+ self.assertEqual(len(zip_file.namelist()), 2)
+ # Closing of the new .zip
+ zip_file.close()
def test_qfield_import_finds(self):
"""
- :function: Try the importation of finds link to QField
+ :function: Try the importation of finds linked to QField
"""
# Definition of the path to test importer data for GIS data
- root = settings.LIB_BASE_PATH + "archaeological_finds/tests/" # Ne pas ch
+ root = settings.LIB_BASE_PATH + "archaeological_finds/tests/"
filename = os.path.join(root, "qfield-mobilier-test.zip")
self.restore_serialized(filename)
# Uses of a class in ishtar_commons.model_import to retrieve a model via its slug (?)
@@ -1116,24 +1116,36 @@ class ImportFindTest(BaseImportFindTest):
self.assertEqual(models.Find.objects.count(), nb_find + 1)
self.assertEqual(Document.objects.count(), nb_docs + 1)
# Verification of the imported values
+ new = models.BaseFind.objects.order_by("-pk").all()[:1]
+ for data in new:
+ self.assertEqual(data.label, "123")
+ self.assertEqual(str(data.discovery_date), "2025-04-07")
+ self.assertEqual(data.point_2d, "SRID=4326;POINT (-2.26868001391598 47.3849390721505)")
+ new = models.Find.objects.order_by("-pk").all()[:1]
+ for data in new:
+ self.assertEqual(data.label, "123")
+ self.assertEqual(str(data.material_types), "archaeological_finds.MaterialType.None")
+ self.assertEqual(data.description, "Test")
+ new = ContextRecord.objects.order_by("-pk").all()[:1]
+ for cr in new:
+ self.assertEqual(cr.label, "CR")
new = GeoVectorData.objects.order_by("-pk").all()[:1]
for geo in new:
self.assertTrue(geo.x)
+ self.assertEqual(geo.x, 14)
self.assertTrue(geo.y)
+ self.assertEqual(geo.y, 3)
self.assertTrue(geo.z)
- self.assertEqual(new[0].x, 14)
- self.assertEqual(new[0].y, 3)
- self.assertEqual(new[0].z, 2000)
+ self.assertEqual(geo.z, 2000)
def test_qfield_import_group(self):
"""
:function: Try the importation of datas from a QField prodject (context record, finds and documents)
- CURRENTLY BUGGED
"""
# Definition of the path to test importer data for GIS data
root = os.path.join(settings.LIB_BASE_PATH, "archaeological_finds", "tests")
self.root = root
- importer_filename = os.path.join(root, "qfield-importeur-test.zip")
+ importer_filename = os.path.join(root, "qfield-csv-test.zip")
restore_serialized(importer_filename)
# Uses of a class in ishtar_commons.model_import to retrieve a model via its slug (?)
imp_group = ImporterGroup.objects.get(slug="qfield-csv-test")
@@ -1152,14 +1164,8 @@ class ImportFindTest(BaseImportFindTest):
"skip_lines": 1,
"csv_sep": ",",
}
- # Initialization of error values
- form = forms_common.NewImportGroupForm(
- data=post_dict, files=file_dict, user=self.user
- )
- self.assertFalse(form.is_valid())
- self.assertIn(str(_("This importer need a document archive.")),
- form.errors["__all__"])
file_dict["imported_images"] = imp_media
+ # Initialization of error values
form = forms_common.NewImportGroupForm(
data=post_dict, files=file_dict, user=self.user
)
@@ -1172,25 +1178,119 @@ class ImportFindTest(BaseImportFindTest):
ope, __ = Operation.objects.get_or_create(
code_patriarche="OP",
operation_type=OperationType.objects.all()[0])
- cr, __ = ContextRecord.objects.get_or_create(
- operation=ope,
- label="CR"
- )
-
# Getting referential values (nb objects, containers,docs, etc.)
nb_base_find = models.BaseFind.objects.count()
nb_find = models.Find.objects.count()
nb_docs = Document.objects.count()
-
# Beggining of importation
impt.importation()
-
# Getting values after modifications
self.assertEqual(models.BaseFind.objects.count(), nb_base_find + 1)
self.assertEqual(models.Find.objects.count(), nb_find + 1)
self.assertEqual(Document.objects.count(), nb_docs + 1)
self.assertFalse(any(imp.error_file for imp in impt.imports.all()), msg="Error on group import")
+ def test_csv_to_gpkg(self):
+ """
+ :function: Creation of a .gpkg file from the data of an imported .csv
+ """
+ # Step 1 : Importation of data
+ # Definition of the path to test importer data for GIS data
+ root = os.path.join(settings.LIB_BASE_PATH, "archaeological_finds", "tests")
+ self.root = root
+ filename = os.path.join(root, "qfield-mobilier-test.zip")
+ self.restore_serialized(filename)
+ # Uses of a class in ishtar_commons.model_import to retrieve a model via its slug (?)
+ imp_type = ImporterType.objects.get(
+ slug="qfield-mobilier-test") # Change the name with the slug of the importeur !!!
+ # Opening of the CSV
+ with open(os.path.join(root, "qfield-importeur-data.csv"), "rb") as imp_file:
+ file_dict = {
+ "imported_file": SimpleUploadedFile(imp_file.name, imp_file.read())
+ }
+ post_dict = {
+ "importer_type": imp_type.pk,
+ "name": "find_geo_import",
+ "encoding": "utf-8",
+ "skip_lines": 1,
+ "csv_sep": ",",
+ }
+ # Preparation of the data import
+ form = forms_common.NewImportGISForm(
+ data=post_dict, files=file_dict, user=self.user
+ )
+ self.assertTrue(form.is_valid())
+ impt = form.save(self.ishtar_user)
+ # Import initialization
+ impt.initialize()
+ # Creation of an operation and a context record for the importation
+ ope, __ = Operation.objects.get_or_create(
+ code_patriarche="GOA",
+ operation_type=OperationType.objects.all()[0])
+ cr, __ = ContextRecord.objects.get_or_create(
+ operation=ope,
+ label="CR"
+ )
+ # Getting referential values (nb objects, containers,docs, etc.)
+ nb_base_find = models.BaseFind.objects.count()
+ nb_find = models.Find.objects.count()
+ nb_docs = Document.objects.count()
+ # Beggining of importation
+ impt.importation()
+ # Step 2 : Convertion to .gpkg
+ gpkg = os.path.join(root, "Finds.gpkg")
+ layer_name = "Finds"
+ # Deletion of the .gpkg if already existing
+ if os.path.exists(gpkg):
+ os.remove(gpkg)
+ # Getting necessary information from OsGeo to create the .gpkg
+ driver = ogr.GetDriverByName("GPKG")
+ datasource = driver.CreateDataSource(gpkg)
+ srs = osr.SpatialReference()
+ srs.ImportFromEPSG(4326)
+ # Layer creation
+ layer = datasource.CreateLayer("Finds", srs, ogr.wkbPoint)
+ # Attributes creation
+ layer.CreateField(ogr.FieldDefn("identifiant", ogr.OFTString))
+ layer.CreateField(ogr.FieldDefn("operation", ogr.OFTString))
+ layer.CreateField(ogr.FieldDefn("date", ogr.OFTDate))
+ layer.CreateField(ogr.FieldDefn("x", ogr.OFTReal))
+ layer.CreateField(ogr.FieldDefn("y", ogr.OFTReal))
+ layer.CreateField(ogr.FieldDefn("z", ogr.OFTReal))
+ layer.CreateField(ogr.FieldDefn("materiau(x)", ogr.OFTString))
+ layer.CreateField(ogr.FieldDefn("description", ogr.OFTString))
+ layer.CreateField(ogr.FieldDefn("wkt", ogr.OFTString))
+ # Importation of the data
+ feature = ogr.Feature(layer.GetLayerDefn())
+ new = models.BaseFind.objects.order_by("-pk").all()[:1]
+ for find in new :
+ feature.SetField("identifiant", find.label)
+ feature.SetField("date",
+ int(find.discovery_date.year),
+ int(find.discovery_date.month),
+ int(find.discovery_date.day),
+ 0, 0, 0, 0)
+ feature.SetField("wkt", str(find.point_2d))
+ new = models.Find.objects.order_by("-pk").all()[:1]
+ for find in new:
+ feature.SetField("materiau(x)", str(find.material_types))
+ feature.SetField("description", str(find.description))
+ new = ContextRecord.objects.order_by("-pk").all()[:1]
+ for cr in new:
+ feature.SetField("operation", cr.label)
+ new = GeoVectorData.objects.order_by("-pk").all()[:1]
+ for geo in new:
+ feature.SetField("x", geo.x)
+ feature.SetField("y", geo.y)
+ feature.SetField("z", geo.z)
+ # Geometry creation
+ point = ogr.Geometry(ogr.wkbPoint)
+ point.AddPoint(geo.x, geo.y)
+ feature.SetGeometry(point)
+ layer.CreateFeature(feature)
+ feature = None
+ datasource = None
+
class ExportTest(FindInit, TestCase):
fixtures = FIND_TOWNS_FIXTURES
diff --git a/archaeological_finds/tests/Finds.csv b/archaeological_finds/tests/Finds.csv
index 41c4bfb63..1ac6e2be1 100644
--- a/archaeological_finds/tests/Finds.csv
+++ b/archaeological_finds/tests/Finds.csv
@@ -1 +1 @@
-id,id_unique,ue,date,x,y,z,materiau(x),descr,media,WKT
+id,id_unique,ue,date,x,y,z,materiau(x),descr,media,wkt
diff --git a/archaeological_finds/tests/Finds.gpkg b/archaeological_finds/tests/Finds.gpkg
new file mode 100644
index 000000000..bad041f4d
--- /dev/null
+++ b/archaeological_finds/tests/Finds.gpkg
Binary files differ
diff --git a/archaeological_finds/tests/qfield-csv-test.zip b/archaeological_finds/tests/qfield-csv-test.zip
new file mode 100644
index 000000000..e89d1536f
--- /dev/null
+++ b/archaeological_finds/tests/qfield-csv-test.zip
Binary files differ
diff --git a/archaeological_finds/tests/qfield-importeur-data.csv b/archaeological_finds/tests/qfield-importeur-data.csv
index ee28f0043..858b181e5 100644
--- a/archaeological_finds/tests/qfield-importeur-data.csv
+++ b/archaeological_finds/tests/qfield-importeur-data.csv
@@ -1,2 +1,2 @@
-id,id_unique,ue,date,x,y,z,materiau(x),descr,media,WKT
+id,id_unique,ue,date,x,y,z,materiau(x),descr,media,wkt
OP,123,CR,2025-04-07,14,3,2000,Indéterminé,Test,A.jpg,POINT (-2.26868001391598 47.3849390721505)
diff --git a/archaeological_finds/tests/qfield-importeur-test.zip b/archaeological_finds/tests/qfield-importeur-test.zip
deleted file mode 100644
index 35850620b..000000000
--- a/archaeological_finds/tests/qfield-importeur-test.zip
+++ /dev/null
Binary files differ
diff --git a/ishtar_common/fixtures/initial_importtypes-qfield.json b/ishtar_common/fixtures/initial_importtypes-qfield.json
new file mode 100644
index 000000000..0461f4b63
--- /dev/null
+++ b/ishtar_common/fixtures/initial_importtypes-qfield.json
@@ -0,0 +1,483 @@
+[
+ {
+ "model": "ishtar_common.importertype",
+ "fields": {
+ "name": "QField - Export - Mobilier",
+ "slug": "qfield-export-mobilier",
+ "description": "test",
+ "associated_models": [
+ "archaeological_finds.models_finds.Find"
+ ],
+ "is_template": true,
+ "unicity_keys": "label",
+ "available": true,
+ "users": [],
+ "created_models": [
+ [
+ "archaeological_finds.models_finds.Find"
+ ],
+ [
+ "archaeological_finds.models.BaseFind"
+ ],
+ [
+ "archaeological_operations.models.Parcel"
+ ],
+ [
+ "archaeological_context_records.models.ContextRecord"
+ ]
+ ]
+ }
+},
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "id",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 1,
+ "description": "",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": false,
+ "export_field_name": null
+ }
+ },
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "id_unique",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 2,
+ "description": "Identifiant de l'entit\u00e9 cr\u00e9\u00e9e",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": true,
+ "export_field_name": null
+ }
+},
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "ue",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 3,
+ "description": "",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": true,
+ "export_field_name": null
+ }
+},
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "date",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 4,
+ "description": "Moment d'enregistrement de l'entit\u00e9",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": false,
+ "export_field_name": null
+ }
+},
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "x",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 5,
+ "description": "Coordonn\u00e9e X de l'entit\u00e9",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": false,
+ "export_field_name": null
+ }
+},
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "y",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 6,
+ "description": "Coordonn\u00e9e Y de l'entit\u00e9",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": false,
+ "export_field_name": null
+ }
+},
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "z",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 7,
+ "description": "Coordonn\u00e9e Z de l'entit\u00e9",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": false,
+ "export_field_name": null
+ }
+},
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "materiau(x)",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 8,
+ "description": "Mat\u00e9riau(x) de l'entit\u00e9 cr\u00e9\u00e9e",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": false,
+ "export_field_name": null
+ }
+},
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "descr",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 9,
+ "description": "Description et commentaires sur l'entit\u00e9 cr\u00e9\u00e9e",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": false,
+ "export_field_name": null
+ }
+},
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "media",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 10,
+ "description": "",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": false,
+ "export_field_name": null
+ }
+},
+{
+ "model": "ishtar_common.importercolumn",
+ "fields": {
+ "label": "wkt",
+ "importer_type": [
+ "qfield-export-mobilier"
+ ],
+ "col_number": 11,
+ "description": "G\u00e9om\u00e9trie au format WKT de l'entit\u00e9 cr\u00e9\u00e9e",
+ "regexp_pre_filter": null,
+ "value_format": null,
+ "required": true,
+ "export_field_name": null
+ }
+},
+{
+ "model": "ishtar_common.importerduplicatefield",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 2
+ ],
+ "field_name": "base_finds__label",
+ "force_new": false,
+ "concat": false,
+ "concat_str": null
+ }
+},
+{
+ "model": "ishtar_common.importerduplicatefield",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 2
+ ],
+ "field_name": "documents__title",
+ "force_new": false,
+ "concat": false,
+ "concat_str": null
+ }
+},
+{
+ "model": "ishtar_common.importerduplicatefield",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 5
+ ],
+ "field_name": "base_finds__geodata__x",
+ "force_new": false,
+ "concat": false,
+ "concat_str": null
+ }
+},
+{
+ "model": "ishtar_common.importerduplicatefield",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 6
+ ],
+ "field_name": "base_finds__geodata__y",
+ "force_new": false,
+ "concat": false,
+ "concat_str": null
+ }
+},
+{
+ "model": "ishtar_common.importerduplicatefield",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 7
+ ],
+ "field_name": "base_finds__geodata__z",
+ "force_new": false,
+ "concat": false,
+ "concat_str": null
+ }
+},
+{
+ "model": "ishtar_common.importerduplicatefield",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 11
+ ],
+ "field_name": "base_finds__geodata__point_2d",
+ "force_new": false,
+ "concat": false,
+ "concat_str": null
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 1
+ ],
+ "target": "index",
+ "formater_type": [
+ "IntegerFormater",
+ "",
+ ""
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 2
+ ],
+ "target": "label",
+ "formater_type": [
+ "UnicodeFormater",
+ "200",
+ ""
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 3
+ ],
+ "target": "base_finds__context_record__label",
+ "formater_type": [
+ "UnicodeFormater",
+ "200",
+ ""
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 4
+ ],
+ "target": "base_finds__discovery_date",
+ "formater_type": [
+ "DateFormater",
+ "%d/%m/%Y | %Y-%m-%d",
+ " | "
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 5
+ ],
+ "target": "base_finds__x",
+ "formater_type": [
+ "FloatFormater",
+ "",
+ ""
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 6
+ ],
+ "target": "base_finds__y",
+ "formater_type": [
+ "FloatFormater",
+ "",
+ ""
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 7
+ ],
+ "target": "base_finds__z",
+ "formater_type": [
+ "FloatFormater",
+ "",
+ ""
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 8
+ ],
+ "target": "material_types",
+ "formater_type": [
+ "TypeFormater",
+ "archaeological_finds.models.MaterialType",
+ "&"
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 9
+ ],
+ "target": "description",
+ "formater_type": [
+ "UnicodeFormater",
+ "",
+ ""
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 10
+ ],
+ "target": "documents_image",
+ "formater_type": [
+ "FileFormater",
+ "",
+ ""
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+},
+{
+ "model": "ishtar_common.importtarget",
+ "fields": {
+ "column": [
+ "qfield-export-mobilier",
+ 11
+ ],
+ "target": "base_finds__point_2d",
+ "formater_type": [
+ "UnicodeFormater",
+ "",
+ ""
+ ],
+ "force_new": false,
+ "concat": false,
+ "concat_str": null,
+ "comment": ""
+ }
+ }
+] \ No newline at end of file
diff --git a/ishtar_common/tests.py b/ishtar_common/tests.py
index 8d3fd837a..d14c861bd 100644
--- a/ishtar_common/tests.py
+++ b/ishtar_common/tests.py
@@ -2736,7 +2736,7 @@ class BaseImportTest(TestCase):
root = os.path.join(settings.LIB_BASE_PATH, "archaeological_finds", "tests")
self.root = root
# Path to zip
- importer_filename = os.path.join(root, "qfield-importeur-test.zip")
+ importer_filename = os.path.join(root, "qfield-csv-test.zip")
restore_serialized(importer_filename)
imp_group = models.ImporterGroup.objects.get(slug="qfield-csv-test") # Must change the name !!!
# Opening of the csv