summaryrefslogtreecommitdiff
path: root/archaeological_finds/tests.py
diff options
context:
space:
mode:
Diffstat (limited to 'archaeological_finds/tests.py')
-rw-r--r--archaeological_finds/tests.py92
1 files changed, 43 insertions, 49 deletions
diff --git a/archaeological_finds/tests.py b/archaeological_finds/tests.py
index 46f11e3e6..87ed1f179 100644
--- a/archaeological_finds/tests.py
+++ b/archaeological_finds/tests.py
@@ -24,8 +24,8 @@ import os
import shutil
import tempfile
from zipfile import ZipFile
-import subprocess
from osgeo import ogr, osr
+import shutil
from rest_framework.test import APITestCase
from rest_framework.authtoken.models import Token
@@ -1014,6 +1014,7 @@ class ImportFindTest(BaseImportFindTest):
# check errors
self.assertEqual(len(impt.errors), 0)
+
def test_verify_qfield_zip(self):
"""
:function: Test if all the files of the QField zipped folder are correct
@@ -1030,6 +1031,7 @@ class ImportFindTest(BaseImportFindTest):
# Closing of the .zip
zip_file.close()
+
def test_add_file_qfield_zip(self):
"""
:function: Try the addition of a file in the zip for QField that will be dowloaded
@@ -1037,8 +1039,10 @@ class ImportFindTest(BaseImportFindTest):
# Definition of the path to test importer data for GIS data
root = settings.LIB_BASE_PATH + "archaeological_finds/tests/"
filename = os.path.join(root, "qfield-prospection.zip")
+ copy = os.path.join(root, "qfield-prospection-copy.zip")
+ shutil.copyfile(filename, copy)
# Opening of the .zip
- with ZipFile(os.path.join(root, "qfield-prospection.zip"), 'a') as zip_file:
+ with ZipFile(copy, 'a') as zip_file:
# Verification of the number of files in the .zip before adding a new one
self.assertEqual(len(zip_file.namelist()), 2)
# Recovery of the .csv to add for the test
@@ -1050,22 +1054,10 @@ class ImportFindTest(BaseImportFindTest):
# Verification of the names of the files in the .zip
list_files = ["Qfield_prospection.qgs", "Qfield_prospection_attachments.zip","Finds.csv"]
self.assertEqual(zip_file.namelist(), list_files)
- # Cloning and deletion of the .zip to have 2 files once again
- zip_temp = filename+".temp"
- with ZipFile(zip_temp, 'w') as zip_new:
- for item in zip_file.infolist():
- if item.filename != "Finds.csv" :
- zip_new.writestr(item, zip_file.read(item.filename))
# Closing of the old .zip
zip_file.close()
- # Squashing the old .zip with the new one
- os.replace(zip_temp,filename)
- # Opening of the new .zip
- with ZipFile(os.path.join(root, "qfield-prospection.zip"), 'r') as zip_file:
- # Verification of the number of files in the .zip after deleting the .csv
- self.assertEqual(len(zip_file.namelist()), 2)
- # Closing of the new .zip
- zip_file.close()
+ os.remove(copy)
+
def test_qfield_import_finds(self):
"""
@@ -1138,6 +1130,7 @@ class ImportFindTest(BaseImportFindTest):
self.assertTrue(geo.z)
self.assertEqual(geo.z, 2000)
+
def test_qfield_import_group(self):
"""
:function: Try the importation of datas from a QField prodject (context record, finds and documents)
@@ -1190,6 +1183,7 @@ class ImportFindTest(BaseImportFindTest):
self.assertEqual(Document.objects.count(), nb_docs + 1)
self.assertFalse(any(imp.error_file for imp in impt.imports.all()), msg="Error on group import")
+
def test_csv_to_gpkg(self):
"""
:function: Creation of a .gpkg file from the data of an imported .csv
@@ -1231,15 +1225,10 @@ class ImportFindTest(BaseImportFindTest):
operation=ope,
label="CR"
)
- # Getting referential values (nb objects, containers,docs, etc.)
- nb_base_find = models.BaseFind.objects.count()
- nb_find = models.Find.objects.count()
- nb_docs = Document.objects.count()
# Beggining of importation
impt.importation()
# Step 2 : Convertion to .gpkg
gpkg = os.path.join(root, "Finds.gpkg")
- layer_name = "Finds"
# Deletion of the .gpkg if already existing
if os.path.exists(gpkg):
os.remove(gpkg)
@@ -1250,47 +1239,52 @@ class ImportFindTest(BaseImportFindTest):
srs.ImportFromEPSG(4326)
# Layer creation
layer = datasource.CreateLayer("Finds", srs, ogr.wkbPoint)
- # Attributes creation
- layer.CreateField(ogr.FieldDefn("identifiant", ogr.OFTString))
- layer.CreateField(ogr.FieldDefn("operation", ogr.OFTString))
- layer.CreateField(ogr.FieldDefn("date", ogr.OFTDate))
- layer.CreateField(ogr.FieldDefn("x", ogr.OFTReal))
- layer.CreateField(ogr.FieldDefn("y", ogr.OFTReal))
- layer.CreateField(ogr.FieldDefn("z", ogr.OFTReal))
- layer.CreateField(ogr.FieldDefn("materiau(x)", ogr.OFTString))
- layer.CreateField(ogr.FieldDefn("description", ogr.OFTString))
- layer.CreateField(ogr.FieldDefn("wkt", ogr.OFTString))
+ col_names = ["identifiant",
+ "operation",
+ "date",
+ "x",
+ "y",
+ "z",
+ "materiau(x)",
+ "description",
+ "wkt"]
+ for name in col_names :
+ # Attributes creation
+ layer.CreateField(ogr.FieldDefn(name, ogr.OFTString))
# Importation of the data
feature = ogr.Feature(layer.GetLayerDefn())
+ datas = []
new = models.BaseFind.objects.order_by("-pk").all()[:1]
+ finds = models.Find.objects.order_by("-pk").all()[:1]
+ crs = ContextRecord.objects.order_by("-pk").all()[:1]
+ geos = GeoVectorData.objects.order_by("-pk").all()[:1]
for find in new :
- feature.SetField("identifiant", find.label)
- feature.SetField("date",
- int(find.discovery_date.year),
- int(find.discovery_date.month),
- int(find.discovery_date.day),
- 0, 0, 0, 0)
- feature.SetField("wkt", str(find.point_2d))
- new = models.Find.objects.order_by("-pk").all()[:1]
+ datas.append(find.label)
+ for cr in crs:
+ datas.append(cr.label)
for find in new:
- feature.SetField("materiau(x)", str(find.material_types))
- feature.SetField("description", str(find.description))
- new = ContextRecord.objects.order_by("-pk").all()[:1]
- for cr in new:
- feature.SetField("operation", cr.label)
- new = GeoVectorData.objects.order_by("-pk").all()[:1]
- for geo in new:
- feature.SetField("x", geo.x)
- feature.SetField("y", geo.y)
- feature.SetField("z", geo.z)
+ datas.append(find.discovery_date)
+ for geo in geos:
+ datas.append(geo.x)
+ datas.append(geo.y)
+ datas.append(geo.z)
# Geometry creation
point = ogr.Geometry(ogr.wkbPoint)
point.AddPoint(geo.x, geo.y)
feature.SetGeometry(point)
+ for find in finds:
+ datas.append(find.material_types)
+ datas.append(find.description)
+ for find in new:
+ datas.append(str(find.point_2d))
+ max = len(datas)
+ for n in range(0, max):
+ feature.SetField(col_names[n], str(datas[n]))
layer.CreateFeature(feature)
feature = None
datasource = None
+
class ExportTest(FindInit, TestCase):
fixtures = FIND_TOWNS_FIXTURES