summaryrefslogtreecommitdiff
path: root/ishtar_common/views_item.py
diff options
context:
space:
mode:
Diffstat (limited to 'ishtar_common/views_item.py')
-rw-r--r--ishtar_common/views_item.py522
1 files changed, 407 insertions, 115 deletions
diff --git a/ishtar_common/views_item.py b/ishtar_common/views_item.py
index bd8780ea9..e697a2435 100644
--- a/ishtar_common/views_item.py
+++ b/ishtar_common/views_item.py
@@ -3221,38 +3221,9 @@ def get_item(
writer.writerow(vals)
return response
elif data_type == "gpkg":
- # Work in progress
# I. Preparations
- driver = ogr.GetDriverByName("GPKG")
root = settings.LIB_BASE_PATH + "ishtar_common/qfield/"
- # 1) Creation of the .gpkg
- filename = os.path.join(root, "export", "Finds.gpkg")
- # Verification to delete it if already existing
- if os.path.exists(filename):
- os.remove(filename)
- datasource = driver.CreateDataSource(filename)
- srs = osr.SpatialReference()
- srs.ImportFromEPSG(4326)
- # 2) Preparations for the modification of the style in the .qgs file
- qgs_path = os.path.join(root, "model", "Prospections.qgs")
- new_qgs = os.path.join(root, "export", "Prospections.qgs")
- if os.path.exists(new_qgs):
- os.remove(new_qgs)
- # 3) Duplication of the .zip for export
- project = os.path.join(root, "model", "Prospections_qfield.zip")
- duplicate = os.path.join(root, "export", "Prospections_qfield_export.zip")
- if os.path.exists(duplicate):
- os.remove(duplicate)
- shutil.copyfile(project, duplicate)
- # II. Populating of the .gpkg
- # 1) Layer creation with verification of the type of geometry to create
- if "base_finds__point_2d" in table_cols:
- layer = datasource.CreateLayer("Finds", srs, ogr.wkbPoint)
- elif any(elem in table_cols for elem in ["base_finds__point_3d", "_z"]):
- layer = datasource.CreateLayer("Finds", srs, ogr.wkbPoint25D)
- else:
- layer = datasource.CreateLayer("Finds", srs, ogr.wkbPolygon)
- # 2) Getting all the column names (copy from below)
+ # 1) Getting all the column names (copy from below)
if col_names:
col_names = [name for name in col_names]
else:
@@ -3277,105 +3248,426 @@ def get_item(
)
continue
col_names.append(str(field.verbose_name))
- # 3) Creation of the attributes
- print("II.3)")
- for idx in range(0, len(col_names)):
- if any(elem in table_cols[idx] for elem in ["index", "order", "quantity", "taq", "tpq", "year"]):
- layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTInteger64))
- elif any(elem in table_cols[idx] for elem in ["_x", "_y", "_z", "circumference", "cost", "depth", "diameter", "height", "length", "number", "surface", "side", "thickness", "value", "volume", "weight", "width"]):
- layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTReal))
- elif "_date" in table_cols[idx]:
- layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTDate))
- elif "_datetime" in table_cols[idx]:
- layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTDateTime))
- elif any(elem in table_cols[idx] for elem in ["large_area_prescription", "is_complete", "executed"]):
- layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTBinary))
- else:
- layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTString))
- idx += 1
- max = len(col_names)
- # Looping on all the datas extracted to create features
- for data in datas:
- # Creation of a new feature
- feature = ogr.Feature(layer.GetLayerDefn())
- # Preparations for the geometry
- point = ""
- geom_x = ""
- geom_y = ""
- geom_z = ""
- # Looping on the attributes to add them to the feature
- for idx in range (0, max) :
- # 4) Completion of the attributes
- if any(elem == table_cols[idx] for elem in ["_date", "_datetime"]):
- # Preparations for specific values for the date and date_time
- try:
- # First version if it has all the data necessary for an ogr.OFTDateTime
- # +1 because the first value in the attributes is ''
- feature.SetField(col_names[idx], data[idx + 1])
- except:
- # Second version if some values are missing
- # +1 because the first value in the attributes is ''
- feature.SetField(col_names[idx], data[idx + 1].year, data[idx + 1].month, data[idx + 1].day, 0, 0, 0)
- else:
- # +1 because the first value in the attributes is ''
- feature.SetField(col_names[idx], str(data[idx + 1]))
- # 5) Gestion of the geometry
- if any(elem in table_cols for elem in ["base_finds__point_2d", "base_finds__point_3d", "_line", "_points", "_polygon"]):
- if table_cols[idx] in ["base_finds__point_2d", "base_finds__point_3d", "_line", "_points", "_polygon"]:
- try:
- point = ogr.CreateGeometryFromWkt(data[idx + 1].split(";")[1])
- except:
- pass
- else:
- if "base_finds__x" and "base_finds__y" in table_cols:
- if table_cols[idx] == "base_finds__x":
- geom_x = data[idx + 1]
- elif table_cols[idx] == "base_finds__y":
- geom_y = data[idx + 1]
- if "base_finds__z" in table_cols:
- if table_cols[idx] == "base_finds__z":
- geom_z = data[idx + 1]
- # Prevent problems when both x,y and geometry are present
- if point == "" and geom_x != "" and geom_y != "":
- if geom_z != "":
- point = ogr.Geometry(ogr.wkbPoint25D)
- point.AddPoint(float(geom_x), float(geom_y), float(geom_z))
- else:
- point = ogr.Geometry(ogr.wkbPoint)
- point.AddPoint(float(geom_x), float(geom_y))
- if point != "":
- feature.SetGeometry(point)
- layer.CreateFeature(feature)
- feature = None
- datasource = None
- # 6) Modification of the style
- list_ref = ["champ_id", "champ_date", "champ_datetime", "champ_x", "champ_y", "champ_z", "champ_media", "champ_wkt_2d", "champ_wkt_3d"]
- list_search = ["label", "_date", "_datetime", "base_finds__x", "base_finds__y", "base_finds__z", "_image", "__point_2d", "__point_2d"]
- text = open(qgs_path, encoding='utf-8').read()
- for elem in list_search:
- for col in table_cols:
- if elem in col:
- id_old = list_search.index(elem)
- id_new = table_cols.index(col)
- text = text.replace(list_ref[id_old], col_names[id_new])
- else:
- pass
+ # 2) Creation of the .gpkg
+ finds, cr, list_ope, list_crea = gpkg_creation(root, table_cols, col_names, datas)
+ # 3) Preparations for the modification of the style in the .qgs file
+ qgs_path = os.path.join(root, 'model', 'Prospections.qgs')
+ new_qgs = os.path.join(root, 'export', 'Prospections.qgs')
+ if os.path.exists(new_qgs):
+ os.remove(new_qgs)
+ text = modification_style(qgs_path, table_cols, col_names, list_ope, list_crea)
with open(new_qgs, 'w', encoding='utf-8') as file:
file.write(text)
+ # II. Duplication of the .zip for export
+ project = os.path.join(root, 'model', 'Prospections_qfield.zip')
+ duplicate = os.path.join(root, 'export', 'Prospections_qfield_export.zip')
+ if os.path.exists(duplicate):
+ os.remove(duplicate)
+ shutil.copyfile(project, duplicate)
# III. Moving the .gpkg in a copy of the Qfield test project
with ZipFile(duplicate, 'a') as zip_file:
# Adding the .gpkg to the .zip
- zip_file.write(filename, os.path.basename(filename))
+ zip_file.write(finds, os.path.basename(finds))
+ zip_file.write(cr, os.path.basename(cr))
zip_file.write(new_qgs, os.path.basename(new_qgs))
# Closing of the .zip
zip_file.close()
response = HttpResponse(open(duplicate, 'rb'), content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename="Qfield_prospections.zip"'
return response
- return HttpResponse("{}", content_type="text/plain")
+ return HttpResponse('{}', content_type='text/plain')
return func
+def gpkg_creation(root, table_cols, col_names, datas):
+ # 1) Creation of the Geopackage
+ finds = os.path.join(root, 'export', 'Finds.gpkg')
+ # Verification to delete it if already existing
+ if os.path.exists(finds):
+ os.remove(finds)
+ driver = ogr.GetDriverByName('GPKG')
+ datasource = driver.CreateDataSource(finds)
+ srs = osr.SpatialReference()
+ srs.ImportFromEPSG(4326)
+ # 2) Verufucation of the origin of the export
+ origin = geometry_attribution(table_cols, datas)
+ # 3a) Case where the extraction come from Finds
+ if origin == 'finds':
+ # 4a) Creation of the finds layer and its attributes
+ layer = datasource.CreateLayer('Finds', srs, ogr.wkbPoint25D)
+ layer = attributes_creation_finds_a(layer, col_names, table_cols)
+ # 5a) Populating the finds layer with the datas
+ list_cr = populating_layer_finds(layer,table_cols,col_names,datas)
+ datasource = None
+ # 6a) Creation of the Context Records file
+ cr = os.path.join(root, 'export', 'Context_records.gpkg')
+ # Verification to delete it if already existing
+ if os.path.exists(cr):
+ os.remove(cr)
+ datasource = driver.CreateDataSource(cr)
+ srs = osr.SpatialReference()
+ srs.ImportFromEPSG(4326)
+ # 7a) Creation of the cr's layer and a list of default attrbutes
+ layer = datasource.CreateLayer('Context_records', srs, ogr.wkbMultiPolygon)
+ list_crea = ['Unité_Enregistrement', 'Opération', 'INSEE_Commune', 'Parcelle', 'Type', 'Interprétation',
+ 'Description', 'Localisation', 'Media', 'Periode', 'Type_Activité', 'WKT']
+ layer = attributes_creation_cr_a(layer, list_crea)
+ # 8a) Populating the cr's layer with datas from the cr of the extracted finds
+ list_ope = populating_layer_cr_a(layer, list_crea, list_cr)
+ datasource = None
+ # 9a) Preparation of a list of the attributes names for the style modifications
+ list_crea = ['cr', list_crea]
+ else:
+ # 4b) Creation of the finds layer and a list of default attrbutes
+ layer = datasource.CreateLayer('Finds', srs, ogr.wkbPoint25D)
+ list_crea = ['Identifiant', 'UE', 'Date', 'X', 'Y', 'Z', 'Matériaux', 'Description', 'Media', 'WKT_point']
+ attributes_creation_finds_b(layer, list_crea)
+ datasource = None
+ # 5b) Creation of the Context Records file
+ cr = os.path.join(root, 'export', 'Context_records.gpkg')
+ # Verification to delete it if already existing
+ if os.path.exists(cr):
+ os.remove(cr)
+ datasource = driver.CreateDataSource(cr)
+ srs = osr.SpatialReference()
+ srs.ImportFromEPSG(4326)
+ # 6a) Creation of the cr's layer and its attributes
+ layer = datasource.CreateLayer('Context_records', srs, ogr.wkbMultiPolygon)
+ layer = attributes_creation_cr_b(layer, col_names, table_cols)
+ # 7b) Populating the cr's layer with the datas
+ list_ope = populating_layer_cr_b(layer, table_cols, col_names, datas)
+ # 8b) Preparation of a list of the attributes names for the style modifications
+ list_crea = ['finds', list_crea]
+ return finds, cr, list_ope, list_crea
+
+
+def geometry_attribution (table_cols, datas):
+ # Getting the name of the first element, two cases because in the case of Context Records, all elements are in lists
+ try:
+ id_label = table_cols.index('label')
+ name = datas[0][id_label + 1]
+ except:
+ id_label = table_cols.index(['label'])
+ name = datas[0][id_label + 1][0]
+ # Recuperation of the BaseFind datas
+ BaseFind = apps.get_model('archaeological_finds', 'BaseFind')
+ references = BaseFind.objects.order_by('-pk')
+ # Verification if the extracted elements come from BaseFind or, by default, from Context Records
+ if any(elem.label == name for elem in references):
+ origin = 'finds'
+ else:
+ origin = 'cr'
+ return origin
+
+
+def attributes_creation_finds_a(layer, col_names, table_cols):
+ # print(table_cols) # debugtest
+ # print(col_names) # debugtest
+ # print(datas) # debugtest
+ # Looping on all the attributes
+ for idx in range(0, len(col_names)):
+ # Prevent missing values (case in some .gpkg)
+ if table_cols[idx] != '':
+ # print(table_cols[idx]) # debugtest
+ # print(col_names[idx]) # debugtest
+ # Gestion of specific formats of attributes
+ if any(elem in table_cols[idx] for elem in ['index', 'order', 'quantity', 'taq', 'tpq', 'year']):
+ layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTInteger64))
+ elif any(elem in table_cols[idx] for elem in
+ ['_x', '_y', '_z', 'circumference', 'cost', 'depth', 'diameter', 'height', 'length', 'number',
+ 'surface', 'side', 'thickness', 'value', 'volume', 'weight', 'width']):
+ layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTReal))
+ elif '_date' in table_cols[idx]:
+ layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTDate))
+ elif '_datetime' in table_cols[idx]:
+ layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTDateTime))
+ elif any(elem in table_cols[idx] for elem in ['large_area_prescription', 'is_complete', 'executed']):
+ layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTBinary))
+ else:
+ layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTString))
+ return layer
+
+
+def populating_layer_finds(layer, table_cols, col_names, datas):
+ max = len(col_names)
+ list_cr = []
+ # Looping on all the datas extracted to create features
+ for data in datas:
+ # Creation of a new feature
+ feature = ogr.Feature(layer.GetLayerDefn())
+ # Preparations for the geometry
+ point = ''
+ # Looping on the attributes to add them to the feature
+ for idx in range(0, max):
+ if col_names[idx] != '':
+ # print(col_names[idx]) # debugtest
+ # print(data[idx + 1]) # debugtest
+ # 4) Completion of the attributes
+ if any(elem == table_cols[idx] for elem in ['_date', '_datetime']):
+ # Preparations for specific values for the date and date_time
+ try:
+ # First version if it has all the data necessary for an ogr.OFTDateTime
+ # +1 because the first value in the attributes is ''
+ feature.SetField(col_names[idx], data[idx + 1])
+ except:
+ # Second version if some values are missing
+ # +1 because the first value in the attributes is ''
+ feature.SetField(col_names[idx], data[idx + 1].year, data[idx + 1].month, data[idx + 1].day, 0,
+ 0, 0)
+ elif 'context_record__label' in table_cols[idx] and data[idx + 1] not in list_cr:
+ list_cr.append(data[idx + 1])
+ feature.SetField(col_names[idx], str(data[idx + 1]))
+ else:
+ # +1 because the first value in the attributes is ''
+ feature.SetField(col_names[idx], str(data[idx + 1]))
+ # Gestion of the geometry
+ id_label = table_cols.index('label')
+ BaseFind = apps.get_model('archaeological_finds', 'BaseFind')
+ name = data[id_label + 1]
+ object, __ = BaseFind.objects.get_or_create(
+ label=name,
+ )
+ try:
+ point = ogr.Geometry(ogr.wkbPoint25D)
+ point.AddPoint(float(object.x), float(object.y), float(object.z))
+ except:
+ try:
+ point = ogr.Geometry(ogr.wkbPoint25D)
+ point.AddPoint(float(object.x), float(object.y), float(0.0))
+ except:
+ pass
+ if point != '':
+ feature.SetGeometry(point)
+ # Addition of the new feature
+ layer.CreateFeature(feature)
+ feature = None
+ return list_cr
+
+
+def attributes_creation_finds_b(layer, list_crea):
+ # Gestion of specific types of attributes for the default values
+ for attribute in list_crea:
+ if attribute == 'Date':
+ layer.CreateField(ogr.FieldDefn(attribute, ogr.OFTDate))
+ elif attribute in ['X', 'Y', 'Z']:
+ layer.CreateField(ogr.FieldDefn(attribute, ogr.OFTReal))
+ else:
+ layer.CreateField(ogr.FieldDefn(attribute, ogr.OFTString))
+ return layer
+
+
+def attributes_creation_cr_a(layer, list_crea):
+ for idx in range(0, len(list_crea)):
+ layer.CreateField(ogr.FieldDefn(list_crea[idx], ogr.OFTString))
+ return layer
+
+
+def populating_layer_cr_a(layer, list_crea, list_cr):
+ list_ope = []
+ # Query in the DataBase to get information on the Context Records of the Finds exported
+ ContextRecord = apps.get_model('archaeological_context_records', 'ContextRecord')
+ for name in list_cr:
+ cr, __ = ContextRecord.objects.get_or_create(
+ label=name
+ )
+ list_attributes = []
+ try:list_attributes.append(cr.label)
+ except:list_attributes.append('Null')
+ try:list_attributes.append(str(cr.operation.code_patriarche))
+ except:list_attributes.append('Null')
+ try:list_attributes.append(cr.town.numero_insee)
+ except:list_attributes.append('Null')
+ try:list_attributes.append(cr.parcel.external_id)
+ except:list_attributes.append('Null')
+ try:list_attributes.append(str(cr.unit))
+ except:list_attributes.append('Null')
+ try:list_attributes.append(cr.operation.interpretation)
+ except:list_attributes.append('Null')
+ try:list_attributes.append(cr.description)
+ except:list_attributes.append('Null')
+ try:list_attributes.append(cr.location)
+ except:list_attributes.append('Null')
+ try:list_attributes.append(cr.documents.image)
+ except:list_attributes.append('Null')
+ try:list_attributes.append(cr.datings.period)
+ except:list_attributes.append('Null')
+ try:list_attributes.append(str(cr.activity))
+ except:list_attributes.append('Null')
+ try:list_attributes.append(str(cr.main_geodata.multi_polygon))
+ except:list_attributes.append('Null')
+ # Creation of a new feature
+ feature = ogr.Feature(layer.GetLayerDefn())
+ for idx in range(0, len(list_crea)-1):
+ try:
+ feature.SetField(list_crea[idx], list_attributes[idx])
+ except:
+ pass
+ if list_attributes[1] not in list_ope:
+ list_ope.append(list_attributes[1])
+ try:
+ geom = ogr.CreateGeometryFromWkt(str(list_attributes[-1]).split(';')[-1])
+ feature.SetGeometry(geom)
+ except:
+ pass
+ layer.CreateFeature(feature)
+ feature = None
+ return list_ope
+
+
+def attributes_creation_cr_b(layer, col_names, table_cols):
+ for idx in range(0, len(col_names)):
+ if table_cols[idx] != '':
+ # print(table_cols[idx]) # debugtest
+ # print(col_names[idx]) # debugtest
+ layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTString))
+ return layer
+
+
+def populating_layer_cr_b(layer, table_cols, col_names, datas):
+ #print(table_cols) #debugtest
+ #print(col_names) #debugtest
+ #print(datas) #debugtest
+ list_ope = []
+ geom = ''
+ max = len(col_names)
+ for data in datas:
+ # Creation of a new feature
+ feature = ogr.Feature(layer.GetLayerDefn())
+ for idx in range(0, max):
+ if col_names[idx] != '':
+ if 'operation__code_patriarche' in table_cols[idx] and data[idx + 1] not in list_ope:
+ list_ope.append(data[idx + 1])
+ feature.SetField(col_names[idx], str(data[idx + 1]))
+ else:
+ feature.SetField(col_names[idx], str(data[idx + 1]))
+ id_label = table_cols.index(['label'])
+ name = datas[0][id_label + 1]
+ ContextRecord = apps.get_model('archaeological_context_records', 'ContextRecord')
+ cr, __ = ContextRecord.objects.get_or_create(
+ label=name
+ )
+ print(str(cr.multi_polygon))
+ try:
+ geom = ogr.CreateGeometryFromWkt(str(cr.geodata.multi_polygon).split(';')[-1])
+ except:
+ try:
+ geom = ogr.CreateGeometryFromWkt(str(cr.main_geodata.multi_polygon).split(';')[-1])
+ except:
+ pass
+ if geom != '':
+ feature.SetGeometry(geom)
+ layer.CreateFeature(feature)
+ feature = None
+ return list_ope
+
+
+def modification_style(qgs_path, table_cols, col_names, list_ope, list_crea):
+ # Lists of default names in the style, attribut names of the datas and new default names
+ list_ref = ['champ_date', 'champ_datetime', 'champ_x', 'champ_y', 'champ_z', 'champ_ue',
+ 'champ_ope', 'champ_insee', 'champ_parc', 'champ_geom']
+ list_search = ['_date', '_datetime', 'base_finds__x', 'base_finds__y', 'base_finds__z', 'context_record__label',
+ 'operation__code_patriarche', 'town__numero_insee', 'parcel__external_id',
+ 'geodata__multi_polygon']
+ list_auto = ['Date', 'Date/Temps', 'X', 'Y', 'Z', 'UE', 'Opération', 'INSEE_Commune', 'Parcelle', 'WKT']
+ # Opening of the style
+ text = open(qgs_path, encoding='utf-8').read()
+ # Adding the different Operations linked of the Contexts Records and/or Finds exported to a list of possible values
+ if len(list_ope) > 0:
+ new_text = ""
+ for ope in list_ope:
+ choice = ' <Option type="Map">\n <Option name="{}" value="{}" type="QString"/>\n </Option>\n'.format(ope, ope)
+ new_text += choice
+ old_text = ' <Option type="Map">\n <Option type="QString" name="Test_choice" value="Test_choice"/>\n </Option>\n'
+ text = text.replace(old_text, new_text)
+ else:
+ text = text.replace("Test_choice", "Null")
+ # Specifics modifications if the datas don't come from Finds
+ if list_crea[0] == 'finds':
+ print(table_cols)
+ for ref in list_ref:
+ id_ref = list_ref.index(ref)
+ new = ''
+ for col in table_cols:
+ if list_search[id_ref] in col[0]:
+ print('A')
+ id_new = table_cols.index(col)
+ new = col_names[id_new]
+ print(col, new)
+ if new == '':
+ print('B')
+ new = list_auto[id_ref]
+ print(new)
+ print(ref, new)
+ text = text.replace(ref, new)
+ # List of corresponding default names in the style linked to the default names used for the Finds
+ list_corr = ['champ_id', 'champ_ue', 'champ_date', 'champ_x', 'champ_y', 'champ_z', 'champ_matériaux',
+ 'champ_desc', 'champ_media_finds', 'champ_wkt_modif']
+ # Gestion of the link between the Finds and Context Records layers
+ id_label = table_cols.index(['label'])
+ new = col_names[id_label]
+ text = text.replace("champ_nom", new)
+ if ['documents__image'] in table_cols:
+ id_media = table_cols.index(['documents__image'])
+ # Gestion of the link between the Finds and Context Records layers
+ new = col_names[id_media]
+ text = text.replace("champ_media_cr", new)
+ # Replacement of the values from the default names used for the Finds
+ n = 0
+ for elem in list_crea[1]:
+ old = list_corr[n]
+ text = text.replace(old, elem)
+ n += 1
+ # Specifics modifications if the datas don't come from Context_Records
+ elif list_crea[0] == 'cr':
+ for ref in list_ref:
+ id_ref = list_ref.index(ref)
+ new = ''
+ for col in table_cols:
+ if list_search[id_ref] in col:
+ id_new = table_cols.index(col)
+ if new == '':
+ new = list_auto[id_ref]
+ text = text.replace(ref, new)
+ # List of corresponding default names in the style linked to the default names used for the Finds
+ list_corr = ['champ_nom', 'champ_ope', 'champ_insee', 'champ_parc', 'champ_type', 'champ_occup', 'champ_desc',
+ 'champ_loca', 'champ_media_cr', 'champ_periode', 'champ_acti', 'champ_geom']
+ # Test in case the all names of attributes are in lists
+ try:
+ id_label = table_cols.index(['label'])
+ except:
+ id_label = table_cols.index('label')
+ # Gestion of the link between the Finds and Context Records layers
+ new = col_names[id_label]
+ text = text.replace('champ_id', new)
+ if 'documents__image' in table_cols:
+ try:
+ id_media = table_cols.index(['documents__image'])
+ except:
+ id_media = table_cols.index('documents__image')
+ # Gestion of the link between the Finds and Context Records layers
+ new = col_names[id_media]
+ text = text.replace("champ_media_finds", new)
+ # Specific case to assure the good registration of the z coordinate
+ if 'geodata__point_3d' in table_cols:
+ id_new = table_cols.index('geodata__point_3d')
+ if any('__z' in elem for elem in table_cols):
+ ref = "champ_wkt_modif"
+ new = col_names[id_new]
+ else:
+ ref = "champ_wkt_simple"
+ new = col_names[id_new]
+ text = text.replace(ref, new)
+ # Replacement of the values from the default names used for the Context Records
+ n = 0
+ for elem in list_crea[1]:
+ old = list_corr[n]
+ text = text.replace(old, elem)
+ n += 1
+ else:
+ pass
+ return text
+
+
def adapt_distant_search(params, src, model):
if "search_vector" in params and params["search_vector"]:
search_vector = params["search_vector"][0]