diff options
Diffstat (limited to 'ishtar_common/views_item.py')
-rw-r--r-- | ishtar_common/views_item.py | 582 |
1 files changed, 465 insertions, 117 deletions
diff --git a/ishtar_common/views_item.py b/ishtar_common/views_item.py index 001a1437f..5b7c136fd 100644 --- a/ishtar_common/views_item.py +++ b/ishtar_common/views_item.py @@ -3223,7 +3223,7 @@ def get_item( elif data_type == "gpkg": # I. Preparations root = settings.LIB_BASE_PATH + "ishtar_common/qfield/" - # 1) Getting all the column names (copy from below) + # 1) Getting all the columns names (copy from below) if col_names: col_names = [name for name in col_names] else: @@ -3248,18 +3248,30 @@ def get_item( ) continue col_names.append(str(field.verbose_name)) - # 2) Creation of the .gpkg - finds, cr, list_ope, list_crea = gpkg_creation(root, table_cols, col_names, datas) + # 2) Gestion of the project to use + if str(model._meta) == 'archaeological_operations.archaeologicalsite': + sites, finds, cr, list_ope, list_crea = gpkg_creation_sites(root, table_cols, col_names, datas) + # Modification of the source to access the desired project depending on source of the data + source = 'specific' + else: + finds, cr, list_ope, list_crea = gpkg_creation(model, root, table_cols, col_names, datas) + # Modification of the source to access the desired project depending on source of the data + source = 'model' # 3) Preparations for the modification of the style in the .qgs file - qgs_path = os.path.join(root, 'model', 'Prospections.qgs') + qgs_path = os.path.join(root, source, 'Prospections.qgs') new_qgs = os.path.join(root, 'export', 'Prospections.qgs') if os.path.exists(new_qgs): os.remove(new_qgs) - text = modification_style(qgs_path, table_cols, col_names, list_ope, list_crea) + # Use of a specific style modifier depending on the project / source of the data + if source == 'specific': + text = modification_style_sites(qgs_path, table_cols, col_names, list_ope, list_crea) + else: + text = modification_style(qgs_path, table_cols, col_names, list_ope, list_crea) + # Creation of the new modified project with open(new_qgs, 'w', encoding='utf-8') as file: file.write(text) # II. Duplication of the .zip for export - project = os.path.join(root, 'model', 'Prospections_qfield.zip') + project = os.path.join(root, source, 'Prospections_qfield.zip') duplicate = os.path.join(root, 'export', 'Prospections_qfield_export.zip') if os.path.exists(duplicate): os.remove(duplicate) @@ -3267,100 +3279,175 @@ def get_item( # III. Moving the .gpkg in a copy of the Qfield test project with ZipFile(duplicate, 'a') as zip_file: # Adding the .gpkg to the .zip + if source == 'specific': + # Add only if created + zip_file.write(sites, os.path.basename(sites)) zip_file.write(finds, os.path.basename(finds)) zip_file.write(cr, os.path.basename(cr)) zip_file.write(new_qgs, os.path.basename(new_qgs)) # Closing of the .zip zip_file.close() response = HttpResponse(open(duplicate, 'rb'), content_type='application/zip') - response['Content-Disposition'] = 'attachment; filename="Qfield_prospections.zip"' + n = timezone.now() + filename = "Qfield_prospections_{}.zip".format(n.strftime("%Y%m%d-%H%M%S")) + response['Content-Disposition'] = 'attachment; filename={}'.format(filename) return response return HttpResponse('{}', content_type='text/plain') return func -def gpkg_creation(root, table_cols, col_names, datas): - # 1) Creation of the Geopackage - finds = os.path.join(root, 'export', 'Finds.gpkg') - # Verification to delete it if already existing - if os.path.exists(finds): - os.remove(finds) +def gpkg_creation(model, root, table_cols, col_names, datas): + """ + :param model: Table from the query + :param root: Path to the folder to create the geopackage + :param table_cols: List of the columns used in the query + :param col_names: Name of the columns in the new geopackage + :param datas: Data from the query + :function: Creation of the Finds and Context_Records geopackages when the query come from one of these two tables + :return finds: Geopackage for the Finds + :return cr: Geopackage for the Context_Records + :return list_ope: List of the different operations linked to the Finds and Context_Records + :return list_cr: List of the labels/names of the Context_Records used + """ + # Preparation of important values and parameters for the geopackages + finds = '' + cr = '' + list_ope = [] + list_crea = [] driver = ogr.GetDriverByName('GPKG') - datasource = driver.CreateDataSource(finds) srs = osr.SpatialReference() srs.ImportFromEPSG(4326) - # 2) Verufucation of the origin of the export - origin = geometry_attribution(table_cols, datas) - # 3a) Case where the extraction come from Finds - if origin == 'finds': - # 4a) Creation of the finds layer and its attributes + # I. Case where the extraction come from Finds + if str(model._meta) == 'archaeological_finds.find': + # 1) Creation of the Finds geopackage + finds = os.path.join(root, 'export', 'Finds.gpkg') + # Verification to delete it if already existing + if os.path.exists(finds): + os.remove(finds) + # 2) Creation of the finds layer and its attributes + datasource = driver.CreateDataSource(finds) layer = datasource.CreateLayer('Finds', srs, ogr.wkbPoint25D) - layer = attributes_creation_finds_manual(layer, col_names, table_cols) - # 5a) Populating the finds layer with the datas - list_cr = populating_layer_finds(layer,table_cols,col_names,datas) + layer = attributes_creation_finds_query(layer, col_names, table_cols) + # 4a) Populating the finds layer with the datas + list_cr = populating_layer_finds_query(layer,table_cols,col_names,datas) datasource = None - # 6a) Creation of the Context Records file + # 3) Creation of the Context Records file cr = os.path.join(root, 'export', 'Context_records.gpkg') # Verification to delete it if already existing if os.path.exists(cr): os.remove(cr) datasource = driver.CreateDataSource(cr) - srs = osr.SpatialReference() - srs.ImportFromEPSG(4326) - # 7a) Creation of the cr's layer and a list of default attrbutes + # 4) Creation of the Context_Records layer and a list of default attributes layer = datasource.CreateLayer('Context_records', srs, ogr.wkbMultiPolygon) - list_crea = ['Unité_Enregistrement', 'Opération', 'INSEE_Commune', 'Section', 'Parcelle', 'Type', 'Interprétation', - 'Description', 'Localisation', 'Media', 'Periode', 'Type_Activité', 'WKT'] - layer = attributes_creation_cr_automatic(layer, list_crea) - # 8a) Populating the cr's layer with datas from the cr of the extracted finds - list_ope = populating_layer_cr_automatic(layer, list_crea, list_cr) + list_crea = ['Unité_Enregistrement', 'Opération', 'INSEE_Commune', 'Type', 'Interprétation', + 'Description', 'Localisation', 'Media', 'Periode', 'Type_Activité', 'WKT', 'Infos_Parcelle'] + layer = attributes_creation_cr_default(layer, list_crea) + # 5) Populating the Context_Records layer with datas from the Context_Records of the extracted finds + list_ope = populating_layer_cr_default(layer, list_crea, list_cr) datasource = None - # 9a) Preparation of a list of the attributes names for the style modifications + # 6) Preparation of a list of the attributes names for the style modifications list_crea = ['cr', list_crea] - else: - # 4b) Creation of the finds layer and a list of default attrbutes - layer = datasource.CreateLayer('Finds', srs, ogr.wkbPoint25D) - list_crea = ['Identifiant', 'UE', 'Date', 'X', 'Y', 'Z', 'Matériaux', 'Description', 'Media', 'WKT_point'] - attributes_creation_finds_automatic(layer, list_crea) - datasource = None - # 5b) Creation of the Context Records file + # II. Case where the extraction come from Context_Recods + elif str(model._meta) == 'archaeological_context_records.contextrecord': + # 1) Creation of the Context Records geopackage cr = os.path.join(root, 'export', 'Context_records.gpkg') # Verification to delete it if already existing if os.path.exists(cr): os.remove(cr) datasource = driver.CreateDataSource(cr) - srs = osr.SpatialReference() - srs.ImportFromEPSG(4326) - # 6a) Creation of the cr's layer and its attributes + # 2) Creation of the Context_Records layer and its attributes layer = datasource.CreateLayer('Context_records', srs, ogr.wkbMultiPolygon) - layer = attributes_creation_cr_manual(layer, col_names, table_cols) - # 7b) Populating the cr's layer with the datas - list_ope = populating_layer_cr_manual(layer, table_cols, col_names, datas) - # 8b) Preparation of a list of the attributes names for the style modifications + layer = attributes_creation_cr_query(layer, col_names, table_cols) + # 3) Populating the Finds layer with the datas + list_ope, list_cr = populating_layer_cr_query(layer, table_cols, col_names, datas) + datasource = None + # 4) Creation of the Finds geopackage + finds = os.path.join(root, 'export', 'Finds.gpkg') + # Verification to delete it if already existing + if os.path.exists(finds): + os.remove(finds) + datasource = driver.CreateDataSource(finds) + layer = datasource.CreateLayer('Finds', srs, ogr.wkbPoint25D) + list_crea = ['Identifiant', 'UE', 'Date', 'X', 'Y', 'Z', 'Matériaux', 'Description', 'Media', 'WKT_point', 'Infos_Parcelle'] + attributes_creation_finds_default(layer, list_crea) + # 5) Populating the finds layer with the datas + populating_layer_finds_default(layer, list_crea, list_cr) + # 6) Preparation of a list of the attributes names for the style modifications list_crea = ['finds', list_crea] return finds, cr, list_ope, list_crea -def geometry_attribution (table_cols, datas): - # Getting the name of the first element, two cases because in the case of Context Records, all elements are in lists - try: - id_label = table_cols.index('label') - name = datas[0][id_label + 1] - except: - id_label = table_cols.index(['label']) - name = datas[0][id_label + 1][0] - # Recuperation of the BaseFind datas - BaseFind = apps.get_model('archaeological_finds', 'BaseFind') - references = BaseFind.objects.order_by('-pk') - # Verification if the extracted elements come from BaseFind or, by default, from Context Records - if any(elem.label == name for elem in references): - origin = 'finds' - else: - origin = 'cr' - return origin - - -def attributes_creation_finds_manual(layer, col_names, table_cols): +def gpkg_creation_sites(root, table_cols, col_names, datas): + """ + :param root: Path to the folder to create the geopackage + :param table_cols: List of the columns used in the query + :param col_names: Name of the columns in the new geopackage + :param datas: Data from the query + :function: Specific version for the creation of the needed geopackages when the query come from the + Archaeological_Sites table + :return sites: Geopackage for the Sites + :return finds: Geopackage for the Finds + :return cr: Geopackage for the Context_Records + :return list_ope: List of the different operations linked to the Finds and Context_Records + :return list_cr: List of the labels/names of the Context_Records used + """ + # Preparation of important values and parameters for the geopackages + finds = '' + cr = '' + list_ope = [] + list_crea = [] + driver = ogr.GetDriverByName('GPKG') + srs = osr.SpatialReference() + srs.ImportFromEPSG(4326) + # 1) Creation of the sites layer + sites = os.path.join(root, 'export', 'Sites.gpkg') + if os.path.exists(sites): + os.remove(sites) + datasource = driver.CreateDataSource(sites) + layer = datasource.CreateLayer('Sites', srs, ogr.wkbPoint) + # Creation of the attributes + layer = attributes_creation_sites_query(layer, col_names, table_cols) + # Creation of the entities + list_ope, list_cr = populating_layer_sites_query(layer, table_cols, col_names, datas) + # 2) Creation of the Context_Records layer + cr = os.path.join(root, 'export', 'Context_records.gpkg') + # Verification to delete it if already existing + if os.path.exists(cr): + os.remove(cr) + datasource = driver.CreateDataSource(cr) + layer = datasource.CreateLayer('Context_records', srs, ogr.wkbMultiPolygon) + # Creation of the attributes + list_a = ['Unité_Enregistrement', 'Opération', 'INSEE_Commune', 'Type', 'Interprétation', 'Description', + 'Localisation', 'Media', 'Periode', 'Type_Activité', 'WKT', 'Infos_Parcelle'] + layer = attributes_creation_cr_default(layer, list_a) + # # Creation of the entities + populating_layer_cr_default(layer, list_a, list_cr) + datasource = None + # 3) Creation of the finds layer + finds = os.path.join(root, 'export', 'Finds.gpkg') + # Verification to delete it if already existing + if os.path.exists(finds): + os.remove(finds) + datasource = driver.CreateDataSource(finds) + layer = datasource.CreateLayer('Finds', srs, ogr.wkbPoint25D) + # Creation of the attributes + list_b = ['Identifiant', 'UE', 'Date', 'X', 'Y', 'Z', 'Matériaux', 'Description', 'Media', 'WKT_point', 'Infos_Parcelle'] + layer = attributes_creation_finds_default(layer, list_b) + # Creation of the entities + populating_layer_finds_default(layer, list_b, list_cr) + # Recuperation of all created attributes + list_crea = list_a + list_b + return sites, finds, cr, list_ope, list_crea + + +def attributes_creation_finds_query(layer, col_names, table_cols): + """ + :param layer: Finds layer from the linked geopackage + :param col_names: Name of the columns in the new layer + :param table_cols: List of the columns used in the query + :function: Creation of the attributes of the Finds layer with the information from the exporter + :return layer: Finds layer with attributes + """ # print(table_cols) # debugtest # print(col_names) # debugtest # print(datas) # debugtest @@ -3388,7 +3475,15 @@ def attributes_creation_finds_manual(layer, col_names, table_cols): return layer -def populating_layer_finds(layer, table_cols, col_names, datas): +def populating_layer_finds_query(layer, table_cols, col_names, datas): + """ + :param layer: Finds layer from the linked geopackage with attributes + :param table_cols: List of the columns used in the query + :param col_names: Name of the columns in the new layer + :param datas: Data from the query + :function: Population of the Finds layer using all the data from the query + :return list_cr: List of all the Context_Records linked to the Finds from the query + """ max = len(col_names) list_cr = [] # Looping on all the datas extracted to create features @@ -3412,8 +3507,8 @@ def populating_layer_finds(layer, table_cols, col_names, datas): except: # Second version if some values are missing # +1 because the first value in the attributes is '' - feature.SetField(col_names[idx], data[idx + 1].year, data[idx + 1].month, data[idx + 1].day, 0, - 0, 0) + feature.SetField(col_names[idx], data[idx + 1].year, data[idx + 1].month, + data[idx + 1].day, 0, 0, 0) elif 'context_record__label' in table_cols[idx] and data[idx + 1] not in list_cr: list_cr.append(data[idx + 1]) feature.SetField(col_names[idx], str(data[idx + 1])) @@ -3435,7 +3530,11 @@ def populating_layer_finds(layer, table_cols, col_names, datas): point = ogr.Geometry(ogr.wkbPoint25D) point.AddPoint(float(object.x), float(object.y), float(0.0)) except: - pass + try: + point = ogr.CreateGeometryFromWkt(str(object.main_geodata.point_3d).split(';')[-1]) + except: + point = '' + print(point) if point != '': feature.SetGeometry(point) # Addition of the new feature @@ -3444,7 +3543,13 @@ def populating_layer_finds(layer, table_cols, col_names, datas): return list_cr -def attributes_creation_finds_automatic(layer, list_crea): +def attributes_creation_finds_default(layer, list_crea): + """ + :param layer: Finds layer from the linked geopackage + :param list_crea: Name of the columns by default + :function: Population of the Finds layer using default attributes + :return layer: Finds layer with attributes + """ # Gestion of specific types of attributes for the default values for attribute in list_crea: if attribute == 'Date': @@ -3456,15 +3561,105 @@ def attributes_creation_finds_automatic(layer, list_crea): return layer -def attributes_creation_cr_automatic(layer, list_crea): +def populating_layer_finds_default(layer, list_crea, list_cr): + """ + :param layer: Finds layer from the linked geopackage with attributes + :param list_crea: Name of the columns by default + :param list_cr: List of all the Context_Records linked to the Finds from the query + :function: Population of the Finds layer using all the data from a specific query + :return layer: Populated Finds layer + """ + ContextRecord = apps.get_model('archaeological_context_records', 'ContextRecord') + BaseFind = apps.get_model('archaeological_finds', 'BaseFind') + for name in list_cr: + cr, __ = ContextRecord.objects.get_or_create(label=name) + finds = list(BaseFind.objects.filter(context_record=cr.id)) + for find in finds: + if str(find.context_record).split(' | ')[-1] in list_cr: + list_attributes = [] + try:list_attributes.append(find.label) + except:list_attributes.append('') + try:list_attributes.append(str(find.context_record).split(' | ')[-1]) + except:list_attributes.append('') + try:list_attributes.append(find.discovery_date) + except:list_attributes.append('') + try:list_attributes.append(find.x) + except:list_attributes.append('') + try:list_attributes.append(find.y) + except:list_attributes.append('') + try:list_attributes.append(find.z) + except:list_attributes.append('') + try:list_attributes.append(find.material_types) + except:list_attributes.append('') + try:list_attributes.append(find.description) + except:list_attributes.append('') + try:list_attributes.append(find.document.image) + except:list_attributes.append('') + try:list_attributes.append(find.main_geodata.point_3d) + except:list_attributes.append('') + try:list_attributes.append(find.parcel.external_id) + except:list_attributes.append('') + # Creation of a new feature + feature = ogr.Feature(layer.GetLayerDefn()) + for idx in range(0, len(list_crea)): + if idx == 2 : + # Gestion of the dates + try: + # First version if it has all the data necessary for an ogr.OFTDateTime + feature.SetField(list_crea[idx], list_attributes[idx]) + except: + # Second version if some values are missing + feature.SetField(list_crea[idx], int(list_attributes[idx].year), + int(list_attributes[idx].month), int(list_attributes[idx].day), 0, 0, 0.0, 0) + elif idx in [3,4,5]: + # Gestion of the coordinates + try: + feature.SetField(list_crea[idx], float(list_attributes[idx])) + except: + pass + else: + feature.SetField(list_crea[idx], str(list_attributes[idx])) + try: + point = ogr.Geometry(ogr.wkbPoint25D) + point.AddPoint(float(find.x), float(find.y), float(find.z)) + except: + try: + point = ogr.Geometry(ogr.wkbPoint25D) + point.AddPoint(float(find.x), float(find.y), float(0.0)) + except: + try: + point = ogr.CreateGeometryFromWkt(str(find.main_geodata.point_3d).split(';')[-1]) + except: + point = '' + if point != '': + feature.SetGeometry(point) + layer.CreateFeature(feature) + feature = None + return layer + + +def attributes_creation_cr_default(layer, list_crea): + """ + :param layer: Context_Records layer from the linked geopackage + :param list_crea: Name of the columns by default + :function: Population of the Context_Records layer using default attributes + :return layer: Populated Context_Records layer + """ for idx in range(0, len(list_crea)): layer.CreateField(ogr.FieldDefn(list_crea[idx], ogr.OFTString)) return layer -def populating_layer_cr_automatic(layer, list_crea, list_cr): +def populating_layer_cr_default(layer, list_crea, list_cr): + """ + :param layer: Context_Records layer from the linked geopackage with attributes + :param list_crea: Name of the columns by default + :param list_cr: List of all the Context_Records linked to the Finds from the query + :function: Population of the Finds layer using all the data from a specific query + :return list_ope: List of all the Operations linked to the Context_Records from the query + """ list_ope = [] - # Query in the DataBase to get information on the Context Records of the Finds exported + # Query in the DataBase to get information on the Context_Records of the Finds exported ContextRecord = apps.get_model('archaeological_context_records', 'ContextRecord') for name in list_cr: cr, __ = ContextRecord.objects.get_or_create( @@ -3472,40 +3667,43 @@ def populating_layer_cr_automatic(layer, list_crea, list_cr): ) list_attributes = [] try:list_attributes.append(cr.label) - except:list_attributes.append('Null') + except:list_attributes.append('') try:list_attributes.append(str(cr.operation.code_patriarche)) - except:list_attributes.append('Null') + except:list_attributes.append('') try:list_attributes.append(cr.town.numero_insee) - except:list_attributes.append('Null') - try:list_attributes.append(cr.parcel.external_id) - except:list_attributes.append('Null') + except:list_attributes.append('') try:list_attributes.append(str(cr.unit)) - except:list_attributes.append('Null') - try:list_attributes.append(cr.operation.interpretation) - except:list_attributes.append('Null') + except:list_attributes.append('') + try:list_attributes.append(cr.interpretation) + except:list_attributes.append('') try:list_attributes.append(cr.description) - except:list_attributes.append('Null') + except:list_attributes.append('') try:list_attributes.append(cr.location) - except:list_attributes.append('Null') + except:list_attributes.append('') try:list_attributes.append(cr.documents.image) - except:list_attributes.append('Null') + except:list_attributes.append('') try:list_attributes.append(cr.datings.period) - except:list_attributes.append('Null') + except:list_attributes.append('') try:list_attributes.append(str(cr.activity)) - except:list_attributes.append('Null') + except:list_attributes.append('') try:list_attributes.append(str(cr.main_geodata.multi_polygon)) - except:list_attributes.append('Null') + except:list_attributes.append('') + try:list_attributes.append(cr.parcel.external_id) + except:list_attributes.append('') # Creation of a new feature - feature = ogr.Feature(layer.GetLayerDefn()) - for idx in range(0, len(list_crea)-1): + feature = ogr.Feature(layer.GetLayerDefn( + )) + for idx in range(0, len(list_crea)): try: feature.SetField(list_crea[idx], list_attributes[idx]) except: pass - if list_attributes[1] not in list_ope: - list_ope.append(list_attributes[1]) + # Completion of the list of Operations linked to the exported Context_Records + if cr.operation.code_patriarche not in list_ope: + list_ope.append(cr.operation.code_patriarche) + # Gestion of the geometry try: - geom = ogr.CreateGeometryFromWkt(str(list_attributes[-1]).split(';')[-1]) + geom = ogr.CreateGeometryFromWkt(str(cr.main_geodata.multi_polygon).split(';')[-1]) feature.SetGeometry(geom) except: pass @@ -3514,7 +3712,14 @@ def populating_layer_cr_automatic(layer, list_crea, list_cr): return list_ope -def attributes_creation_cr_manual(layer, col_names, table_cols): +def attributes_creation_cr_query(layer, col_names, table_cols): + """ + :param layer: Context_Records layer from the linked geopackage + :param col_names: Name of the columns in the new layer + :param table_cols: List of the columns used in the query + :function: Creation of the attributes of the Context_Records layer with the data from the exporter + :return layer: Layer with attributes + """ for idx in range(0, len(col_names)): if table_cols[idx] != '': # print(table_cols[idx]) # debugtest @@ -3523,13 +3728,23 @@ def attributes_creation_cr_manual(layer, col_names, table_cols): return layer -def populating_layer_cr_manual(layer, table_cols, col_names, datas): +def populating_layer_cr_query(layer, table_cols, col_names, datas): + """ + :param layer: Context_Records layer from the linked geopackage with attributes + :param table_cols: List of the columns used in the query + :param col_names: Name of the columns in the new layer + :param datas: Data from the query + :function: Population of the Context_Records layer using all the data from the query + :return list_ope: List of all the Operations linked to the Context_Records from the query + """ #print(table_cols) #debugtest #print(col_names) #debugtest #print(datas) #debugtest list_ope = [] + list_cr = [] geom = '' max = len(col_names) + # Looping on all the datas extracted to create features for data in datas: # Creation of a new feature feature = ogr.Feature(layer.GetLayerDefn()) @@ -3542,6 +3757,7 @@ def populating_layer_cr_manual(layer, table_cols, col_names, datas): feature.SetField(col_names[idx], str(data[idx + 1])) id_label = table_cols.index(['label']) name = datas[0][id_label + 1] + list_cr.append(name) ContextRecord = apps.get_model('archaeological_context_records', 'ContextRecord') cr, __ = ContextRecord.objects.get_or_create( label=name @@ -3557,26 +3773,120 @@ def populating_layer_cr_manual(layer, table_cols, col_names, datas): feature.SetGeometry(geom) layer.CreateFeature(feature) feature = None - return list_ope + return list_ope, list_cr + + +def attributes_creation_sites_query(layer, col_names, table_cols): + """ + :param layer: Sites layer from the linked geopackage + :param col_names: Name of the columns in the new layer + :param table_cols: List of the columns used in the query + :function: Creation of the attributes of the Sites layer with the data from the exporter + :return layer: Layer with attributes + """ + for idx in range(0, len(col_names)): + if table_cols[idx] != '': + # Gestion of the attribute's type + if table_cols[idx] in ['geodata__x', 'geodata__y']: + layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTReal)) + else: + layer.CreateField(ogr.FieldDefn(col_names[idx], ogr.OFTString)) + return layer + + +def populating_layer_sites_query(layer, table_cols, col_names, datas): + """ + :param layer: Sites layer from the linked geopackage with attributes + :param table_cols: List of the columns used in the query + :param col_names: Name of the columns in the new layer + :param datas: Data from the query + :function: Population of the Sites layer using all the data from the query + :return list_cr: List of all the Context_Records linked to the Sites from the query + """ + max = len(col_names) + list_ope = [] + # Looping on all the datas extracted to create features + for data in datas: + # Creation of a new feature + feature = ogr.Feature(layer.GetLayerDefn()) + # Looping on the attributes to add them to the feature + for idx in range(0, max): + if col_names[idx] != '': + # print(table_cols[idx]) # debugtest + # print(data[idx + 1]) # debugtest + if table_cols[idx] == ['operations__code_patriarche'] and data[idx + 1] not in list_ope: + list_ope.append(data[idx + 1]) + feature.SetField(col_names[idx], str(data[idx + 1])) + elif table_cols[idx] in [['geodata__x'], ['geodata__y'], ['geodata__point_2d']]: + feature.SetField(col_names[idx], str(data[idx + 1]).split(' & ')[-1]) + else: + # +1 because the first value in the attributes is '' + feature.SetField(col_names[idx], str(data[idx + 1])) + ArchaeologicalSite = apps.get_model("archaeological_operations", "ArchaeologicalSite") + try: + id_label = table_cols.index(['name']) + label = data[id_label + 1] + object = ArchaeologicalSite.objects.filter( + name=label, + ) + except: + id_label = table_cols.index(['reference']) + label = data[id_label + 1] + object = ArchaeologicalSite.objects.filter( + reference=label, + ) + # Preparations for the geometry + print(object[0],object[0].main_geodata.x,object[0].main_geodata.y,object[0].main_geodata.point_2d) + try: + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(float(object[0].main_geodata.x), float(object[0].main_geodata.y)) + except: + try: + point = ogr.CreateGeometryFromWkt(str(object[0].main_geodata.point_2d).split(';')[-1]) + except: + point = '' + if point != '': + feature.SetGeometry(point) + # Addition of the new feature + layer.CreateFeature(feature) + feature = None + ContextRecord = apps.get_model('archaeological_context_records', 'ContextRecord') + # Completion of the list of Context_Records linked to the extracted Sites + list_cr = [] + for elem in list_ope: + if elem != '': + search = ContextRecord.objects.all() + for cr in search: + if elem in str(cr) and cr.label not in list_cr: + list_cr.append(cr.label) + return list_ope, list_cr def modification_style(qgs_path, table_cols, col_names, list_ope, list_crea): + """ + :param qgs_path: Path to the QGIS project, containing the layers style + :param table_cols: List of the columns used in the query to spot specific ones + :param col_names: Name of the columns in the new layer to add their name to the style of the layer + :param list_ope: List of the Operations linked to the entities from the query, to add them as a list + :param list_crea: List of created attributes for the Finds or Context_Records layers + :function: Modification of the QGIS project style to assure the autocompletion/automations for some attributes + :return text: Modified QGIS project + """ # Lists of default names in the style, attribut names of the datas and new default names - list_ref = ['champ_date', 'champ_datetime', 'champ_x', 'champ_y', 'champ_z', 'champ_ue', - 'champ_ope', 'champ_insee', 'champ_section', 'champ_parc', 'champ_geom'] + list_ref = ['finds_date', 'finds_time', 'finds_x', 'finds_y', 'finds_z', 'finds_cr', 'finds_parcel', + 'cr_operation', 'cr_insee', 'cr_section', 'cr_parcel', 'cr_full_parcel', 'cr_wkt'] list_search = ['_date', '_datetime', 'base_finds__x', 'base_finds__y', 'base_finds__z', 'context_record__label', - 'operation__code_patriarche', 'town__numero_insee', 'parcel__section', 'parcel__parcel_number', - 'geodata__multi_polygon'] - list_auto = ['Date', 'Date/Temps', 'X', 'Y', 'Z', 'UE', 'Opération', 'INSEE_Commune', 'Section', 'Parcelle', 'WKT'] + 'parcel__external_id', 'operation__code_patriarche', 'town__numero_insee', 'parcel__section', + 'parcel__parcel_number', 'parcel__external_id', 'geodata__multi_polygon'] # Opening of the style text = open(qgs_path, encoding='utf-8').read() # Adding the different Operations linked of the Contexts Records and/or Finds exported to a list of possible values if len(list_ope) > 0: new_text = "" for ope in list_ope: - choice = ' <Option type="Map">\n <Option name="{}" value="{}" type="QString"/>\n </Option>\n'.format(ope, ope) + choice = '<Option type="Map">\n <Option name="{}" value="{}" type="QString"/>\n </Option>\n'.format(ope, ope) new_text += choice - old_text = ' <Option type="Map">\n <Option type="QString" value="Test_choice" name="Test_choice"/>\n </Option>\n' + old_text = '<Option type="Map">\n <Option value="Default_value" name="Default_value" type="QString"/>\n </Option>\n' text = text.replace(old_text, new_text) else: text = text.replace("Test_choice", "Null") @@ -3589,21 +3899,19 @@ def modification_style(qgs_path, table_cols, col_names, list_ope, list_crea): if col != '' and list_search[id_ref] in col[0]: id_new = table_cols.index(col) new = col_names[id_new] - if new == '': - new = list_auto[id_ref] text = text.replace(ref, new) # List of corresponding default names in the style linked to the default names used for the Finds - list_corr = ['champ_id', 'champ_ue', 'champ_date', 'champ_x', 'champ_y', 'champ_z', 'champ_matériaux', - 'champ_desc', 'champ_media_finds', 'champ_wkt_modif'] + list_corr = ['finds_id', 'finds_cr', 'finds_date', 'finds_x', 'finds_y', 'finds_z', 'find_matériaux', + 'cr_description', 'finds_media', 'finds_wkt_modif', 'finds_parcel'] # Gestion of the link between the Finds and Context Records layers id_label = table_cols.index(['label']) new = col_names[id_label] - text = text.replace("champ_nom", new) + text = text.replace("cr_name", new) if ['documents__image'] in table_cols: id_media = table_cols.index(['documents__image']) # Gestion of the link between the Finds and Context Records layers new = col_names[id_media] - text = text.replace("champ_media_cr", new) + text = text.replace("cr_media", new) # Replacement of the values from the default names used for the Finds n = 0 for elem in list_crea[1]: @@ -3619,12 +3927,10 @@ def modification_style(qgs_path, table_cols, col_names, list_ope, list_crea): if col != '' and list_search[id_ref] in col: id_new = table_cols.index(col) new = col_names[id_new] - if new == '': - new = list_auto[id_ref] text = text.replace(ref, new) # List of corresponding default names in the style linked to the default names used for the Finds - list_corr = ['champ_nom', 'champ_ope', 'champ_insee', 'champ_section', 'champ_parc', 'champ_type', 'champ_occup', 'champ_desc', - 'champ_loca', 'champ_media_cr', 'champ_periode', 'champ_acti', 'champ_geom'] + list_corr = ['cr_name', 'cr_operation', 'cr_insee', 'cr_type', 'cr_occupation', 'cr_description', + 'cr_localisation', 'cr_media', 'cr_periode', 'cr_activity', 'cr_wkt', 'cr_full_parcel'] # Test in case the all names of attributes are in lists try: id_label = table_cols.index(['label']) @@ -3632,7 +3938,7 @@ def modification_style(qgs_path, table_cols, col_names, list_ope, list_crea): id_label = table_cols.index('label') # Gestion of the link between the Finds and Context Records layers new = col_names[id_label] - text = text.replace('champ_id', new) + text = text.replace('finds_id', new) if 'documents__image' in table_cols: try: id_media = table_cols.index(['documents__image']) @@ -3640,15 +3946,15 @@ def modification_style(qgs_path, table_cols, col_names, list_ope, list_crea): id_media = table_cols.index('documents__image') # Gestion of the link between the Finds and Context Records layers new = col_names[id_media] - text = text.replace("champ_media_finds", new) + text = text.replace("finds_media", new) # Specific case to assure the good registration of the z coordinate if 'geodata__point_3d' in table_cols: id_new = table_cols.index('geodata__point_3d') if any('__z' in elem for elem in table_cols): - ref = "champ_wkt_modif" + ref = "finds_wkt_modif" new = col_names[id_new] else: - ref = "champ_wkt_simple" + ref = "finds_wkt_simple" new = col_names[id_new] text = text.replace(ref, new) # Replacement of the values from the default names used for the Context Records @@ -3662,6 +3968,48 @@ def modification_style(qgs_path, table_cols, col_names, list_ope, list_crea): return text +def modification_style_sites(qgs_path, table_cols, col_names, list_ope, list_crea): + """ + :param qgs_path: Path to the QGIS project, containing the layers style + :param table_cols: List of the columns used in the query to spot specific ones + :param col_names: Name of the columns in the new layer to add their name to the style of the layer + :param list_ope: List of the Operations linked to the entities from the query, to add them as a list + :param list_crea: List of created attributes for the Finds and Context_Records layers + :function: Modification of the QGIS project style to assure the autocompletion/automations for some attributes + :return text: Modified QGIS project + """ + list_ref = ['sites_operation', 'sites_parcel', 'sites_insee', 'sites_x', 'sites_y', 'sites_wkt'] + list_search = ['operations__code_patriarche', 'parcel__external_id', 'towns__numero_insee', 'geodata__x', + 'geodata__y', 'geodata__point_2d'] + # Opening of the style + text = open(qgs_path, encoding='utf-8').read() + # Adding the different Operations linked of the Contexts Records and/or Finds exported to a list of possible values + if len(list_ope) > 0: + new_text = "" + for ope in list_ope: + choice = '<Option type="Map">\n <Option name="{}" value="{}" type="QString"/>\n </Option>\n'.format(ope, ope) + new_text += choice + old_text = '<Option type="Map">\n <Option value="Default_value" name="Default_value" type="QString"/>\n </Option>\n' + text = text.replace(old_text, new_text) + else: + text = text.replace("Test_choice", 'None') + for ref in list_ref: + id_ref = list_ref.index(ref) + new = '' + for col in table_cols: + if col != '' and list_search[id_ref] in col: + id_new = table_cols.index(col) + new = col_names[id_new] + text = text.replace(ref, new) + list_ref = ['cr_name', 'cr_operation', 'cr_insee', 'cr_type', 'cr_occupation', 'cr_description', 'cr_localisation', + 'cr_media', 'cr_periode', 'cr_activity', 'cr_wkt', 'cr_full_parcel', 'finds_id', 'finds_cr', + 'finds_date', 'finds_x', 'finds_y', 'finds_z', 'find_matériaux', 'cr_description', 'finds_media', + 'finds_wkt_modif', 'finds_parcel'] + for id in range(0, len(list_crea)): + text = text.replace(list_ref[id], list_crea[id]) + return text + + def adapt_distant_search(params, src, model): if "search_vector" in params and params["search_vector"]: search_vector = params["search_vector"][0] |