summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--archaeological_context_records/tests.py33
-rw-r--r--archaeological_operations/tests.py15
-rw-r--r--ishtar_common/data_importer.py6
-rw-r--r--ishtar_common/fixtures/initial_importtypes-fr.json14
-rw-r--r--ishtar_common/models.py8
-rw-r--r--ishtar_common/views.py38
6 files changed, 95 insertions, 19 deletions
diff --git a/archaeological_context_records/tests.py b/archaeological_context_records/tests.py
index 2e1355572..c9795fce7 100644
--- a/archaeological_context_records/tests.py
+++ b/archaeological_context_records/tests.py
@@ -17,7 +17,9 @@
# See the file COPYING for details.
+import csv
import json
+from StringIO import StringIO
from django.conf import settings
from django.core.exceptions import ValidationError, ImproperlyConfigured
@@ -26,6 +28,7 @@ from django.test.client import Client
from ishtar_common.models import IshtarSiteProfile, ImporterModel
from ishtar_common.tests import create_superuser, TestCase
+
from archaeological_operations.tests import OperationInitTest, \
ImportTest, ImportOperationTest
from archaeological_operations import models as models_ope
@@ -183,6 +186,35 @@ class ContextRecordInit(OperationInitTest):
super(ContextRecordInit, self).tearDown()
+class ExportTest(ContextRecordInit, TestCase):
+ fixtures = ImportContextRecordTest.fixtures
+
+ def setUp(self):
+ self.username, self.password, self.user = create_superuser()
+
+ def test_ishtar_export_ue(self):
+ ope = self.create_operation()[0]
+ ope.code_patriarche = "45000"
+ ope.save()
+ cr = self.create_context_record(data={"label": u"CR 1"})[0]
+ c = Client()
+ url = reverse('get-by-importer',
+ kwargs={'slug': 'ishtar-context-record',
+ 'type': 'csv'})
+ response = c.get(url)
+ # no result when no authentication
+ self.assertTrue(not response.content)
+ c.login(username=self.username, password=self.password)
+ response = c.get(url)
+ rows = list(csv.reader(StringIO(response.content)))
+ # one header + one context record
+ self.assertEqual(len(rows), 2)
+ row_cr = rows[1]
+ self.assertEqual(row_cr[0], '45000')
+ self.assertEqual(row_cr[1], '12345')
+ self.assertEqual(row_cr[2], 'A1')
+
+
class ContextRecordTest(ContextRecordInit, TestCase):
fixtures = ImportContextRecordTest.fixtures
@@ -254,7 +286,6 @@ class ContextRecordTest(ContextRecordInit, TestCase):
self.assertEqual(ope_id, 'OP2017-1')
-
class ContextRecordSearchTest(ContextRecordInit, TestCase):
fixtures = ImportContextRecordTest.fixtures
diff --git a/archaeological_operations/tests.py b/archaeological_operations/tests.py
index 63f572643..c2d5aad83 100644
--- a/archaeological_operations/tests.py
+++ b/archaeological_operations/tests.py
@@ -332,7 +332,20 @@ class ImportOperationTest(ImportTest, TestCase):
impt.delete()
self.assertEqual(parcel_count - 3, models.Parcel.objects.count())
- def testParseParcels(self):
+
+class ParcelTest(ImportTest, TestCase):
+ fixtures = [settings.ROOT_PATH +
+ '../fixtures/initial_data-auth-fr.json',
+ settings.ROOT_PATH +
+ '../ishtar_common/fixtures/initial_data-fr.json',
+ settings.ROOT_PATH +
+ '../ishtar_common/fixtures/test_towns.json',
+ settings.ROOT_PATH +
+ '../ishtar_common/fixtures/initial_importtypes-fr.json',
+ settings.ROOT_PATH +
+ '../archaeological_operations/fixtures/initial_data-fr.json']
+
+ def test_parse_parcels(self):
# the database needs to be initialised before importing
from archaeological_operations.import_from_csv import parse_parcels
# default_town = Town.objects.create(numero_insee="12345",
diff --git a/ishtar_common/data_importer.py b/ishtar_common/data_importer.py
index 34394341c..61fae2721 100644
--- a/ishtar_common/data_importer.py
+++ b/ishtar_common/data_importer.py
@@ -47,8 +47,12 @@ class ImportFormater(object):
through_unicity_keys=None, duplicate_fields=[], regexp=None,
regexp_formater_args=[], force_value=None,
post_processing=False, concat=False, concat_str=False,
- comment="", force_new=None):
+ comment="", force_new=None, export_field_name=None):
self.field_name = field_name
+ if export_field_name:
+ self.export_field_name = export_field_name
+ else:
+ self.export_field_name = field_name
self.formater = formater
self.required = required
self.through = through
diff --git a/ishtar_common/fixtures/initial_importtypes-fr.json b/ishtar_common/fixtures/initial_importtypes-fr.json
index 9ee6710c4..cef19d374 100644
--- a/ishtar_common/fixtures/initial_importtypes-fr.json
+++ b/ishtar_common/fixtures/initial_importtypes-fr.json
@@ -158,7 +158,7 @@
"is_template": true,
"unicity_keys": "external_id",
"users": [],
- "slug": "ishtar_context_record",
+ "slug": "ishtar-context-record",
"associated_models": 7,
"name": "ISHTAR-UE"
}
@@ -172,7 +172,7 @@
"is_template": true,
"unicity_keys": "external_id",
"users": [],
- "slug": "ishtar_parcels",
+ "slug": "ishtar-parcels",
"associated_models": 9,
"name": "ISHTAR-PARCELLES"
}
@@ -186,7 +186,7 @@
"is_template": true,
"unicity_keys": "code_patriarche",
"users": [],
- "slug": "ishtar_operations",
+ "slug": "ishtar-operations",
"associated_models": 6,
"name": "ISHTAR-OP\u00c9RATIONS"
}
@@ -200,7 +200,7 @@
"is_template": true,
"unicity_keys": "external_id",
"users": [],
- "slug": "ishtar_finds",
+ "slug": "ishtar-finds",
"associated_models": 3,
"name": "ISHTAR-MOBILIER"
}
@@ -1460,7 +1460,8 @@
"regexp_pre_filter": null,
"required": true,
"label": "INSEE",
- "importer_type": 18
+ "importer_type": 18,
+ "export_field_name": "parcel__town__numero_insee"
}
},
{
@@ -1472,7 +1473,8 @@
"regexp_pre_filter": null,
"required": true,
"label": "Parcelle",
- "importer_type": 18
+ "importer_type": 18,
+ "export_field_name": "parcel__section|parcel__parcel_number"
}
},
{
diff --git a/ishtar_common/models.py b/ishtar_common/models.py
index 53f0871f5..5b21a9a49 100644
--- a/ishtar_common/models.py
+++ b/ishtar_common/models.py
@@ -1831,6 +1831,9 @@ class ImporterType(models.Model):
for field in column.duplicate_fields.all()]
formater_kwargs['required'] = column.required
formater_kwargs['force_new'] = force_news
+ if column.export_field_name:
+ formater_kwargs['export_field_name'] = [
+ column.export_field_name]
formater = ImportFormater(targets, formater_types,
**formater_kwargs)
LINE_FORMAT.append(formater)
@@ -1952,6 +1955,11 @@ class ImporterColumn(models.Model):
description = models.TextField(_("Description"), blank=True, null=True)
regexp_pre_filter = models.ForeignKey("Regexp", blank=True, null=True)
required = models.BooleanField(_(u"Required"), default=False)
+ export_field_name = models.CharField(
+ _(u"Export field name"), blank=True, null=True, max_length=200,
+ help_text=_(u"Fill this field if the field name is ambiguous for "
+ u"export. For instance: concatenated fields.")
+ )
class Meta:
verbose_name = _(u"Importer - Column")
diff --git a/ishtar_common/views.py b/ishtar_common/views.py
index a4ad130be..94d754938 100644
--- a/ishtar_common/views.py
+++ b/ishtar_common/views.py
@@ -880,15 +880,23 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
else:
table_cols = model.TABLE_COLS
+ query_table_cols = []
+ for cols in table_cols:
+ if type(cols) not in (list, tuple):
+ cols = [cols]
+ for col in cols:
+ query_table_cols += col.split('|')
+
# contextual (full, simple, etc.) col
contxt = full and 'full' or 'simple'
if hasattr(model, 'CONTEXTUAL_TABLE_COLS') and \
contxt in model.CONTEXTUAL_TABLE_COLS:
for idx, col in enumerate(table_cols):
if col in model.CONTEXTUAL_TABLE_COLS[contxt]:
- table_cols[idx] = model.CONTEXTUAL_TABLE_COLS[contxt][col]
+ query_table_cols[idx] = \
+ model.CONTEXTUAL_TABLE_COLS[contxt][col]
if full == 'shortcut':
- table_cols = ['cached_label']
+ query_table_cols = ['cached_label']
# manage sort tables
manual_sort_key = None
@@ -912,7 +920,7 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
orders.append(sign + k)
items = items.order_by(*orders)
elif q:
- for ke in table_cols:
+ for ke in query_table_cols:
if type(ke) in (list, tuple):
ke = ke[0]
if ke.endswith(q):
@@ -957,7 +965,7 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
continue
c_ids.append(item.pk)
data = [item.pk]
- for keys in table_cols:
+ for keys in query_table_cols:
if type(keys) not in (list, tuple):
keys = [keys]
my_vals = []
@@ -1022,10 +1030,10 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
if manual_sort_key:
# +1 because the id is added as a first col
idx_col = None
- if manual_sort_key in table_cols:
- idx_col = table_cols.index(manual_sort_key) + 1
+ if manual_sort_key in query_table_cols:
+ idx_col = query_table_cols.index(manual_sort_key) + 1
else:
- for idx, col in enumerate(table_cols):
+ for idx, col in enumerate(query_table_cols):
if type(col) in (list, tuple) and \
manual_sort_key in col:
idx_col = idx + 1
@@ -1114,8 +1122,18 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
unicode(field.verbose_name).encode(ENCODING))
writer.writerow(col_names)
for data in datas:
- writer.writerow([val.encode(ENCODING, errors='replace')
- for val in data[1:]])
+ row, delta = [], 0
+ # regroup cols with join "|"
+ for idx, col_name in enumerate(table_cols):
+ val = data[1:][idx + delta].encode(
+ ENCODING, errors='replace')
+ if "|" in col_name[0]:
+ for delta_idx in range(len(col_name[0].split('|')) - 1):
+ delta += 1
+ val += data[1:][idx + delta].encode(
+ ENCODING, errors='replace')
+ row.append(val)
+ writer.writerow(row)
return response
return HttpResponse('{}', mimetype='text/plain')
@@ -1135,7 +1153,7 @@ def get_by_importer(request, slug, data_type='json', full=False,
for formater in imp.LINE_FORMAT:
if not formater:
continue
- cols.append(formater.field_name)
+ cols.append(formater.export_field_name)
obj_name = imp.OBJECT_CLS.__name__.lower()
return get_item(
imp.OBJECT_CLS, 'get_' + obj_name, obj_name, own_table_cols=cols