summaryrefslogtreecommitdiff
path: root/ishtar_common/data_importer.py
diff options
context:
space:
mode:
authorÉtienne Loks <etienne.loks@iggdrasil.net>2018-03-21 17:55:48 +0100
committerÉtienne Loks <etienne.loks@iggdrasil.net>2018-03-27 17:53:27 +0200
commit951b9e9ba349459f5b1abbdd092d5910c346cd9f (patch)
treefddb75df476ad2322feaf0395d25822d8bf25390 /ishtar_common/data_importer.py
parent54daa385a241ad272abe9e06a63a0da36ff17749 (diff)
downloadIshtar-951b9e9ba349459f5b1abbdd092d5910c346cd9f.tar.bz2
Ishtar-951b9e9ba349459f5b1abbdd092d5910c346cd9f.zip
Step by step import: present object to be created, updated, etc. (refs #3975)
Diffstat (limited to 'ishtar_common/data_importer.py')
-rw-r--r--ishtar_common/data_importer.py116
1 files changed, 89 insertions, 27 deletions
diff --git a/ishtar_common/data_importer.py b/ishtar_common/data_importer.py
index 420e38008..4519241af 100644
--- a/ishtar_common/data_importer.py
+++ b/ishtar_common/data_importer.py
@@ -864,13 +864,18 @@ class Importer(object):
self.test = test
self.errors = [] # list of (line, col, message)
self.validity = [] # list of (line, col, message)
+
+ # list used for simulation
+ self.simulate = False
+ self.new_objects, self.updated_objects = [], []
+ self.ambiguous_objects, self.not_find_objects = [], []
+
self.number_updated = 0
self.number_created = 0
self.check_col_num = check_col_num
self.line_format = copy.copy(self.LINE_FORMAT)
self.import_instance = import_instance
self.archive = None
- self.simulate = False
self.current_csv_line = None
self.conservative_import = conservative_import
# for a conservative_import UNICITY_KEYS should be defined
@@ -1129,13 +1134,17 @@ class Importer(object):
n2 = n
if self.test:
return
- if self.simulate:
- return data
# manage unicity of items (mainly for updates)
if 'history_modifier' in get_all_field_names(self.OBJECT_CLS):
data['history_modifier'] = self.history_modifier
+ self.new_objects, self.updated_objects = [], []
+ self.ambiguous_objects, self.not_find_objects = [], []
+
obj, created = self.get_object(self.OBJECT_CLS, data)
+ if self.simulate:
+ return data
+
if self.import_instance and hasattr(obj, 'imports') \
and created:
obj.imports.add(self.import_instance)
@@ -1657,7 +1666,10 @@ class Importer(object):
if self.MODEL_CREATION_LIMIT and \
cls not in self.MODEL_CREATION_LIMIT:
raise self._get_improperly_conf_error(cls)
- obj = cls.objects.create(**new_dct)
+ if not self.simulate:
+ obj = cls.objects.create(**new_dct)
+ else:
+ self.new_objects.append((path, cls, new_dct))
else:
# manage UNICITY_KEYS - only level 1
if not path and self.UNICITY_KEYS:
@@ -1665,20 +1677,49 @@ class Importer(object):
if k not in self.UNICITY_KEYS \
and k != 'defaults':
defaults[k] = dct.pop(k)
- if not self.MODEL_CREATION_LIMIT or \
- cls in self.MODEL_CREATION_LIMIT:
- dct['defaults'] = defaults.copy()
- obj, created = cls.objects.get_or_create(**dct)
+
+ if self.simulate:
+ q = cls.objects.filter(**dct)
+ if not q.count():
+ if self.MODEL_CREATION_LIMIT and \
+ cls not in self.MODEL_CREATION_LIMIT:
+ self.not_find_objects.append(
+ (path, cls, dct)
+ )
+ return _(u"* match not find *"), False
+ dct.update(defaults)
+ self.new_objects.append([path, cls, dct])
+ created = True
+ elif q.count() > 1:
+ self.ambiguous_objects.append(
+ (path, list(q.all()), dct)
+ )
+ if q.count() > 10:
+ return _(u"* the query match more than 10 "
+ u"results*"), False
+ else:
+ return unicode(_(u" or ")).join(
+ [unicode(item) for item in q.all()]
+ ), False
+ else:
+ self.updated_objects.append(
+ [path, q.all()[0], dct, {}])
+ dct['defaults'] = defaults.copy()
else:
- try:
- obj = cls.objects.get(**dct)
+ if not self.MODEL_CREATION_LIMIT or \
+ cls in self.MODEL_CREATION_LIMIT:
dct['defaults'] = defaults.copy()
- except cls.DoesNotExist:
- raise self._get_does_not_exist_in_db_error(
- cls, dct)
+ obj, created = cls.objects.get_or_create(**dct)
+ else:
+ try:
+ obj = cls.objects.get(**dct)
+ dct['defaults'] = defaults.copy()
+ except cls.DoesNotExist:
+ raise self._get_does_not_exist_in_db_error(
+ cls, dct)
if not created and not path and self.UNICITY_KEYS:
- changed = False
+ updated_dct = {}
if self.conservative_import:
for k in dct['defaults']:
new_val = dct['defaults'][k]
@@ -1686,23 +1727,26 @@ class Importer(object):
continue
val = getattr(obj, k)
if val is None or val == '':
- changed = True
- setattr(obj, k, new_val)
+ updated_dct[k] = new_val
elif k in self.concats \
and type(val) == unicode \
and type(new_val) == unicode:
- setattr(obj, k, val + u"\n" + new_val)
+ updated_dct[k] = val + u"\n" + new_val
else:
for k in dct['defaults']:
new_val = dct['defaults'][k]
if new_val is None or new_val == '':
continue
- changed = True
- setattr(obj, k, new_val)
- if changed:
- obj.save()
- if self.import_instance and hasattr(obj, 'imports') \
- and created:
+ updated_dct[k] = new_val
+ if updated_dct:
+ if self.simulate:
+ self.updated_objects[-1][-1] = updated_dct
+ else:
+ for k in updated_dct:
+ setattr(obj, k, updated_dct[k])
+ obj.save()
+ if not self.simulate and self.import_instance and \
+ hasattr(obj, 'imports') and created:
obj.imports.add(self.import_instance)
except ValueError as e:
raise IntegrityError(e.message)
@@ -1720,10 +1764,28 @@ class Importer(object):
values = [value]
if type(value) in (list, tuple):
values = value
- for v in values:
- getattr(obj, attr).add(v)
- # force post save script
- v.save()
+ if self.simulate:
+ if created:
+ obj_dct = self.new_objects[-1][-1]
+ else:
+ obj_dct = self.updated_objects[-1][-1]
+ obj_dct[attr] = values
+ else:
+ for v in values:
+ getattr(obj, attr).add(v)
+ # force post save script
+ v.save()
+ if self.simulate:
+ if created:
+ for k in dct.keys():
+ # do not present empty value
+ if dct[k] in (None, ''):
+ dct.pop(k)
+ return _(u"* created item *"), True
+ else:
+ # defaults are not presented as matching data
+ dct.pop('defaults')
+ return self.updated_objects[-1][1], False
if m2ms:
# force post save script
obj.save()