summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--archaeological_context_records/models.py2
-rw-r--r--archaeological_finds/models_finds.py3
-rw-r--r--archaeological_operations/models.py2
-rw-r--r--archaeological_warehouse/models.py10
-rw-r--r--ishtar_common/data_importer.py19
-rw-r--r--ishtar_common/models_common.py22
-rw-r--r--ishtar_common/wizards.py2
7 files changed, 46 insertions, 14 deletions
diff --git a/archaeological_context_records/models.py b/archaeological_context_records/models.py
index 23b6644ed..9be9789d9 100644
--- a/archaeological_context_records/models.py
+++ b/archaeological_context_records/models.py
@@ -1441,6 +1441,8 @@ def context_record_post_save(sender, **kwargs):
profile = get_current_profile()
if profile.parent_relations_engine == "T":
ContextRecordTree._update_self_relation(instance.pk) # on creation: manage self relation
+ if getattr(instance, "_no_down_model_update", None):
+ return
BaseFind = apps.get_model("archaeological_finds", "BaseFind")
Find = apps.get_model("archaeological_finds", "Find")
for bf in instance.base_finds.all():
diff --git a/archaeological_finds/models_finds.py b/archaeological_finds/models_finds.py
index 67c4a414a..bd0c42619 100644
--- a/archaeological_finds/models_finds.py
+++ b/archaeological_finds/models_finds.py
@@ -3285,6 +3285,7 @@ class Find(
_("No container have been set - the localisation cannot " "be set.")
)
+ container.no_post_process(history=True)
localisation, error = container.set_localisation(
place, value, static=static, return_errors=True
)
@@ -3651,7 +3652,7 @@ class Find(
if base_find.update_external_id():
modified = True
if modified:
- base_find.skip_history_when_saving = True
+ base_find.no_post_process()
base_find._cached_label_checked = False
base_find.save()
# if not base_find.material_index:
diff --git a/archaeological_operations/models.py b/archaeological_operations/models.py
index cadb806dc..b7b955b80 100644
--- a/archaeological_operations/models.py
+++ b/archaeological_operations/models.py
@@ -2665,6 +2665,8 @@ def operation_post_save(sender, **kwargs):
operation.fnap_financing = fnap_percent
operation.save()
cached_label_changed(sender, **kwargs)
+ if getattr(operation, "_no_down_model_update", None):
+ return
if operation.associated_file:
operation.associated_file.update_short_menu_class()
# manage parcel association
diff --git a/archaeological_warehouse/models.py b/archaeological_warehouse/models.py
index 2c6feaffc..d692f3dd9 100644
--- a/archaeological_warehouse/models.py
+++ b/archaeological_warehouse/models.py
@@ -1717,8 +1717,12 @@ class Container(
return None, error_msg
return
current_localisation = Container.objects.create(**dct)
+ current_localisation_id = current_localisation.id
+ # do not use save is query friendly
+ self.__class__.objects.filter(pk=self.id).update(
+ parent_id=current_localisation_id)
+ # update the cached value of the parent - safer for post-treatments
self.parent = current_localisation
- self.save()
if return_errors:
return current_localisation, None
return current_localisation
@@ -1990,12 +1994,12 @@ class Container(
for find in self.finds.all():
updated = find.update_current_full_location(full_location)
if updated:
- find.skip_history_when_saving = True
+ find.no_post_process()
find.save()
for find in self.finds_ref.all():
updated = find.update_ref_full_location(full_location)
if updated:
- find.skip_history_when_saving = True
+ find.no_post_process()
find.save()
def pre_save(self):
diff --git a/ishtar_common/data_importer.py b/ishtar_common/data_importer.py
index 9d470a084..9329fbfac 100644
--- a/ishtar_common/data_importer.py
+++ b/ishtar_common/data_importer.py
@@ -945,6 +945,8 @@ class Importer(object):
item = cls.objects.get(pk=pk)
except cls.DoesNotExist:
continue
+ if cls != self.OBJECT_CLS:
+ cls._no_down_model_update = True
item._timestamp = self.timestamp
item._queue = "low_priority"
item.save()
@@ -1186,6 +1188,8 @@ class Importer(object):
def _create_item(self, cls, dct, idx_line):
obj = cls(**dct)
obj._no_post_save = True # delayed at the end of the import
+ if hasattr(obj, "no_post_process"):
+ obj.no_post_process(history=True)
obj._queue = "low_priority"
obj.save()
self._add_to_post_save(cls, obj.pk, idx_line)
@@ -1329,6 +1333,9 @@ class Importer(object):
else:
self.number_updated += 1
+ if hasattr(obj, "no_post_process"):
+ obj.no_post_process(history=True)
+
if not created and "defaults" in data:
for k in data["defaults"]:
setattr(obj, k, data["defaults"][k])
@@ -1375,6 +1382,8 @@ class Importer(object):
setattr(item, k, geodata[k])
item._timestamp = self.timestamp
item._queue = "low_priority"
+ if hasattr(item, "no_post_process"):
+ item.no_post_process(history=True)
item.save()
else:
item = GeoVectorData.objects.create(**geodata)
@@ -1393,6 +1402,8 @@ class Importer(object):
obj._no_move = True
obj.skip_history_when_saving = True
obj._queue = "low_priority"
+ if hasattr(obj, "no_post_process"):
+ obj.no_post_process(history=True)
obj.save()
n = datetime.datetime.now()
@@ -1466,6 +1477,8 @@ class Importer(object):
t_obj._no_post_save = True
t_obj._timestamp = self.timestamp
t_obj._queue = "low_priority"
+ if hasattr(t_obj, "no_post_process"):
+ t_obj.no_post_process(history=True)
t_obj.save()
self._add_to_post_save(t_obj.__class__, t_obj.pk, idx_line)
if self.import_instance and hasattr(t_obj, "imports") and created:
@@ -1808,6 +1821,8 @@ class Importer(object):
if changed:
v._timestamp = self.timestamp
v._queue = "low_priority"
+ if hasattr(v, "no_post_process"):
+ v.no_post_process(history=True)
v.save()
for att, objs in m2m_m2ms:
if type(objs) not in (list, tuple):
@@ -2157,6 +2172,8 @@ class Importer(object):
setattr(obj, k, updated_dct[k])
obj._timestamp = self.timestamp
obj._queue = "low_priority"
+ if hasattr(obj, "no_post_process"):
+ obj.no_post_process(history=True)
obj.save()
if (
not self.simulate
@@ -2227,6 +2244,8 @@ class Importer(object):
try:
v._timestamp = self.timestamp
v._queue = "low_priority"
+ if hasattr(v, "no_post_process"):
+ v.no_post_process(history=True)
v.save()
except DatabaseError as import_error:
msg = str(import_error)
diff --git a/ishtar_common/models_common.py b/ishtar_common/models_common.py
index e749580f4..19d04ea17 100644
--- a/ishtar_common/models_common.py
+++ b/ishtar_common/models_common.py
@@ -1133,7 +1133,8 @@ class FullSearch(models.Model):
m2m_search_vector.key, config=m2m_search_vector.language
)
).values("search")
- search_vectors.append(q.all()[0]["search"])
+ if q.count():
+ search_vectors.append(q.all()[0]["search"])
# int/float are not well managed by the SearchVector
for int_search_vector in self.INT_SEARCH_VECTORS:
@@ -1950,6 +1951,16 @@ class BaseHistorizedItem(
self.fix_associated()
return True
+ def no_post_process(self, history=False):
+ if not history:
+ self.skip_history_when_saving = True
+ self._cached_label_checked = True
+ self._post_saved_geo = True
+ self._external_id_checked = True
+ self._search_updated = True
+ self._no_move = True
+ self._no_down_model_update = True
+
class LightHistorizedItem(BaseHistorizedItem):
history_date = models.DateTimeField(default=datetime.datetime.now)
@@ -3440,15 +3451,6 @@ class MainItem(ShortMenuItem, SerializeItem, SheetItem):
if hasattr(item, "main_geodata"):
item.post_save_geo()
- def no_post_process(self):
- self.skip_history_when_saving = True
- self._cached_label_checked = True
- self._post_saved_geo = True
- self._external_id_checked = True
- self._search_updated = True
- self._no_move = True
- self._no_down_model_update = True
-
@classmethod
def app_label(cls):
return cls._meta.app_label
diff --git a/ishtar_common/wizards.py b/ishtar_common/wizards.py
index 8308deed3..1d5e95647 100644
--- a/ishtar_common/wizards.py
+++ b/ishtar_common/wizards.py
@@ -776,6 +776,8 @@ class Wizard(IshtarWizard):
c_item = m.related.model(**other_objs[dependant_item])
setattr(obj, dependant_item, c_item)
obj.save()
+ # test framework do not reinit well this settings
+ obj._no_down_model_update = False
obj.save()
else:
adds = {}