summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--archaeological_context_records/models.py2
-rw-r--r--archaeological_finds/models_finds.py3
-rw-r--r--archaeological_operations/models.py2
-rw-r--r--archaeological_warehouse/models.py10
-rw-r--r--ishtar_common/data_importer.py19
-rw-r--r--ishtar_common/models_common.py22
-rw-r--r--ishtar_common/wizards.py2
7 files changed, 46 insertions, 14 deletions
diff --git a/archaeological_context_records/models.py b/archaeological_context_records/models.py
index 4c642a464..861558c09 100644
--- a/archaeological_context_records/models.py
+++ b/archaeological_context_records/models.py
@@ -1405,6 +1405,8 @@ def context_record_post_save(sender, **kwargs):
profile = get_current_profile()
if profile.parent_relations_engine == "T":
ContextRecordTree._update_self_relation(instance.pk) # on creation: manage self relation
+ if getattr(instance, "_no_down_model_update", None):
+ return
BaseFind = apps.get_model("archaeological_finds", "BaseFind")
Find = apps.get_model("archaeological_finds", "Find")
for bf in instance.base_finds.all():
diff --git a/archaeological_finds/models_finds.py b/archaeological_finds/models_finds.py
index d6280d53e..40381a6bb 100644
--- a/archaeological_finds/models_finds.py
+++ b/archaeological_finds/models_finds.py
@@ -3174,6 +3174,7 @@ class Find(
_("No container have been set - the localisation cannot " "be set.")
)
+ container.no_post_process(history=True)
localisation, error = container.set_localisation(
place, value, static=static, return_errors=True
)
@@ -3540,7 +3541,7 @@ class Find(
if base_find.update_external_id():
modified = True
if modified:
- base_find.skip_history_when_saving = True
+ base_find.no_post_process()
base_find._cached_label_checked = False
base_find.save()
# if not base_find.material_index:
diff --git a/archaeological_operations/models.py b/archaeological_operations/models.py
index 3717ba9b9..cf0db7d19 100644
--- a/archaeological_operations/models.py
+++ b/archaeological_operations/models.py
@@ -2583,6 +2583,8 @@ def operation_post_save(sender, **kwargs):
operation.fnap_financing = fnap_percent
operation.save()
cached_label_changed(sender, **kwargs)
+ if getattr(operation, "_no_down_model_update", None):
+ return
if operation.associated_file:
operation.associated_file.update_short_menu_class()
# manage parcel association
diff --git a/archaeological_warehouse/models.py b/archaeological_warehouse/models.py
index 9700e689a..9a253e735 100644
--- a/archaeological_warehouse/models.py
+++ b/archaeological_warehouse/models.py
@@ -1693,8 +1693,12 @@ class Container(
return None, error_msg
return
current_localisation = Container.objects.create(**dct)
+ current_localisation_id = current_localisation.id
+ # do not use save is query friendly
+ self.__class__.objects.filter(pk=self.id).update(
+ parent_id=current_localisation_id)
+ # update the cached value of the parent - safer for post-treatments
self.parent = current_localisation
- self.save()
if return_errors:
return current_localisation, None
return current_localisation
@@ -1966,12 +1970,12 @@ class Container(
for find in self.finds.all():
updated = find.update_current_full_location(full_location)
if updated:
- find.skip_history_when_saving = True
+ find.no_post_process()
find.save()
for find in self.finds_ref.all():
updated = find.update_ref_full_location(full_location)
if updated:
- find.skip_history_when_saving = True
+ find.no_post_process()
find.save()
def pre_save(self):
diff --git a/ishtar_common/data_importer.py b/ishtar_common/data_importer.py
index 7717c0e03..c239b20b4 100644
--- a/ishtar_common/data_importer.py
+++ b/ishtar_common/data_importer.py
@@ -945,6 +945,8 @@ class Importer(object):
item = cls.objects.get(pk=pk)
except cls.DoesNotExist:
continue
+ if cls != self.OBJECT_CLS:
+ cls._no_down_model_update = True
item._timestamp = self.timestamp
item._queue = "low_priority"
item.save()
@@ -1186,6 +1188,8 @@ class Importer(object):
def _create_item(self, cls, dct, idx_line):
obj = cls(**dct)
obj._no_post_save = True # delayed at the end of the import
+ if hasattr(obj, "no_post_process"):
+ obj.no_post_process(history=True)
obj._queue = "low_priority"
obj.save()
self._add_to_post_save(cls, obj.pk, idx_line)
@@ -1326,6 +1330,9 @@ class Importer(object):
else:
self.number_updated += 1
+ if hasattr(obj, "no_post_process"):
+ obj.no_post_process(history=True)
+
if not created and "defaults" in data:
for k in data["defaults"]:
setattr(obj, k, data["defaults"][k])
@@ -1372,6 +1379,8 @@ class Importer(object):
setattr(item, k, geodata[k])
item._timestamp = self.timestamp
item._queue = "low_priority"
+ if hasattr(item, "no_post_process"):
+ item.no_post_process(history=True)
item.save()
else:
item = GeoVectorData.objects.create(**geodata)
@@ -1390,6 +1399,8 @@ class Importer(object):
obj._no_move = True
obj.skip_history_when_saving = True
obj._queue = "low_priority"
+ if hasattr(obj, "no_post_process"):
+ obj.no_post_process(history=True)
obj.save()
n = datetime.datetime.now()
@@ -1463,6 +1474,8 @@ class Importer(object):
t_obj._no_post_save = True
t_obj._timestamp = self.timestamp
t_obj._queue = "low_priority"
+ if hasattr(t_obj, "no_post_process"):
+ t_obj.no_post_process(history=True)
t_obj.save()
self._add_to_post_save(t_obj.__class__, t_obj.pk, idx_line)
if self.import_instance and hasattr(t_obj, "imports") and created:
@@ -1805,6 +1818,8 @@ class Importer(object):
if changed:
v._timestamp = self.timestamp
v._queue = "low_priority"
+ if hasattr(v, "no_post_process"):
+ v.no_post_process(history=True)
v.save()
for att, objs in m2m_m2ms:
if type(objs) not in (list, tuple):
@@ -2154,6 +2169,8 @@ class Importer(object):
setattr(obj, k, updated_dct[k])
obj._timestamp = self.timestamp
obj._queue = "low_priority"
+ if hasattr(obj, "no_post_process"):
+ obj.no_post_process(history=True)
obj.save()
if (
not self.simulate
@@ -2224,6 +2241,8 @@ class Importer(object):
try:
v._timestamp = self.timestamp
v._queue = "low_priority"
+ if hasattr(v, "no_post_process"):
+ v.no_post_process(history=True)
v.save()
except DatabaseError as import_error:
msg = str(import_error)
diff --git a/ishtar_common/models_common.py b/ishtar_common/models_common.py
index c623a0604..79aa66953 100644
--- a/ishtar_common/models_common.py
+++ b/ishtar_common/models_common.py
@@ -1051,7 +1051,8 @@ class FullSearch(models.Model):
m2m_search_vector.key, config=m2m_search_vector.language
)
).values("search")
- search_vectors.append(q.all()[0]["search"])
+ if q.count():
+ search_vectors.append(q.all()[0]["search"])
# int/float are not well managed by the SearchVector
for int_search_vector in self.INT_SEARCH_VECTORS:
@@ -1875,6 +1876,16 @@ class BaseHistorizedItem(
self.fix_associated()
return True
+ def no_post_process(self, history=False):
+ if not history:
+ self.skip_history_when_saving = True
+ self._cached_label_checked = True
+ self._post_saved_geo = True
+ self._external_id_checked = True
+ self._search_updated = True
+ self._no_move = True
+ self._no_down_model_update = True
+
class LightHistorizedItem(BaseHistorizedItem):
history_date = models.DateTimeField(default=datetime.datetime.now)
@@ -3284,15 +3295,6 @@ class MainItem(ShortMenuItem, SerializeItem, SheetItem):
if hasattr(item, "main_geodata"):
item.post_save_geo()
- def no_post_process(self):
- self.skip_history_when_saving = True
- self._cached_label_checked = True
- self._post_saved_geo = True
- self._external_id_checked = True
- self._search_updated = True
- self._no_move = True
- self._no_down_model_update = True
-
@classmethod
def app_label(cls):
return cls._meta.app_label
diff --git a/ishtar_common/wizards.py b/ishtar_common/wizards.py
index 43e84fbcb..4e75b98de 100644
--- a/ishtar_common/wizards.py
+++ b/ishtar_common/wizards.py
@@ -767,6 +767,8 @@ class Wizard(IshtarWizard):
c_item = m.related.model(**other_objs[dependant_item])
setattr(obj, dependant_item, c_item)
obj.save()
+ # test framework do not reinit well this settings
+ obj._no_down_model_update = False
obj.save()
else:
adds = {}