summaryrefslogtreecommitdiff
path: root/archaeological_finds/models_finds.py
diff options
context:
space:
mode:
authorÉtienne Loks <etienne.loks@iggdrasil.net>2021-03-19 11:17:58 +0100
committerÉtienne Loks <etienne.loks@iggdrasil.net>2021-03-19 11:17:58 +0100
commitbaf29e1bb4b18fb9830956c53f3475db2f828e3f (patch)
tree31b6b0a838b83efce2437d956a4f8d1a54f0aae6 /archaeological_finds/models_finds.py
parent9d662a73d94264e2129195018d230481e1e2272f (diff)
downloadIshtar-baf29e1bb4b18fb9830956c53f3475db2f828e3f.tar.bz2
Ishtar-baf29e1bb4b18fb9830956c53f3475db2f828e3f.zip
Format - black: finds
Diffstat (limited to 'archaeological_finds/models_finds.py')
-rw-r--r--archaeological_finds/models_finds.py2490
1 files changed, 1394 insertions, 1096 deletions
diff --git a/archaeological_finds/models_finds.py b/archaeological_finds/models_finds.py
index 152e572f9..cf521c2b6 100644
--- a/archaeological_finds/models_finds.py
+++ b/archaeological_finds/models_finds.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2012-2017 Étienne Loks <etienne.loks_AT_peacefrogsDOTnet>
@@ -28,44 +28,66 @@ from django.contrib.postgres.indexes import GinIndex
from django.core.urlresolvers import reverse
from django.db import connection
from django.db.models import Max, Q, F
-from django.db.models.signals import m2m_changed, post_save, post_delete, \
- pre_delete
+from django.db.models.signals import m2m_changed, post_save, post_delete, pre_delete
from django.core.exceptions import ObjectDoesNotExist
from ishtar_common.utils import ugettext_lazy as _, pgettext_lazy
from ishtar_common.data_importer import post_importer_action, ImporterError
-from ishtar_common.utils import cached_label_changed, post_save_geo, \
- m2m_historization_changed
+from ishtar_common.utils import (
+ cached_label_changed,
+ post_save_geo,
+ m2m_historization_changed,
+)
from ishtar_common.alternative_configs import ALTERNATE_CONFIGS
from ishtar_common.model_managers import ExternalIdManager, UUIDModelManager
-from ishtar_common.models import Document, GeneralType, \
- HierarchicalType, BaseHistorizedItem, LightHistorizedItem, \
- OwnPerms, Person, Basket, post_save_cache, \
- ValueGetter, get_current_profile, IshtarSiteProfile, \
- GeoItem, BulkUpdatedItem, QuickAction, \
- MainItem, document_attached_changed, HistoryModel, DynamicRequest, \
- SearchAltName, CompleteIdentifierItem, SearchVectorConfig, DocumentItem
+from ishtar_common.models import (
+ Document,
+ GeneralType,
+ HierarchicalType,
+ BaseHistorizedItem,
+ LightHistorizedItem,
+ OwnPerms,
+ Person,
+ Basket,
+ post_save_cache,
+ ValueGetter,
+ get_current_profile,
+ IshtarSiteProfile,
+ GeoItem,
+ BulkUpdatedItem,
+ QuickAction,
+ MainItem,
+ document_attached_changed,
+ HistoryModel,
+ DynamicRequest,
+ SearchAltName,
+ CompleteIdentifierItem,
+ SearchVectorConfig,
+ DocumentItem,
+)
from ishtar_common.models_common import HistoricalRecords
from ishtar_common.utils import PRIVATE_FIELDS
-from archaeological_operations.models import AdministrativeAct, Operation, \
- CulturalAttributionType
+from archaeological_operations.models import (
+ AdministrativeAct,
+ Operation,
+ CulturalAttributionType,
+)
from archaeological_context_records.models import ContextRecord, Dating
from archaeological_warehouse.models import Warehouse
class MaterialType(HierarchicalType):
code = models.CharField(_("Code"), max_length=100, blank=True, null=True)
- recommendation = models.TextField(
- _("Recommendation"), blank=True, default="")
+ recommendation = models.TextField(_("Recommendation"), blank=True, default="")
class Meta:
verbose_name = _("Material type")
verbose_name_plural = _("Material types")
- ordering = ('label',)
+ ordering = ("label",)
post_save.connect(post_save_cache, sender=MaterialType)
@@ -78,7 +100,7 @@ class MaterialTypeQualityType(GeneralType):
class Meta:
verbose_name = _("Material type quality type")
verbose_name_plural = _("Material type quality types")
- ordering = ('order',)
+ ordering = ("order",)
post_save.connect(post_save_cache, sender=MaterialTypeQualityType)
@@ -91,7 +113,10 @@ class ConservatoryState(HierarchicalType):
class Meta:
verbose_name = _("Conservatory state type")
verbose_name_plural = _("Conservatory state types")
- ordering = ('order', 'label',)
+ ordering = (
+ "order",
+ "label",
+ )
post_save.connect(post_save_cache, sender=ConservatoryState)
@@ -103,43 +128,76 @@ class TreatmentType(HierarchicalType):
virtual = models.BooleanField(_("Virtual"))
destructive = models.BooleanField(_("Destructive"), default=False)
create_new_find = models.BooleanField(
- _("Create a new find"), default=False,
- help_text=_("If True when this treatment is applied a new version "
- "of the object will be created."))
+ _("Create a new find"),
+ default=False,
+ help_text=_(
+ "If True when this treatment is applied a new version "
+ "of the object will be created."
+ ),
+ )
upstream_is_many = models.BooleanField(
- _("Upstream is many"), default=False,
+ _("Upstream is many"),
+ default=False,
help_text=_(
- "Check this if for this treatment from many finds you'll get "
- "one."))
+ "Check this if for this treatment from many finds you'll get " "one."
+ ),
+ )
downstream_is_many = models.BooleanField(
- _("Downstream is many"), default=False,
+ _("Downstream is many"),
+ default=False,
help_text=_(
- "Check this if for this treatment from one find you'll get "
- "many."))
+ "Check this if for this treatment from one find you'll get " "many."
+ ),
+ )
change_reference_location = models.BooleanField(
- _("Change reference location"), default=False,
- help_text=_("The treatment change the reference location."))
+ _("Change reference location"),
+ default=False,
+ help_text=_("The treatment change the reference location."),
+ )
change_current_location = models.BooleanField(
- _("Change current location"), default=False,
- help_text=_("The treatment change the current location."))
+ _("Change current location"),
+ default=False,
+ help_text=_("The treatment change the current location."),
+ )
restore_reference_location = models.BooleanField(
- _("Restore the reference location"), default=False,
- help_text=_("The treatment change restore reference location to the "
- "current location."))
+ _("Restore the reference location"),
+ default=False,
+ help_text=_(
+ "The treatment change restore reference location to the "
+ "current location."
+ ),
+ )
class Meta:
verbose_name = _("Treatment type")
verbose_name_plural = _("Treatment types")
- ordering = ('order', 'label',)
+ ordering = (
+ "order",
+ "label",
+ )
@classmethod
- def get_types(cls, dct=None, instances=False, exclude=None,
- empty_first=True, default=None, initial=None, force=False,
- full_hierarchy=False):
+ def get_types(
+ cls,
+ dct=None,
+ instances=False,
+ exclude=None,
+ empty_first=True,
+ default=None,
+ initial=None,
+ force=False,
+ full_hierarchy=False,
+ ):
types = super(TreatmentType, cls).get_types(
- dct=dct, instances=instances, exclude=exclude,
- empty_first=empty_first, default=default, initial=initial,
- force=force, full_hierarchy=full_hierarchy)
+ dct=dct,
+ instances=instances,
+ exclude=exclude,
+ empty_first=empty_first,
+ default=default,
+ initial=initial,
+ force=force,
+ full_hierarchy=full_hierarchy,
+ )
if dct and not exclude:
rank = 0
if instances:
@@ -167,7 +225,7 @@ class IntegrityType(GeneralType):
class Meta:
verbose_name = _("Integrity / interest type")
verbose_name_plural = _("Integrity / interest types")
- ordering = ('label',)
+ ordering = ("label",)
post_save.connect(post_save_cache, sender=IntegrityType)
@@ -178,7 +236,7 @@ class RemarkabilityType(GeneralType):
class Meta:
verbose_name = _("Remarkability type")
verbose_name_plural = _("Remarkability types")
- ordering = ('label',)
+ ordering = ("label",)
post_save.connect(post_save_cache, sender=RemarkabilityType)
@@ -187,10 +245,11 @@ post_delete.connect(post_save_cache, sender=RemarkabilityType)
class BatchType(GeneralType):
order = models.IntegerField(_("Order"), default=10)
+
class Meta:
verbose_name = _("Batch type")
verbose_name_plural = _("Batch types")
- ordering = ('order',)
+ ordering = ("order",)
post_save.connect(post_save_cache, sender=BatchType)
@@ -201,7 +260,10 @@ class ObjectType(HierarchicalType):
class Meta:
verbose_name = _("Object type")
verbose_name_plural = _("Object types")
- ordering = ('parent__label', 'label',)
+ ordering = (
+ "parent__label",
+ "label",
+ )
post_save.connect(post_save_cache, sender=ObjectType)
@@ -214,7 +276,7 @@ class ObjectTypeQualityType(GeneralType):
class Meta:
verbose_name = _("Object type quality type")
verbose_name_plural = _("Object type quality types")
- ordering = ('order',)
+ ordering = ("order",)
post_save.connect(post_save_cache, sender=ObjectTypeQualityType)
@@ -225,7 +287,10 @@ class AlterationType(HierarchicalType):
class Meta:
verbose_name = _("Alteration type")
verbose_name_plural = _("Alteration types")
- ordering = ('parent__label', 'label',)
+ ordering = (
+ "parent__label",
+ "label",
+ )
post_save.connect(post_save_cache, sender=AlterationType)
@@ -236,7 +301,10 @@ class AlterationCauseType(HierarchicalType):
class Meta:
verbose_name = _("Alteration cause type")
verbose_name_plural = _("Alteration cause types")
- ordering = ('parent__label', 'label',)
+ ordering = (
+ "parent__label",
+ "label",
+ )
post_save.connect(post_save_cache, sender=AlterationCauseType)
@@ -247,7 +315,7 @@ class TreatmentEmergencyType(GeneralType):
class Meta:
verbose_name = _("Treatment emergency type")
verbose_name_plural = _("Treatment emergency types")
- ordering = ('label',)
+ ordering = ("label",)
post_save.connect(post_save_cache, sender=TreatmentEmergencyType)
@@ -258,7 +326,10 @@ class CommunicabilityType(HierarchicalType):
class Meta:
verbose_name = _("Communicability type")
verbose_name_plural = _("Communicability types")
- ordering = ('parent__label', 'label',)
+ ordering = (
+ "parent__label",
+ "label",
+ )
post_save.connect(post_save_cache, sender=CommunicabilityType)
@@ -271,7 +342,7 @@ class CheckedType(GeneralType):
class Meta:
verbose_name = _("Checked type")
verbose_name_plural = _("Checked types")
- ordering = ('order',)
+ ordering = ("order",)
post_save.connect(post_save_cache, sender=CheckedType)
@@ -298,51 +369,71 @@ class BFBulkView(object):
"""
-class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
- CompleteIdentifierItem, OwnPerms, ValueGetter):
- EXTERNAL_ID_KEY = 'base_find_external_id'
- EXTERNAL_ID_DEPENDENCIES = ['find']
- SLUG = 'basefind'
+class BaseFind(
+ BulkUpdatedItem,
+ BaseHistorizedItem,
+ GeoItem,
+ CompleteIdentifierItem,
+ OwnPerms,
+ ValueGetter,
+):
+ EXTERNAL_ID_KEY = "base_find_external_id"
+ EXTERNAL_ID_DEPENDENCIES = ["find"]
+ SLUG = "basefind"
uuid = models.UUIDField(default=uuid.uuid4)
label = models.TextField(_("Free ID"))
external_id = models.TextField(_("External ID"), blank=True, default="")
auto_external_id = models.BooleanField(
- _("External ID is set automatically"), default=False)
- excavation_id = models.TextField(
- _("Excavation ID"), blank=True, default="")
- description = models.TextField(
- _("Description"), blank=True, default="")
+ _("External ID is set automatically"), default=False
+ )
+ excavation_id = models.TextField(_("Excavation ID"), blank=True, default="")
+ description = models.TextField(_("Description"), blank=True, default="")
comment = models.TextField(_("Comment"), blank=True, default="")
special_interest = models.CharField(
- _("Special interest"), blank=True, default="", max_length=120)
+ _("Special interest"), blank=True, default="", max_length=120
+ )
context_record = models.ForeignKey(
- ContextRecord, related_name='base_finds',
- verbose_name=_("Context Record"))
- discovery_date = models.DateField(_("Discovery date (exact or TPQ)"),
- blank=True, null=True)
- discovery_date_taq = models.DateField(_("Discovery date (TAQ)"),
- blank=True, null=True)
- batch = models.ForeignKey(BatchType, verbose_name=_("Batch/object"),
- on_delete=models.SET_NULL,
- blank=True, null=True)
+ ContextRecord, related_name="base_finds", verbose_name=_("Context Record")
+ )
+ discovery_date = models.DateField(
+ _("Discovery date (exact or TPQ)"), blank=True, null=True
+ )
+ discovery_date_taq = models.DateField(
+ _("Discovery date (TAQ)"), blank=True, null=True
+ )
+ batch = models.ForeignKey(
+ BatchType,
+ verbose_name=_("Batch/object"),
+ on_delete=models.SET_NULL,
+ blank=True,
+ null=True,
+ )
index = models.IntegerField("Index", default=0)
material_index = models.IntegerField(_("Material index"), default=0)
topographic_localisation = models.CharField(
- _("Point of topographic reference"), blank=True, null=True,
- max_length=120)
+ _("Point of topographic reference"), blank=True, null=True, max_length=120
+ )
# gis
line = models.LineStringField(_("Line"), blank=True, null=True)
cache_short_id = models.TextField(
- _("Short ID"), blank=True, default="", db_index=True,
- help_text=_("Cached value - do not edit"))
+ _("Short ID"),
+ blank=True,
+ default="",
+ db_index=True,
+ help_text=_("Cached value - do not edit"),
+ )
cache_complete_id = models.TextField(
- _("Complete ID"), blank=True, default="", db_index=True,
- help_text=_("Cached value - do not edit"))
+ _("Complete ID"),
+ blank=True,
+ default="",
+ db_index=True,
+ help_text=_("Cached value - do not edit"),
+ )
history = HistoricalRecords()
- RELATED_POST_PROCESS = ['find']
- CACHED_LABELS = ['cache_short_id', 'cache_complete_id']
+ RELATED_POST_PROCESS = ["find"]
+ CACHED_LABELS = ["cache_short_id", "cache_complete_id"]
CACHED_COMPLETE_ID = "cache_complete_id"
- PARENT_SEARCH_VECTORS = ['context_record']
+ PARENT_SEARCH_VECTORS = ["context_record"]
BASE_SEARCH_VECTORS = [
SearchVectorConfig("label"),
SearchVectorConfig("description", "local"),
@@ -364,14 +455,14 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
("delete_own_basefind", "Can delete own Base find"),
)
indexes = [
- GinIndex(fields=['data']),
+ GinIndex(fields=["data"]),
]
def __str__(self):
return self.label
def natural_key(self):
- return (self.uuid, )
+ return (self.uuid,)
def regenerate_external_id(self):
self.skip_history_when_saving = True
@@ -386,24 +477,27 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
def public_representation(self):
dct = super(BaseFind, self).public_representation()
- dct.update({
- "context-record": self.context_record.public_representation(),
- "description": self.description,
- "comment": self.comment,
- "discovery-date": self.discovery_date and
- self.discovery_date.strftime("%Y/%m/%d"),
- "discovery-date-taq": self.discovery_date_taq and
- self.discovery_date_taq.strftime("%Y/%m/%d"),
- "discovery-date-tpq": self.discovery_date_taq and
- self.discovery_date and
- self.discovery_date.strftime("%Y/%m/%d"),
- })
+ dct.update(
+ {
+ "context-record": self.context_record.public_representation(),
+ "description": self.description,
+ "comment": self.comment,
+ "discovery-date": self.discovery_date
+ and self.discovery_date.strftime("%Y/%m/%d"),
+ "discovery-date-taq": self.discovery_date_taq
+ and self.discovery_date_taq.strftime("%Y/%m/%d"),
+ "discovery-date-tpq": self.discovery_date_taq
+ and self.discovery_date
+ and self.discovery_date.strftime("%Y/%m/%d"),
+ }
+ )
return dct
- def get_values(self, prefix='', no_values=False, filtr=None, **kwargs):
+ def get_values(self, prefix="", no_values=False, filtr=None, **kwargs):
no_find = kwargs.get("no_find", False)
values = super(BaseFind, self).get_values(
- prefix=prefix, no_values=no_values, filtr=filtr, **kwargs)
+ prefix=prefix, no_values=no_values, filtr=filtr, **kwargs
+ )
if not filtr or prefix + "complete_id" in filtr:
values[prefix + "complete_id"] = self.complete_id()
if no_find:
@@ -412,7 +506,7 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
kwargs["no_base_finds"] = True
values[prefix + "finds"] = [
find.get_values(no_values=True, filtr=None, **kwargs)
- for find in self.find.order_by('pk').all()
+ for find in self.find.order_by("pk").all()
]
return values
@@ -425,7 +519,7 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
"""
Get the last find which is not related to many base_find
"""
- for find in self.find.order_by('-pk'):
+ for find in self.find.order_by("-pk"):
if find.base_finds.count() == 1:
return find
return
@@ -452,49 +546,48 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
"""
Generate index based on operation or context record (based on
the configuration)
-
+
:return: True if index has been changed.
"""
profile = get_current_profile()
- if profile.find_index == 'O':
+ if profile.find_index == "O":
operation = self.context_record.operation
- q = BaseFind.objects \
- .filter(context_record__operation=operation)
- elif profile.find_index == 'CR':
+ q = BaseFind.objects.filter(context_record__operation=operation)
+ elif profile.find_index == "CR":
cr = self.context_record
- q = BaseFind.objects \
- .filter(context_record=cr)
+ q = BaseFind.objects.filter(context_record=cr)
else:
return False
if self.pk:
q = q.exclude(pk=self.pk)
if q.count():
- self.index = q.aggregate(Max('index'))['index__max'] + 1
+ self.index = q.aggregate(Max("index"))["index__max"] + 1
else:
self.index = 1
return True
def _ope_code(self):
if not self.context_record.operation:
- return ''
+ return ""
profile = get_current_profile()
ope = self.context_record.operation
c_id = []
if ope.code_patriarche:
- c_id.append(profile.operation_prefix +
- ope.code_patriarche)
+ c_id.append(profile.operation_prefix + ope.code_patriarche)
elif ope.year and ope.operation_code:
c_id.append(
- profile.default_operation_prefix +
- str(ope.year or '') + "-" +
- str(ope.operation_code or ''))
+ profile.default_operation_prefix
+ + str(ope.year or "")
+ + "-"
+ + str(ope.operation_code or "")
+ )
else:
- c_id.append('')
+ c_id.append("")
return settings.JOINT.join(c_id)
def complete_id(self):
profile = get_current_profile()
- if profile.has_overload('basefind_complete_id'):
+ if profile.has_overload("basefind_complete_id"):
return ALTERNATE_CONFIGS[profile.config].basefind_complete_id(self)
# OPE|MAT.CODE|UE|FIND_index
c_id = [self._ope_code()]
@@ -504,12 +597,15 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
for mat in find.material_types.all():
if mat.code:
materials.add(mat.code)
- c_id.append('-'.join(sorted(list(materials))))
+ c_id.append("-".join(sorted(list(materials))))
c_id.append(self.context_record.label)
- c_id.append(('{:0' + str(settings.ISHTAR_FINDS_INDEX_ZERO_LEN) + 'd}'
- ).format(self.index))
+ c_id.append(
+ ("{:0" + str(settings.ISHTAR_FINDS_INDEX_ZERO_LEN) + "d}").format(
+ self.index
+ )
+ )
return settings.JOINT.join(c_id)
def _generate_cache_complete_id(self):
@@ -517,12 +613,15 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
def short_id(self):
profile = get_current_profile()
- if profile.has_overload('basefind_short_id'):
+ if profile.has_overload("basefind_short_id"):
return ALTERNATE_CONFIGS[profile.config].basefind_short_id(self)
# OPE|FIND_index
c_id = [self._ope_code()]
- c_id.append(('{:0' + str(settings.ISHTAR_FINDS_INDEX_ZERO_LEN) + 'd}'
- ).format(self.index))
+ c_id.append(
+ ("{:0" + str(settings.ISHTAR_FINDS_INDEX_ZERO_LEN) + "d}").format(
+ self.index
+ )
+ )
return settings.JOINT.join(c_id)
def _generate_cache_short_id(self):
@@ -533,24 +632,32 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
def material_type_label(self):
find = self.get_last_find()
- finds = [find and find.material_type.code or '']
+ finds = [find and find.material_type.code or ""]
ope = self.context_record.operation
- finds += [ope.code_patriarche or
- (str(ope.year) + "-" + ope.operation_code)]
+ finds += [ope.code_patriarche or (str(ope.year) + "-" + ope.operation_code)]
finds += [self.context_record.label, str(self.material_index)]
return settings.JOINT.join(finds)
def _real_label(self):
- if not self.context_record.parcel \
- or not self.context_record.operation \
- or not self.context_record.operation.code_patriarche:
+ if (
+ not self.context_record.parcel
+ or not self.context_record.operation
+ or not self.context_record.operation.code_patriarche
+ ):
return
find = self.get_last_find()
lbl = find.label or self.label
return settings.JOINT.join(
- [it for it in (
- self.context_record.operation.code_patriarche,
- self.context_record.label, lbl) if it])
+ [
+ it
+ for it in (
+ self.context_record.operation.code_patriarche,
+ self.context_record.label,
+ lbl,
+ )
+ if it
+ ]
+ )
def _temp_label(self):
if not self.context_record.parcel:
@@ -558,19 +665,33 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
find = self.get_last_find()
lbl = find.label or self.label
return settings.JOINT.join(
- [str(it) for it in (
- self.context_record.parcel.year, self.index,
- self.context_record.label, lbl) if it])
+ [
+ str(it)
+ for it in (
+ self.context_record.parcel.year,
+ self.index,
+ self.context_record.label,
+ lbl,
+ )
+ if it
+ ]
+ )
@property
def name(self):
return self.label
@classmethod
- def cached_label_bulk_update(cls, operation_id=None, parcel_id=None,
- context_record_id=None, transaction_id=None):
+ def cached_label_bulk_update(
+ cls,
+ operation_id=None,
+ parcel_id=None,
+ context_record_id=None,
+ transaction_id=None,
+ ):
transaction_id, is_recursion = cls.bulk_recursion(
- transaction_id, [operation_id, parcel_id, context_record_id])
+ transaction_id, [operation_id, parcel_id, context_record_id]
+ )
if is_recursion:
return
@@ -580,23 +701,23 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
ON acr.operation_id = %s AND acr.id = mybf.context_record_id
"""
args = [int(operation_id)]
- kwargs = {'operation_id': operation_id}
+ kwargs = {"operation_id": operation_id}
elif parcel_id:
filters = """
INNER JOIN archaeological_context_records_contextrecord acr
ON acr.parcel_id = %s AND acr.id = mybf.context_record_id
"""
args = [int(parcel_id)]
- kwargs = {'parcel_id': parcel_id}
+ kwargs = {"parcel_id": parcel_id}
elif context_record_id:
filters = """
WHERE mybf.context_record_id = %s
"""
args = [int(context_record_id)]
- kwargs = {'context_record_id': context_record_id}
+ kwargs = {"context_record_id": context_record_id}
else:
return
- kwargs['transaction_id'] = transaction_id
+ kwargs["transaction_id"] = transaction_id
profile = get_current_profile()
@@ -672,10 +793,13 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, GeoItem,
SELECT mybf.id FROM archaeological_finds_basefind mybf
{filters}
);
- """.format(main_ope_prefix=profile.operation_prefix,
- ope_prefix=profile.default_operation_prefix,
- join=settings.JOINT, filters=filters,
- zeros=settings.ISHTAR_FINDS_INDEX_ZERO_LEN * "0")
+ """.format(
+ main_ope_prefix=profile.operation_prefix,
+ ope_prefix=profile.default_operation_prefix,
+ join=settings.JOINT,
+ filters=filters,
+ zeros=settings.ISHTAR_FINDS_INDEX_ZERO_LEN * "0",
+ )
with connection.cursor() as c:
c.execute(sql, args)
Find.cached_label_bulk_update(**kwargs)
@@ -689,37 +813,43 @@ def post_save_basefind(sender, **kwargs):
post_save.connect(post_save_basefind, sender=BaseFind)
-WEIGHT_UNIT = (('g', _("g")),
- ('kg', _("kg")),)
+WEIGHT_UNIT = (
+ ("g", _("g")),
+ ("kg", _("kg")),
+)
class FindBasket(Basket, MainItem, ValueGetter):
- SHOW_URL = 'show-findbasket'
+ SHOW_URL = "show-findbasket"
SLUG = "findbasket"
- items = models.ManyToManyField('Find', blank=True, related_name='basket')
+ items = models.ManyToManyField("Find", blank=True, related_name="basket")
QUICK_ACTIONS = [
QuickAction(
- url="findbasket-qa-duplicate", icon_class="fa fa-clone",
- text=_("Duplicate"), target="one",
- rights=['view_find', 'view_own_find']),
+ url="findbasket-qa-duplicate",
+ icon_class="fa fa-clone",
+ text=_("Duplicate"),
+ target="one",
+ rights=["view_find", "view_own_find"],
+ ),
]
class Meta:
verbose_name = _("Basket")
- ordering = ('label', )
+ ordering = ("label",)
permissions = (
("view_find", "Can view all Finds"),
("view_own_find", "Can view own Find"),
)
- def get_values(self, prefix='', no_values=False, filtr=None, **kwargs):
+ def get_values(self, prefix="", no_values=False, filtr=None, **kwargs):
base_exclude = kwargs["exclude"][:] if "exclude" in kwargs else []
base_exclude.append(prefix + "items")
kw = kwargs.copy()
kw["exclude"] = base_exclude
values = super(FindBasket, self).get_values(
- prefix=prefix, no_values=no_values, filtr=filtr, **kw)
+ prefix=prefix, no_values=no_values, filtr=filtr, **kw
+ )
if not filtr or prefix + "items" in filtr:
values[prefix + "items"] = [
item.get_values(no_values=True, filtr=filtr, **kwargs)
@@ -739,30 +869,53 @@ class FindBasket(Basket, MainItem, ValueGetter):
ishtaruser = request.user.ishtaruser
actions = []
if self.user == ishtaruser or ishtaruser.pk in [
- user.pk for user in self.shared_write_with.all()]:
+ user.pk for user in self.shared_write_with.all()
+ ]:
actions = [
- (reverse("select_itemsinbasket", args=[self.pk]),
- _("Manage basket"),
- "fa fa-shopping-basket", "", "", False),
+ (
+ reverse("select_itemsinbasket", args=[self.pk]),
+ _("Manage basket"),
+ "fa fa-shopping-basket",
+ "",
+ "",
+ False,
+ ),
]
- can_edit_find = self.can_do(request, 'change_find')
+ can_edit_find = self.can_do(request, "change_find")
if can_edit_find:
actions += [
- (reverse('findbasket-add-treatment', args=[self.pk]),
- _("Add treatment"), "fa fa-flask", "", "", False),
+ (
+ reverse("findbasket-add-treatment", args=[self.pk]),
+ _("Add treatment"),
+ "fa fa-flask",
+ "",
+ "",
+ False,
+ ),
]
if self.can_do(request, "add_treatmentfile"):
actions += [
- (reverse('findbasket-add-treatmentfile', args=[self.pk]),
- _("Add treatment file"), "fa fa-file-text-o", "", "", False),
+ (
+ reverse("findbasket-add-treatmentfile", args=[self.pk]),
+ _("Add treatment file"),
+ "fa fa-file-text-o",
+ "",
+ "",
+ False,
+ ),
]
if can_edit_find:
duplicate = self.get_quick_action_by_url("findbasket-qa-duplicate")
actions += [
- (reverse(duplicate.url, args=[self.pk]),
- duplicate.text, duplicate.icon_class,
- "", "", True),
+ (
+ reverse(duplicate.url, args=[self.pk]),
+ duplicate.text,
+ duplicate.icon_class,
+ "",
+ "",
+ True,
+ ),
]
return actions
@@ -813,129 +966,153 @@ def query_loan(is_true=True):
:return: (filter, exclude, extra)
"""
if is_true:
- return Q(container_ref__isnull=False, container__isnull=False), \
- Q(container_ref=F('container')), None
+ return (
+ Q(container_ref__isnull=False, container__isnull=False),
+ Q(container_ref=F("container")),
+ None,
+ )
else:
- return Q(container_ref__isnull=False, container__isnull=False,
- container_ref=F('container')), None, None
+ return (
+ Q(
+ container_ref__isnull=False,
+ container__isnull=False,
+ container_ref=F("container"),
+ ),
+ None,
+ None,
+ )
-class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
- CompleteIdentifierItem, OwnPerms, MainItem):
- SLUG = 'find'
+class Find(
+ BulkUpdatedItem,
+ ValueGetter,
+ DocumentItem,
+ BaseHistorizedItem,
+ CompleteIdentifierItem,
+ OwnPerms,
+ MainItem,
+):
+ SLUG = "find"
APP = "archaeological-finds"
MODEL = "find"
- SHOW_URL = 'show-find'
- DELETE_URL = 'delete-find'
- EXTERNAL_ID_KEY = 'find_external_id'
- TABLE_COLS = ['external_id', 'label',
- 'base_finds__context_record__town__name',
- 'base_finds__context_record__operation__common_name',
- 'base_finds__context_record__label',
- 'cached_materials', 'cached_object_types',
- 'cached_periods',
- 'container__cached_label']
- if settings.COUNTRY == 'fr':
- TABLE_COLS.insert(
- 3, 'base_finds__context_record__operation__code_patriarche')
+ SHOW_URL = "show-find"
+ DELETE_URL = "delete-find"
+ EXTERNAL_ID_KEY = "find_external_id"
+ TABLE_COLS = [
+ "external_id",
+ "label",
+ "base_finds__context_record__town__name",
+ "base_finds__context_record__operation__common_name",
+ "base_finds__context_record__label",
+ "cached_materials",
+ "cached_object_types",
+ "cached_periods",
+ "container__cached_label",
+ ]
+ if settings.COUNTRY == "fr":
+ TABLE_COLS.insert(3, "base_finds__context_record__operation__code_patriarche")
TABLE_COLS_FOR_OPE = [
- 'base_finds__cache_short_id',
- 'base_finds__cache_complete_id',
- 'previous_id', 'label',
- 'cached_materials',
- 'cached_periods',
- 'find_number',
- 'cached_object_types',
- 'container__cached_label',
- 'container_ref__cached_label',
- 'description',
- 'base_finds__context_record__town__name',
- 'base_finds__context_record__parcel', ]
+ "base_finds__cache_short_id",
+ "base_finds__cache_complete_id",
+ "previous_id",
+ "label",
+ "cached_materials",
+ "cached_periods",
+ "find_number",
+ "cached_object_types",
+ "container__cached_label",
+ "container_ref__cached_label",
+ "description",
+ "base_finds__context_record__town__name",
+ "base_finds__context_record__parcel",
+ ]
TABLE_COLS_FOR_CR = [
- 'base_finds__cache_short_id',
- 'base_finds__cache_complete_id',
- 'previous_id', 'label',
- 'base_finds__context_record__label',
- 'cached_materials',
- 'cached_periods',
- 'find_number',
- 'cached_object_types',
- 'container__cached_label',
- 'container_ref__cached_label',
- 'description',
- 'base_finds__context_record__town__name',
- 'base_finds__context_record__parcel', ]
+ "base_finds__cache_short_id",
+ "base_finds__cache_complete_id",
+ "previous_id",
+ "label",
+ "base_finds__context_record__label",
+ "cached_materials",
+ "cached_periods",
+ "find_number",
+ "cached_object_types",
+ "container__cached_label",
+ "container_ref__cached_label",
+ "description",
+ "base_finds__context_record__town__name",
+ "base_finds__context_record__parcel",
+ ]
NEW_QUERY_ENGINE = True
COL_LABELS = {
- 'base_finds__context_record__label': _("Context record"),
- 'base_finds__cache_short_id': _("Base find - Short ID"),
- 'base_finds__cache_complete_id': _("Base find - Complete ID"),
- 'base_finds__context_record__operation__code_patriarche': _(
- "Operation (code)"
- ),
- 'base_finds__context_record__town__name': _("Town"),
- 'base_finds__context_record__operation__common_name': _(
- "Operation (name)"
- ),
- 'base_finds__context_record__archaeological_site__name':
- IshtarSiteProfile.get_default_site_label,
- 'base_finds__context_record__parcel': _("Parcel"),
- 'base_finds__batch': _("Batch"),
- 'base_finds__comment': _("Base find - Comment"),
- 'base_finds__description': _("Base find - Description"),
- 'base_finds__topographic_localisation': _("Base find - "
- "Topographic localisation"),
- 'base_finds__special_interest': _("Base find - Special interest"),
- 'base_finds__discovery_date': _(
- "Base find - Discovery date (exact or TPQ)"),
- 'base_finds__discovery_date_taq': _(
- "Base find - Discovery date (TAQ)"),
- 'container__cached_label': _("Current container"),
- 'container_ref__cached_label': _("Reference container"),
- 'datings__period__label': _("Periods"),
- 'cached_periods': _("Periods"),
- 'material_types__label': _("Material types"),
- 'cached_materials': _("Material types"),
- 'object_types__label': _("Object types"),
- 'cached_object_types': _("Object types"),
+ "base_finds__context_record__label": _("Context record"),
+ "base_finds__cache_short_id": _("Base find - Short ID"),
+ "base_finds__cache_complete_id": _("Base find - Complete ID"),
+ "base_finds__context_record__operation__code_patriarche": _("Operation (code)"),
+ "base_finds__context_record__town__name": _("Town"),
+ "base_finds__context_record__operation__common_name": _("Operation (name)"),
+ "base_finds__context_record__archaeological_site__name": IshtarSiteProfile.get_default_site_label,
+ "base_finds__context_record__parcel": _("Parcel"),
+ "base_finds__batch": _("Batch"),
+ "base_finds__comment": _("Base find - Comment"),
+ "base_finds__description": _("Base find - Description"),
+ "base_finds__topographic_localisation": _(
+ "Base find - " "Topographic localisation"
+ ),
+ "base_finds__special_interest": _("Base find - Special interest"),
+ "base_finds__discovery_date": _("Base find - Discovery date (exact or TPQ)"),
+ "base_finds__discovery_date_taq": _("Base find - Discovery date (TAQ)"),
+ "container__cached_label": _("Current container"),
+ "container_ref__cached_label": _("Reference container"),
+ "datings__period__label": _("Periods"),
+ "cached_periods": _("Periods"),
+ "material_types__label": _("Material types"),
+ "cached_materials": _("Material types"),
+ "object_types__label": _("Object types"),
+ "cached_object_types": _("Object types"),
}
EXTRA_FULL_FIELDS = [
- 'datings',
- 'base_finds__cache_short_id', 'base_finds__cache_complete_id',
- 'base_finds__comment', 'base_finds__description',
- 'base_finds__topographic_localisation',
- 'base_finds__special_interest',
- 'base_finds__discovery_date',
- 'base_finds__discovery_date_taq',
+ "datings",
+ "base_finds__cache_short_id",
+ "base_finds__cache_complete_id",
+ "base_finds__comment",
+ "base_finds__description",
+ "base_finds__topographic_localisation",
+ "base_finds__special_interest",
+ "base_finds__discovery_date",
+ "base_finds__discovery_date_taq",
]
- ATTRS_EQUIV = {'get_first_base_find': 'base_finds'}
+ ATTRS_EQUIV = {"get_first_base_find": "base_finds"}
# statistics
- STATISTIC_MODALITIES_OPTIONS = OrderedDict([
- ("base_finds__context_record__operation__operation_type__label",
- _("Operation type")),
- ('base_finds__context_record__operation__year', _("Year")),
- ("base_finds__context_record__operation__towns__areas__label",
- _("Area")),
- ("base_finds__context_record__operation__towns__areas__parent__label",
- _("Extended area")),
- ("datings__period__label", _("Period")),
- ("material_types__label", _("Material type")),
- ("object_types__label", _("Object type")),
- ("preservation_to_considers__label", _("Recommended treatments")),
- ("conservatory_state__label", _("Conservatory state")),
- ("integrities__label", _("Integrity / interest")),
- ("remarkabilities__label", _("Remarkability")),
- ("communicabilities__label", _("Communicability")),
- ("checked_type__label", _("Check")),
- ("alterations__label", _("Alteration")),
- ("alteration_causes__label", _("Alteration cause")),
- ("treatment_emergency__label", _("Treatment emergency")),
- ("documents__source_type__label", _("Associated document type")),
- ])
- STATISTIC_MODALITIES = [
- key for key, lbl in STATISTIC_MODALITIES_OPTIONS.items()]
+ STATISTIC_MODALITIES_OPTIONS = OrderedDict(
+ [
+ (
+ "base_finds__context_record__operation__operation_type__label",
+ _("Operation type"),
+ ),
+ ("base_finds__context_record__operation__year", _("Year")),
+ ("base_finds__context_record__operation__towns__areas__label", _("Area")),
+ (
+ "base_finds__context_record__operation__towns__areas__parent__label",
+ _("Extended area"),
+ ),
+ ("datings__period__label", _("Period")),
+ ("material_types__label", _("Material type")),
+ ("object_types__label", _("Object type")),
+ ("preservation_to_considers__label", _("Recommended treatments")),
+ ("conservatory_state__label", _("Conservatory state")),
+ ("integrities__label", _("Integrity / interest")),
+ ("remarkabilities__label", _("Remarkability")),
+ ("communicabilities__label", _("Communicability")),
+ ("checked_type__label", _("Check")),
+ ("alterations__label", _("Alteration")),
+ ("alteration_causes__label", _("Alteration cause")),
+ ("treatment_emergency__label", _("Treatment emergency")),
+ ("documents__source_type__label", _("Associated document type")),
+ ]
+ )
+ STATISTIC_MODALITIES = [key for key, lbl in STATISTIC_MODALITIES_OPTIONS.items()]
STATISTIC_SUM_VARIABLE = OrderedDict(
(
("pk", (_("Number"), 1)),
@@ -946,65 +1123,54 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
)
# search parameters
- REVERSED_BOOL_FIELDS = ['documents__image__isnull',
- 'documents__associated_url__isnull',
- 'documents__associated_file__isnull']
- BOOL_FIELDS = ['is_complete']
+ REVERSED_BOOL_FIELDS = [
+ "documents__image__isnull",
+ "documents__associated_url__isnull",
+ "documents__associated_file__isnull",
+ ]
+ BOOL_FIELDS = ["is_complete"]
RELATION_TYPES_PREFIX = {
- 'ope_relation_types':
- 'base_finds__context_record__operation__',
- 'cr_relation_types':
- 'base_finds__context_record__',
+ "ope_relation_types": "base_finds__context_record__operation__",
+ "cr_relation_types": "base_finds__context_record__",
}
DATED_FIELDS = [
- 'last_modified__gte',
- 'treatments__file__end_date__lte',
- 'treatments__end_date__lte',
- 'base_finds__discovery_date__lte',
- 'base_finds__discovery_date__gte',
- 'base_finds__discovery_date_tpq__lte',
- 'base_finds__discovery_date_tpq__gte',
- 'base_finds__discovery_date_taq__lte',
- 'base_finds__discovery_date_taq__gte',
- 'check_date__lte',
- 'check_date__gte',
- 'appraisal_date__lte',
- 'appraisal_date__gte',
+ "last_modified__gte",
+ "treatments__file__end_date__lte",
+ "treatments__end_date__lte",
+ "base_finds__discovery_date__lte",
+ "base_finds__discovery_date__gte",
+ "base_finds__discovery_date_tpq__lte",
+ "base_finds__discovery_date_tpq__gte",
+ "base_finds__discovery_date_taq__lte",
+ "base_finds__discovery_date_taq__gte",
+ "check_date__lte",
+ "check_date__gte",
+ "appraisal_date__lte",
+ "appraisal_date__gte",
]
- BASE_REQUEST = {'downstream_treatment__isnull': True}
+ BASE_REQUEST = {"downstream_treatment__isnull": True}
EXTRA_REQUEST_KEYS = {
- 'all_base_finds__context_record':
- 'base_finds__context_record__context_record_tree_parent__cr_parent_id',
- 'base_finds__context_record':
- 'base_finds__context_record__pk',
- 'base_finds__context_record__archaeological_site':
- 'base_finds__context_record__archaeological_site__pk',
- 'archaeological_sites_context_record':
- 'base_finds__context_record__archaeological_site__pk',
- 'base_finds__context_record__operation__year':
- 'base_finds__context_record__operation__year__contains',
- 'base_finds__context_record__operation':
- 'base_finds__context_record__operation__pk',
- 'base_finds__context_record__operation__operation_type':
- 'base_finds__context_record__operation__operation_type__pk',
- 'archaeological_sites':
- 'base_finds__context_record__operation__archaeological_sites__pk',
- 'base_finds__context_record__operation__code_patriarche':
- 'base_finds__context_record__operation__code_patriarche',
- 'base_finds__context_record__town__areas':
- 'base_finds__context_record__town__areas__pk',
- 'base_finds__context_record__archaeological_site__name':
- 'base_finds__context_record__archaeological_site__name',
- 'datings__period': 'datings__period__pk',
- 'description': 'description__icontains',
- 'base_finds__batch': 'base_finds__batch',
- 'basket_id': 'basket__pk',
- 'denomination': 'denomination',
- 'cached_label': 'cached_label__icontains',
- 'documents__image__isnull': 'documents__image__isnull',
- 'container__location': 'container__location__pk',
- 'container_ref__location': 'container_ref__location__pk',
+ "all_base_finds__context_record": "base_finds__context_record__context_record_tree_parent__cr_parent_id",
+ "base_finds__context_record": "base_finds__context_record__pk",
+ "base_finds__context_record__archaeological_site": "base_finds__context_record__archaeological_site__pk",
+ "archaeological_sites_context_record": "base_finds__context_record__archaeological_site__pk",
+ "base_finds__context_record__operation__year": "base_finds__context_record__operation__year__contains",
+ "base_finds__context_record__operation": "base_finds__context_record__operation__pk",
+ "base_finds__context_record__operation__operation_type": "base_finds__context_record__operation__operation_type__pk",
+ "archaeological_sites": "base_finds__context_record__operation__archaeological_sites__pk",
+ "base_finds__context_record__operation__code_patriarche": "base_finds__context_record__operation__code_patriarche",
+ "base_finds__context_record__town__areas": "base_finds__context_record__town__areas__pk",
+ "base_finds__context_record__archaeological_site__name": "base_finds__context_record__archaeological_site__name",
+ "datings__period": "datings__period__pk",
+ "description": "description__icontains",
+ "base_finds__batch": "base_finds__batch",
+ "basket_id": "basket__pk",
+ "denomination": "denomination",
+ "cached_label": "cached_label__icontains",
+ "documents__image__isnull": "documents__image__isnull",
+ "container__location": "container__location__pk",
+ "container_ref__location": "container_ref__location__pk",
}
for table in (TABLE_COLS, TABLE_COLS_FOR_OPE):
for key in table:
@@ -1013,421 +1179,373 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
# alternative names of fields for searches
ALT_NAMES = {
- 'base_finds__cache_short_id':
- SearchAltName(pgettext_lazy("key for text search", "short-id"),
- 'base_finds__cache_short_id__iexact'),
- 'base_finds__cache_complete_id':
- SearchAltName(pgettext_lazy("key for text search", "complete-id"),
- 'base_finds__cache_complete_id__iexact'),
- 'label':
- SearchAltName(pgettext_lazy("key for text search", "free-id"),
- 'label__iexact'),
- 'denomination':
- SearchAltName(pgettext_lazy("key for text search", "denomination"),
- 'denomination__iexact'),
- 'base_finds__context_record__town':
- SearchAltName(
- pgettext_lazy("key for text search", "town"),
- 'base_finds__context_record__town__cached_label__iexact'),
- 'base_finds__context_record__operation__year':
- SearchAltName(pgettext_lazy("key for text search", "year"),
- 'base_finds__context_record__operation__year'),
- 'base_finds__context_record__operation__operation_code':
- SearchAltName(
- pgettext_lazy("key for text search", "operation-code"),
- 'base_finds__context_record__operation__operation_code'),
- 'base_finds__context_record__operation__code_patriarche':
- SearchAltName(
- pgettext_lazy("key for text search", "code-patriarche"),
- 'base_finds__context_record__operation__code_patriarche__iexact'
- ),
- 'base_finds__context_record__operation__operation_type':
- SearchAltName(
- pgettext_lazy("key for text search", "operation-type"),
- 'base_finds__context_record__operation__operation_type'
- '__label__iexact'),
- 'base_finds__context_record__town__areas':
- SearchAltName(
- pgettext_lazy("key for text search", "area"),
- 'base_finds__context_record__town__areas__label__iexact'),
- 'archaeological_sites':
- SearchAltName(
- pgettext_lazy("key for text search", "site"),
- 'base_finds__context_record__operation__archaeological_sites__'
- 'cached_label__icontains'),
- 'archaeological_sites_name':
- SearchAltName(
- pgettext_lazy("key for text search", "site-name"),
- 'base_finds__context_record__operation__archaeological_sites__'
- 'name__iexact'),
- 'archaeological_sites_context_record':
- SearchAltName(
- pgettext_lazy("key for text search", "context-record-site"),
- 'base_finds__context_record__archaeological_site__'
- 'cached_label__icontains'),
- 'archaeological_sites_context_record_name':
- SearchAltName(
- pgettext_lazy("key for text search",
- "context-record-site-name"),
- 'base_finds__context_record__archaeological_site__'
- 'name__iexact'),
- 'base_finds__context_record':
- SearchAltName(
- pgettext_lazy("key for text search", "context-record"),
- 'base_finds__context_record__cached_label__icontains'),
- 'ope_relation_types':
- SearchAltName(
- pgettext_lazy("key for text search", "operation-relation-type"),
- 'ope_relation_types'),
- 'cr_relation_types':
- SearchAltName(
- pgettext_lazy("key for text search",
- "context-record-relation-type"),
- 'cr_relation_types'),
- 'material_types':
- SearchAltName(
- pgettext_lazy("key for text search", "material"),
- 'material_types__label__iexact'),
- 'object_types':
- SearchAltName(
- pgettext_lazy("key for text search", "object-type"),
- 'object_types__label__iexact'),
- 'preservation_to_considers':
- SearchAltName(
- pgettext_lazy("key for text search", "recommended-treatments"),
- 'preservation_to_considers__label__iexact'),
- 'conservatory_state':
- SearchAltName(
- pgettext_lazy("key for text search", "conservatory"),
- 'conservatory_state__label__iexact'),
- 'integrities':
- SearchAltName(
- pgettext_lazy("key for text search", "integrity"),
- 'integrities__label__iexact'),
- 'remarkabilities':
- SearchAltName(
- pgettext_lazy("key for text search", "remarkability"),
- 'remarkabilities__label__iexact'),
- 'description':
- SearchAltName(
- pgettext_lazy("key for text search", "description"),
- 'description__iexact'),
- 'base_finds__batch':
- SearchAltName(
- pgettext_lazy("key for text search", "batch"),
- 'base_finds__batch__label__iexact'),
- 'checked_type':
- SearchAltName(
- pgettext_lazy("key for text search", "checked"),
- 'checked_type__label__iexact'),
- 'container_ref':
- SearchAltName(
- pgettext_lazy("key for text search", "container"),
- 'container_ref__cached_label__iexact'),
- 'container_ref__location':
- SearchAltName(
- pgettext_lazy("key for text search", "location"),
- 'container_ref__location__name__iexact'),
- 'container__location':
- SearchAltName(
- pgettext_lazy("key for text search", "current-location"),
- 'container__location__name__iexact'),
- 'container':
- SearchAltName(
- pgettext_lazy("key for text search", "current-container"),
- 'container__cached_label__iexact'),
- 'basket':
- SearchAltName(
- pgettext_lazy("key for text search", "basket"),
- 'basket__label__iexact'),
- 'base_finds__context_record__operation__cached_label':
- SearchAltName(
- pgettext_lazy("key for text search", "operation"),
- 'base_finds__context_record__operation__cached_label__icontains'
- ),
- 'history_modifier':
- SearchAltName(
- pgettext_lazy("key for text search", "last-modified-by"),
- 'history_modifier__ishtaruser__person__cached_label__icontains'
- ),
- 'history_creator':
- SearchAltName(
- pgettext_lazy("key for text search", "created-by"),
- 'history_creator__ishtaruser__person__cached_label__iexact'
- ),
- 'loan':
- SearchAltName(
- pgettext_lazy("key for text search", "loan"), query_loan),
- 'treatments_file_end_date':
- SearchAltName(
- pgettext_lazy("key for text search",
- "treatment-file-end-date-before"),
- 'treatments__file__end_date__lte'),
- 'treatments_end_date':
- SearchAltName(
- pgettext_lazy("key for text search",
- "treatment-end-date-before"),
- 'treatments__end_date__lte'),
- 'previous_id':
- SearchAltName(
- pgettext_lazy("key for text search", "previous-id"),
- 'previous_id__iexact'),
+ "base_finds__cache_short_id": SearchAltName(
+ pgettext_lazy("key for text search", "short-id"),
+ "base_finds__cache_short_id__iexact",
+ ),
+ "base_finds__cache_complete_id": SearchAltName(
+ pgettext_lazy("key for text search", "complete-id"),
+ "base_finds__cache_complete_id__iexact",
+ ),
+ "label": SearchAltName(
+ pgettext_lazy("key for text search", "free-id"), "label__iexact"
+ ),
+ "denomination": SearchAltName(
+ pgettext_lazy("key for text search", "denomination"), "denomination__iexact"
+ ),
+ "base_finds__context_record__town": SearchAltName(
+ pgettext_lazy("key for text search", "town"),
+ "base_finds__context_record__town__cached_label__iexact",
+ ),
+ "base_finds__context_record__operation__year": SearchAltName(
+ pgettext_lazy("key for text search", "year"),
+ "base_finds__context_record__operation__year",
+ ),
+ "base_finds__context_record__operation__operation_code": SearchAltName(
+ pgettext_lazy("key for text search", "operation-code"),
+ "base_finds__context_record__operation__operation_code",
+ ),
+ "base_finds__context_record__operation__code_patriarche": SearchAltName(
+ pgettext_lazy("key for text search", "code-patriarche"),
+ "base_finds__context_record__operation__code_patriarche__iexact",
+ ),
+ "base_finds__context_record__operation__operation_type": SearchAltName(
+ pgettext_lazy("key for text search", "operation-type"),
+ "base_finds__context_record__operation__operation_type" "__label__iexact",
+ ),
+ "base_finds__context_record__town__areas": SearchAltName(
+ pgettext_lazy("key for text search", "area"),
+ "base_finds__context_record__town__areas__label__iexact",
+ ),
+ "archaeological_sites": SearchAltName(
+ pgettext_lazy("key for text search", "site"),
+ "base_finds__context_record__operation__archaeological_sites__"
+ "cached_label__icontains",
+ ),
+ "archaeological_sites_name": SearchAltName(
+ pgettext_lazy("key for text search", "site-name"),
+ "base_finds__context_record__operation__archaeological_sites__"
+ "name__iexact",
+ ),
+ "archaeological_sites_context_record": SearchAltName(
+ pgettext_lazy("key for text search", "context-record-site"),
+ "base_finds__context_record__archaeological_site__"
+ "cached_label__icontains",
+ ),
+ "archaeological_sites_context_record_name": SearchAltName(
+ pgettext_lazy("key for text search", "context-record-site-name"),
+ "base_finds__context_record__archaeological_site__" "name__iexact",
+ ),
+ "base_finds__context_record": SearchAltName(
+ pgettext_lazy("key for text search", "context-record"),
+ "base_finds__context_record__cached_label__icontains",
+ ),
+ "ope_relation_types": SearchAltName(
+ pgettext_lazy("key for text search", "operation-relation-type"),
+ "ope_relation_types",
+ ),
+ "cr_relation_types": SearchAltName(
+ pgettext_lazy("key for text search", "context-record-relation-type"),
+ "cr_relation_types",
+ ),
+ "material_types": SearchAltName(
+ pgettext_lazy("key for text search", "material"),
+ "material_types__label__iexact",
+ ),
+ "object_types": SearchAltName(
+ pgettext_lazy("key for text search", "object-type"),
+ "object_types__label__iexact",
+ ),
+ "preservation_to_considers": SearchAltName(
+ pgettext_lazy("key for text search", "recommended-treatments"),
+ "preservation_to_considers__label__iexact",
+ ),
+ "conservatory_state": SearchAltName(
+ pgettext_lazy("key for text search", "conservatory"),
+ "conservatory_state__label__iexact",
+ ),
+ "integrities": SearchAltName(
+ pgettext_lazy("key for text search", "integrity"),
+ "integrities__label__iexact",
+ ),
+ "remarkabilities": SearchAltName(
+ pgettext_lazy("key for text search", "remarkability"),
+ "remarkabilities__label__iexact",
+ ),
+ "description": SearchAltName(
+ pgettext_lazy("key for text search", "description"), "description__iexact"
+ ),
+ "base_finds__batch": SearchAltName(
+ pgettext_lazy("key for text search", "batch"),
+ "base_finds__batch__label__iexact",
+ ),
+ "checked_type": SearchAltName(
+ pgettext_lazy("key for text search", "checked"),
+ "checked_type__label__iexact",
+ ),
+ "container_ref": SearchAltName(
+ pgettext_lazy("key for text search", "container"),
+ "container_ref__cached_label__iexact",
+ ),
+ "container_ref__location": SearchAltName(
+ pgettext_lazy("key for text search", "location"),
+ "container_ref__location__name__iexact",
+ ),
+ "container__location": SearchAltName(
+ pgettext_lazy("key for text search", "current-location"),
+ "container__location__name__iexact",
+ ),
+ "container": SearchAltName(
+ pgettext_lazy("key for text search", "current-container"),
+ "container__cached_label__iexact",
+ ),
+ "basket": SearchAltName(
+ pgettext_lazy("key for text search", "basket"), "basket__label__iexact"
+ ),
+ "base_finds__context_record__operation__cached_label": SearchAltName(
+ pgettext_lazy("key for text search", "operation"),
+ "base_finds__context_record__operation__cached_label__icontains",
+ ),
+ "history_modifier": SearchAltName(
+ pgettext_lazy("key for text search", "last-modified-by"),
+ "history_modifier__ishtaruser__person__cached_label__icontains",
+ ),
+ "history_creator": SearchAltName(
+ pgettext_lazy("key for text search", "created-by"),
+ "history_creator__ishtaruser__person__cached_label__iexact",
+ ),
+ "loan": SearchAltName(pgettext_lazy("key for text search", "loan"), query_loan),
+ "treatments_file_end_date": SearchAltName(
+ pgettext_lazy("key for text search", "treatment-file-end-date-before"),
+ "treatments__file__end_date__lte",
+ ),
+ "treatments_end_date": SearchAltName(
+ pgettext_lazy("key for text search", "treatment-end-date-before"),
+ "treatments__end_date__lte",
+ ),
+ "previous_id": SearchAltName(
+ pgettext_lazy("key for text search", "previous-id"), "previous_id__iexact"
+ ),
#'collection':
# SearchAltName(
# pgettext_lazy("key for text search", "collection"),
# 'collection__name__iexact'),
- 'seal_number':
- SearchAltName(
- pgettext_lazy("key for text search", "seal-number"),
- 'seal_number__iexact'),
- 'base_finds__excavation_id':
- SearchAltName(
- pgettext_lazy("key for text search", "excavation-id"),
- 'base_finds__excavation_id__iexact'),
- 'museum_id':
- SearchAltName(
- pgettext_lazy("key for text search", "museum-id"),
- 'museum_id__iexact'),
- 'laboratory_id':
- SearchAltName(
- pgettext_lazy("key for text search", "laboratory-id"),
- 'laboratory_id__iexact'),
- 'mark':
- SearchAltName(
- pgettext_lazy("key for text search", "mark"),
- 'mark__iexact'),
- 'base_finds__discovery_date__before':
- SearchAltName(
- pgettext_lazy("key for text search", "discovery-date-before"),
- 'base_finds__discovery_date__lte'),
- 'base_finds__discovery_date__after':
- SearchAltName(
- pgettext_lazy("key for text search", "discovery-date-after"),
- 'base_finds__discovery_date__gte'),
- 'base_finds__discovery_date_tpq__before':
- SearchAltName(
- pgettext_lazy("key for text search",
- "discovery-date-tpq-before"),
- 'base_finds__discovery_date_tpq__lte'),
- 'base_finds__discovery_date_tpq__after':
- SearchAltName(
- pgettext_lazy("key for text search",
- "discovery-date-tpq-after"),
- 'base_finds__discovery_date_tpq__gte'),
- 'base_finds__discovery_date_taq__before':
- SearchAltName(
- pgettext_lazy("key for text search",
- "discovery-date-taq-before"),
- 'base_finds__discovery_date_taq__lte'),
- 'base_finds__discovery_date_taq__after':
- SearchAltName(
- pgettext_lazy("key for text search",
- "discovery-date-taq-after"),
- 'base_finds__discovery_date_taq__gte'),
- 'is_complete':
- SearchAltName(
- pgettext_lazy("key for text search", "is-complete"),
- 'is_complete'),
- 'material_type_quality':
- SearchAltName(
- pgettext_lazy("key for text search", "material-type-quality"),
- 'material_type_quality__label__iexact'),
- 'object_type_quality':
- SearchAltName(
- pgettext_lazy("key for text search", "object-type-quality"),
- 'object_type_quality__label__iexact'),
- 'find_number':
- SearchAltName(
- pgettext_lazy("key for text search", "find-number"),
- 'find_number'),
- 'min_number_of_individuals':
- SearchAltName(
- pgettext_lazy("key for text search",
- "min-number-of-individuals"),
- 'min_number_of_individuals'),
- 'decoration':
- SearchAltName(
- pgettext_lazy("key for text search", "decoration"),
- 'decoration__iexact'),
- 'inscription':
- SearchAltName(
- pgettext_lazy("key for text search", "inscription"),
- 'inscription__iexact'),
- 'manufacturing_place':
- SearchAltName(
- pgettext_lazy("key for text search", "manufacturing-place"),
- 'manufacturing_place__iexact'),
- 'communicabilities':
- SearchAltName(
- pgettext_lazy("key for text search", "communicabilities"),
- 'communicabilities__label__iexact'),
- 'comment':
- SearchAltName(
- pgettext_lazy("key for text search", "comment"),
- 'comment__iexact'),
- 'material_comment':
- SearchAltName(
- pgettext_lazy("key for text search", "material-comment"),
- 'material_comment__iexact'),
- 'dating_comment':
- SearchAltName(
- pgettext_lazy("key for text search", "dating-comment"),
- 'dating_comment__iexact'),
- 'conservatory_comment':
- SearchAltName(
- pgettext_lazy("key for text search", "conservatory-comment"),
- 'conservatory_comment__iexact'),
- 'length__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "length-lower"),
- 'length__lte'),
- 'width__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "width-lower"),
- 'width__lte'),
- 'height__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "height-lower"),
- 'height__lte'),
- 'thickness__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "thickness-lower"),
- 'thickness__lte'),
- 'diameter__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "diameter-lower"),
- 'diameter__lte'),
- 'circumference__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "circumference-lower"),
- 'circumference__lte'),
- 'volume__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "volume-lower"),
- 'volume__lte'),
- 'weight__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "weight-lower"),
- 'weight__lte'),
- 'clutter_long_side__lower':
- SearchAltName(
- pgettext_lazy("key for text search",
- "clutter-long-side-lower"),
- 'clutter_long_side__lte'),
- 'clutter_short_side__lower':
- SearchAltName(
- pgettext_lazy("key for text search",
- "clutter-short-side-lower"),
- 'clutter_short_side__lte'),
- 'clutter_height__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "clutter-height-lower"),
- 'clutter_height__lte'),
- 'length__higher':
- SearchAltName(
- pgettext_lazy("key for text search", "length-higher"),
- 'length__gte'),
- 'width__higher':
- SearchAltName(
- pgettext_lazy("key for text search", "width-higher"),
- 'width__gte'),
- 'height__higher':
- SearchAltName(
- pgettext_lazy("key for text search", "height-higher"),
- 'height__gte'),
- 'thickness__higher':
- SearchAltName(
- pgettext_lazy("key for text search", "thickness-higher"),
- 'thickness__gte'),
- 'diameter__higher':
- SearchAltName(
- pgettext_lazy("key for text search", "diameter-higher"),
- 'diameter__gte'),
- 'circumference__higher':
- SearchAltName(
- pgettext_lazy("key for text search", "circumference-higher"),
- 'circumference__gte'),
- 'volume__higher':
- SearchAltName(
- pgettext_lazy("key for text search", "volume-higher"),
- 'volume__gte'),
- 'weight__higher':
- SearchAltName(
- pgettext_lazy("key for text search", "weight-higher"),
- 'weight__gte'),
- 'clutter_long_side__higher':
- SearchAltName(
- pgettext_lazy("key for text search",
- "clutter-long-side-higher"),
- 'clutter_long_side__gte'),
- 'clutter_short_side__higher':
- SearchAltName(
- pgettext_lazy("key for text search",
- "clutter-short-side-higher"),
- 'clutter_short_side__gte'),
- 'clutter_height__higher':
- SearchAltName(
- pgettext_lazy("key for text search", "clutter-height-higher"),
- 'clutter_height__gte'),
-
- 'dimensions_comment':
- SearchAltName(
- pgettext_lazy("key for text search", "dimensions-comment"),
- 'dimensions_comment__icontains'),
- 'base_finds__topographic_localisation':
- SearchAltName(
- pgettext_lazy("key for text search",
- "topographic-localisation"),
- 'base_finds__topographic_localisation__iexact'),
- 'check_date__before':
- SearchAltName(
- pgettext_lazy("key for text search", "check-date-before"),
- 'check_date__lte'),
- 'check_date__after':
- SearchAltName(
- pgettext_lazy("key for text search", "check-date-after"),
- 'check_date__gte'),
- 'alterations':
- SearchAltName(
- pgettext_lazy("key for text search", "alterations"),
- 'alterations__label__iexact'),
- 'alteration_causes':
- SearchAltName(
- pgettext_lazy("key for text search", "alteration-causes"),
- 'alteration_causes__label__iexact'),
- 'treatment_emergency':
- SearchAltName(
- pgettext_lazy("key for text search", "treatment-emergency"),
- 'treatment_emergency__label__iexact'),
- 'estimated_value__higher':
- SearchAltName(
- pgettext_lazy("key for text search",
- "estimated-value-higher"),
- 'estimated_value__gte'),
- 'estimated_value__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "estimated-value-lower"),
- 'estimated_value__lte'),
- 'insurance_value__higher':
- SearchAltName(
- pgettext_lazy("key for text search",
- "insurance-value-higher"),
- 'insurance_value__gte'),
- 'insurance_value__lower':
- SearchAltName(
- pgettext_lazy("key for text search", "insurance-value-lower"),
- 'insurance_value__lte'),
- 'appraisal_date__before':
- SearchAltName(
- pgettext_lazy("key for text search", "appraisal-date-before"),
- 'appraisal_date__lte'),
- 'appraisal_date__after':
- SearchAltName(
- pgettext_lazy("key for text search", "appraisal-date-after"),
- 'appraisal_date__gte'),
- 'cultural_attributions': SearchAltName(
+ "seal_number": SearchAltName(
+ pgettext_lazy("key for text search", "seal-number"), "seal_number__iexact"
+ ),
+ "base_finds__excavation_id": SearchAltName(
+ pgettext_lazy("key for text search", "excavation-id"),
+ "base_finds__excavation_id__iexact",
+ ),
+ "museum_id": SearchAltName(
+ pgettext_lazy("key for text search", "museum-id"), "museum_id__iexact"
+ ),
+ "laboratory_id": SearchAltName(
+ pgettext_lazy("key for text search", "laboratory-id"),
+ "laboratory_id__iexact",
+ ),
+ "mark": SearchAltName(
+ pgettext_lazy("key for text search", "mark"), "mark__iexact"
+ ),
+ "base_finds__discovery_date__before": SearchAltName(
+ pgettext_lazy("key for text search", "discovery-date-before"),
+ "base_finds__discovery_date__lte",
+ ),
+ "base_finds__discovery_date__after": SearchAltName(
+ pgettext_lazy("key for text search", "discovery-date-after"),
+ "base_finds__discovery_date__gte",
+ ),
+ "base_finds__discovery_date_tpq__before": SearchAltName(
+ pgettext_lazy("key for text search", "discovery-date-tpq-before"),
+ "base_finds__discovery_date_tpq__lte",
+ ),
+ "base_finds__discovery_date_tpq__after": SearchAltName(
+ pgettext_lazy("key for text search", "discovery-date-tpq-after"),
+ "base_finds__discovery_date_tpq__gte",
+ ),
+ "base_finds__discovery_date_taq__before": SearchAltName(
+ pgettext_lazy("key for text search", "discovery-date-taq-before"),
+ "base_finds__discovery_date_taq__lte",
+ ),
+ "base_finds__discovery_date_taq__after": SearchAltName(
+ pgettext_lazy("key for text search", "discovery-date-taq-after"),
+ "base_finds__discovery_date_taq__gte",
+ ),
+ "is_complete": SearchAltName(
+ pgettext_lazy("key for text search", "is-complete"), "is_complete"
+ ),
+ "material_type_quality": SearchAltName(
+ pgettext_lazy("key for text search", "material-type-quality"),
+ "material_type_quality__label__iexact",
+ ),
+ "object_type_quality": SearchAltName(
+ pgettext_lazy("key for text search", "object-type-quality"),
+ "object_type_quality__label__iexact",
+ ),
+ "find_number": SearchAltName(
+ pgettext_lazy("key for text search", "find-number"), "find_number"
+ ),
+ "min_number_of_individuals": SearchAltName(
+ pgettext_lazy("key for text search", "min-number-of-individuals"),
+ "min_number_of_individuals",
+ ),
+ "decoration": SearchAltName(
+ pgettext_lazy("key for text search", "decoration"), "decoration__iexact"
+ ),
+ "inscription": SearchAltName(
+ pgettext_lazy("key for text search", "inscription"), "inscription__iexact"
+ ),
+ "manufacturing_place": SearchAltName(
+ pgettext_lazy("key for text search", "manufacturing-place"),
+ "manufacturing_place__iexact",
+ ),
+ "communicabilities": SearchAltName(
+ pgettext_lazy("key for text search", "communicabilities"),
+ "communicabilities__label__iexact",
+ ),
+ "comment": SearchAltName(
+ pgettext_lazy("key for text search", "comment"), "comment__iexact"
+ ),
+ "material_comment": SearchAltName(
+ pgettext_lazy("key for text search", "material-comment"),
+ "material_comment__iexact",
+ ),
+ "dating_comment": SearchAltName(
+ pgettext_lazy("key for text search", "dating-comment"),
+ "dating_comment__iexact",
+ ),
+ "conservatory_comment": SearchAltName(
+ pgettext_lazy("key for text search", "conservatory-comment"),
+ "conservatory_comment__iexact",
+ ),
+ "length__lower": SearchAltName(
+ pgettext_lazy("key for text search", "length-lower"), "length__lte"
+ ),
+ "width__lower": SearchAltName(
+ pgettext_lazy("key for text search", "width-lower"), "width__lte"
+ ),
+ "height__lower": SearchAltName(
+ pgettext_lazy("key for text search", "height-lower"), "height__lte"
+ ),
+ "thickness__lower": SearchAltName(
+ pgettext_lazy("key for text search", "thickness-lower"), "thickness__lte"
+ ),
+ "diameter__lower": SearchAltName(
+ pgettext_lazy("key for text search", "diameter-lower"), "diameter__lte"
+ ),
+ "circumference__lower": SearchAltName(
+ pgettext_lazy("key for text search", "circumference-lower"),
+ "circumference__lte",
+ ),
+ "volume__lower": SearchAltName(
+ pgettext_lazy("key for text search", "volume-lower"), "volume__lte"
+ ),
+ "weight__lower": SearchAltName(
+ pgettext_lazy("key for text search", "weight-lower"), "weight__lte"
+ ),
+ "clutter_long_side__lower": SearchAltName(
+ pgettext_lazy("key for text search", "clutter-long-side-lower"),
+ "clutter_long_side__lte",
+ ),
+ "clutter_short_side__lower": SearchAltName(
+ pgettext_lazy("key for text search", "clutter-short-side-lower"),
+ "clutter_short_side__lte",
+ ),
+ "clutter_height__lower": SearchAltName(
+ pgettext_lazy("key for text search", "clutter-height-lower"),
+ "clutter_height__lte",
+ ),
+ "length__higher": SearchAltName(
+ pgettext_lazy("key for text search", "length-higher"), "length__gte"
+ ),
+ "width__higher": SearchAltName(
+ pgettext_lazy("key for text search", "width-higher"), "width__gte"
+ ),
+ "height__higher": SearchAltName(
+ pgettext_lazy("key for text search", "height-higher"), "height__gte"
+ ),
+ "thickness__higher": SearchAltName(
+ pgettext_lazy("key for text search", "thickness-higher"), "thickness__gte"
+ ),
+ "diameter__higher": SearchAltName(
+ pgettext_lazy("key for text search", "diameter-higher"), "diameter__gte"
+ ),
+ "circumference__higher": SearchAltName(
+ pgettext_lazy("key for text search", "circumference-higher"),
+ "circumference__gte",
+ ),
+ "volume__higher": SearchAltName(
+ pgettext_lazy("key for text search", "volume-higher"), "volume__gte"
+ ),
+ "weight__higher": SearchAltName(
+ pgettext_lazy("key for text search", "weight-higher"), "weight__gte"
+ ),
+ "clutter_long_side__higher": SearchAltName(
+ pgettext_lazy("key for text search", "clutter-long-side-higher"),
+ "clutter_long_side__gte",
+ ),
+ "clutter_short_side__higher": SearchAltName(
+ pgettext_lazy("key for text search", "clutter-short-side-higher"),
+ "clutter_short_side__gte",
+ ),
+ "clutter_height__higher": SearchAltName(
+ pgettext_lazy("key for text search", "clutter-height-higher"),
+ "clutter_height__gte",
+ ),
+ "dimensions_comment": SearchAltName(
+ pgettext_lazy("key for text search", "dimensions-comment"),
+ "dimensions_comment__icontains",
+ ),
+ "base_finds__topographic_localisation": SearchAltName(
+ pgettext_lazy("key for text search", "topographic-localisation"),
+ "base_finds__topographic_localisation__iexact",
+ ),
+ "check_date__before": SearchAltName(
+ pgettext_lazy("key for text search", "check-date-before"), "check_date__lte"
+ ),
+ "check_date__after": SearchAltName(
+ pgettext_lazy("key for text search", "check-date-after"), "check_date__gte"
+ ),
+ "alterations": SearchAltName(
+ pgettext_lazy("key for text search", "alterations"),
+ "alterations__label__iexact",
+ ),
+ "alteration_causes": SearchAltName(
+ pgettext_lazy("key for text search", "alteration-causes"),
+ "alteration_causes__label__iexact",
+ ),
+ "treatment_emergency": SearchAltName(
+ pgettext_lazy("key for text search", "treatment-emergency"),
+ "treatment_emergency__label__iexact",
+ ),
+ "estimated_value__higher": SearchAltName(
+ pgettext_lazy("key for text search", "estimated-value-higher"),
+ "estimated_value__gte",
+ ),
+ "estimated_value__lower": SearchAltName(
+ pgettext_lazy("key for text search", "estimated-value-lower"),
+ "estimated_value__lte",
+ ),
+ "insurance_value__higher": SearchAltName(
+ pgettext_lazy("key for text search", "insurance-value-higher"),
+ "insurance_value__gte",
+ ),
+ "insurance_value__lower": SearchAltName(
+ pgettext_lazy("key for text search", "insurance-value-lower"),
+ "insurance_value__lte",
+ ),
+ "appraisal_date__before": SearchAltName(
+ pgettext_lazy("key for text search", "appraisal-date-before"),
+ "appraisal_date__lte",
+ ),
+ "appraisal_date__after": SearchAltName(
+ pgettext_lazy("key for text search", "appraisal-date-after"),
+ "appraisal_date__gte",
+ ),
+ "cultural_attributions": SearchAltName(
pgettext_lazy("key for text search", "cultural-attribution"),
- 'cultural_attributions__label__iexact'),
+ "cultural_attributions__label__iexact",
+ ),
}
ALT_NAMES.update(BaseHistorizedItem.ALT_NAMES)
ALT_NAMES.update(DocumentItem.ALT_NAMES)
@@ -1457,7 +1575,7 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
}
"""
- PARENT_SEARCH_VECTORS = ['base_finds']
+ PARENT_SEARCH_VECTORS = ["base_finds"]
BASE_SEARCH_VECTORS = [
SearchVectorConfig("cached_label"),
SearchVectorConfig("label"),
@@ -1483,240 +1601,333 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
]
QA_EDIT = QuickAction(
- url="find-qa-bulk-update", icon_class="fa fa-pencil",
- text=_("Bulk update"), target="many",
- rights=['change_find', 'change_own_find'])
+ url="find-qa-bulk-update",
+ icon_class="fa fa-pencil",
+ text=_("Bulk update"),
+ target="many",
+ rights=["change_find", "change_own_find"],
+ )
QA_LOCK = QuickAction(
- url="find-qa-lock", icon_class="fa fa-lock",
- text=_("Lock/Unlock"), target="many",
- rights=['change_find', 'change_own_find']
+ url="find-qa-lock",
+ icon_class="fa fa-lock",
+ text=_("Lock/Unlock"),
+ target="many",
+ rights=["change_find", "change_own_find"],
)
QUICK_ACTIONS = [
QA_EDIT,
QuickAction(
- url="find-qa-duplicate", icon_class="fa fa-clone",
- text=_("Duplicate"), target="one",
- rights=['change_find', 'change_own_find']),
+ url="find-qa-duplicate",
+ icon_class="fa fa-clone",
+ text=_("Duplicate"),
+ target="one",
+ rights=["change_find", "change_own_find"],
+ ),
QuickAction(
- url="find-qa-basket", icon_class="fa fa-shopping-basket",
- text=_("Basket"), target="many",
- rights=['change_find', 'change_own_find']),
+ url="find-qa-basket",
+ icon_class="fa fa-shopping-basket",
+ text=_("Basket"),
+ target="many",
+ rights=["change_find", "change_own_find"],
+ ),
QuickAction(
- url="find-qa-packaging", icon_class="fa fa-gift",
- text=_("Packaging"), target="many",
- rights=['change_find', 'change_own_find'],
- module='warehouse'
+ url="find-qa-packaging",
+ icon_class="fa fa-gift",
+ text=_("Packaging"),
+ target="many",
+ rights=["change_find", "change_own_find"],
+ module="warehouse",
),
- QA_LOCK
+ QA_LOCK,
]
UP_MODEL_QUERY = {
- "operation": (pgettext_lazy("key for text search", "operation"),
- 'cached_label'),
+ "operation": (
+ pgettext_lazy("key for text search", "operation"),
+ "cached_label",
+ ),
"contextrecord": (
pgettext_lazy("key for text search", "context-record"),
- 'cached_label'),
- "warehouse": (
- pgettext_lazy("key for text search", "location"),
- 'name'),
+ "cached_label",
+ ),
+ "warehouse": (pgettext_lazy("key for text search", "location"), "name"),
"site": (
pgettext_lazy("key for text search", "context-record-site"),
- 'cached_label'),
+ "cached_label",
+ ),
}
RELATIVE_SESSION_NAMES = [
- ('contextrecord', 'base_finds__context_record__pk'),
- ('operation', 'base_finds__context_record__operation__pk'),
- ('file', 'base_finds__context_record__operation__associated_file__pk'),
- ('warehouse', 'container__location__pk'),
- ('site', 'base_finds__context_record__archaeological_site__pk')
+ ("contextrecord", "base_finds__context_record__pk"),
+ ("operation", "base_finds__context_record__operation__pk"),
+ ("file", "base_finds__context_record__operation__associated_file__pk"),
+ ("warehouse", "container__location__pk"),
+ ("site", "base_finds__context_record__archaeological_site__pk"),
]
HISTORICAL_M2M = [
- 'material_types', 'datings', 'object_types', 'integrities',
- 'remarkabilities', 'communicabilities', 'preservation_to_considers',
- 'alterations', 'alteration_causes', "cultural_attributions"
+ "material_types",
+ "datings",
+ "object_types",
+ "integrities",
+ "remarkabilities",
+ "communicabilities",
+ "preservation_to_considers",
+ "alterations",
+ "alteration_causes",
+ "cultural_attributions",
]
GET_VALUES_EXTRA_TYPES = ValueGetter.GET_VALUES_EXCLUDE_FIELDS + [
- 'material_types', 'object_types', 'integrities',
- 'remarkabilities', 'communicabilities', 'preservation_to_considers',
- 'alterations', 'alteration_causes'
+ "material_types",
+ "object_types",
+ "integrities",
+ "remarkabilities",
+ "communicabilities",
+ "preservation_to_considers",
+ "alterations",
+ "alteration_causes",
+ ]
+ CACHED_LABELS = [
+ "cached_label",
+ "cached_periods",
+ "cached_object_types",
+ "cached_materials",
]
- CACHED_LABELS = ['cached_label', 'cached_periods',
- 'cached_object_types', 'cached_materials']
objects = UUIDModelManager()
# fields
uuid = models.UUIDField(default=uuid.uuid4)
- base_finds = models.ManyToManyField(BaseFind, verbose_name=_("Base find"),
- related_name='find')
+ base_finds = models.ManyToManyField(
+ BaseFind, verbose_name=_("Base find"), related_name="find"
+ )
external_id = models.TextField(_("External ID"), blank=True, default="")
auto_external_id = models.BooleanField(
- _("External ID is set automatically"), default=False)
+ _("External ID is set automatically"), default=False
+ )
# judiciary operation
seal_number = models.TextField(_("Seal number"), blank=True, default="")
order = models.IntegerField(_("Order"), default=1)
label = models.TextField(_("Free ID"))
- denomination = models.TextField(
- _("Denomination"), blank=True, default="")
- museum_id = models.TextField(
- _("Museum ID"), blank=True, default="")
- laboratory_id = models.TextField(
- _("Laboratory ID"), blank=True, default="")
- description = models.TextField(
- _("Description"), blank=True, default="")
- decoration = models.TextField(
- _("Decoration"), blank=True, default="")
- inscription = models.TextField(
- _("Inscription"), blank=True, default="")
+ denomination = models.TextField(_("Denomination"), blank=True, default="")
+ museum_id = models.TextField(_("Museum ID"), blank=True, default="")
+ laboratory_id = models.TextField(_("Laboratory ID"), blank=True, default="")
+ description = models.TextField(_("Description"), blank=True, default="")
+ decoration = models.TextField(_("Decoration"), blank=True, default="")
+ inscription = models.TextField(_("Inscription"), blank=True, default="")
manufacturing_place = models.TextField(
- _("Manufacturing place"), blank=True, default="")
+ _("Manufacturing place"), blank=True, default=""
+ )
material_types = models.ManyToManyField(
- MaterialType, verbose_name=_("Material types"), related_name='finds',
- blank=True
+ MaterialType, verbose_name=_("Material types"), related_name="finds", blank=True
)
material_type_quality = models.ForeignKey(
MaterialTypeQualityType,
- verbose_name=_("Material type quality"), related_name='finds',
+ verbose_name=_("Material type quality"),
+ related_name="finds",
on_delete=models.SET_NULL,
- blank=True, null=True
+ blank=True,
+ null=True,
)
material_comment = models.TextField(
- _("Comment on the material"), blank=True, default="")
+ _("Comment on the material"), blank=True, default=""
+ )
volume = models.FloatField(_("Volume (l)"), blank=True, null=True)
weight = models.FloatField(_("Weight"), blank=True, null=True)
- weight_unit = models.CharField(_("Weight unit"), max_length=4,
- blank=True, null=True, choices=WEIGHT_UNIT)
+ weight_unit = models.CharField(
+ _("Weight unit"), max_length=4, blank=True, null=True, choices=WEIGHT_UNIT
+ )
find_number = models.IntegerField(_("Find number"), blank=True, null=True)
upstream_treatment = models.ForeignKey(
- "Treatment", blank=True, null=True,
- related_name='downstream', on_delete=models.SET_NULL,
- verbose_name=_("Upstream treatment"))
+ "Treatment",
+ blank=True,
+ null=True,
+ related_name="downstream",
+ on_delete=models.SET_NULL,
+ verbose_name=_("Upstream treatment"),
+ )
downstream_treatment = models.ForeignKey(
- "Treatment", blank=True, null=True, related_name='upstream',
- verbose_name=_("Downstream treatment"), on_delete=models.SET_NULL)
- datings = models.ManyToManyField(Dating, verbose_name=_("Dating"),
- related_name='find')
+ "Treatment",
+ blank=True,
+ null=True,
+ related_name="upstream",
+ verbose_name=_("Downstream treatment"),
+ on_delete=models.SET_NULL,
+ )
+ datings = models.ManyToManyField(
+ Dating, verbose_name=_("Dating"), related_name="find"
+ )
cultural_attributions = models.ManyToManyField(
- CulturalAttributionType, verbose_name=_("Cultural attribution"),
- blank=True)
+ CulturalAttributionType, verbose_name=_("Cultural attribution"), blank=True
+ )
container = models.ForeignKey(
- "archaeological_warehouse.Container", verbose_name=_("Container"),
- blank=True, null=True, related_name='finds', on_delete=models.SET_NULL)
+ "archaeological_warehouse.Container",
+ verbose_name=_("Container"),
+ blank=True,
+ null=True,
+ related_name="finds",
+ on_delete=models.SET_NULL,
+ )
container_ref = models.ForeignKey(
"archaeological_warehouse.Container",
verbose_name=_("Reference container"),
- blank=True, null=True,
- related_name='finds_ref', on_delete=models.SET_NULL)
- is_complete = models.NullBooleanField(_("Is complete?"), blank=True,
- null=True)
+ blank=True,
+ null=True,
+ related_name="finds_ref",
+ on_delete=models.SET_NULL,
+ )
+ is_complete = models.NullBooleanField(_("Is complete?"), blank=True, null=True)
object_types = models.ManyToManyField(
- ObjectType, verbose_name=_("Object types"), related_name='find',
- blank=True
+ ObjectType, verbose_name=_("Object types"), related_name="find", blank=True
)
object_type_quality = models.ForeignKey(
ObjectTypeQualityType,
- verbose_name=_("Object type quality"), related_name='finds',
- on_delete=models.SET_NULL, blank=True, null=True
+ verbose_name=_("Object type quality"),
+ related_name="finds",
+ on_delete=models.SET_NULL,
+ blank=True,
+ null=True,
)
integrities = models.ManyToManyField(
- IntegrityType, verbose_name=_("Integrity / interest"),
- related_name='find', blank=True)
+ IntegrityType,
+ verbose_name=_("Integrity / interest"),
+ related_name="find",
+ blank=True,
+ )
remarkabilities = models.ManyToManyField(
- RemarkabilityType, verbose_name=_("Remarkability"),
- related_name='find', blank=True)
+ RemarkabilityType,
+ verbose_name=_("Remarkability"),
+ related_name="find",
+ blank=True,
+ )
communicabilities = models.ManyToManyField(
- CommunicabilityType, verbose_name=_("Communicability"),
- related_name='find', blank=True)
+ CommunicabilityType,
+ verbose_name=_("Communicability"),
+ related_name="find",
+ blank=True,
+ )
min_number_of_individuals = models.IntegerField(
- _("Minimum number of individuals (MNI)"), blank=True, null=True)
+ _("Minimum number of individuals (MNI)"), blank=True, null=True
+ )
length = models.FloatField(_("Length (cm)"), blank=True, null=True)
width = models.FloatField(_("Width (cm)"), blank=True, null=True)
height = models.FloatField(_("Height (cm)"), blank=True, null=True)
diameter = models.FloatField(_("Diameter (cm)"), blank=True, null=True)
- circumference = models.FloatField(_("Circumference (cm)"), blank=True,
- null=True)
+ circumference = models.FloatField(_("Circumference (cm)"), blank=True, null=True)
thickness = models.FloatField(_("Thickness (cm)"), blank=True, null=True)
clutter_long_side = models.FloatField(
- _("Clutter - long side (cm)"), blank=True, null=True)
+ _("Clutter - long side (cm)"), blank=True, null=True
+ )
clutter_short_side = models.FloatField(
- _("Clutter - short side (cm)"), blank=True, null=True)
+ _("Clutter - short side (cm)"), blank=True, null=True
+ )
clutter_height = models.FloatField(
- _("Clutter - height (cm)"), blank=True, null=True)
+ _("Clutter - height (cm)"), blank=True, null=True
+ )
dimensions_comment = models.TextField(
- _("Dimensions comment"), blank=True, default="")
+ _("Dimensions comment"), blank=True, default=""
+ )
mark = models.TextField(_("Mark"), blank=True, default="")
comment = models.TextField(_("Comment"), blank=True, default="")
- dating_comment = models.TextField(_("Comment on dating"), blank=True,
- default="")
+ dating_comment = models.TextField(_("Comment on dating"), blank=True, default="")
previous_id = models.TextField(_("Previous ID"), blank=True, default="")
index = models.IntegerField("Index", default=0)
- checked_type = models.ForeignKey(CheckedType, verbose_name=_("Check"),
- on_delete=models.SET_NULL,
- blank=True, null=True)
- check_date = models.DateField(_("Check date"),
- default=datetime.date.today)
- estimated_value = models.FloatField(_("Estimated value"), blank=True,
- null=True)
+ checked_type = models.ForeignKey(
+ CheckedType,
+ verbose_name=_("Check"),
+ on_delete=models.SET_NULL,
+ blank=True,
+ null=True,
+ )
+ check_date = models.DateField(_("Check date"), default=datetime.date.today)
+ estimated_value = models.FloatField(_("Estimated value"), blank=True, null=True)
collection = models.ForeignKey(
- "archaeological_warehouse.Warehouse", verbose_name=_("Collection"),
- blank=True, null=True, related_name='finds', on_delete=models.SET_NULL,
+ "archaeological_warehouse.Warehouse",
+ verbose_name=_("Collection"),
+ blank=True,
+ null=True,
+ related_name="finds",
+ on_delete=models.SET_NULL,
help_text=_("Do not use - need evolutions"),
)
# preservation module
conservatory_state = models.ForeignKey(
- ConservatoryState, verbose_name=_("Conservatory state"), blank=True,
- null=True, on_delete=models.SET_NULL)
- conservatory_comment = models.TextField(_("Conservatory comment"),
- blank=True, default="")
+ ConservatoryState,
+ verbose_name=_("Conservatory state"),
+ blank=True,
+ null=True,
+ on_delete=models.SET_NULL,
+ )
+ conservatory_comment = models.TextField(
+ _("Conservatory comment"), blank=True, default=""
+ )
preservation_to_considers = models.ManyToManyField(
TreatmentType,
verbose_name=_("Recommended treatments"),
- related_name='finds_recommended', blank=True)
+ related_name="finds_recommended",
+ blank=True,
+ )
alterations = models.ManyToManyField(
- AlterationType, verbose_name=_("Alteration"), blank=True,
- related_name='finds'
+ AlterationType, verbose_name=_("Alteration"), blank=True, related_name="finds"
)
alteration_causes = models.ManyToManyField(
- AlterationCauseType, verbose_name=_("Alteration cause"), blank=True,
- related_name='finds'
+ AlterationCauseType,
+ verbose_name=_("Alteration cause"),
+ blank=True,
+ related_name="finds",
)
treatment_emergency = models.ForeignKey(
- TreatmentEmergencyType, verbose_name=_("Treatment emergency"),
+ TreatmentEmergencyType,
+ verbose_name=_("Treatment emergency"),
on_delete=models.SET_NULL,
- blank=True, null=True
+ blank=True,
+ null=True,
)
- insurance_value = models.FloatField(_("Insurance value"), blank=True,
- null=True)
- appraisal_date = models.DateField(_("Appraisal date"), blank=True,
- null=True)
+ insurance_value = models.FloatField(_("Insurance value"), blank=True, null=True)
+ appraisal_date = models.DateField(_("Appraisal date"), blank=True, null=True)
public_description = models.TextField(
- _("Public description"), blank=True, default="")
+ _("Public description"), blank=True, default=""
+ )
documents = models.ManyToManyField(
- Document, related_name='finds', verbose_name=_("Documents"),
- blank=True)
+ Document, related_name="finds", verbose_name=_("Documents"), blank=True
+ )
main_image = models.ForeignKey(
- Document, related_name='main_image_finds',
+ Document,
+ related_name="main_image_finds",
on_delete=models.SET_NULL,
- verbose_name=_("Main image"), blank=True, null=True)
+ verbose_name=_("Main image"),
+ blank=True,
+ null=True,
+ )
treatments = models.ManyToManyField(
- "Treatment", verbose_name=_("Treatments"),
- related_name='finds', blank=True,
- help_text=_("Related treatments when no new find is created"))
+ "Treatment",
+ verbose_name=_("Treatments"),
+ related_name="finds",
+ blank=True,
+ help_text=_("Related treatments when no new find is created"),
+ )
cached_label = models.TextField(
- _("Cached name"), blank=True, default="", db_index=True,
- help_text=_("Generated automatically - do not edit")
+ _("Cached name"),
+ blank=True,
+ default="",
+ db_index=True,
+ help_text=_("Generated automatically - do not edit"),
)
cached_periods = models.TextField(
- _("Cached periods label"), blank=True, default="",
- help_text=_("Generated automatically - do not edit")
+ _("Cached periods label"),
+ blank=True,
+ default="",
+ help_text=_("Generated automatically - do not edit"),
)
cached_object_types = models.TextField(
- _("Cached object types label"), blank=True, default="",
- help_text=_("Generated automatically - do not edit")
+ _("Cached object types label"),
+ blank=True,
+ default="",
+ help_text=_("Generated automatically - do not edit"),
)
cached_materials = models.TextField(
- _("Cached material types label"), blank=True, default="",
- help_text=_("Generated automatically - do not edit")
+ _("Cached material types label"),
+ blank=True,
+ default="",
+ help_text=_("Generated automatically - do not edit"),
)
history = HistoricalRecords(bases=[HistoryModel])
BASKET_MODEL = FindBasket
@@ -1731,13 +1942,13 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
("change_own_find", "Can change own Find"),
("delete_own_find", "Can delete own Find"),
)
- ordering = ('cached_label',)
+ ordering = ("cached_label",)
indexes = [
- GinIndex(fields=['data']),
+ GinIndex(fields=["data"]),
]
def natural_key(self):
- return (self.uuid, )
+ return (self.uuid,)
@property
def short_class_name(self):
@@ -1757,10 +1968,14 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
@property
def excavation_ids(self):
return " - ".join(
- [base_find['excavation_id']
- for base_find in self.base_finds.values(
- 'excavation_id').order_by('pk').all()
- if base_find['excavation_id']])
+ [
+ base_find["excavation_id"]
+ for base_find in self.base_finds.values("excavation_id")
+ .order_by("pk")
+ .all()
+ if base_find["excavation_id"]
+ ]
+ )
@classmethod
def hierarchic_fields(cls):
@@ -1768,8 +1983,7 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
@property
def materials(self):
- return " ; ".join([str(material)
- for material in self.material_types.all()])
+ return " ; ".join([str(material) for material in self.material_types.all()])
def get_first_material_type(self):
model = self.__class__.material_types.through
@@ -1782,35 +1996,38 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
@property
def show_url(self):
- return reverse('show-find', args=[self.pk, ''])
+ return reverse("show-find", args=[self.pk, ""])
def public_representation(self):
dct = super(Find, self).public_representation()
- dct.update({
- "denomination": self.denomination,
- "free-id": self.label,
- "description": self.description,
- "public-description": self.public_description,
- "materials": [str(mt) for mt in self.material_types.all()],
- "material-comment": self.material_comment,
- "object-types": [str(ot) for ot in self.object_types.all()],
- "find-number": self.find_number,
- "decoration": self.decoration,
- "inscription": self.inscription,
- "manufacturing-place":self.manufacturing_place,
- "comment": self.comment,
- "length": self.length,
- "width": self.width,
- "height": self.height,
- "thickness": self.thickness,
- "diameter": self.diameter,
- "circumference": self.circumference,
- "volume": self.volume,
- "weight": self.weight,
- "datings": [str(dating) for dating in self.datings.all()],
- "base-finds": [bf.public_representation()
- for bf in self.base_finds.all()]
- })
+ dct.update(
+ {
+ "denomination": self.denomination,
+ "free-id": self.label,
+ "description": self.description,
+ "public-description": self.public_description,
+ "materials": [str(mt) for mt in self.material_types.all()],
+ "material-comment": self.material_comment,
+ "object-types": [str(ot) for ot in self.object_types.all()],
+ "find-number": self.find_number,
+ "decoration": self.decoration,
+ "inscription": self.inscription,
+ "manufacturing-place": self.manufacturing_place,
+ "comment": self.comment,
+ "length": self.length,
+ "width": self.width,
+ "height": self.height,
+ "thickness": self.thickness,
+ "diameter": self.diameter,
+ "circumference": self.circumference,
+ "volume": self.volume,
+ "weight": self.weight,
+ "datings": [str(dating) for dating in self.datings.all()],
+ "base-finds": [
+ bf.public_representation() for bf in self.base_finds.all()
+ ],
+ }
+ )
# images
return dct
@@ -1828,20 +2045,21 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
@property
def full_label(self):
lbl = " - ".join(
- getattr(self, attr) for attr in (
- 'label', 'denomination', 'administrative_index')
- if getattr(self, attr))
+ getattr(self, attr)
+ for attr in ("label", "denomination", "administrative_index")
+ if getattr(self, attr)
+ )
base = " - ".join(
base_find.complete_id() for base_find in self.base_finds.all()
)
if base:
- lbl += ' ({})'.format(base)
+ lbl += " ({})".format(base)
return lbl
def get_first_base_find(self):
if not self.base_finds.count():
return
- return self.base_finds.order_by('-pk').all()[0]
+ return self.base_finds.order_by("-pk").all()[0]
DOC_VALUES = [
("base_finds", _("List of associated base finds")),
@@ -1854,8 +2072,9 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
Return pipe separated material type code inside a container
"""
materials = set()
- for material in self.material_types.exclude(
- code__isnull=True).values_list("code", flat=True):
+ for material in self.material_types.exclude(code__isnull=True).values_list(
+ "code", flat=True
+ ):
materials.add(material)
return "|".join(sorted(materials))
@@ -1864,22 +2083,23 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
Return comma separated string of material types inside a container
"""
materials = set()
- for material in self.material_types.exclude(
- label__isnull=True).values_list("label", flat=True):
+ for material in self.material_types.exclude(label__isnull=True).values_list(
+ "label", flat=True
+ ):
materials.add(material)
return ", ".join(sorted(materials))
- def get_values(self, prefix='', no_values=False, filtr=None, **kwargs):
+ def get_values(self, prefix="", no_values=False, filtr=None, **kwargs):
no_base_finds = False
if "no_base_finds" in kwargs:
no_base_finds = kwargs["no_base_finds"]
values = super(Find, self).get_values(
- prefix=prefix, no_values=no_values, filtr=filtr, **kwargs)
- if not filtr or prefix + 'material_types_label' in filtr:
- values[prefix + 'material_types_label'] = self.get_material_types()
- if not filtr or prefix + 'material_types_code' in filtr:
- values[prefix + 'material_types_code'] = \
- self.get_material_types_code()
+ prefix=prefix, no_values=no_values, filtr=filtr, **kwargs
+ )
+ if not filtr or prefix + "material_types_label" in filtr:
+ values[prefix + "material_types_label"] = self.get_material_types()
+ if not filtr or prefix + "material_types_code" in filtr:
+ values[prefix + "material_types_code"] = self.get_material_types_code()
if no_base_finds:
return values
# by default attach first basefind data
@@ -1887,21 +2107,18 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
bf = self.get_first_base_find()
if not bf:
return values
- v = bf.get_values(
- prefix=prefix,
- no_values=True, filtr=filtr, **kwargs)
+ v = bf.get_values(prefix=prefix, no_values=True, filtr=filtr, **kwargs)
v.update(values)
values = v
kwargs["no_find"] = True
values[prefix + "base_finds"] = [
base_find.get_values(no_values=True, filtr=filtr, **kwargs)
- for base_find in self.base_finds.distinct().order_by('-pk').all()
+ for base_find in self.base_finds.distinct().order_by("-pk").all()
]
return values
- def get_values_for_datings(self, prefix=''):
- return [dating.get_values(prefix=prefix)
- for dating in self.datings.all()]
+ def get_values_for_datings(self, prefix=""):
+ return [dating.get_values(prefix=prefix) for dating in self.datings.all()]
@property
def reference(self):
@@ -1921,24 +2138,52 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
actions = super(Find, self).get_extra_actions(request)
is_locked = hasattr(self, "is_locked") and self.is_locked(request.user)
- can_edit_find = self.can_do(request, 'change_find')
+ can_edit_find = self.can_do(request, "change_find")
if can_edit_find and not is_locked:
actions += [
- (reverse("find-qa-duplicate", args=[self.pk]),
- _("Duplicate"), "fa fa-clone", "", "", True),
- (reverse("find-qa-basket", args=[self.pk]),
- _("Add to basket"),
- "fa fa-shopping-basket", "", "", True),
- (reverse('find-add-treatment', args=[self.pk]),
- _("Simple treatment"), "fa fa-flask", "", "", False),
- (reverse('find-add-divide-treatment', args=[self.pk]),
- _("Divide treatment"), "fa fa-scissors", "", "", False),
+ (
+ reverse("find-qa-duplicate", args=[self.pk]),
+ _("Duplicate"),
+ "fa fa-clone",
+ "",
+ "",
+ True,
+ ),
+ (
+ reverse("find-qa-basket", args=[self.pk]),
+ _("Add to basket"),
+ "fa fa-shopping-basket",
+ "",
+ "",
+ True,
+ ),
+ (
+ reverse("find-add-treatment", args=[self.pk]),
+ _("Simple treatment"),
+ "fa fa-flask",
+ "",
+ "",
+ False,
+ ),
+ (
+ reverse("find-add-divide-treatment", args=[self.pk]),
+ _("Divide treatment"),
+ "fa fa-scissors",
+ "",
+ "",
+ False,
+ ),
]
if get_current_profile().warehouse:
actions.append(
- (reverse("find-qa-packaging", args=[self.pk]),
- _("Packaging"),
- "fa fa-gift", "", "", True)
+ (
+ reverse("find-qa-packaging", args=[self.pk]),
+ _("Packaging"),
+ "fa fa-gift",
+ "",
+ "",
+ True,
+ )
)
return actions
@@ -1949,22 +2194,20 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
if not bf:
return "detached/{}".format(self.SLUG)
ope = bf.context_record.operation
- find_idx = '{:0' + str(settings.ISHTAR_FINDS_INDEX_ZERO_LEN) + 'd}'
+ find_idx = "{:0" + str(settings.ISHTAR_FINDS_INDEX_ZERO_LEN) + "d}"
return ("{}/{}/" + find_idx).format(
- ope._get_base_image_path(), self.SLUG, self.index)
+ ope._get_base_image_path(), self.SLUG, self.index
+ )
@property
def administrative_index(self):
profile = get_current_profile()
- if profile.has_overload('find_administrative_index'):
- return ALTERNATE_CONFIGS[profile.config].find_administrative_index(
- self)
+ if profile.has_overload("find_administrative_index"):
+ return ALTERNATE_CONFIGS[profile.config].find_administrative_index(self)
bf = self.get_first_base_find()
if not bf or not bf.context_record or not bf.context_record.operation:
return ""
- return "{}-{}".format(
- bf.context_record.operation.get_reference(),
- self.index)
+ return "{}-{}".format(bf.context_record.operation.get_reference(), self.index)
@property
def operation(self):
@@ -1977,52 +2220,52 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
return " - ".join(
[bf.context_record.cached_label for bf in self.base_finds.all()]
)
+
context_records_lbl.short_description = _("Context record")
- context_records_lbl.admin_order_field = \
- "base_finds__context_record__cached_label"
+ context_records_lbl.admin_order_field = "base_finds__context_record__cached_label"
def operations_lbl(self):
return " - ".join(
- [bf.context_record.operation.cached_label
- for bf in self.base_finds.all()]
+ [bf.context_record.operation.cached_label for bf in self.base_finds.all()]
)
+
operations_lbl.short_description = _("Operation")
- operations_lbl.admin_order_field = \
+ operations_lbl.admin_order_field = (
"base_finds__context_record__operation__cached_label"
+ )
- def _get_treatments(self, model, rel='upstream', limit=None, count=False):
+ def _get_treatments(self, model, rel="upstream", limit=None, count=False):
treatments, findtreats = [], []
- q = model.objects.filter(
- find_id=self.pk).order_by(
- '-treatment__year', '-treatment__index', '-treatment__start_date',
- '-treatment__end_date')
+ q = model.objects.filter(find_id=self.pk).order_by(
+ "-treatment__year",
+ "-treatment__index",
+ "-treatment__start_date",
+ "-treatment__end_date",
+ )
if count:
return q.count()
for findtreat in q.distinct().all():
if findtreat.pk in findtreats:
continue
findtreats.append(findtreat.pk)
- q = getattr(findtreat.treatment, rel).distinct().order_by(
- 'label')
+ q = getattr(findtreat.treatment, rel).distinct().order_by("label")
if limit:
q = q[:limit]
treatments.append((q.all(), findtreat.treatment))
return treatments
def upstream_treatments(self, limit=None):
- from archaeological_finds.models_treatments import \
- FindUpstreamTreatments
- return self._get_treatments(FindUpstreamTreatments, 'upstream',
- limit=limit)
+ from archaeological_finds.models_treatments import FindUpstreamTreatments
+
+ return self._get_treatments(FindUpstreamTreatments, "upstream", limit=limit)
def limited_upstream_treatments(self):
return self.upstream_treatments(15)
def downstream_treatments(self, limit=None):
- from archaeological_finds.models_treatments import \
- FindDownstreamTreatments
- return self._get_treatments(FindDownstreamTreatments, 'downstream',
- limit=limit)
+ from archaeological_finds.models_treatments import FindDownstreamTreatments
+
+ return self._get_treatments(FindDownstreamTreatments, "downstream", limit=limit)
def limited_downstream_treatments(self):
return self.downstream_treatments(15)
@@ -2031,26 +2274,24 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
return self.upstream_treatments() + self.downstream_treatments()
def non_modif_treatments(self, limit=None):
- from archaeological_finds.models_treatments import \
- FindNonModifTreatments
- return self._get_treatments(FindNonModifTreatments, 'finds',
- limit=limit)
+ from archaeological_finds.models_treatments import FindNonModifTreatments
+
+ return self._get_treatments(FindNonModifTreatments, "finds", limit=limit)
def non_modif_treatments_count(self):
- from archaeological_finds.models_treatments import \
- FindNonModifTreatments
- return self._get_treatments(FindNonModifTreatments, 'finds',
- count=True)
+ from archaeological_finds.models_treatments import FindNonModifTreatments
+
+ return self._get_treatments(FindNonModifTreatments, "finds", count=True)
def limited_non_modif_treatments(self):
return self.non_modif_treatments(15)
def associated_treatment_files(self):
- from archaeological_finds.models_treatments import \
- TreatmentFile
+ from archaeological_finds.models_treatments import TreatmentFile
+
return TreatmentFile.objects.filter(
- associated_basket__items__pk=self.pk).order_by(
- 'reception_date', 'creation_date', 'end_date')
+ associated_basket__items__pk=self.pk
+ ).order_by("reception_date", "creation_date", "end_date")
def associated_treatment_files_count(self):
return self.associated_treatment_files().count()
@@ -2074,13 +2315,13 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
return bf.context_record.operation.get_town_label()
@classmethod
- def get_periods(cls, slice='year', fltr=None):
+ def get_periods(cls, slice="year", fltr=None):
if not fltr:
fltr = {}
q = cls.objects
if fltr:
q = q.filter(**fltr)
- if slice == 'year':
+ if slice == "year":
years = set()
finds = q.filter(downstream_treatment__isnull=True)
for find in finds:
@@ -2102,7 +2343,8 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
q = q.filter(**fltr)
return q.filter(
downstream_treatment__isnull=True,
- base_finds__context_record__operation__start_date__year=year)
+ base_finds__context_record__operation__start_date__year=year,
+ )
@classmethod
def get_operations(cls):
@@ -2121,7 +2363,8 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
def get_by_operation(cls, operation_id):
return cls.objects.filter(
downstream_treatment__isnull=True,
- base_finds__context_record__operation__pk=operation_id)
+ base_finds__context_record__operation__pk=operation_id,
+ )
@classmethod
def get_total_number(cls, fltr=None):
@@ -2130,8 +2373,9 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
q = q.filter(**fltr)
return q.filter(downstream_treatment__isnull=True).count()
- def duplicate(self, user, copy_datings=True, duplicate_for_treatment=True,
- data=None):
+ def duplicate(
+ self, user, copy_datings=True, duplicate_for_treatment=True, data=None
+ ):
model = self.__class__
new = model.objects.get(pk=self.pk)
@@ -2150,16 +2394,20 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
setattr(new, k, data[k])
# remove associated treatments
if not duplicate_for_treatment and (
- new.upstream_treatment or new.downstream_treatment):
+ new.upstream_treatment or new.downstream_treatment
+ ):
new.upstream_treatment, new.downstream_treatment = None, None
new.uuid = uuid.uuid4()
new.save()
# m2m fields
- m2m = [field.name for field in model._meta.many_to_many
- if field.name not in PRIVATE_FIELDS]
+ m2m = [
+ field.name
+ for field in model._meta.many_to_many
+ if field.name not in PRIVATE_FIELDS
+ ]
for field in m2m:
- if field == 'datings' and copy_datings:
+ if field == "datings" and copy_datings:
for dating in self.datings.all():
is_present = False
for current_dating in new.datings.all():
@@ -2179,8 +2427,11 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
bf = self.get_first_base_find()
new.base_finds.clear()
if bf:
- new.base_finds.add(bf.duplicate(
- user=user, data={"label": new.label, "external_id": ''}))
+ new.base_finds.add(
+ bf.duplicate(
+ user=user, data={"label": new.label, "external_id": ""}
+ )
+ )
# remove documents for this kind of duplicate (data entry)
new.documents.clear()
# remove associated treatments
@@ -2189,45 +2440,53 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
@classmethod
def get_query_owns(cls, ishtaruser):
- q = cls._construct_query_own(
- 'container__location__',
- Warehouse._get_query_owns_dicts(ishtaruser)
- ) | cls._construct_query_own(
- 'container__responsible__',
- Warehouse._get_query_owns_dicts(ishtaruser)
- ) | cls._construct_query_own(
- 'base_finds__context_record__operation__',
- Operation._get_query_owns_dicts(ishtaruser)
- ) | cls._construct_query_own(
- 'basket__',
- [{"shared_with": ishtaruser, "shared_write_with": ishtaruser}]
- ) | cls._construct_query_own('', [
- {'history_creator': ishtaruser.user_ptr},
- {'base_finds__context_record__operation__end_date__isnull': True}
- ])
+ q = (
+ cls._construct_query_own(
+ "container__location__", Warehouse._get_query_owns_dicts(ishtaruser)
+ )
+ | cls._construct_query_own(
+ "container__responsible__", Warehouse._get_query_owns_dicts(ishtaruser)
+ )
+ | cls._construct_query_own(
+ "base_finds__context_record__operation__",
+ Operation._get_query_owns_dicts(ishtaruser),
+ )
+ | cls._construct_query_own(
+ "basket__",
+ [{"shared_with": ishtaruser, "shared_write_with": ishtaruser}],
+ )
+ | cls._construct_query_own(
+ "",
+ [
+ {"history_creator": ishtaruser.user_ptr},
+ {"base_finds__context_record__operation__end_date__isnull": True},
+ ],
+ )
+ )
return q
@classmethod
- def get_owns(cls, user, menu_filtr=None, limit=None,
- values=None, get_short_menu_class=None):
+ def get_owns(
+ cls, user, menu_filtr=None, limit=None, values=None, get_short_menu_class=None
+ ):
replace_query = None
- if menu_filtr and 'contextrecord' in menu_filtr:
- replace_query = Q(
- base_finds__context_record=menu_filtr['contextrecord']
- )
+ if menu_filtr and "contextrecord" in menu_filtr:
+ replace_query = Q(base_finds__context_record=menu_filtr["contextrecord"])
owns = super(Find, cls).get_owns(
- user, replace_query=replace_query, limit=limit, values=values,
- get_short_menu_class=get_short_menu_class)
+ user,
+ replace_query=replace_query,
+ limit=limit,
+ values=values,
+ get_short_menu_class=get_short_menu_class,
+ )
return cls._return_get_owns(owns, values, get_short_menu_class)
def _generate_cached_label(self):
self.cached_label_bulk_update(find_id=self.pk)
- return Find.objects.filter(pk=self.pk).values(
- 'cached_label')[0]['cached_label']
+ return Find.objects.filter(pk=self.pk).values("cached_label")[0]["cached_label"]
def _generate_cached_periods(self):
- return " & ".join([dating.period.label
- for dating in self.datings.all()])
+ return " & ".join([dating.period.label for dating in self.datings.all()])
def _generate_cached_object_types(self):
return " & ".join([str(obj) for obj in self.object_types.all()])
@@ -2237,11 +2496,16 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
@classmethod
def cached_label_bulk_update(
- cls, operation_id=None, parcel_id=None, context_record_id=None,
- find_id=None, transaction_id=None):
+ cls,
+ operation_id=None,
+ parcel_id=None,
+ context_record_id=None,
+ find_id=None,
+ transaction_id=None,
+ ):
transaction_id, is_recursion = cls.bulk_recursion(
- transaction_id, [operation_id, parcel_id, context_record_id,
- find_id])
+ transaction_id, [operation_id, parcel_id, context_record_id, find_id]
+ )
if is_recursion:
return
@@ -2287,7 +2551,9 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
if profile.find_use_index:
index = """|| '-' ||
to_char(find_cached_bulk_update.index, 'fm{zeros}')
- """.format(zeros=settings.ISHTAR_FINDS_INDEX_ZERO_LEN * "0")
+ """.format(
+ zeros=settings.ISHTAR_FINDS_INDEX_ZERO_LEN * "0"
+ )
sql = """
UPDATE "archaeological_finds_find" AS f
@@ -2322,10 +2588,13 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
SELECT myf.id FROM archaeological_finds_find myf
{filters}
);
- """.format(main_ope_prefix=profile.operation_prefix,
- ope_prefix=profile.default_operation_prefix,
- join=settings.JOINT, filters=filters,
- index=index)
+ """.format(
+ main_ope_prefix=profile.operation_prefix,
+ ope_prefix=profile.default_operation_prefix,
+ join=settings.JOINT,
+ filters=filters,
+ index=index,
+ )
with connection.cursor() as c:
c.execute(sql, args)
@@ -2421,8 +2690,7 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
def localisation_9(self):
return self.get_localisation(8)
- def set_localisation(self, place, context, value, is_ref=False,
- static=False):
+ def set_localisation(self, place, context, value, is_ref=False, static=False):
"""
Get localisation reference in the warehouse
@@ -2443,240 +2711,269 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
return
if is_ref:
raise ImporterError(
- _("No reference container have been set - the "
- "localisation cannot be set."))
+ _(
+ "No reference container have been set - the "
+ "localisation cannot be set."
+ )
+ )
else:
raise ImporterError(
- _("No container have been set - the localisation cannot "
- "be set."))
+ _("No container have been set - the localisation cannot " "be set.")
+ )
localisation, error = container.set_localisation(
- place, value, static=static, return_errors=True)
+ place, value, static=static, return_errors=True
+ )
if error:
raise ImporterError(error)
@post_importer_action
def set_reference_localisation_1(self, context, value):
return self.set_localisation(0, context, value, is_ref=True)
+
set_reference_localisation_1.post_save = True
@post_importer_action
def set_reference_localisation_2(self, context, value):
return self.set_localisation(1, context, value, is_ref=True)
+
set_reference_localisation_2.post_save = True
@post_importer_action
def set_reference_localisation_3(self, context, value):
return self.set_localisation(2, context, value, is_ref=True)
+
set_reference_localisation_3.post_save = True
@post_importer_action
def set_reference_localisation_4(self, context, value):
return self.set_localisation(3, context, value, is_ref=True)
+
set_reference_localisation_4.post_save = True
@post_importer_action
def set_reference_localisation_5(self, context, value):
return self.set_localisation(4, context, value, is_ref=True)
+
set_reference_localisation_5.post_save = True
@post_importer_action
def set_reference_localisation_6(self, context, value):
return self.set_localisation(5, context, value, is_ref=True)
+
set_reference_localisation_6.post_save = True
@post_importer_action
def set_reference_localisation_7(self, context, value):
return self.set_localisation(6, context, value, is_ref=True)
+
set_reference_localisation_7.post_save = True
@post_importer_action
def set_reference_localisation_8(self, context, value):
return self.set_localisation(7, context, value, is_ref=True)
+
set_reference_localisation_8.post_save = True
@post_importer_action
def set_reference_localisation_9(self, context, value):
return self.set_localisation(8, context, value, is_ref=True)
+
set_reference_localisation_9.post_save = True
@post_importer_action
def set_reference_static_localisation_1(self, context, value):
- return self.set_localisation(0, context, value, is_ref=True,
- static=True)
+ return self.set_localisation(0, context, value, is_ref=True, static=True)
+
set_reference_static_localisation_1.post_save = True
@post_importer_action
def set_reference_static_localisation_2(self, context, value):
- return self.set_localisation(1, context, value, is_ref=True,
- static=True)
+ return self.set_localisation(1, context, value, is_ref=True, static=True)
+
set_reference_static_localisation_2.post_save = True
@post_importer_action
def set_reference_static_localisation_3(self, context, value):
- return self.set_localisation(2, context, value, is_ref=True,
- static=True)
+ return self.set_localisation(2, context, value, is_ref=True, static=True)
+
set_reference_static_localisation_3.post_save = True
@post_importer_action
def set_reference_static_localisation_4(self, context, value):
- return self.set_localisation(3, context, value, is_ref=True,
- static=True)
+ return self.set_localisation(3, context, value, is_ref=True, static=True)
+
set_reference_static_localisation_4.post_save = True
@post_importer_action
def set_reference_static_localisation_5(self, context, value):
- return self.set_localisation(4, context, value, is_ref=True,
- static=True)
+ return self.set_localisation(4, context, value, is_ref=True, static=True)
+
set_reference_static_localisation_5.post_save = True
@post_importer_action
def set_reference_static_localisation_6(self, context, value):
- return self.set_localisation(5, context, value, is_ref=True,
- static=True)
+ return self.set_localisation(5, context, value, is_ref=True, static=True)
+
set_reference_static_localisation_6.post_save = True
@post_importer_action
def set_reference_static_localisation_7(self, context, value):
- return self.set_localisation(6, context, value, is_ref=True,
- static=True)
+ return self.set_localisation(6, context, value, is_ref=True, static=True)
+
set_reference_static_localisation_7.post_save = True
@post_importer_action
def set_reference_static_localisation_8(self, context, value):
- return self.set_localisation(7, context, value, is_ref=True,
- static=True)
+ return self.set_localisation(7, context, value, is_ref=True, static=True)
+
set_reference_static_localisation_8.post_save = True
@post_importer_action
def set_reference_static_localisation_9(self, context, value):
- return self.set_localisation(8, context, value, is_ref=True,
- static=True)
+ return self.set_localisation(8, context, value, is_ref=True, static=True)
+
set_reference_static_localisation_9.post_save = True
@post_importer_action
def set_localisation_1(self, context, value):
return self.set_localisation(0, context, value)
+
set_localisation_1.post_save = True
@post_importer_action
def set_localisation_2(self, context, value):
return self.set_localisation(1, context, value)
+
set_localisation_2.post_save = True
@post_importer_action
def set_localisation_3(self, context, value):
return self.set_localisation(2, context, value)
+
set_localisation_3.post_save = True
@post_importer_action
def set_localisation_4(self, context, value):
return self.set_localisation(3, context, value)
+
set_localisation_4.post_save = True
@post_importer_action
def set_localisation_5(self, context, value):
return self.set_localisation(4, context, value)
+
set_localisation_5.post_save = True
@post_importer_action
def set_localisation_6(self, context, value):
return self.set_localisation(5, context, value)
+
set_localisation_6.post_save = True
@post_importer_action
def set_localisation_7(self, context, value):
return self.set_localisation(6, context, value)
+
set_localisation_7.post_save = True
@post_importer_action
def set_localisation_8(self, context, value):
return self.set_localisation(7, context, value)
+
set_localisation_8.post_save = True
@post_importer_action
def set_localisation_9(self, context, value):
return self.set_localisation(8, context, value)
+
set_localisation_9.post_save = True
@post_importer_action
def set_static_localisation_1(self, context, value):
return self.set_localisation(0, context, value, static=True)
+
set_static_localisation_1.post_save = True
@post_importer_action
def set_static_localisation_2(self, context, value):
return self.set_localisation(1, context, value, static=True)
+
set_static_localisation_2.post_save = True
@post_importer_action
def set_static_localisation_3(self, context, value):
return self.set_localisation(2, context, value, static=True)
+
set_static_localisation_3.post_save = True
@post_importer_action
def set_static_localisation_4(self, context, value):
return self.set_localisation(3, context, value, static=True)
+
set_static_localisation_4.post_save = True
@post_importer_action
def set_static_localisation_5(self, context, value):
return self.set_localisation(4, context, value, static=True)
+
set_static_localisation_5.post_save = True
@post_importer_action
def set_static_localisation_6(self, context, value):
return self.set_localisation(5, context, value, static=True)
+
set_static_localisation_6.post_save = True
@post_importer_action
def set_static_localisation_7(self, context, value):
return self.set_localisation(6, context, value, static=True)
+
set_static_localisation_7.post_save = True
@post_importer_action
def set_static_localisation_8(self, context, value):
return self.set_localisation(7, context, value, static=True)
+
set_static_localisation_8.post_save = True
@post_importer_action
def set_static_localisation_9(self, context, value):
return self.set_localisation(8, context, value, static=True)
+
set_static_localisation_9.post_save = True
def generate_index(self):
"""
Generate index based on operation or context record (based on
the configuration)
-
+
:return: True if index has been changed.
"""
bfs = self.base_finds
profile = get_current_profile()
- if profile.find_index == 'O':
- bfs = bfs.filter(
- context_record__operation__pk__isnull=False).order_by(
- '-context_record__operation__start_date')
+ if profile.find_index == "O":
+ bfs = bfs.filter(context_record__operation__pk__isnull=False).order_by(
+ "-context_record__operation__start_date"
+ )
if not bfs.count():
return False
operation = bfs.all()[0].context_record.operation
- q = Find.objects \
- .filter(base_finds__context_record__operation=operation)
- elif profile.find_index == 'CR':
- bfs = bfs.filter(
- context_record__pk__isnull=False).order_by(
- 'context_record__pk')
+ q = Find.objects.filter(base_finds__context_record__operation=operation)
+ elif profile.find_index == "CR":
+ bfs = bfs.filter(context_record__pk__isnull=False).order_by(
+ "context_record__pk"
+ )
if not bfs.count():
return False
cr = bfs.all()[0].context_record
- q = Find.objects \
- .filter(base_finds__context_record=cr)
+ q = Find.objects.filter(base_finds__context_record=cr)
else:
return False
if self.pk:
q = q.exclude(pk=self.pk)
if q.count():
- self.index = q.aggregate(Max('index'))['index__max'] + 1
+ self.index = q.aggregate(Max("index"))["index__max"] + 1
else:
self.index = 1
return True
@@ -2685,8 +2982,9 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
old_container = None
# fetch in db
if self.pk:
- old_container = self.__class__.objects.filter(
- pk=self.pk).values_list("container_id", flat=True)[0]
+ old_container = self.__class__.objects.filter(pk=self.pk).values_list(
+ "container_id", flat=True
+ )[0]
super(Find, self).save(*args, **kwargs)
self.skip_history_when_saving = True
@@ -2715,7 +3013,8 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
self._cached_label_checked = False
self.save()
for base_find in self.base_finds.filter(
- context_record__operation__pk__isnull=False).all():
+ context_record__operation__pk__isnull=False
+ ).all():
modified = False
if self.label and not base_find.label:
base_find.label = self.label
@@ -2752,9 +3051,9 @@ class Find(BulkUpdatedItem, ValueGetter, DocumentItem, BaseHistorizedItem,
def pre_clean_find(sender, **kwargs):
- if not kwargs.get('instance'):
+ if not kwargs.get("instance"):
return
- instance = kwargs.get('instance')
+ instance = kwargs.get("instance")
for bf in instance.base_finds.all():
# no other find is associated
@@ -2782,7 +3081,7 @@ pre_delete.connect(pre_clean_find, sender=Find)
def base_find_find_changed(sender, **kwargs):
- obj = kwargs.get('instance', None)
+ obj = kwargs.get("instance", None)
if not obj:
return
obj.skip_history_when_saving = True
@@ -2792,8 +3091,7 @@ def base_find_find_changed(sender, **kwargs):
m2m_changed.connect(base_find_find_changed, sender=Find.base_finds.through)
-m2m_changed.connect(document_attached_changed,
- sender=Find.documents.through)
+m2m_changed.connect(document_attached_changed, sender=Find.documents.through)
class FindInsideContainer(models.Model):
@@ -2818,39 +3116,39 @@ class FindInsideContainer(models.Model):
DROP VIEW IF EXISTS find_inside_container;
"""
TABLE_COLS = ["find__" + t for t in Find.TABLE_COLS]
- COL_LABELS = {
- "find__" + k: Find.COL_LABELS[k] for k in Find.COL_LABELS.keys()
- }
+ COL_LABELS = {"find__" + k: Find.COL_LABELS[k] for k in Find.COL_LABELS.keys()}
EXTRA_REQUEST_KEYS = {
- "find__" + k:
- "find__" + Find.EXTRA_REQUEST_KEYS[k]
+ "find__" + k: "find__" + Find.EXTRA_REQUEST_KEYS[k]
for k in Find.EXTRA_REQUEST_KEYS.keys()
}
SLUG = "find_inside_container"
find = models.OneToOneField(
- Find, verbose_name=_("Find"), related_name="inside_container",
- primary_key=True)
- container = models.ForeignKey("archaeological_warehouse.Container",
- verbose_name=_("Container"),
- related_name="container_content")
+ Find, verbose_name=_("Find"), related_name="inside_container", primary_key=True
+ )
+ container = models.ForeignKey(
+ "archaeological_warehouse.Container",
+ verbose_name=_("Container"),
+ related_name="container_content",
+ )
class Meta:
managed = False
- db_table = 'find_inside_container'
+ db_table = "find_inside_container"
for attr in Find.HISTORICAL_M2M:
- m2m_changed.connect(m2m_historization_changed,
- sender=getattr(Find, attr).through)
+ m2m_changed.connect(m2m_historization_changed, sender=getattr(Find, attr).through)
class Property(LightHistorizedItem):
find = models.ForeignKey(Find, verbose_name=_("Find"))
administrative_act = models.ForeignKey(
- AdministrativeAct, verbose_name=_("Administrative act"))
- person = models.ForeignKey(Person, verbose_name=_("Person"),
- related_name='properties')
+ AdministrativeAct, verbose_name=_("Administrative act")
+ )
+ person = models.ForeignKey(
+ Person, verbose_name=_("Person"), related_name="properties"
+ )
start_date = models.DateField(_("Start date"))
end_date = models.DateField(_("End date"))
@@ -2858,7 +3156,7 @@ class Property(LightHistorizedItem):
verbose_name = _("Property")
verbose_name_plural = _("Properties")
indexes = [
- GinIndex(fields=['data']),
+ GinIndex(fields=["data"]),
]
def __str__(self):