summaryrefslogtreecommitdiff
path: root/ishtar_common/serializers_utils.py
diff options
context:
space:
mode:
authorÉtienne Loks <etienne.loks@iggdrasil.net>2021-03-19 11:05:22 +0100
committerÉtienne Loks <etienne.loks@iggdrasil.net>2021-03-19 11:05:22 +0100
commite2d6c50f231f636fed362be37e7bf3319fc5d6b8 (patch)
tree5d7fde3628825aebeeef3d85d2dfcf09a52116de /ishtar_common/serializers_utils.py
parente6af0225df8f539308bc3fd8c9dbc967bba5a807 (diff)
downloadIshtar-e2d6c50f231f636fed362be37e7bf3319fc5d6b8.tar.bz2
Ishtar-e2d6c50f231f636fed362be37e7bf3319fc5d6b8.zip
Format - black: ishtar_common
Diffstat (limited to 'ishtar_common/serializers_utils.py')
-rw-r--r--ishtar_common/serializers_utils.py97
1 files changed, 58 insertions, 39 deletions
diff --git a/ishtar_common/serializers_utils.py b/ishtar_common/serializers_utils.py
index c03a55e35..99ebf9f4e 100644
--- a/ishtar_common/serializers_utils.py
+++ b/ishtar_common/serializers_utils.py
@@ -27,9 +27,8 @@ def get_model_from_filename(filename):
if module_name == "django":
if model_name in ("Group", "Permission"):
module = importlib.import_module("django.contrib.auth.models")
- elif model_name in ("ContentType", ):
- module = importlib.import_module(
- "django.contrib.contenttypes.models")
+ elif model_name in ("ContentType",):
+ module = importlib.import_module("django.contrib.contenttypes.models")
else:
return
else:
@@ -44,16 +43,21 @@ def serialization_info(info=None):
"ishtar-version": get_version(),
"domain": site.domain,
"name": site.name,
- "date": datetime.datetime.now().isoformat()
+ "date": datetime.datetime.now().isoformat(),
}
if info:
base_info.update(info)
return base_info
-def archive_serialization(result, archive_dir=None, archive=False,
- return_empty_types=False, archive_name=None,
- info=None):
+def archive_serialization(
+ result,
+ archive_dir=None,
+ archive=False,
+ return_empty_types=False,
+ archive_name=None,
+ info=None,
+):
"""
Serialize all types models to JSON
Used for import and export scripts
@@ -94,9 +98,7 @@ def archive_serialization(result, archive_dir=None, archive=False,
base_filename = "info.json"
filename = tmpdirname + os.sep + base_filename
with open(filename, "w") as json_file:
- json_file.write(
- json.dumps(serialization_info(info=info), indent=2)
- )
+ json_file.write(json.dumps(serialization_info(info=info), indent=2))
current_zip.write(filename, arcname=base_filename)
for dir_name, model_name in result:
@@ -112,27 +114,30 @@ def archive_serialization(result, archive_dir=None, archive=False,
GENERIC_QUERYSET_FILTER = {
- "Regexp": {"ImporterType": 'columns__importer_type__pk__in'},
- "ImporterModel": {"ImporterType": ['importer_type_associated__pk__in',
- 'importer_type_created__pk__in']},
- "ValueFormater": {"ImporterType": 'columns__importer_type__pk__in'},
- "ImporterColumn": {"ImporterType": 'importer_type__pk__in'},
- "ImporterDefault": {"ImporterType": 'importer_type__pk__in'},
- "ImportTarget": {"ImporterType": 'column__importer_type__pk__in'},
- "FormaterType": {"ImporterType": 'targets__column__importer_type__pk__in'},
- "ImporterDefaultValues": {
- "ImporterType": 'default_target__importer_type__pk__in'},
- "ImporterDuplicateField": {"ImporterType": 'column__importer_type__pk__in'},
+ "Regexp": {"ImporterType": "columns__importer_type__pk__in"},
+ "ImporterModel": {
+ "ImporterType": [
+ "importer_type_associated__pk__in",
+ "importer_type_created__pk__in",
+ ]
+ },
+ "ValueFormater": {"ImporterType": "columns__importer_type__pk__in"},
+ "ImporterColumn": {"ImporterType": "importer_type__pk__in"},
+ "ImporterDefault": {"ImporterType": "importer_type__pk__in"},
+ "ImportTarget": {"ImporterType": "column__importer_type__pk__in"},
+ "FormaterType": {"ImporterType": "targets__column__importer_type__pk__in"},
+ "ImporterDefaultValues": {"ImporterType": "default_target__importer_type__pk__in"},
+ "ImporterDuplicateField": {"ImporterType": "column__importer_type__pk__in"},
}
-def generic_get_results(model_list, dirname, no_geo=True,
- result_queryset=None, serialization_include=None):
+def generic_get_results(
+ model_list, dirname, no_geo=True, result_queryset=None, serialization_include=None
+):
result = OrderedDict()
for model in model_list:
base_model_name = model.__name__
- model_name = str(model.__module__).split(".")[0] + "__" + \
- base_model_name
+ model_name = str(model.__module__).split(".")[0] + "__" + base_model_name
base_q = model.objects
if result_queryset:
if result_queryset and base_model_name in result_queryset:
@@ -144,8 +149,7 @@ def generic_get_results(model_list, dirname, no_geo=True,
terms = alt_filter[k]
if not isinstance(terms, (list, tuple)):
terms = [terms]
- ids = [r["pk"]
- for r in result_queryset[k].values("pk").all()]
+ ids = [r["pk"] for r in result_queryset[k].values("pk").all()]
q = None
for term in terms:
if not q:
@@ -167,9 +171,11 @@ def generic_get_results(model_list, dirname, no_geo=True,
key = (dirname, model_name)
result[key] = serialize(
- "json", q.distinct().all(),
+ "json",
+ q.distinct().all(),
indent=2,
- use_natural_foreign_keys=True, use_natural_primary_keys=True,
+ use_natural_foreign_keys=True,
+ use_natural_primary_keys=True,
)
if recursion:
@@ -178,24 +184,32 @@ def generic_get_results(model_list, dirname, no_geo=True,
if not recursion.endswith("_id"):
recursion_in += "_id"
recursion_in += "__in"
- q = base_q.filter(**{recursion_in: serialized}
- ).exclude(id__in=serialized)
+ q = base_q.filter(**{recursion_in: serialized}).exclude(id__in=serialized)
while q.count():
v = serialize(
- "json", q.all(), indent=2, use_natural_foreign_keys=True,
- use_natural_primary_keys=True)
+ "json",
+ q.all(),
+ indent=2,
+ use_natural_foreign_keys=True,
+ use_natural_primary_keys=True,
+ )
new_result = json.loads(result[key])
new_result += json.loads(v)
result[key] = json.dumps(new_result, indent=2)
serialized += [item["id"] for item in q.values("id").all()]
- q = base_q.filter(**{recursion_in: serialized}
- ).exclude(id__in=serialized)
+ q = base_q.filter(**{recursion_in: serialized}).exclude(
+ id__in=serialized
+ )
# managed circular
q = base_q.exclude(id__in=serialized)
if q.count():
v = serialize(
- "json", q.all(), indent=2, use_natural_foreign_keys=True,
- use_natural_primary_keys=True)
+ "json",
+ q.all(),
+ indent=2,
+ use_natural_foreign_keys=True,
+ use_natural_primary_keys=True,
+ )
result_to_add = json.loads(v)
result_cleaned = deepcopy(result_to_add)
for res in result_cleaned: # first add with no recursion
@@ -205,8 +219,13 @@ def generic_get_results(model_list, dirname, no_geo=True,
new_result += result_to_add
result[key] = json.dumps(new_result, indent=2)
- excluded_fields = ["history_modifier", "history_creator", "imports",
- "locked", "lock_user"]
+ excluded_fields = [
+ "history_modifier",
+ "history_creator",
+ "imports",
+ "locked",
+ "lock_user",
+ ]
if hasattr(model, "SERIALIZATION_EXCLUDE"):
excluded_fields += list(model.SERIALIZATION_EXCLUDE)
if no_geo: