summaryrefslogtreecommitdiff
path: root/ishtar_common
diff options
context:
space:
mode:
authorÉtienne Loks <etienne.loks@iggdrasil.net>2019-09-08 01:12:33 +0200
committerÉtienne Loks <etienne.loks@iggdrasil.net>2019-09-08 01:13:44 +0200
commit647546deffb5152cebbaed3b0bf5bed309d59fc5 (patch)
tree1f31c31260464801cc42222289ceb3eaa8a6f6f9 /ishtar_common
parent1638af713f4418f5dc5f0ccb97b691b668df0dc7 (diff)
downloadIshtar-647546deffb5152cebbaed3b0bf5bed309d59fc5.tar.bz2
Ishtar-647546deffb5152cebbaed3b0bf5bed309d59fc5.zip
Serializer: concat media.zip in main archive - fix restore order (importer before conf)
Diffstat (limited to 'ishtar_common')
-rw-r--r--ishtar_common/serializers.py59
-rw-r--r--ishtar_common/tests.py4
2 files changed, 50 insertions, 13 deletions
diff --git a/ishtar_common/serializers.py b/ishtar_common/serializers.py
index 7e61df517..1b18ccab5 100644
--- a/ishtar_common/serializers.py
+++ b/ishtar_common/serializers.py
@@ -174,36 +174,73 @@ def document_serialization(archive=False, return_empty_types=False,
return_empty_types=return_empty_types, archive_name=archive_name)
if not media_archive:
return full_archive
- with ZipFile(full_archive, 'a') as current_zip:
- current_zip.write(media_archive, arcname="media.zip")
+
+ has_media = "media.zip" in ZipFile(full_archive, 'r').namelist()
+ if not has_media:
+ with ZipFile(full_archive, 'a') as current_zip:
+ current_zip.write(media_archive, arcname="media.zip")
+ os.remove(media_archive)
+ return full_archive
+
+ with tempfile.TemporaryDirectory() as tmp_dir_name:
+ # extract the current archive
+ current_zip = ZipFile(full_archive, 'r')
+ name_list = current_zip.namelist()
+ for name in name_list:
+ current_zip.extract(name, tmp_dir_name)
+ current_zip.close()
+
+ # extract the media and recreate a media.zip
+ old_media_archive = ZipFile(
+ os.path.join(tmp_dir_name, "media.zip"), "r")
+ with ZipFile(media_archive, "a") as new_zip:
+ for name in old_media_archive.namelist():
+ new_zip.writestr(
+ name, old_media_archive.open(name).read())
+
+ # rewrite the archive
+ with ZipFile(full_archive + "_new", "w") as new_zip:
+ for name in name_list:
+ if name == "media.zip":
+ continue
+ new_zip.write(os.path.join(tmp_dir_name, name), arcname=name)
+ new_zip.write(media_archive, arcname="media.zip")
+ os.remove(media_archive)
+ os.remove(full_archive)
+ os.rename(full_archive + "_new", full_archive)
return full_archive
def full_serialization(operation_queryset=None, site_queryset=None,
cr_queryset=None, find_queryset=None,
- warehouse_queryset=None):
- archive_name = type_serialization()
- conf_serialization(archive_name=archive_name)
- importer_serialization(archive_name=archive_name)
- geo_serialization(archive_name=archive_name)
- directory_serialization(archive_name=archive_name)
- document_serialization(archive_name=archive_name)
+ warehouse_queryset=None, archive=True):
+ archive_name = type_serialization(archive=archive)
+ conf_serialization(archive=archive, archive_name=archive_name)
+ importer_serialization(archive=archive, archive_name=archive_name)
+ geo_serialization(archive=archive, archive_name=archive_name)
+ directory_serialization(archive=archive, archive_name=archive_name)
+ document_serialization(archive=archive, archive_name=archive_name)
operation_serialization(
+ archive=archive,
archive_name=archive_name, operation_queryset=operation_queryset,
site_queryset=site_queryset, cr_queryset=cr_queryset,
find_queryset=find_queryset, warehouse_queryset=warehouse_queryset)
cr_serialization(
+ archive=archive,
archive_name=archive_name, operation_queryset=operation_queryset,
site_queryset=site_queryset, cr_queryset=cr_queryset,
find_queryset=find_queryset, warehouse_queryset=warehouse_queryset)
find_serialization(
+ archive=archive,
archive_name=archive_name, operation_queryset=operation_queryset,
site_queryset=site_queryset, cr_queryset=cr_queryset,
find_queryset=find_queryset, warehouse_queryset=warehouse_queryset)
warehouse_serialization(
+ archive=archive,
archive_name=archive_name, operation_queryset=operation_queryset,
site_queryset=site_queryset, cr_queryset=cr_queryset,
find_queryset=find_queryset, warehouse_queryset=warehouse_queryset)
+ return archive_name
def restore_serialized(archive_name, delete_existing=False):
@@ -218,15 +255,15 @@ def restore_serialized(archive_name, delete_existing=False):
DIRS = (
("types", [None]),
- ("common_configuration", CONF_MODEL_LIST),
("common_imports", IMPORT_MODEL_LIST),
+ ("common_configuration", CONF_MODEL_LIST),
("common_geo", GEO_MODEL_LIST),
("common_directory", DIRECTORY_MODEL_LIST),
("documents", [models.Document]),
("operations", OPERATION_MODEL_LIST),
("context_records", CR_MODEL_LIST),
- ("finds", FIND_MODEL_LIST),
("warehouse", WAREHOUSE_MODEL_LIST),
+ ("finds", FIND_MODEL_LIST),
)
namelist = zip_file.namelist()
for current_dir, model_list in DIRS:
diff --git a/ishtar_common/tests.py b/ishtar_common/tests.py
index 9313a8432..b814cbe5c 100644
--- a/ishtar_common/tests.py
+++ b/ishtar_common/tests.py
@@ -55,7 +55,7 @@ from ishtar_common.serializers import type_serialization, \
restore_serialized, conf_serialization, CONF_MODEL_LIST, \
importer_serialization, IMPORT_MODEL_LIST, geo_serialization, \
GEO_MODEL_LIST, directory_serialization, DIRECTORY_MODEL_LIST, \
- document_serialization, get_type_models
+ document_serialization, get_type_models, full_serialization
from archaeological_operations.serializers import OPERATION_MODEL_LIST
from archaeological_context_records.serializers import CR_MODEL_LIST
from archaeological_finds.serializers import FIND_MODEL_LIST
@@ -1065,7 +1065,7 @@ class SerializationTest(GenericSerializationTest, TestCase):
GEO_MODEL_LIST + DIRECTORY_MODEL_LIST + OPERATION_MODEL_LIST + \
CR_MODEL_LIST + FIND_MODEL_LIST + WAREHOUSE_MODEL_LIST
current_number, zip_filename = self.generic_restore_test_genzip(
- model_list, document_serialization)
+ model_list, full_serialization)
self.generic_restore_test(zip_filename, current_number,
model_list)