summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ishtar_common/models_imports.py107
-rw-r--r--ishtar_common/templates/ishtar/import_table.html2
-rw-r--r--ishtar_common/tests.py127
3 files changed, 134 insertions, 102 deletions
diff --git a/ishtar_common/models_imports.py b/ishtar_common/models_imports.py
index b03b42e1a..78398ea1a 100644
--- a/ishtar_common/models_imports.py
+++ b/ishtar_common/models_imports.py
@@ -19,7 +19,7 @@
import csv
import datetime
-import sys
+from pathlib import Path
import fiona
from fiona import crs as fiona_crs
@@ -1452,6 +1452,10 @@ class ImportGroup(BaseImport):
return False
@property
+ def has_error(self) -> bool:
+ return any(1 for imprt in self.imports.all() if imprt.has_error)
+
+ @property
def pre_import_form_is_valid(self) -> bool:
return not any(-1 for imp in self.imports.all() if not imp.pre_import_form_is_valid)
@@ -1545,7 +1549,9 @@ class ImportGroup(BaseImport):
def _unarchive(self):
if not self.archive_file:
return
- with tempfile.TemporaryDirectory() as tmp_dir_name:
+ today = datetime.date.today()
+ sub_imports_ids = [(Import, imp.pk) for imp in self.import_list()]
+ with (tempfile.TemporaryDirectory("-ishtar") as tmp_dir_name):
# extract the current archive
current_zip = zipfile.ZipFile(self.archive_file.path, "r")
name_list = current_zip.namelist()
@@ -1560,22 +1566,71 @@ class ImportGroup(BaseImport):
files = json.loads(content.read())
except (IOError, json.JSONDecodeError):
return
- today = datetime.date.today()
for attr in files:
filename = files[attr]
full_filename = os.path.join(tmp_dir_name, filename)
- with open(full_filename, "rb") as raw_file:
- getattr(self, attr).save(
+ current_imports_ids = [(ImportGroup, self.pk)]
+
+ # identify imports to attach the file
+ if attr == "imported_file":
+ current_imports_ids += sub_imports_ids # imported_file is attached to each imports
+ elif attr.startswith("sub-"):
+ # identify the sub import to attach the file
+ # archive filename is "sub-{subimport_order}-{attribute}"
+ split_attr = attr.split("-")
+ if len(split_attr) < 3:
+ continue
+ try:
+ idx = int(split_attr[1])
+ except (ValueError, IndexError):
+ continue
+ if idx > (len(sub_imports_ids) - 1):
+ continue
+ current_imports_ids = [sub_imports_ids[idx]]
+ attr = "-".join(split_attr[2:])
+
+ for idx, current_import_id in enumerate(current_imports_ids):
+ model, current_import_id = current_import_id
+ current_import = model.objects.get(pk=current_import_id)
+ path = Path(full_filename)
+ c_filename = path.name
+ if idx:
+ c_filename = f"{idx:02d}-" + c_filename
+ path = Path(full_filename)
+ """
+ # TODO: should be enough...
+ # but need to be set explicitly, why?
+ with path.open(mode="rb") as raw_file:
+ getattr(current_import, attr).save(c_filename, File(raw_file))
+ """
+
+ w_filename = os.path.join(
+ settings.MEDIA_ROOT,
"upload/imports/{}/{:02d}/{}".format(
- today.year, today.month, filename
- ),
- File(raw_file),
+ today.year, today.month, c_filename
+ )
)
-
+ with path.open(mode="rb") as raw_file:
+ with open(w_filename, "wb") as w_file:
+ w_file.write(raw_file.read())
+ with open(w_filename, mode="rb") as raw_file:
+ f = File(raw_file, name=c_filename)
+ f.path = w_filename
+ setattr(current_import, attr, f)
+ current_import.save()
+ getattr(current_import, attr).name = w_filename
+ current_import.save()
os.remove(self.archive_file.path)
+ self.refresh_from_db()
setattr(self, "archive_file", None)
- self.state = "FE" if self.error_file else "F"
+ self.state = "FE" if self.has_error else "F"
self.save()
+
+ for model, sub_import_id in sub_imports_ids:
+ sub_import = model.objects.get(pk=sub_import_id)
+ sub_import.state = "FE" if sub_import.has_error else "F"
+ sub_import.save()
+
return True
def _archive(self):
@@ -1610,17 +1665,11 @@ class ImportGroup(BaseImport):
content.write(json.dumps(zip_content))
current_zip.write(content_name, arcname="content.json")
- today = datetime.date.today()
with open(
archive_name,
"rb",
) as raw_file:
- self.archive_file.save(
- "upload/imports/{}/{:02d}/{}".format(
- today.year, today.month, base_name
- ),
- File(raw_file),
- )
+ self.archive_file.save(base_name, File(raw_file))
IshtarSiteProfile = apps.get_model("ishtar_common", "IshtarSiteProfile")
profile = IshtarSiteProfile.get_current_profile()
if profile.delete_image_zip_on_archive:
@@ -1648,6 +1697,10 @@ class ImportGroup(BaseImport):
self.save()
self._archive_pending = False
+ def archive(self):
+ super().archive()
+ self.imports.update(state="AC")
+
def get_all_imported(self):
imported = []
for imp in self.imports.all():
@@ -1820,6 +1873,10 @@ class Import(BaseImport):
return bool(self.importer_type.columns.filter(col_number__lte=0).count())
@property
+ def has_error(self) -> bool:
+ return bool(self.error_file)
+
+ @property
def pre_import_form_is_valid(self) -> bool:
for column in self.importer_type.columns.filter(col_number__lte=0, required=True):
q = ImportColumnValue.objects.filter(column=column, import_item=self)
@@ -2377,17 +2434,11 @@ class Import(BaseImport):
files = json.loads(content.read())
except (IOError, json.JSONDecodeError):
return
- today = datetime.date.today()
for attr in files:
filename = files[attr]
full_filename = os.path.join(tmp_dir_name, filename)
with open(full_filename, "rb") as raw_file:
- getattr(self, attr).save(
- "upload/imports/{}/{:02d}/{}".format(
- today.year, today.month, filename
- ),
- File(raw_file),
- )
+ getattr(self, attr).save(filename, File(raw_file))
os.remove(self.archive_file.path)
setattr(self, "archive_file", None)
@@ -2419,17 +2470,11 @@ class Import(BaseImport):
content.write(json.dumps(zip_content))
current_zip.write(content_name, arcname="content.json")
- today = datetime.date.today()
with open(
archive_name,
"rb",
) as raw_file:
- self.archive_file.save(
- "upload/imports/{}/{:02d}/{}".format(
- today.year, today.month, base_name
- ),
- File(raw_file),
- )
+ self.archive_file.save(base_name, File(raw_file))
IshtarSiteProfile = apps.get_model("ishtar_common", "IshtarSiteProfile")
profile = IshtarSiteProfile.get_current_profile()
if profile.delete_image_zip_on_archive:
diff --git a/ishtar_common/templates/ishtar/import_table.html b/ishtar_common/templates/ishtar/import_table.html
index 0b2aa46d7..c04248088 100644
--- a/ishtar_common/templates/ishtar/import_table.html
+++ b/ishtar_common/templates/ishtar/import_table.html
@@ -52,7 +52,7 @@
</tr>
{% for import in object_list %}
<tr id="import-{{import.import_id}}"
- class='import-row{% if import.error_file or not import.pre_import_form_is_valid %}-error{% endif %}{% if import.pk in refreshed_pks %} bg-info{% endif %}'>
+ class='import-row{% if import.has_error or not import.pre_import_form_is_valid %}-error{% endif %}{% if import.pk in refreshed_pks %} bg-info{% endif %}'>
<td><ul class="simple">
<li><strong>{{import.name|default:"-"}}</strong></li>
<li><small><i class="fa fa-fw fa-calendar" aria-hidden="true"></i>&nbsp; {{import.creation_date|date:"DATE_FORMAT"}} {{import.creation_date|time:"H:i"}} - {% if import.end_date %}{{import.end_date|date:"DATE_FORMAT"}} {{import.end_date|time:"H:i"}}{% endif %}</small></li>
diff --git a/ishtar_common/tests.py b/ishtar_common/tests.py
index 19d09a205..f90def1d2 100644
--- a/ishtar_common/tests.py
+++ b/ishtar_common/tests.py
@@ -2684,55 +2684,52 @@ class ImportTest(BaseImportTest):
profile.delete_image_zip_on_archive = False
profile.save()
imprt.archive()
- group_import = models.ImportGroup.objects.get(pk=imprt.pk)
- self.assertEqual(group_import.state, "AC")
- for imprt in group_import.imports.all():
- self.assertEqual(imprt.state, "AC")
- self.assertFalse(imprt.error_file)
- self.assertFalse(imprt.result_file)
- self.assertFalse(imprt.match_file)
- self.assertTrue(imprt.imported_images)
- self.assertTrue(imprt.archive_file)
- self.assertTrue(zipfile.is_zipfile(imprt.archive_file))
- with tempfile.TemporaryDirectory() as tmpdir:
- current_zip = zipfile.ZipFile(imprt.archive_file.path, "r")
- name_list = current_zip.namelist()
- self.assertIn("content.json", name_list)
- current_zip.extract("content.json", tmpdir)
- content_name = os.path.join(tmpdir, "content.json")
- with open(content_name, "r") as content:
- files = json.loads(content.read())
- self.assertIn("imported_file", files.keys())
- self.assertIn(files["imported_file"], name_list)
- self.assertIn("error_file", files.keys())
- self.assertIn(files["error_file"], name_list)
- self.assertIn("result_file", files.keys())
- self.assertIn(files["result_file"], name_list)
- self.assertIn("match_file", files.keys())
- self.assertIn(files["match_file"], name_list)
- rev_dict = {v: k for k, v in files.items()}
- for name in name_list:
- current_zip.extract(name, tmpdir)
- if name.endswith(".txt"):
- with open(os.path.join(tmpdir, name), "r") as f:
- self.assertEqual(f.read(), "test" + rev_dict[name])
- elif name.endswith(".csv"): # imported file
- with open(os.path.join(tmpdir, name), "r") as f:
- self.assertEqual(f.read(), csv_content)
-
- group_import.unarchive("FE")
- group_import = models.ImportGroup.objects.get(pk=imprt.pk)
- for imprt in group_import.import_list():
- self.assertEqual(imprt.state, "FE")
- for k in ("error_file", "result_file", "match_file", "imported_images"):
- field = getattr(imprt, k)
- self.assertTrue(field, "{} is missing in unarchive".format(k))
- with open(field.path, "r") as f:
- self.assertEqual(f.read(), "test" + k)
- field = getattr(imprt, "imported_file")
+ imprt = models.Import.objects.get(pk=imprt.pk)
+ self.assertEqual(imprt.state, "AC")
+ self.assertFalse(imprt.error_file)
+ self.assertFalse(imprt.result_file)
+ self.assertFalse(imprt.match_file)
+ self.assertTrue(imprt.imported_images)
+ self.assertTrue(imprt.archive_file)
+ self.assertTrue(zipfile.is_zipfile(imprt.archive_file))
+ with tempfile.TemporaryDirectory() as tmpdir:
+ current_zip = zipfile.ZipFile(imprt.archive_file.path, "r")
+ name_list = current_zip.namelist()
+ self.assertIn("content.json", name_list)
+ current_zip.extract("content.json", tmpdir)
+ content_name = os.path.join(tmpdir, "content.json")
+ with open(content_name, "r") as content:
+ files = json.loads(content.read())
+ self.assertIn("imported_file", files.keys())
+ self.assertIn(files["imported_file"], name_list)
+ self.assertIn("error_file", files.keys())
+ self.assertIn(files["error_file"], name_list)
+ self.assertIn("result_file", files.keys())
+ self.assertIn(files["result_file"], name_list)
+ self.assertIn("match_file", files.keys())
+ self.assertIn(files["match_file"], name_list)
+ rev_dict = {v: k for k, v in files.items()}
+ for name in name_list:
+ current_zip.extract(name, tmpdir)
+ if name.endswith(".txt"):
+ with open(os.path.join(tmpdir, name), "r") as f:
+ self.assertEqual(f.read(), "test" + rev_dict[name])
+ elif name.endswith(".csv"): # imported file
+ with open(os.path.join(tmpdir, name), "r") as f:
+ self.assertEqual(f.read(), csv_content)
+
+ imprt.unarchive("FE")
+ imprt = models.Import.objects.get(pk=imprt.pk)
+ self.assertEqual(imprt.state, "FE")
+ for k in ("error_file", "result_file", "match_file"):
+ field = getattr(imprt, k)
self.assertTrue(field, "{} is missing in unarchive".format(k))
with open(field.path, "r") as f:
- self.assertEqual(f.read(), csv_content)
+ self.assertEqual(f.read(), "test" + k)
+ field = getattr(imprt, "imported_file")
+ self.assertTrue(field, "{} is missing in unarchive".format(k))
+ with open(field.path, "r") as f:
+ self.assertEqual(f.read(), csv_content)
def test_archive_group_import(self):
group_import = self.create_group_import()
@@ -2752,6 +2749,8 @@ class ImportTest(BaseImportTest):
self.assertTrue(group_import.imported_images)
self.assertTrue(group_import.archive_file)
self.assertTrue(zipfile.is_zipfile(group_import.archive_file))
+ SUB_IMPORT_MATCH_IDX = [1, 2]
+
with tempfile.TemporaryDirectory() as tmpdir:
current_zip = zipfile.ZipFile(group_import.archive_file.path, "r")
name_list = current_zip.namelist()
@@ -2765,7 +2764,7 @@ class ImportTest(BaseImportTest):
for idx in range(4):
self.assertIn(f"sub-{idx}-result_file", files.keys())
self.assertIn(files[f"sub-{idx}-result_file"], name_list)
- for idx in range(1, 3):
+ for idx in SUB_IMPORT_MATCH_IDX:
self.assertIn(f"sub-{idx}-match_file", files.keys())
self.assertIn(files[f"sub-{idx}-match_file"], name_list)
for name in name_list:
@@ -2776,32 +2775,20 @@ class ImportTest(BaseImportTest):
self.assertEqual(result, csv_content)
group_import.unarchive("F")
- group_import = models.Import.objects.get(pk=group_import.pk)
+ group_import = models.ImportGroup.objects.get(pk=group_import.pk)
self.assertEqual(group_import.state, "F")
- for k in ("error_file", "result_file", "match_file", "imported_images"):
- field = getattr(group_import, k)
- self.assertTrue(field, "{} is missing in unarchive".format(k))
- with open(field.path, "r") as f:
- self.assertEqual(f.read(), "test" + k)
field = getattr(group_import, "imported_file")
- self.assertTrue(field, "{} is missing in unarchive".format(k))
+ self.assertTrue(field, "imported_file is missing in unarchive")
with open(field.path, "r") as f:
self.assertEqual(f.read(), csv_content)
-
- profile = models.get_current_profile()
- profile.delete_image_zip_on_archive = True
- profile.save()
- group_import = models.Import.objects.get(pk=group_import.pk)
- image_filename = group_import.imported_images.path
- self.assertTrue(os.path.isfile(image_filename))
- group_import.archive()
- group_import = models.Import.objects.get(pk=group_import.pk)
- self.assertFalse(group_import.imported_images)
- self.assertFalse(os.path.isfile(image_filename))
- group_import.unarchive("F")
- group_import = models.Import.objects.get(pk=group_import.pk)
- self.assertEqual(group_import.state, "FE") # as an error file so state fixed
- self.assertFalse(group_import.imported_images)
+ for idx, sub_import in enumerate(group_import.import_list()):
+ for k in ("imported_file", "result_file"):
+ field = getattr(sub_import, k)
+ self.assertTrue(field, "{} is missing in unarchive".format(k))
+ if idx in SUB_IMPORT_MATCH_IDX:
+ k = "match_file"
+ field = getattr(sub_import, k)
+ self.assertTrue(field, "{} is missing in unarchive".format(k))
def test_delete_related(self):
town = models.Town.objects.create(name="my-test")