diff options
author | Étienne Loks <etienne.loks@iggdrasil.net> | 2019-09-09 14:12:36 +0200 |
---|---|---|
committer | Étienne Loks <etienne.loks@iggdrasil.net> | 2019-09-09 14:12:36 +0200 |
commit | ad9727f91af7d1fdde26bb29118d99623d6c65cb (patch) | |
tree | 894769f3c546130512a1b09247c36414c1709f3e /ishtar_common | |
parent | 8a17c17d45a2c9b63c5a70c4a6e3e56cc3c942d4 (diff) | |
download | Ishtar-ad9727f91af7d1fdde26bb29118d99623d6c65cb.tar.bz2 Ishtar-ad9727f91af7d1fdde26bb29118d99623d6c65cb.zip |
Serializers: import action - manage partial export by type of data
Diffstat (limited to 'ishtar_common')
-rw-r--r-- | ishtar_common/admin.py | 63 | ||||
-rw-r--r-- | ishtar_common/migrations/0102_auto_20190909_1300.py | 39 | ||||
-rw-r--r-- | ishtar_common/migrations/0103_auto_20190909_1342.py | 56 | ||||
-rw-r--r-- | ishtar_common/models.py | 30 | ||||
-rw-r--r-- | ishtar_common/serializers.py | 104 | ||||
-rw-r--r-- | ishtar_common/tasks.py | 59 |
6 files changed, 296 insertions, 55 deletions
diff --git a/ishtar_common/admin.py b/ishtar_common/admin.py index 7da8265cd..25dffcf72 100644 --- a/ishtar_common/admin.py +++ b/ishtar_common/admin.py @@ -75,7 +75,7 @@ from archaeological_finds import forms as find_forms, \ forms_treatments as treatment_forms from archaeological_warehouse import forms as warehouse_forms -from ishtar_common.tasks import launch_export +from ishtar_common.tasks import launch_export, launch_import csrf_protect_m = method_decorator(csrf_protect) @@ -1447,6 +1447,67 @@ class ExportTaskAdmin(admin.ModelAdmin): admin_site.register(models.ExportTask, ExportTaskAdmin) +""" +class Media: + js = ( + 'js/myscript.js', # project's static folder ( /static/js/myscript.js ) + ) + +$(document).ready(function(){ + $('form').submit(function() { + var c = confirm("continue submitting ?"); + return c; + }); +}) + + +""" + + +def launch_import_action(modeladmin, request, queryset): + model = modeladmin.model + back_url = reverse( + 'admin:%s_%s_changelist' % ( + model._meta.app_label, + model._meta.model_name) + ) + '?' + urllib.parse.urlencode(request.GET) + if queryset.count() != 1: + messages.add_message( + request, messages.ERROR, str(_("Select only one task.")) + ) + return HttpResponseRedirect(back_url) + + import_task = queryset.all()[0] + + if import_task.state != "C": + messages.add_message( + request, messages.ERROR, str( + _("Import already imported/scheduled.")) + ) + return HttpResponseRedirect(back_url) + + import_task.state = "S" + import_task.save() + + if not settings.USE_BACKGROUND_TASK: + return launch_import(import_task) + return launch_import.delay(import_task) + + +launch_import_action.short_description = _("Launch import") + + +class ImportTaskAdmin(admin.ModelAdmin): + exclude = ('creation_date', 'launch_date', 'finished_date') + list_display = ['creation_date', "source", 'state', 'launch_date', + 'finished_date'] + list_filter = ['state'] + actions = [launch_import_action] + + +admin_site.register(models.ImportTask, ImportTaskAdmin) + + class UserProfileAdmin(admin.ModelAdmin): list_display = ['person', 'profile_type', 'area_labels'] list_filter = ['profile_type'] diff --git a/ishtar_common/migrations/0102_auto_20190909_1300.py b/ishtar_common/migrations/0102_auto_20190909_1300.py new file mode 100644 index 000000000..6016cac7f --- /dev/null +++ b/ishtar_common/migrations/0102_auto_20190909_1300.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.18 on 2019-09-09 13:00 +from __future__ import unicode_literals + +from django.conf import settings +import django.contrib.gis.db.models.fields +import django.contrib.postgres.search +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion +import re +import virtualtime + + +class Migration(migrations.Migration): + + dependencies = [ + ('ishtar_common', '0101_auto_20190908_1345'), + ] + + operations = [ + migrations.CreateModel( + name='ImportTask', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('creation_date', models.DateTimeField(default=virtualtime.virtual_datetime.now)), + ('launch_date', models.DateTimeField(blank=True, null=True)), + ('finished_date', models.DateTimeField(blank=True, null=True)), + ('state', models.CharField(choices=[('C', 'Created'), ('S', 'Scheduled'), ('P', 'In progress'), ('FE', 'Finished with errors'), ('F', 'Finished')], default='C', max_length=2, verbose_name='State')), + ('delete_before', models.BooleanField(default=False, help_text='Delete existing items before adding', verbose_name='Delete before adding')), + ('source', models.FileField(upload_to='imports', verbose_name='Source')), + ], + options={ + 'verbose_name': 'Import task', + 'ordering': ['creation_date'], + 'verbose_name_plural': 'Import tasks', + }, + ), + ] diff --git a/ishtar_common/migrations/0103_auto_20190909_1342.py b/ishtar_common/migrations/0103_auto_20190909_1342.py new file mode 100644 index 000000000..ef03725e9 --- /dev/null +++ b/ishtar_common/migrations/0103_auto_20190909_1342.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.18 on 2019-09-09 13:42 +from __future__ import unicode_literals + +from django.conf import settings +import django.contrib.gis.db.models.fields +import django.contrib.postgres.search +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion +import re + + +class Migration(migrations.Migration): + + dependencies = [ + ('ishtar_common', '0102_auto_20190909_1300'), + ] + + operations = [ + migrations.AddField( + model_name='exporttask', + name='export_conf', + field=models.BooleanField(default=True, verbose_name='Export configuration'), + ), + migrations.AddField( + model_name='exporttask', + name='export_dir', + field=models.BooleanField(default=True, verbose_name='Export directory'), + ), + migrations.AddField( + model_name='exporttask', + name='export_docs', + field=models.BooleanField(default=True, verbose_name='Export documents'), + ), + migrations.AddField( + model_name='exporttask', + name='export_geo', + field=models.BooleanField(default=True, verbose_name='Export towns, areas...'), + ), + migrations.AddField( + model_name='exporttask', + name='export_importers', + field=models.BooleanField(default=True, verbose_name='Export importers'), + ), + migrations.AddField( + model_name='exporttask', + name='export_items', + field=models.BooleanField(default=True, verbose_name='Export main items'), + ), + migrations.AddField( + model_name='exporttask', + name='export_types', + field=models.BooleanField(default=True, verbose_name='Export types'), + ), + ] diff --git a/ishtar_common/models.py b/ishtar_common/models.py index b3cc807a8..bcec9931e 100644 --- a/ishtar_common/models.py +++ b/ishtar_common/models.py @@ -5704,6 +5704,13 @@ class ExportTask(models.Model): "export")) state = models.CharField(_("State"), max_length=2, choices=EXPORT_STATE, default='C') + export_types = models.BooleanField(_("Export types"), default=True) + export_conf = models.BooleanField(_("Export configuration"), default=True) + export_importers = models.BooleanField(_("Export importers"), default=True) + export_geo = models.BooleanField(_("Export towns, areas..."), default=True) + export_dir = models.BooleanField(_("Export directory"), default=True) + export_docs = models.BooleanField(_("Export documents"), default=True) + export_items = models.BooleanField(_("Export main items"), default=True) creation_date = models.DateTimeField(default=datetime.datetime.now) launch_date = models.DateTimeField(null=True, blank=True) finished_date = models.DateTimeField(null=True, blank=True) @@ -5736,3 +5743,26 @@ class ExportTask(models.Model): raise ValidationError( _("To filter filter type and filter text must be filled.") ) + + +class ImportTask(models.Model): + creation_date = models.DateTimeField(default=datetime.datetime.now) + launch_date = models.DateTimeField(null=True, blank=True) + finished_date = models.DateTimeField(null=True, blank=True) + state = models.CharField(_("State"), max_length=2, choices=EXPORT_STATE, + default='C') + delete_before = models.BooleanField( + _("Delete before adding"), default=False, + help_text=_("Delete existing items before adding")) + source = models.FileField(_("Source"), upload_to="imports") + + class Meta: + verbose_name = _("Import task") + verbose_name_plural = _("Import tasks") + ordering = ['creation_date'] + + def __str__(self): + state = _("Unknown") + if self.state in SCRIPT_STATE_DCT: + state = str(SCRIPT_STATE_DCT[self.state]) + return "Import - {} - {}".format(self.creation_date, state) diff --git a/ishtar_common/serializers.py b/ishtar_common/serializers.py index e59c1aa3c..66f771e3c 100644 --- a/ishtar_common/serializers.py +++ b/ishtar_common/serializers.py @@ -214,51 +214,65 @@ def document_serialization(archive=False, return_empty_types=False, def full_serialization(operation_queryset=None, site_queryset=None, cr_queryset=None, find_queryset=None, warehouse_queryset=None, archive=True, no_geo=True, - info=None): - # print("type") - archive_name = type_serialization(archive=archive, info=info) - # print("conf") - conf_serialization(archive=archive, archive_name=archive_name) - # print("importer") - importer_serialization(archive=archive, archive_name=archive_name) - # print("geo") - geo_serialization(archive=archive, archive_name=archive_name, no_geo=no_geo) - # print("directory") - directory_serialization(archive=archive, archive_name=archive_name) - # print("document") - document_serialization( - archive=archive, - archive_name=archive_name, operation_queryset=operation_queryset, - site_queryset=site_queryset, cr_queryset=cr_queryset, - find_queryset=find_queryset, warehouse_queryset=warehouse_queryset) - # print("operation") - operation_serialization( - archive=archive, - archive_name=archive_name, operation_queryset=operation_queryset, - site_queryset=site_queryset, cr_queryset=cr_queryset, - find_queryset=find_queryset, warehouse_queryset=warehouse_queryset, - no_geo=no_geo) - # print("cr") - cr_serialization( - archive=archive, - archive_name=archive_name, operation_queryset=operation_queryset, - site_queryset=site_queryset, cr_queryset=cr_queryset, - find_queryset=find_queryset, warehouse_queryset=warehouse_queryset, - no_geo=no_geo) - # print("find") - find_serialization( - archive=archive, - archive_name=archive_name, operation_queryset=operation_queryset, - site_queryset=site_queryset, cr_queryset=cr_queryset, - find_queryset=find_queryset, warehouse_queryset=warehouse_queryset, - no_geo=no_geo) - # print("warehouse") - warehouse_serialization( - archive=archive, - archive_name=archive_name, operation_queryset=operation_queryset, - site_queryset=site_queryset, cr_queryset=cr_queryset, - find_queryset=find_queryset, warehouse_queryset=warehouse_queryset, - no_geo=no_geo) + info=None, export_types=True, export_conf=True, + export_importers=True, export_geo=True, export_dir=True, + export_docs=True, export_items=True): + archive_name = None + if export_types: + # print("type") + archive_name = type_serialization(archive=archive, info=info) + if export_conf: + # print("conf") + archive_name = conf_serialization(archive=archive, + archive_name=archive_name) + if export_importers: + # print("importer") + archive_name = importer_serialization(archive=archive, + archive_name=archive_name) + if export_geo: + # print("geo") + archive_name = geo_serialization( + archive=archive, archive_name=archive_name, no_geo=no_geo) + if export_dir: + # print("directory") + archive_name = directory_serialization(archive=archive, + archive_name=archive_name) + if export_docs: + # print("document") + archive_name = document_serialization( + archive=archive, archive_name=archive_name, + operation_queryset=operation_queryset, site_queryset=site_queryset, + cr_queryset=cr_queryset, find_queryset=find_queryset, + warehouse_queryset=warehouse_queryset) + if export_items: + # print("operation") + archive_name = operation_serialization( + archive=archive, + archive_name=archive_name, operation_queryset=operation_queryset, + site_queryset=site_queryset, cr_queryset=cr_queryset, + find_queryset=find_queryset, warehouse_queryset=warehouse_queryset, + no_geo=no_geo) + # print("cr") + cr_serialization( + archive=archive, + archive_name=archive_name, operation_queryset=operation_queryset, + site_queryset=site_queryset, cr_queryset=cr_queryset, + find_queryset=find_queryset, warehouse_queryset=warehouse_queryset, + no_geo=no_geo) + # print("find") + find_serialization( + archive=archive, + archive_name=archive_name, operation_queryset=operation_queryset, + site_queryset=site_queryset, cr_queryset=cr_queryset, + find_queryset=find_queryset, warehouse_queryset=warehouse_queryset, + no_geo=no_geo) + # print("warehouse") + warehouse_serialization( + archive=archive, + archive_name=archive_name, operation_queryset=operation_queryset, + site_queryset=site_queryset, cr_queryset=cr_queryset, + find_queryset=find_queryset, warehouse_queryset=warehouse_queryset, + no_geo=no_geo) return archive_name diff --git a/ishtar_common/tasks.py b/ishtar_common/tasks.py index 631996684..2a968eb0a 100644 --- a/ishtar_common/tasks.py +++ b/ishtar_common/tasks.py @@ -28,7 +28,7 @@ from django.db.models import Q from ishtar_common.models import Town, Department from ishtar_common.utils import task -from ishtar_common.serializers import full_serialization +from ishtar_common.serializers import full_serialization, restore_serialized from archaeological_operations.models import Operation, ArchaeologicalSite from archaeological_context_records.models import ContextRecord @@ -42,6 +42,21 @@ from archaeological_warehouse.views import get_warehouse @task() +def launch_import(import_task): + if import_task.state != 'S': + return + import_task.launch_date = datetime.datetime.now() + import_task.state = 'P' + import_task.save() + + restore_serialized(import_task.source.path, + delete_existing=import_task.delete_before) + import_task.finished_date = datetime.datetime.now() + import_task.state = 'F' + import_task.save() + + +@task() def launch_export(export_task): if export_task.state != 'S': return @@ -49,42 +64,68 @@ def launch_export(export_task): export_task.state = 'P' export_task.save() - kwargs = {} + kwargs = {"info": {}} + + for fltr_key in ("export_types", "export_conf", "export_importers", + "export_geo", "export_dir", "export_docs", "export_items"): + kwargs["info"][fltr_key] = getattr(export_task, fltr_key) + kwargs[fltr_key] = getattr(export_task, fltr_key) + if export_task.filter_type and export_task.filter_text: query = {"search_vector": export_task.filter_text} - kwargs["info"] = {"query": {"filter": export_task.filter_text}} + kwargs["info"]["query"] = {"filter": export_task.filter_text} if export_task.filter_type == "O": kwargs["info"]["query"]["model"] = "Operation" ids = list( get_operation( None, query=query, return_query=True).values_list( "id", flat=True)) - kwargs["operation_queryset"] = Operation.objects.filter(pk__in=ids) + if not ids: + kwargs["operation_queryset"] = Operation.objects.filter( + pk=-1) + else: + kwargs["operation_queryset"] = Operation.objects.filter( + pk__in=ids) elif export_task.filter_type == "S": kwargs["info"]["query"]["model"] = "ArchaeologicalSite" ids = list(get_site( None, query=query, return_query=True).values_list( "id", flat=True)) - kwargs["site_queryset"] = ArchaeologicalSite.objects.filter( - pk__in=ids) + if not ids: + kwargs["site_queryset"] = ArchaeologicalSite.objects.filter( + pk=-1) + else: + kwargs["site_queryset"] = ArchaeologicalSite.objects.filter( + pk__in=ids) elif export_task.filter_type == "CR": kwargs["info"]["query"]["model"] = "ArchaeologicalSite" ids = list(get_contextrecord( None, query=query, return_query=True).values_list( "id", flat=True)) - kwargs["cr_queryset"] = ContextRecord.objects.filter(pk__in=ids) + if not ids: + kwargs["cr_queryset"] = ContextRecord.objects.filter(pk=-1) + else: + kwargs["cr_queryset"] = ContextRecord.objects.filter(pk__in=ids) elif export_task.filter_type == "F": kwargs["info"]["query"]["model"] = "Find" ids = list(get_find( None, query=query, return_query=True ).values_list("id", flat=True)) - kwargs["find_queryset"] = Find.objects.filter(pk__in=ids) + if not ids: + kwargs["find_queryset"] = Find.objects.filter(pk=-1) + else: + kwargs["find_queryset"] = Find.objects.filter(pk__in=ids) elif export_task.filter_type == "W": kwargs["info"]["query"]["model"] = "Warehouse" ids = list(get_warehouse( None, query=query, return_query=True ).values_list("id", flat=True)) - kwargs["warehouse_queryset"] = Warehouse.objects.filter(pk__in=ids) + if not ids: + kwargs["warehouse_queryset"] = Warehouse.objects.filter(pk=-1) + else: + kwargs["warehouse_queryset"] = Warehouse.objects.filter( + pk__in=ids) + kwargs["info"]["geo"] = export_task.geo if not export_task.geo: kwargs["no_geo"] = True archive_name = full_serialization(**kwargs) |