summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--archaeological_context_records/serializers.py38
-rw-r--r--archaeological_finds/serializers.py4
-rw-r--r--archaeological_operations/serializers.py4
-rw-r--r--archaeological_warehouse/serializers.py41
-rw-r--r--ishtar_common/admin.py48
-rw-r--r--ishtar_common/migrations/0101_auto_20190908_1345.py41
-rw-r--r--ishtar_common/models.py61
-rw-r--r--ishtar_common/serializers.py55
-rw-r--r--ishtar_common/serializers_utils.py23
-rw-r--r--ishtar_common/tasks.py74
-rw-r--r--ishtar_common/utils.py1
-rw-r--r--ishtar_common/views_item.py18
12 files changed, 350 insertions, 58 deletions
diff --git a/archaeological_context_records/serializers.py b/archaeological_context_records/serializers.py
index d10d8fcd4..9f454eb35 100644
--- a/archaeological_context_records/serializers.py
+++ b/archaeological_context_records/serializers.py
@@ -3,7 +3,10 @@ from django.db.models import Q
from ishtar_common.serializers_utils import generic_get_results, \
archive_serialization
from archaeological_context_records import models
+from archaeological_finds.models import Find
+from archaeological_finds.serializers import generate_warehouse_queryset \
+ as finds_generate_warehouse_queryset
CR_MODEL_LIST = [
models.Dating, models.ContextRecord, models.RecordRelations
@@ -46,8 +49,9 @@ def generate_warehouse_queryset(ids):
def cr_serialization(archive=False, return_empty_types=False,
archive_name=None, operation_queryset=None,
site_queryset=None, cr_queryset=None, find_queryset=None,
- warehouse_queryset=None, get_queryset=False):
+ warehouse_queryset=None, get_queryset=False, no_geo=True):
result_queryset = {}
+ find_ids, cr_ids = None, None
if operation_queryset:
operation_ids = operation_queryset.values_list("id", flat=True)
result_queryset = {
@@ -58,8 +62,15 @@ def cr_serialization(archive=False, return_empty_types=False,
models.RecordRelations.objects.filter(
left_record__operation_id__in=operation_ids,
right_record__operation_id__in=operation_ids,
- )
+ ),
}
+ cr_ids = list(
+ result_queryset[models.ContextRecord.__name__].values_list(
+ "id", flat=True))
+ find_ids = list(
+ Find.objects.filter(
+ base_finds__context_record__operation_id__in=operation_ids
+ ).values_list("id", flat=True))
elif site_queryset:
site_ids = site_queryset.values_list("id", flat=True)
result_queryset = {
@@ -72,6 +83,11 @@ def cr_serialization(archive=False, return_empty_types=False,
right_record__operation__archaeological_sites__id__in=site_ids,
)
}
+ f_q = {
+ "base_finds__context_record__operation__archaeological_sites__id__in": site_ids
+ }
+ find_ids = list(
+ Find.objects.filter(**f_q).values_list("id", flat=True))
elif cr_queryset:
cr_ids = cr_queryset.values_list("id", flat=True)
result_queryset = {
@@ -82,6 +98,10 @@ def cr_serialization(archive=False, return_empty_types=False,
right_record_id__in=cr_ids,
)
}
+ find_ids = list(
+ Find.objects.filter(
+ base_finds__context_record__in=cr_ids).values_list(
+ "id", flat=True))
elif find_queryset:
find_ids = find_queryset.values_list("id", flat=True)
result_queryset = {
@@ -97,11 +117,23 @@ def cr_serialization(archive=False, return_empty_types=False,
elif warehouse_queryset:
warehouse_ids = warehouse_queryset.values_list("id", flat=True)
result_queryset = generate_warehouse_queryset(warehouse_ids)
+ w_queryset = finds_generate_warehouse_queryset(warehouse_ids)
+ find_ids = w_queryset[Find.__name__].values_list("id", flat=True)
+
+ if result_queryset:
+ cr_ids = list(
+ result_queryset[models.ContextRecord.__name__].values_list(
+ "id", flat=True))
+ result_queryset[models.Dating.__name__] = models.Dating.objects.filter(
+ Q(context_records__id__in=cr_ids) |
+ Q(find__id__in=list(find_ids))
+ )
+
if get_queryset:
return result_queryset
result = generic_get_results(CR_MODEL_LIST, "context_records",
- result_queryset=result_queryset)
+ result_queryset=result_queryset, no_geo=no_geo)
full_archive = archive_serialization(
result, archive_dir="context_records", archive=archive,
return_empty_types=return_empty_types, archive_name=archive_name,
diff --git a/archaeological_finds/serializers.py b/archaeological_finds/serializers.py
index b697224fe..7e79123e6 100644
--- a/archaeological_finds/serializers.py
+++ b/archaeological_finds/serializers.py
@@ -43,7 +43,7 @@ def find_serialization(archive=False, return_empty_types=False,
archive_name=None, operation_queryset=None,
site_queryset=None, cr_queryset=None,
find_queryset=None, warehouse_queryset=None,
- get_queryset=False):
+ get_queryset=False, no_geo=True):
result_queryset = {}
if operation_queryset:
operation_ids = operation_queryset.values_list("id", flat=True)
@@ -93,7 +93,7 @@ def find_serialization(archive=False, return_empty_types=False,
return result_queryset
result = generic_get_results(FIND_MODEL_LIST, "finds",
- result_queryset=result_queryset)
+ result_queryset=result_queryset, no_geo=no_geo)
full_archive = archive_serialization(
result, archive_dir="operations", archive=archive,
return_empty_types=return_empty_types, archive_name=archive_name,
diff --git a/archaeological_operations/serializers.py b/archaeological_operations/serializers.py
index a833eb10f..6fd0f10d6 100644
--- a/archaeological_operations/serializers.py
+++ b/archaeological_operations/serializers.py
@@ -57,7 +57,7 @@ def operation_serialization(archive=False, return_empty_types=False,
archive_name=None, operation_queryset=None,
site_queryset=None, cr_queryset=None,
find_queryset=None, warehouse_queryset=None,
- get_queryset=False):
+ get_queryset=False, no_geo=True):
result_queryset = {}
if operation_queryset:
operation_ids = operation_queryset.values_list("id", flat=True)
@@ -123,7 +123,7 @@ def operation_serialization(archive=False, return_empty_types=False,
return result_queryset
result = generic_get_results(OPERATION_MODEL_LIST, "operations",
- result_queryset=result_queryset)
+ result_queryset=result_queryset, no_geo=no_geo)
full_archive = archive_serialization(
result, archive_dir="operations", archive=archive,
return_empty_types=return_empty_types, archive_name=archive_name,
diff --git a/archaeological_warehouse/serializers.py b/archaeological_warehouse/serializers.py
index 904126b32..38ebf4342 100644
--- a/archaeological_warehouse/serializers.py
+++ b/archaeological_warehouse/serializers.py
@@ -14,19 +14,20 @@ WAREHOUSE_MODEL_LIST = [
def generate_warehouse_queryset(base_query_key, ids):
- warehouse_division_q, warehouse_q, container_q = None, None, None
- container_loca_q = None
+ container_ids, warehouse_ids = set(), set()
+ warehouse_div_ids, container_loca_ids = set(), set()
+
for find_key in ("finds", "finds_ref"):
base_key = "{}__{}".format(find_key, base_query_key)
q = Q(**{base_key: ids})
+ container_ids.update(
+ list(models.Container.objects.filter(
+ q).values_list("id", flat=True)))
q_loca = Q(
**{"container__{}__{}".format(find_key, base_query_key): ids})
- if not container_q:
- container_q = q
- container_loca_q = q_loca
- else:
- container_q |= q
- container_loca_q |= q_loca
+ container_loca_ids.update(
+ list(models.ContainerLocalisation.objects.filter(
+ q_loca).values_list("id", flat=True)))
for container_key in ("containers", "owned_containers"):
q = Q(**
{"{}__{}__{}".format(container_key, find_key,
@@ -34,23 +35,23 @@ def generate_warehouse_queryset(base_query_key, ids):
q_div = Q(**
{"warehouse__{}__{}__{}".format(
container_key, find_key, base_query_key): ids})
- if not warehouse_q:
- warehouse_q = q
- warehouse_division_q = q_div
- else:
- warehouse_q |= q
- warehouse_division_q |= q_div
+ warehouse_ids.update(
+ list(models.Warehouse.objects.filter(q).values_list(
+ "id", flat=True)))
+ warehouse_div_ids.update(
+ list(models.WarehouseDivision.objects.filter(q_div).values_list(
+ "id", flat=True)))
result_queryset = {
models.Warehouse.__name__:
- models.Warehouse.objects.filter(warehouse_q),
+ models.Warehouse.objects.filter(id__in=warehouse_ids),
models.Container.__name__:
- models.Container.objects.filter(container_q),
+ models.Container.objects.filter(id__in=container_ids),
models.WarehouseDivisionLink.__name__:
models.WarehouseDivisionLink.objects.filter(
- warehouse_division_q),
+ id__in=warehouse_div_ids),
models.ContainerLocalisation.__name__:
models.ContainerLocalisation.objects.filter(
- container_loca_q)
+ id__in=container_loca_ids)
}
return result_queryset
@@ -59,7 +60,7 @@ def warehouse_serialization(archive=False, return_empty_types=False,
archive_name=None, operation_queryset=None,
site_queryset=None, cr_queryset=None,
find_queryset=None, warehouse_queryset=None,
- get_queryset=False):
+ get_queryset=False, no_geo=True):
result_queryset = {}
if operation_queryset:
operation_ids = operation_queryset.values_list("id", flat=True)
@@ -98,7 +99,7 @@ def warehouse_serialization(archive=False, return_empty_types=False,
if get_queryset:
return result_queryset
result = generic_get_results(WAREHOUSE_MODEL_LIST, "warehouse",
- result_queryset=result_queryset)
+ result_queryset=result_queryset, no_geo=no_geo)
full_archive = archive_serialization(
result, archive_dir="warehouse", archive=archive,
return_empty_types=return_empty_types, archive_name=archive_name,
diff --git a/ishtar_common/admin.py b/ishtar_common/admin.py
index 4449f7ba4..7da8265cd 100644
--- a/ishtar_common/admin.py
+++ b/ishtar_common/admin.py
@@ -75,6 +75,9 @@ from archaeological_finds import forms as find_forms, \
forms_treatments as treatment_forms
from archaeological_warehouse import forms as warehouse_forms
+from ishtar_common.tasks import launch_export
+
+
csrf_protect_m = method_decorator(csrf_protect)
@@ -1399,6 +1402,51 @@ class AdministrationTaskAdmin(admin.ModelAdmin):
admin_site.register(models.AdministrationTask, AdministrationTaskAdmin)
+def launch_export_action(modeladmin, request, queryset):
+ model = modeladmin.model
+ back_url = reverse(
+ 'admin:%s_%s_changelist' % (
+ model._meta.app_label,
+ model._meta.model_name)
+ ) + '?' + urllib.parse.urlencode(request.GET)
+ if queryset.count() != 1:
+ messages.add_message(
+ request, messages.ERROR, str(_("Select only one task."))
+ )
+ return HttpResponseRedirect(back_url)
+
+ export_task = queryset.all()[0]
+
+ if export_task.state != "C":
+ messages.add_message(
+ request, messages.ERROR, str(
+ _("Export already exported/scheduled."))
+ )
+ return HttpResponseRedirect(back_url)
+
+ export_task.state = "S"
+ export_task.save()
+
+ if not settings.USE_BACKGROUND_TASK:
+ return launch_export(export_task)
+ return launch_export.delay(export_task)
+
+
+launch_export_action.short_description = _("Launch export")
+
+
+class ExportTaskAdmin(admin.ModelAdmin):
+ readonly_fields = ("result", )
+ exclude = ('creation_date', 'launch_date', 'finished_date')
+ list_display = ["label", 'state', 'creation_date', 'launch_date',
+ 'finished_date']
+ list_filter = ['state']
+ actions = [launch_export_action]
+
+
+admin_site.register(models.ExportTask, ExportTaskAdmin)
+
+
class UserProfileAdmin(admin.ModelAdmin):
list_display = ['person', 'profile_type', 'area_labels']
list_filter = ['profile_type']
diff --git a/ishtar_common/migrations/0101_auto_20190908_1345.py b/ishtar_common/migrations/0101_auto_20190908_1345.py
new file mode 100644
index 000000000..4ff843d7e
--- /dev/null
+++ b/ishtar_common/migrations/0101_auto_20190908_1345.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.18 on 2019-09-08 13:45
+from __future__ import unicode_literals
+
+from django.conf import settings
+import django.contrib.gis.db.models.fields
+import django.contrib.postgres.search
+import django.core.validators
+from django.db import migrations, models
+import django.db.models.deletion
+import re
+import virtualtime
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0100_auto_20190903_1427'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='ExportTask',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('filter_type', models.CharField(blank=True, choices=[('O', 'Operation'), ('S', 'Archaeological site'), ('CR', 'Context record'), ('F', 'Find'), ('W', 'Warehouse')], max_length=2, null=True, verbose_name='Filter on')),
+ ('filter_text', models.TextField(blank=True, help_text='Textual query on this item (try it on the main interface)', null=True, verbose_name='Filter query')),
+ ('geo', models.BooleanField(default=True, help_text='Geographic data can represent large volume of information. Geographic data can be excluded from the export', verbose_name='Export geographic data')),
+ ('state', models.CharField(choices=[('C', 'Created'), ('S', 'Scheduled'), ('P', 'In progress'), ('FE', 'Finished with errors'), ('F', 'Finished')], default='C', max_length=2, verbose_name='State')),
+ ('creation_date', models.DateTimeField(default=virtualtime.virtual_datetime.now)),
+ ('launch_date', models.DateTimeField(blank=True, null=True)),
+ ('finished_date', models.DateTimeField(blank=True, null=True)),
+ ('result', models.FileField(blank=True, null=True, upload_to='exports', verbose_name='Result')),
+ ],
+ options={
+ 'verbose_name_plural': 'Export tasks',
+ 'verbose_name': 'Export task',
+ 'ordering': ['creation_date'],
+ },
+ ),
+ ]
diff --git a/ishtar_common/models.py b/ishtar_common/models.py
index 0a5fc30bf..b3cc807a8 100644
--- a/ishtar_common/models.py
+++ b/ishtar_common/models.py
@@ -5675,3 +5675,64 @@ class AdministrationTask(models.Model):
self.state = 'F'
self.result = "{}".format(stdout.decode('utf-8'))
self.save()
+
+
+ITEM_TYPES = (
+ ("O", _("Operation")),
+ ("S", _("Archaeological site")),
+ ("CR", _("Context record")),
+ ("F", _("Find")),
+ ("W", _("Warehouse")),
+)
+
+
+EXPORT_STATE = (("C", _("Created")),) + SCRIPT_STATE
+
+
+class ExportTask(models.Model):
+ filter_type = models.CharField(
+ _("Filter on"), max_length=2, choices=ITEM_TYPES, null=True, blank=True
+ )
+ filter_text = models.TextField(
+ _("Filter query"), null=True, blank=True,
+ help_text=_("Textual query on this item (try it on the main "
+ "interface)"))
+ geo = models.BooleanField(
+ _("Export geographic data"), default=True,
+ help_text=_("Geographic data can represent large volume of "
+ "information. Geographic data can be excluded from the "
+ "export"))
+ state = models.CharField(_("State"), max_length=2, choices=EXPORT_STATE,
+ default='C')
+ creation_date = models.DateTimeField(default=datetime.datetime.now)
+ launch_date = models.DateTimeField(null=True, blank=True)
+ finished_date = models.DateTimeField(null=True, blank=True)
+ result = models.FileField(_("Result"), null=True, blank=True,
+ upload_to="exports")
+
+ class Meta:
+ verbose_name = _("Export task")
+ verbose_name_plural = _("Export tasks")
+ ordering = ['creation_date']
+
+ def __str__(self):
+ state = _("Unknown")
+ if self.state in SCRIPT_STATE_DCT:
+ state = str(SCRIPT_STATE_DCT[self.state])
+ return "Export - {} - {}".format(self.creation_date, state)
+
+ @property
+ def label(self):
+ fltr = _("Whole database")
+ if self.filter_type and self.filter_text:
+ dct = dict(ITEM_TYPES)
+ if self.filter_type in dct:
+ fltr = '{} "{}"'.format(dct[self.filter_type], self.filter_text)
+ return "{} - {}".format(fltr, self.creation_date)
+
+ def clean(self):
+ if (self.filter_text and not self.filter_type) or (
+ self.filter_type and not self.filter_type):
+ raise ValidationError(
+ _("To filter filter type and filter text must be filled.")
+ )
diff --git a/ishtar_common/serializers.py b/ishtar_common/serializers.py
index 1b18ccab5..e59c1aa3c 100644
--- a/ishtar_common/serializers.py
+++ b/ishtar_common/serializers.py
@@ -43,11 +43,11 @@ def get_type_models():
def type_serialization(archive=False, return_empty_types=False,
- archive_name=None):
+ archive_name=None, info=None):
result = generic_get_results(get_type_models(), "types")
return archive_serialization(result, archive_dir="types", archive=archive,
return_empty_types=return_empty_types,
- archive_name=archive_name)
+ archive_name=archive_name, info=info)
CONF_MODEL_LIST = [
@@ -147,7 +147,7 @@ def document_serialization(archive=False, return_empty_types=False,
queries.update(cr_serialization(**get_queryset_attr))
queries.update(find_serialization(**get_queryset_attr))
queries.update(warehouse_serialization(**get_queryset_attr))
- q = None
+ document_ids = set()
for model, attr in (
("Operation", "operations"),
("ArchaeologicalSite", "sites"),
@@ -155,13 +155,13 @@ def document_serialization(archive=False, return_empty_types=False,
("Find", "finds"),
("Warehouse", "warehouses"),
("Container", "containers")):
- values = queries[model].values_list("id", flat=True)
- base_q = Q(**{attr + "__id__in": values})
- if not q:
- q = base_q
- else:
- q |= base_q
- result_queryset["Document"] = models.Document.objects.filter(q)
+ values = list(queries[model].values_list("id", flat=True))
+ document_ids.update(
+ models.Document.objects.filter(
+ **{attr + "__id__in": values}).values_list(
+ "id", flat=True))
+ result_queryset["Document"] = models.Document.objects.filter(
+ id__in=document_ids)
result = generic_get_results([models.Document], "documents",
result_queryset=result_queryset)
@@ -213,33 +213,52 @@ def document_serialization(archive=False, return_empty_types=False,
def full_serialization(operation_queryset=None, site_queryset=None,
cr_queryset=None, find_queryset=None,
- warehouse_queryset=None, archive=True):
- archive_name = type_serialization(archive=archive)
+ warehouse_queryset=None, archive=True, no_geo=True,
+ info=None):
+ # print("type")
+ archive_name = type_serialization(archive=archive, info=info)
+ # print("conf")
conf_serialization(archive=archive, archive_name=archive_name)
+ # print("importer")
importer_serialization(archive=archive, archive_name=archive_name)
- geo_serialization(archive=archive, archive_name=archive_name)
+ # print("geo")
+ geo_serialization(archive=archive, archive_name=archive_name, no_geo=no_geo)
+ # print("directory")
directory_serialization(archive=archive, archive_name=archive_name)
- document_serialization(archive=archive, archive_name=archive_name)
- operation_serialization(
+ # print("document")
+ document_serialization(
archive=archive,
archive_name=archive_name, operation_queryset=operation_queryset,
site_queryset=site_queryset, cr_queryset=cr_queryset,
find_queryset=find_queryset, warehouse_queryset=warehouse_queryset)
+ # print("operation")
+ operation_serialization(
+ archive=archive,
+ archive_name=archive_name, operation_queryset=operation_queryset,
+ site_queryset=site_queryset, cr_queryset=cr_queryset,
+ find_queryset=find_queryset, warehouse_queryset=warehouse_queryset,
+ no_geo=no_geo)
+ # print("cr")
cr_serialization(
archive=archive,
archive_name=archive_name, operation_queryset=operation_queryset,
site_queryset=site_queryset, cr_queryset=cr_queryset,
- find_queryset=find_queryset, warehouse_queryset=warehouse_queryset)
+ find_queryset=find_queryset, warehouse_queryset=warehouse_queryset,
+ no_geo=no_geo)
+ # print("find")
find_serialization(
archive=archive,
archive_name=archive_name, operation_queryset=operation_queryset,
site_queryset=site_queryset, cr_queryset=cr_queryset,
- find_queryset=find_queryset, warehouse_queryset=warehouse_queryset)
+ find_queryset=find_queryset, warehouse_queryset=warehouse_queryset,
+ no_geo=no_geo)
+ # print("warehouse")
warehouse_serialization(
archive=archive,
archive_name=archive_name, operation_queryset=operation_queryset,
site_queryset=site_queryset, cr_queryset=cr_queryset,
- find_queryset=find_queryset, warehouse_queryset=warehouse_queryset)
+ find_queryset=find_queryset, warehouse_queryset=warehouse_queryset,
+ no_geo=no_geo)
return archive_name
diff --git a/ishtar_common/serializers_utils.py b/ishtar_common/serializers_utils.py
index 4bd655269..ef36e2695 100644
--- a/ishtar_common/serializers_utils.py
+++ b/ishtar_common/serializers_utils.py
@@ -24,19 +24,23 @@ def get_model_from_filename(filename):
return getattr(module, model_name)
-def serialization_info():
+def serialization_info(info=None):
site = Site.objects.get_current()
- return {
+ base_info = {
"serialize-version": SERIALIZATION_VERSION,
"ishtar-version": get_version(),
"domain": site.domain,
"name": site.name,
"date": datetime.datetime.now().isoformat()
}
+ if info:
+ base_info.update(info)
+ return base_info
def archive_serialization(result, archive_dir=None, archive=False,
- return_empty_types=False, archive_name=None):
+ return_empty_types=False, archive_name=None,
+ info=None):
"""
Serialize all types models to JSON
Used for import and export scripts
@@ -78,7 +82,7 @@ def archive_serialization(result, archive_dir=None, archive=False,
filename = tmpdirname + os.sep + base_filename
with open(filename, "w") as json_file:
json_file.write(
- json.dumps(serialization_info(), indent=2)
+ json.dumps(serialization_info(info=info), indent=2)
)
current_zip.write(filename, arcname=base_filename)
@@ -187,8 +191,10 @@ def generic_archive_files(model_list, archive_name=None, result_queryset=None):
for item in query.all():
for attr in model.SERIALIZATION_FILES:
media = getattr(item, attr)
- result.append((media.path, media.name))
-
+ try:
+ result.append((media.path, media.name))
+ except ValueError:
+ pass
archive_created = False
if not archive_name:
archive_created = True
@@ -199,5 +205,8 @@ def generic_archive_files(model_list, archive_name=None, result_queryset=None):
mode = "w" if archive_created else "a"
with ZipFile(archive_name, mode) as current_zip:
for media_path, name in result:
- current_zip.write(media_path, arcname=name)
+ try:
+ current_zip.write(media_path, arcname=name)
+ except OSError:
+ pass
return archive_name
diff --git a/ishtar_common/tasks.py b/ishtar_common/tasks.py
index a8db97bb1..a68a30b86 100644
--- a/ishtar_common/tasks.py
+++ b/ishtar_common/tasks.py
@@ -17,14 +17,87 @@
# See the file COPYING for details.
+import datetime
+
+import os
import sys
from django.conf import settings
+from django.core.files import File
from django.db.models import Q
from ishtar_common.models import Town, Department
+from ishtar_common.utils import task
+from ishtar_common.serializers import full_serialization
+
+from archaeological_operations.models import Operation, ArchaeologicalSite
+from archaeological_context_records.models import ContextRecord
+from archaeological_finds.models import Find
+from archaeological_warehouse.models import Warehouse
+
+from archaeological_operations.views import get_operation, get_site
+from archaeological_context_records.views import get_contextrecord
+from archaeological_finds.views import get_find
+from archaeological_warehouse.views import get_warehouse
+
+
+@task()
+def launch_export(export_task):
+ if export_task.state != 'S':
+ return
+ export_task.launch_date = datetime.datetime.now()
+ export_task.state = 'P'
+ export_task.save()
+
+ kwargs = {}
+ if export_task.filter_type and export_task.filter_text:
+ query = {"search_vector": export_task.filter_text}
+ kwargs["info"] = {"query": {"filter": export_task.filter_text}}
+ if export_task.filter_type == "O":
+ kwargs["info"]["query"]["model"] = "Operation"
+ ids = list(
+ get_operation(
+ None, query=query, return_query=True).values_list(
+ "id", flat=True))
+ kwargs["operation_queryset"] = Operation.objects.filter(pk__in=ids)
+ elif export_task.filter_type == "S":
+ kwargs["info"]["query"]["model"] = "ArchaeologicalSite"
+ ids = list(get_site(
+ None, query=query, return_query=True).values_list(
+ "id", flat=True))
+ kwargs["site_queryset"] = ArchaeologicalSite.objects.filter(
+ pk__in=ids)
+ elif export_task.filter_type == "CR":
+ kwargs["info"]["query"]["model"] = "ArchaeologicalSite"
+ ids = list(get_contextrecord(
+ None, query=query, return_query=True).values_list(
+ "id", flat=True))
+ kwargs["cr_queryset"] = ContextRecord.objects.filter(pk__in=ids)
+ elif export_task.filter_type == "F":
+ kwargs["info"]["query"]["model"] = "Find"
+ ids = list(get_find(
+ None, query=query, return_query=True
+ ).values_list("id", flat=True))
+ kwargs["find_queryset"] = Find.objects.filter(pk__in=ids)
+ elif export_task.filter_type == "W":
+ kwargs["info"]["query"]["model"] = "Warehouse"
+ ids = list(get_warehouse(
+ None, query=query, return_query=True
+ ).values_list("id", flat=True))
+ kwargs["warehouse_queryset"] = Warehouse.objects.filter(pk__in=ids)
+ if not export_task.geo:
+ kwargs["no_geo"] = True
+ archive_name = full_serialization(**kwargs)
+ result = open(archive_name, "rb")
+ export_task.result.save(archive_name.split(os.sep)[-1], File(result))
+ os.remove(archive_name)
+ export_task.finished_date = datetime.datetime.now()
+ export_task.state = 'F'
+ export_task.save()
+
def load_towns():
+ # TODO: remove?
from geodjangofla.models import Commune
q = None
for dpt_number in settings.ISHTAR_DPTS:
@@ -55,6 +128,7 @@ def load_towns():
return nb, updated
def update_towns():
+ # TODO: remove?
nb, updated = 0, 0
dpts = dict([(dpt.number, dpt) for dpt in Department.objects.all()])
q = Town.objects.filter(numero_insee__isnull=False)
diff --git a/ishtar_common/utils.py b/ishtar_common/utils.py
index 388f54ff4..df92208e9 100644
--- a/ishtar_common/utils.py
+++ b/ishtar_common/utils.py
@@ -385,7 +385,6 @@ class SecretaryRenderer(MainSecretaryRenderer):
return zip_file
-
def serialize_args_for_tasks(sender, instance, kwargs, extra_kwargs=None):
if 'instance' in kwargs:
kwargs['instance'] = kwargs["instance"].pk
diff --git a/ishtar_common/views_item.py b/ishtar_common/views_item.py
index f330e26b7..62aff2e81 100644
--- a/ishtar_common/views_item.py
+++ b/ishtar_common/views_item.py
@@ -1298,7 +1298,8 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
:return:
"""
def func(request, data_type='json', full=False, force_own=False,
- col_names=None, no_link=False, no_limit=False, **dct):
+ col_names=None, no_link=False, no_limit=False, return_query=False,
+ **dct):
available_perms = []
if specific_perms:
available_perms = specific_perms[:]
@@ -1321,8 +1322,11 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
if model_for_perms:
model_to_check = model_for_perms
- allowed, own = check_model_access_control(request, model_to_check,
- available_perms)
+ if return_query:
+ allowed, own = True, False
+ else:
+ allowed, own = check_model_access_control(request, model_to_check,
+ available_perms)
if not allowed:
return HttpResponse(EMPTY, content_type='text/plain')
@@ -1425,7 +1429,7 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
request_keys.update(my_extra_request_keys)
# manage search on json fields and excluded fields
- if search_form and request.user and getattr(
+ if search_form and request and request.user and getattr(
request.user, 'ishtaruser', None):
available, excluded_fields, json_fields = \
search_form.check_custom_form(request.user.ishtaruser)
@@ -1531,7 +1535,7 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
dct, pinned_search = _manage_default_search(
dct, request, model, default_name, my_base_request,
my_relative_session_names)
- elif func_name:
+ elif func_name and request:
request.session[func_name] = dct
for k in request_keys:
@@ -1633,6 +1637,10 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
for extra in extras:
items = items.extra(**extra)
+
+ if return_query:
+ return items
+
items = items.distinct()
items_nb = items.values('pk').aggregate(Count('pk'))['pk__count']