summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab-ci.yml4
-rw-r--r--Makefile.example4
-rw-r--r--UPGRADE.md7
-rw-r--r--archaeological_context_records/admin.py2
-rw-r--r--archaeological_context_records/data_importer.py82
-rw-r--r--archaeological_context_records/forms.py1
-rw-r--r--archaeological_context_records/migrations/0010_auto_20171011_1644.py26
-rw-r--r--archaeological_context_records/migrations/0011_auto_20171012_1316.py25
-rw-r--r--archaeological_context_records/migrations/0012_auto_20171017_1840.py26
-rw-r--r--archaeological_context_records/models.py9
-rw-r--r--archaeological_context_records/templates/ishtar/sheet_contextrecord.html2
-rw-r--r--archaeological_context_records/templates/ishtar/sheet_contextrecord_pdf.html4
-rw-r--r--archaeological_context_records/templates/ishtar/sheet_contextrecordsource_pdf.html4
-rw-r--r--archaeological_context_records/tests.py60
-rw-r--r--archaeological_files/admin.py4
-rw-r--r--archaeological_files/data_importer.py359
-rw-r--r--archaeological_files/migrations/0008_auto_20171011_1644.py26
-rw-r--r--archaeological_files/migrations/0009_auto_20171012_1316.py25
-rw-r--r--archaeological_files/migrations/0010_auto_20171017_1840.py26
-rw-r--r--archaeological_files/models.py3
-rw-r--r--archaeological_files/templates/ishtar/sheet_file.html2
-rw-r--r--archaeological_files/templates/ishtar/sheet_file_pdf.html4
-rw-r--r--archaeological_finds/admin.py11
-rw-r--r--archaeological_finds/data_importer.py158
-rw-r--r--archaeological_finds/forms.py1
-rw-r--r--archaeological_finds/migrations/0010_auto_20171011_1644.py61
-rw-r--r--archaeological_finds/migrations/0011_auto_20171012_1316.py65
-rw-r--r--archaeological_finds/migrations/0012_auto_20171017_1840.py61
-rw-r--r--archaeological_finds/models_finds.py18
-rw-r--r--archaeological_finds/models_treatments.py7
-rw-r--r--archaeological_finds/templates/ishtar/sheet_find.html2
-rw-r--r--archaeological_finds/templates/ishtar/sheet_find_pdf.html4
-rw-r--r--archaeological_finds/templates/ishtar/sheet_findbasket_pdf.html4
-rw-r--r--archaeological_finds/templates/ishtar/sheet_findsource_pdf.html4
-rw-r--r--archaeological_finds/templates/ishtar/sheet_treatment.html2
-rw-r--r--archaeological_finds/templates/ishtar/sheet_treatment_pdf.html4
-rw-r--r--archaeological_finds/templates/ishtar/sheet_treatmentfile.html2
-rw-r--r--archaeological_finds/templates/ishtar/sheet_treatmentfile_pdf.html4
-rw-r--r--archaeological_finds/templates/ishtar/sheet_treatmentfilesource_pdf.html4
-rw-r--r--archaeological_finds/templates/ishtar/sheet_treatmentsource_pdf.html4
-rw-r--r--archaeological_operations/admin.py9
-rw-r--r--archaeological_operations/data_importer.py280
-rw-r--r--archaeological_operations/forms.py1
-rw-r--r--archaeological_operations/migrations/0009_auto_20171011_1644.py51
-rw-r--r--archaeological_operations/migrations/0010_auto_20171012_1316.py25
-rw-r--r--archaeological_operations/migrations/0011_auto_20171017_1840.py51
-rw-r--r--archaeological_operations/models.py7
-rw-r--r--archaeological_operations/templates/ishtar/sheet_administrativeact_pdf.html4
-rw-r--r--archaeological_operations/templates/ishtar/sheet_operation.html2
-rw-r--r--archaeological_operations/templates/ishtar/sheet_operation_pdf.html4
-rw-r--r--archaeological_operations/templates/ishtar/sheet_operationsource_pdf.html4
-rw-r--r--archaeological_operations/tests.py147
-rw-r--r--archaeological_operations/tests/operations-with-json-fields.csv3
-rw-r--r--archaeological_warehouse/admin.py7
-rw-r--r--archaeological_warehouse/migrations/0008_auto_20171011_1644.py36
-rw-r--r--archaeological_warehouse/migrations/0009_auto_20171012_1316.py25
-rw-r--r--archaeological_warehouse/migrations/0010_auto_20171017_1840.py31
-rw-r--r--archaeological_warehouse/models.py4
-rw-r--r--archaeological_warehouse/templates/ishtar/sheet_container.html2
-rw-r--r--archaeological_warehouse/templates/ishtar/sheet_container_pdf.html4
-rw-r--r--archaeological_warehouse/templates/ishtar/sheet_warehouse.html2
-rw-r--r--archaeological_warehouse/templates/ishtar/sheet_warehouse_pdf.html4
-rw-r--r--example_project/.coveragerc1
-rw-r--r--example_project/settings.py5
-rwxr-xr-xinstall/ishtar-install74
-rw-r--r--ishtar_common/admin.py94
-rw-r--r--ishtar_common/data_importer.py7
-rw-r--r--ishtar_common/management/commands/update_search_vectors.py24
-rw-r--r--ishtar_common/migrations/0015_auto_20171011_1644.py36
-rw-r--r--ishtar_common/migrations/0016_auto_20171016_1104.py30
-rw-r--r--ishtar_common/migrations/0017_auto_20171016_1320.py29
-rw-r--r--ishtar_common/migrations/0018_auto_20171017_1840.py72
-rw-r--r--ishtar_common/models.py205
-rw-r--r--ishtar_common/static/gentium/GentiumPlus-I.ttfbin0 -> 1818280 bytes
-rw-r--r--ishtar_common/static/gentium/GentiumPlus-R.ttfbin0 -> 1918536 bytes
-rw-r--r--ishtar_common/static/gentium/OFL.txt94
-rw-r--r--ishtar_common/static/gentium/README.txt88
-rw-r--r--ishtar_common/static/media/style_basic.css35
-rw-r--r--ishtar_common/templates/ishtar/blocks/sheet_json.html11
-rw-r--r--ishtar_common/templates/ishtar/sheet_organization_pdf.html4
-rw-r--r--ishtar_common/templates/ishtar/sheet_person_pdf.html4
-rw-r--r--ishtar_common/tests.py45
-rw-r--r--ishtar_common/utils.py44
-rw-r--r--ishtar_common/views.py65
-rw-r--r--ishtar_common/wizards.py3
-rw-r--r--requirements.txt9
-rw-r--r--static/gis/js/OLMapWidget.js376
-rw-r--r--version.py4
88 files changed, 1766 insertions, 1417 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 53b4d7d88..fb2d4e824 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,7 +1,7 @@
before_script:
- apt-get update
- - apt-get install -q -y git python-pip libpq-dev python-dev libjpeg-dev zlib1g-dev libxml2-dev libxslt1-dev libgeos-dev tidy binutils libproj-dev gdal-bin
- - pip install -q -r requirements.txt
+ - apt-get install -q -y git python-pip libpq-dev python-dev libjpeg-dev zlib1g-dev libxml2-dev libxslt1-dev libgeos-dev python-cairocffi tidy binutils libproj-dev gdal-bin libpangocairo-1.0-0
+ - pip install -r requirements.txt
- cp Makefile.example Makefile
- cp example_project/local_settings.py.gitlab-ci example_project/local_settings.py
diff --git a/Makefile.example b/Makefile.example
index abb4eec77..b3016e8af 100644
--- a/Makefile.example
+++ b/Makefile.example
@@ -50,10 +50,10 @@ coverage: clean
archaeological_context_records,archaeological_files,archaeological_finds,archaeological_warehouse,\
archaeological_files_pdl" ./manage.py test $(apps) && coverage report
-build_gitlab:
+build_gitlab: clean collectstatic
cd $(project); $(PYTHON) ./manage.py migrate
-test_gitlab: clean
+test_gitlab: build_gitlab
cd $(project); $(PYTHON) manage.py test $(apps)
pep8:
diff --git a/UPGRADE.md b/UPGRADE.md
index 34d1560db..de01ca78b 100644
--- a/UPGRADE.md
+++ b/UPGRADE.md
@@ -35,3 +35,10 @@ cd <application-path>
./manage.py migrate --fake archaeological_warehouse 0002_auto_20170414_2123
./manage.py migrate
```
+
+Finally create indexes the new full text search engine
+
+```
+cd <application-path>
+./manage.py update_search_vectors
+```
diff --git a/archaeological_context_records/admin.py b/archaeological_context_records/admin.py
index 2733fa2ff..d5e4d09b9 100644
--- a/archaeological_context_records/admin.py
+++ b/archaeological_context_records/admin.py
@@ -62,7 +62,7 @@ class ContextRecordAdmin(HistorizedObjectAdmin):
model = models.ContextRecord
form = AdminContextRecordForm
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'cached_label', 'datings'
+ 'cached_label', 'datings'
]
admin_site.register(models.ContextRecord, ContextRecordAdmin)
diff --git a/archaeological_context_records/data_importer.py b/archaeological_context_records/data_importer.py
deleted file mode 100644
index 5fdc67949..000000000
--- a/archaeological_context_records/data_importer.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (C) 2015 Étienne Loks <etienne.loks_AT_peacefrogsDOTnet>
-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-# See the file COPYING for details.
-
-from ishtar_common.data_importer import *
-
-from archaeological_context_records import models
-
-
-class ContextRecordsImporterBibracte(Importer):
- DESC = u"Exports Bibracte : importeur pour l'onglet UE"
- OBJECT_CLS = models.ContextRecord
- DEFAULTS = {}
- LINE_FORMAT = [
- # ID operation
- ImportFormater('operation__operation_code', IntegerFormater(),
- duplicate_fields=[('parcel__operation__operation_code',
- False)]),
- # ID UE
- ImportFormater('external_id', UnicodeFormater(120),
- duplicate_fields=[('label', False)],),
- # Type
- ImportFormater('unit', TypeFormater(models.Unit), required=False),
- # description
- ImportFormater('description', UnicodeFormater(1000), required=False,),
- # interprétation
- ImportFormater('interpretation', UnicodeFormater(1000),
- required=False,),
- # date ouverture
- ImportFormater('opening_date', DateFormater(['%Y/%m/%d']),
- required=False,),
- # date fermeture
- ImportFormater('closing_date', DateFormater(['%Y/%m/%d']),
- required=False,),
- # lien vers parcelle
- ImportFormater('parcel__external_id', UnicodeFormater(12),
- required=False,),
- # lien vers ID sig
- None,
- # commentaire
- ImportFormater('comment', UnicodeFormater(1000), required=False,),
- # ????
- None,
- # chrono #TODO! pas de vrai création de nouvelle et en cas de modif
- # c'est la zone
- ImportFormater('datings__period', TypeFormater(models.Period),
- required=False),
- ]
-
-
-class ContextRecordsRelationImporterBibracte(Importer):
- DESC = u"Exports Bibracte : importeur pour l'onglet relations entre UE"
- OBJECT_CLS = models.RecordRelations
- DEFAULTS = {}
- LINE_FORMAT = [
- # code OA
- ImportFormater(
- 'left_record__operation__operation_code', IntegerFormater(),
- duplicate_fields=[('right_record__operation__operation_code',
- False)],),
- # identifiant UE 1
- ImportFormater('left_record__external_id', UnicodeFormater(120),),
- # type relation
- ImportFormater('relation_type', TypeFormater(models.RelationType),),
- # identifiant UE 2
- ImportFormater('right_record__external_id', UnicodeFormater(120),),
- ]
diff --git a/archaeological_context_records/forms.py b/archaeological_context_records/forms.py
index e5c244fde..c310e98fa 100644
--- a/archaeological_context_records/forms.py
+++ b/archaeological_context_records/forms.py
@@ -56,6 +56,7 @@ class OperationFormSelection(forms.Form):
class RecordSelect(TableSelect):
+ search_vector = forms.CharField(label=_(u"Full text search"))
label = forms.CharField(label=_(u"ID"), max_length=100)
parcel__town = get_town_field()
if settings.COUNTRY == 'fr':
diff --git a/archaeological_context_records/migrations/0010_auto_20171011_1644.py b/archaeological_context_records/migrations/0010_auto_20171011_1644.py
new file mode 100644
index 000000000..379110e44
--- /dev/null
+++ b/archaeological_context_records/migrations/0010_auto_20171011_1644.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_context_records', '0009_auto_20170829_1639'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='contextrecord',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicalcontextrecord',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ ]
diff --git a/archaeological_context_records/migrations/0011_auto_20171012_1316.py b/archaeological_context_records/migrations/0011_auto_20171012_1316.py
new file mode 100644
index 000000000..95b042c43
--- /dev/null
+++ b/archaeological_context_records/migrations/0011_auto_20171012_1316.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-12 13:16
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_context_records', '0010_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='contextrecord',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='historicalcontextrecord',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ ]
diff --git a/archaeological_context_records/migrations/0012_auto_20171017_1840.py b/archaeological_context_records/migrations/0012_auto_20171017_1840.py
new file mode 100644
index 000000000..6de4abb0f
--- /dev/null
+++ b/archaeological_context_records/migrations/0012_auto_20171017_1840.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_context_records', '0011_auto_20171012_1316'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='contextrecord',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicalcontextrecord',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ ]
diff --git a/archaeological_context_records/models.py b/archaeological_context_records/models.py
index 082d4f53c..925a48597 100644
--- a/archaeological_context_records/models.py
+++ b/archaeological_context_records/models.py
@@ -302,7 +302,14 @@ class ContextRecord(BulkUpdatedItem, BaseHistorizedItem,
point_2d = models.PointField(_(u"Point (2D)"), blank=True, null=True)
point = models.PointField(_(u"Point (3D)"), blank=True, null=True, dim=3)
polygon = models.PolygonField(_(u"Polygon"), blank=True, null=True)
- cached_label = models.TextField(_(u"Cached name"), null=True, blank=True)
+ cached_label = models.TextField(_(u"Cached name"), null=True, blank=True,
+ db_index=True)
+ PARENT_SEARCH_VECTORS = ['operation']
+ BASE_SEARCH_VECTORS = ["cached_label", "label", "location",
+ "interpretation", "filling", "datings_comment",
+ "identification__label", "activity__label",
+ "excavation_technic__label"]
+ M2M_SEARCH_VECTORS = ["datings__period__label"]
history = HistoricalRecords()
class Meta:
diff --git a/archaeological_context_records/templates/ishtar/sheet_contextrecord.html b/archaeological_context_records/templates/ishtar/sheet_contextrecord.html
index 170c2d87c..e1ea9019b 100644
--- a/archaeological_context_records/templates/ishtar/sheet_contextrecord.html
+++ b/archaeological_context_records/templates/ishtar/sheet_contextrecord.html
@@ -29,6 +29,8 @@
</ul>
{% field "Comment on datings" item.datings_comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if item.diameter or item.depth_of_appearance or item.documentations.count or item.description or item.lenght or item.width or item.depth or item.thickness or item.comment %}
<h3>{% trans "Description"%}</h3>
{% field "Description" item.description "<pre>" "</pre>" %}
diff --git a/archaeological_context_records/templates/ishtar/sheet_contextrecord_pdf.html b/archaeological_context_records/templates/ishtar/sheet_contextrecord_pdf.html
index a0d0affcf..b91500403 100644
--- a/archaeological_context_records/templates/ishtar/sheet_contextrecord_pdf.html
+++ b/archaeological_context_records/templates/ishtar/sheet_contextrecord_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_contextrecord.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_context_records/templates/ishtar/sheet_contextrecordsource_pdf.html b/archaeological_context_records/templates/ishtar/sheet_contextrecordsource_pdf.html
index 38c5d318e..c03b80a53 100644
--- a/archaeological_context_records/templates/ishtar/sheet_contextrecordsource_pdf.html
+++ b/archaeological_context_records/templates/ishtar/sheet_contextrecordsource_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_contextrecordsource.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_context_records/tests.py b/archaeological_context_records/tests.py
index 89b15fbbf..b0f4b8f9e 100644
--- a/archaeological_context_records/tests.py
+++ b/archaeological_context_records/tests.py
@@ -273,6 +273,20 @@ class ContextRecordTest(ContextRecordInit, TestCase):
cr.operation
)
+ def test_search_vector_update(self):
+ cr = self.create_context_record(force=True)[0]
+ cr = models.ContextRecord.objects.get(pk=cr.pk)
+ cr.label = "Label label"
+ cr.location = "I am heeere"
+ cr.save()
+ for key in ('label', 'heeer'):
+ self.assertIn(key, cr.search_vector)
+ cr.operation.code_patriarche = "PATRIARCHE"
+ cr.operation.save()
+ cr = models.ContextRecord.objects.get(pk=cr.pk)
+ self.assertIn(settings.ISHTAR_OPE_PREFIX.lower() + "patriarch",
+ cr.search_vector)
+
def test_upstream_cache_update(self):
cr = self.create_context_record()[0]
cr_pk = cr.pk
@@ -399,6 +413,44 @@ class ContextRecordSearchTest(ContextRecordInit, TestCase):
models.RecordRelations.objects.create(
left_record=cr_1, right_record=cr_2, relation_type=sym_rel_type)
+ def test_town_search(self):
+ c = Client()
+ c.login(username=self.username, password=self.password)
+
+ data = {'numero_insee': '98989', 'name': 'base_town'}
+ base_town = self.create_towns(datas=data)[-1]
+
+ parcel = self.create_parcel(data={'town': base_town,
+ 'section': 'A', 'parcel_number': '1'})[-1]
+ self.context_records[0].parcel = parcel
+ self.context_records[0].save()
+
+ data = {'numero_insee': '56789', 'name': 'parent_town'}
+ parent_town = self.create_towns(datas=data)[-1]
+ parent_town.children.add(base_town)
+
+ data = {'numero_insee': '01234', 'name': 'child_town'}
+ child_town = self.create_towns(datas=data)[-1]
+ base_town.children.add(child_town)
+
+ # simple search
+ search = {'parcel__town': base_town.pk}
+ response = c.get(reverse('get-contextrecord'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
+ # parent search
+ search = {'parcel__town': parent_town.pk}
+ response = c.get(reverse('get-contextrecord'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
+ # child search
+ search = {'parcel__town': child_town.pk}
+ response = c.get(reverse('get-contextrecord'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
def testSearchExport(self):
c = Client()
response = c.get(reverse('get-contextrecord'))
@@ -416,6 +468,14 @@ class ContextRecordSearchTest(ContextRecordInit, TestCase):
{'label': 'cr 1',
'cr_relation_types_0': self.cr_rel_type.pk})
self.assertEqual(json.loads(response.content)['total'], 2)
+ # test search vector
+ response = c.get(reverse('get-contextrecord'),
+ {'search_vector': 'CR'})
+ self.assertEqual(json.loads(response.content)['total'], 2)
+ # the 2 context records have the same operation
+ response = c.get(reverse('get-contextrecord'),
+ {'search_vector': 'op2010'})
+ self.assertEqual(json.loads(response.content)['total'], 2)
# test search between related operations
first_ope = self.operations[0]
first_ope.year = 2010
diff --git a/archaeological_files/admin.py b/archaeological_files/admin.py
index 525f7e840..4dca1afa9 100644
--- a/archaeological_files/admin.py
+++ b/archaeological_files/admin.py
@@ -48,13 +48,15 @@ class FileAdmin(HistorizedObjectAdmin):
'related_file': 'file'
})
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'raw_general_contractor', 'raw_town_planning_service', 'imports',
+ 'raw_general_contractor', 'raw_town_planning_service',
'cached_label', 'imported_line'
]
model = models.File
+
admin_site.register(models.File, FileAdmin)
+
general_models = [models.FileType, models.PermitType]
if settings.COUNTRY == 'fr':
general_models.append(models.SaisineType)
diff --git a/archaeological_files/data_importer.py b/archaeological_files/data_importer.py
deleted file mode 100644
index 96b2ee007..000000000
--- a/archaeological_files/data_importer.py
+++ /dev/null
@@ -1,359 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (C) 2013-2015 Étienne Loks <etienne.loks_AT_peacefrogsDOTnet>
-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-# See the file COPYING for details.
-
-import datetime
-import unicodecsv
-
-from django.conf import settings
-
-from ishtar_common.data_importer import *
-from ishtar_common.models import OrganizationType
-
-from archaeological_operations.data_importer import *
-
-from archaeological_files import models
-
-
-class ImportClosingFormater(ImportFormater):
- def post_process(self, obj, context, value, owner=None):
- value = self.formater.format(value)
- if not value:
- return
- open_date = obj.reception_date or obj.creation_date
- if not open_date:
- return
- obj.end_date = open_date + datetime.timedelta(30)
- obj.save()
-
-
-class ImportMayorFormater(ImportFormater):
- def post_process(self, obj, context, value, owner=None):
- value = self.formater.format(value)
- if type(self.field_name) in (list, tuple):
- return # not managed
- associated_obj = get_object_from_path(obj, self.field_name)
- if not value or not obj.main_town or not associated_obj:
- return
- if slugify(value).endswith('le-maire'):
- value += u" de " + obj.main_town.name
- value = value[:300]
- setattr(associated_obj, self.field_name.split('__')[-1], value)
- associated_obj.save()
-
-
-class FilePostProcessing(object):
- def post_processing(self, item, data):
- if not item.end_date: # auto-close
- open_date = item.reception_date or item.creation_date
- if open_date:
- item.end_date = open_date + datetime.timedelta(30)
- item.save()
- return item
-
-
-class FileImporterSraPdL(FilePostProcessing, Importer):
- DESC = u"Exports dossiers SRA PdL : importeur Filemaker dossiers"
- SLUG = "sra-pdl-files"
- LINE_FORMAT = []
- OBJECT_CLS = models.File
- UNICITY_KEYS = ['external_id']
- DEFAULTS = {
- ('responsible_town_planning_service', 'attached_to'): {
- 'organization_type': OrganizationType.objects.get(
- txt_idx="planning_service")},
- ('general_contractor', 'attached_to'): {
- 'organization_type': OrganizationType.objects.get(
- txt_idx="general_contractor")},
- tuple(): {
- 'file_type': models.FileType.objects.get(
- txt_idx='preventive'),
- 'creation_date': datetime.datetime.now,
- },
- ('in_charge',): {'attached_to': None}, # initialized in __init__
- }
-
- def _init_line_format(self):
- tf = TownFormater()
- tf.town_dct_init()
- self.line_format = [
- None, # A, 1
- ImportFormater(
- ['general_contractor__attached_to__address', # B, 2
- 'general_contractor__attached_to__postal_code',
- 'general_contractor__attached_to__town'],
- [UnicodeFormater(500, clean=True),
- UnicodeFormater(5, re_filter=RE_CD_POSTAL_FILTER),
- UnicodeFormater(70, clean=True), ],
- regexp=RE_ADD_CD_POSTAL_TOWN,
- regexp_formater_args=[[0], [1], [2]], required=False,
- comment=u"Aménageur - adresse"),
- ImportMayorFormater(
- # C, 3 TODO - extraire nom_prenom_titre
- 'general_contractor__raw_name',
- UnicodeFormater(200),
- comment=u"Aménageur - nom brut",
- post_processing=True,
- required=False),
- None, # D, 4
- ImportFormater(
- "general_contractor__title", # E, 5
- TypeFormater(models.TitleType),
- required=False,
- comment=u"Aménageur - titre"),
- None, # F, 6
- None, # G, 7
- None, # H, 8
- ImportFormater("parcels__year", # I, 9
- YearNoFuturFormater(),
- required=False),
- # J, 10
- ImportParcelFormater('', required=False, post_processing=True),
- None, # K, 11
- ImportFormater([['main_town', 'parcels__town']], # L, 12
- tf,
- required=False,
- comment=u"Commune (si non définie avant)"),
- ImportFormater([['main_town', 'parcels__town']], # M, 13
- tf,
- required=False,
- comment=u"Commune (si non définie avant)"),
- ImportFormater('saisine_type', # N, 14
- TypeFormater(models.SaisineType),
- required=False,
- comment=u"Type de saisine"),
- None, # O, 15
- ImportFormater('name', # P, 16
- UnicodeFormater(),
- comment=u"Nom du dossier",
- required=False),
- None, # Q, 17
- ImportFormater(
- [
- 'responsible_town_planning_service__raw_name',
- # R, 18 service instructeur
- 'responsible_town_planning_service__attached_to__address',
- 'responsible_town_planning_service__'
- 'attached_to__postal_code',
- 'responsible_town_planning_service__attached_to__town'],
- [UnicodeFormater(300, clean=True),
- UnicodeFormater(300, clean=True),
- UnicodeFormater(5, re_filter=RE_CD_POSTAL_FILTER),
- UnicodeFormater(70, clean=True), ],
- regexp=RE_NAME_ADD_CD_POSTAL_TOWN,
- regexp_formater_args=[[0], [1], [2], [3]],
- comment=u"Service instructeur - adresse",
- required=False),
- ImportFormater(
- 'comment', # S, 19
- UnicodeFormater(prefix=u'* Considérants : '),
- comment=u"Commentaire",
- concat=True,
- required=False),
- ImportYearFormater('reception_date', # T, 20
- DateFormater(['%d/%m/%Y', '%d/%m/%Y']),
- comment=u"Date de réception",
- required=False,
- duplicate_fields=[['creation_date', False]]),
- None, # U, 21
- None, # V, 22
- None, # W, 23
- None, # X, 24
- None, # Y, 25
- None, # Z, 26
- None, # AA, 27
- None, # AB, 28
- None, # AC, 29
- None, # AD, 30
- None, # AE, 31
- None, # AF, 32
- None, # AG, 33
- None, # AH, 34
- ImportFormater('creation_date', # AI, 35
- DateFormater(['%d/%m/%Y', '%d/%m/%y']),
- force_value=True,
- comment=u"Date de création",
- required=False,),
- None, # AJ, 36
- ImportFormater('comment', # AK, 37
- UnicodeFormater(prefix=u"* Historique : "),
- comment=u"Commentaire",
- concat=True, required=False),
- ImportFormater('internal_reference', # AL, 38
- UnicodeFormater(60),
- comment=u"Autre référence",
- required=False),
- None, # AM, 39
- None, # AN, 40
- ImportFormater('comment', # AO, 41
- UnicodeFormater(
- prefix=u"* Justificatif de prescription : "),
- comment=u"Justificatif de prescription",
- concat=True, required=False),
- ImportFormater('comment', # AP, 42
- UnicodeFormater(
- prefix=u"* Justificatif d'intervention : "),
- comment=u"Justificatif d'intervention",
- concat=True, required=False),
- None, # AQ, 43
- None, # AR, 44
- None, # AS, 45
- None, # AT, 46
- ImportFormater('comment', # AU, 47
- UnicodeFormater(
- prefix=u"* Méthodologie de l'opération : "),
- comment=u"Méthodologie de l'opération",
- concat=True, required=False),
- None, # AV, 48
- ImportFormater('permit_reference', # AW, 49
- UnicodeFormater(300, clean=True),
- regexp=RE_PERMIT_REFERENCE,
- comment=u"Réf. du permis de construire",
- required=False),
- ImportFormater('comment', # AX, 50
- UnicodeFormater(
- prefix=u"* Référence de dossier aménageur : "),
- comment=u"Référence de dossier aménageur",
- concat=True, required=False),
- None, # AY, 51
- None, # AZ, 52
- ImportFormater('comment', # BA, 53
- UnicodeFormater(
- prefix=u"* Numéro d'arrêté préfectoral : "),
- comment=u"Numéro d'arrêté préfectoral",
- concat=True, required=False),
- ImportFormater('comment', # BB, 54
- UnicodeFormater(
- prefix=u"* Numéro d'arrêté SRA : "),
- comment=u"Numéro d'arrêté SRA",
- concat=True, required=False),
- ImportFormater('comment', # BC, 55
- UnicodeFormater(
- prefix=u"* Numéro d'arrêté de "
- u"post-diagnostic : "),
- comment=u"Numéro d'arrêté de post-diagnostic",
- concat=True, required=False),
- None, # BD, 56
- ImportFormater([['main_town', 'parcels__town']], # BE, 57
- TownINSEEFormater(),
- required=False,
- comment=u"Commune (si non définie avant)"),
- ImportFormater('comment', # BF, 58
- UnicodeFormater(2000),
- comment=u"Commentaire",
- concat=True, required=False),
- None, # BG, 59
- None, # BH, 60
- None, # BI, 61
- None, # BJ, 62
- None, # BK, 63
- None, # BL, 64
- None, # BM, 65
- None, # BN, 66
- None, # BO, 67
- None, # BP, 68
- None, # BQ, 69
- None, # BR, 70
- None, # BS, 71
- ImportFormater( # BT, 72 nom service instructeur
- ['responsible_town_planning_service__attached_to__name', ],
- [UnicodeFormater(300, clean=True), ],
- regexp=RE_ORGA,
- comment=u"Service instructeur - nom",
- required=False),
- None, # BU, 73
- None, # BV, 74
- ImportFormater(
- 'in_charge__raw_name', # BW, 75 responsable
- UnicodeFormater(200),
- comment=u"Responsable - nom brut",
- required=False),
- ImportFormater('total_surface', # BX, 76 surface totale
- SurfaceFormater(),
- comment=u"Surface totale",
- required=False),
- ImportFormater('total_developed_surface',
- # BY, 77 surface totale aménagée
- SurfaceFormater(),
- comment=u"Surface totale aménagée",
- required=False),
- None, # BZ, 78
- None, # CA, 79
- None, # CB, 80
- None, # CC, 81
- None, # CD, 82
- None, # CE, 83
- None, # CF, 84
- ImportFormater('permit_type',
- TypeFormater(models.PermitType),
- required=False,
- comment=u"Type de permis"), # CG, 85
- None, # CH, 85
- ImportFormater('year', # CI, 86
- IntegerFormater(),
- comment=u"Année du dossier",
- required=True),
- ImportFormater('numeric_reference', # CJ, 87
- IntegerFormater(),
- comment=u"Identifiant numérique",
- required=True),
- ImportFormater('external_id', # CK, 88
- UnicodeFormater(),
- comment=u"Identifiant externe",
- required=True),
- ]
-
- def __init__(self, *args, **kwargs):
- super(FileImporterSraPdL, self).__init__(*args, **kwargs)
- self.DEFAULTS[('in_charge',)]['attached_to'], created = \
- models.Organization.objects.get_or_create(
- name='SRA Pays de la Loire',
- defaults={
- 'organization_type':
- OrganizationType.objects.get(txt_idx='sra')})
- self._init_line_format()
- if tuple() not in self._defaults:
- self._defaults[tuple()] = {}
- self._defaults[tuple()]['history_modifier'] = self.history_modifier
- self._associate_db_target_to_formaters()
-
-
-def test(filename):
- importer = FileImporterSraPdL(skip_lines=3, output='cli')
- with open(filename) as csv_file:
- encodings = [settings.ENCODING, settings.ALT_ENCODING, 'utf-8']
- for encoding in encodings:
- try:
- importer.importation(
- [line for line in
- unicodecsv.reader(csv_file, encoding='utf-8')])
- # importer.importation(unicode_csv_reader(
- # [line.decode(encoding)
- # for line in csv_file.readlines()])
- print importer.get_csv_errors()
- break
- except ImporterError, e:
- print(unicode(e))
- if e.type == ImporterError.HEADER \
- and encoding != encodings[-1]:
- csv_file.seek(0)
- continue
- except UnicodeDecodeError:
- if encoding != encodings[-1]:
- csv_file.seek(0)
- continue
diff --git a/archaeological_files/migrations/0008_auto_20171011_1644.py b/archaeological_files/migrations/0008_auto_20171011_1644.py
new file mode 100644
index 000000000..33dfbf59e
--- /dev/null
+++ b/archaeological_files/migrations/0008_auto_20171011_1644.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_files', '0007_auto_20170826_1152'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='file',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicalfile',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ ]
diff --git a/archaeological_files/migrations/0009_auto_20171012_1316.py b/archaeological_files/migrations/0009_auto_20171012_1316.py
new file mode 100644
index 000000000..cd33d8243
--- /dev/null
+++ b/archaeological_files/migrations/0009_auto_20171012_1316.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-12 13:16
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_files', '0008_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='file',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='historicalfile',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ ]
diff --git a/archaeological_files/migrations/0010_auto_20171017_1840.py b/archaeological_files/migrations/0010_auto_20171017_1840.py
new file mode 100644
index 000000000..04eb5b1cc
--- /dev/null
+++ b/archaeological_files/migrations/0010_auto_20171017_1840.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_files', '0009_auto_20171012_1316'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='file',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicalfile',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ ]
diff --git a/archaeological_files/models.py b/archaeological_files/models.py
index 0d5b4b3e8..b0f53f11c 100644
--- a/archaeological_files/models.py
+++ b/archaeological_files/models.py
@@ -204,7 +204,8 @@ class File(ClosedItem, BaseHistorizedItem, OwnPerms, ValueGetter,
mh_listing = models.NullBooleanField(
u"Sur Monument Historique inscrit", blank=True, null=True)
# <-- research archaeology
- cached_label = models.TextField(_(u"Cached name"), null=True, blank=True)
+ cached_label = models.TextField(_(u"Cached name"), null=True, blank=True,
+ db_index=True)
imported_line = models.TextField(_(u"Imported line"), null=True,
blank=True)
history = HistoricalRecords()
diff --git a/archaeological_files/templates/ishtar/sheet_file.html b/archaeological_files/templates/ishtar/sheet_file.html
index 6d64a975f..6eda1124a 100644
--- a/archaeological_files/templates/ishtar/sheet_file.html
+++ b/archaeological_files/templates/ishtar/sheet_file.html
@@ -44,6 +44,8 @@
</ul>
{% field "Comment" item.comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
<h3>{% trans "Localisation"%}</h3>
{% if item.towns.count %}<p><label>{%trans "Towns"%}</label> <span class='value'>{{ item.towns.all|join:", " }}</span></p>{% endif %}
{% if item.departments.count %}<p><label>{%trans "Departments"%}</label> <span class='value'>{{ item.departments.all|join:", " }}</span></p>{% endif %}
diff --git a/archaeological_files/templates/ishtar/sheet_file_pdf.html b/archaeological_files/templates/ishtar/sheet_file_pdf.html
index eaf2a9436..7335eaec7 100644
--- a/archaeological_files/templates/ishtar/sheet_file_pdf.html
+++ b/archaeological_files/templates/ishtar/sheet_file_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_file.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_finds/admin.py b/archaeological_finds/admin.py
index b8ed15865..6f32365b9 100644
--- a/archaeological_finds/admin.py
+++ b/archaeological_finds/admin.py
@@ -47,11 +47,11 @@ class AdminBaseFindForm(forms.ModelForm):
class BaseFindAdmin(HistorizedObjectAdmin):
list_display = ('label', 'context_record', 'index')
- search_fields = ('label', 'context_record__parcel__operation__name',)
+ search_fields = ('label', 'cache_complete_id',)
model = models.BaseFind
form = AdminBaseFindForm
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'cache_short_id', 'cache_complete_id', 'imports'
+ 'cache_short_id', 'cache_complete_id',
]
admin_site.register(models.BaseFind, BaseFindAdmin)
@@ -69,7 +69,7 @@ class FindAdmin(HistorizedObjectAdmin):
'container': 'container'
})
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'datings', 'cached_label'
+ 'datings', 'cached_label'
]
admin_site.register(models.Find, FindAdmin)
@@ -97,7 +97,7 @@ class PropertyAdmin(HistorizedObjectAdmin):
'person': 'person',
})
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'administrative_act', 'imports', ]
+ 'administrative_act']
def has_add_permission(self, request):
return False
@@ -118,7 +118,7 @@ class TreatmentAdmin(HistorizedObjectAdmin):
'container': 'container',
})
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'cached_label', 'downstream_lbl', 'upstream_lbl'
+ 'cached_label', 'downstream_lbl', 'upstream_lbl'
]
def has_add_permission(self, request):
@@ -140,7 +140,6 @@ class TreatmentFileAdmin(HistorizedObjectAdmin):
'applicant': 'person',
'applicant_organisation': 'organization',
})
- exclude = ['imports']
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
'cached_label',
]
diff --git a/archaeological_finds/data_importer.py b/archaeological_finds/data_importer.py
deleted file mode 100644
index e0c18d1bf..000000000
--- a/archaeological_finds/data_importer.py
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (C) 2015 Étienne Loks <etienne.loks_AT_peacefrogsDOTnet>
-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-# See the file COPYING for details.
-
-from ishtar_common.data_importer import *
-
-from archaeological_finds import models
-
-
-class FindsImporterBibracte(Importer):
- DESC = u"Exports Bibracte : importeur pour l'onglet mobilier"
- OBJECT_CLS = models.BaseFind
- DEFAULTS = {}
- LINE_FORMAT = [
- # OA
- ImportFormater('context_record__operation__operation_code',
- IntegerFormater(),),
- # external_id
- ImportFormater(
- 'external_id', UnicodeFormater(120, notnull=True),
- duplicate_fields=[('find__external_id', False),
- ('label', False),
- ('find__label', False)]),
- # isolé ou non (si non isolé = lot)
- None, # à corriger
- # ImportFormater(
- # 'is_isolated',
- # StrToBoolean(choices={'lot': False, 'objet': True}),
- # required=False),
- # ???
- None,
- # A voir
- None,
- # cf type
- None,
- # Type = sous classe de matériaux = Liste hiérarchique
- ImportFormater('find__material_types',
- TypeFormater(models.MaterialType), required=False),
- # ???
- None,
- # lien avec contenant
- None,
- # = nombre
- ImportFormater('find__find_number', IntegerFormater(), required=False),
- # poids
- ImportFormater('find__weight', FloatFormater(), required=False),
- # unité (g par défault)
- ImportFormater('find__weight_unit',
- StrChoiceFormater(models.WEIGHT_UNIT), required=False),
- # lien UE
- ImportFormater('context_record__external_id', UnicodeFormater(120),),
- # date decouverte
- ImportFormater('discovery_date', DateFormater(['%Y/%m/%d']),
- required=False,),
- # lien parcelle (unique)
- None,
- # etat conservation
- ImportFormater('find__conservatory_state',
- TypeFormater(models.ConservatoryState), required=False),
- # preservation_to_consider
- ImportFormater('find__preservation_to_considers',
- TypeFormater(models.TreatmentType), required=False),
- # comment
- ImportFormater('comment', UnicodeFormater(1000), required=False),
- # lien vers plusieurs chrono (voir gestion actuelle chrono)
- None,
- # ImportFormater('find__datings__period', TypeFormater(Period,
- # many_split="&"), required=False),
- # topographic_localisation
- ImportFormater('topographic_localisation', UnicodeFormater(120),
- required=False),
- # special_interest
- ImportFormater('special_interest', UnicodeFormater(120),
- required=False),
- # description
- ImportFormater('description', UnicodeFormater(1000), required=False),
- # remontage
- None
- ]
-
-
-class FindAltImporterBibracte(Importer):
- DESC = u"Exports Bibracte : importeur pour l'onglet prélèvement"
- OBJECT_CLS = models.BaseFind
- DEFAULTS = {}
- LINE_FORMAT = [
- # code OA
- ImportFormater('context_record__operation__operation_code',
- IntegerFormater(),),
- # identifiant prelevement
- ImportFormater('external_id', UnicodeFormater(120, notnull=True),
- duplicate_fields=[('find__external_id', False)]),
- # nature
- ImportFormater('find__material_types',
- TypeFormater(models.MaterialType), required=False),
- # identifiant UE
- ImportFormater('context_record__external_id', UnicodeFormater(120),),
- # identifiant materiel
- None,
- # commentaire
- ImportFormater('comment', UnicodeFormater(1000), required=False),
- ]
-
-
-class ImportTreatmentFormater(ImportFormater):
- def post_process(self, obj, context, value, owner=None):
- if obj.upstream_treatment.count():
- return
- ope_code = context['upstream_treatment'][
- 'base_finds']['context_record']['operation']['operation_code']
- ope_code = int(ope_code)
- downstream = models.Find.objects.filter(
- external_id=value,
- base_finds__context_record__operation__operation_code=ope_code)
- if not downstream.count():
- return
- downstream = downstream.all()[0]
- downstream.upstream_treatment = obj
- downstream.save()
- upstream = downstream.duplicate(owner)
- upstream.downstream_treatment = obj
- upstream.save()
- return
-
-
-class TreatmentImporterBibracte(Importer):
- DESC = u"Exports Bibracte : importeur pour l'onglet traitement"
- OBJECT_CLS = models.Treatment
- DEFAULTS = {}
- LINE_FORMAT = [
- # code OA
- ImportFormater(
- 'upstream_treatment__base_finds__context_record__operation__'
- 'operation_code',
- UnicodeFormater(120, notnull=True)),
- # identifiant
- ImportTreatmentFormater(
- 'external_id',
- UnicodeFormater(120, notnull=True), post_processing=True),
- None,
- # traitement
- ImportFormater('treatment_type', TypeFormater(models.TreatmentType),),
- ]
diff --git a/archaeological_finds/forms.py b/archaeological_finds/forms.py
index aa0ae4621..1f81cf52f 100644
--- a/archaeological_finds/forms.py
+++ b/archaeological_finds/forms.py
@@ -366,6 +366,7 @@ DatingFormSet.form_label = _("Dating")
class FindSelect(TableSelect):
+ search_vector = forms.CharField(label=_(u"Full text search"))
base_finds__cache_short_id = forms.CharField(label=_(u"Short ID"))
base_finds__cache_complete_id = forms.CharField(label=_(u"Complete ID"))
label = forms.CharField(label=_(u"Free ID"))
diff --git a/archaeological_finds/migrations/0010_auto_20171011_1644.py b/archaeological_finds/migrations/0010_auto_20171011_1644.py
new file mode 100644
index 000000000..ce892e96d
--- /dev/null
+++ b/archaeological_finds/migrations/0010_auto_20171011_1644.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_finds', '0009_auto_20171010_1644'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='basefind',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='find',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicalbasefind',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicalfind',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicaltreatment',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicaltreatmentfile',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='property',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='treatment',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='treatmentfile',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ ]
diff --git a/archaeological_finds/migrations/0011_auto_20171012_1316.py b/archaeological_finds/migrations/0011_auto_20171012_1316.py
new file mode 100644
index 000000000..6fabd578f
--- /dev/null
+++ b/archaeological_finds/migrations/0011_auto_20171012_1316.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-12 13:16
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_finds', '0010_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='basefind',
+ name='cache_complete_id',
+ field=models.TextField(blank=True, db_index=True, help_text='Cached value - do not edit', null=True, verbose_name='Complete ID'),
+ ),
+ migrations.AlterField(
+ model_name='basefind',
+ name='cache_short_id',
+ field=models.TextField(blank=True, db_index=True, help_text='Cached value - do not edit', null=True, verbose_name='Short ID'),
+ ),
+ migrations.AlterField(
+ model_name='find',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='historicalbasefind',
+ name='cache_complete_id',
+ field=models.TextField(blank=True, db_index=True, help_text='Cached value - do not edit', null=True, verbose_name='Complete ID'),
+ ),
+ migrations.AlterField(
+ model_name='historicalbasefind',
+ name='cache_short_id',
+ field=models.TextField(blank=True, db_index=True, help_text='Cached value - do not edit', null=True, verbose_name='Short ID'),
+ ),
+ migrations.AlterField(
+ model_name='historicalfind',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='historicaltreatment',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='historicaltreatmentfile',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='treatment',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='treatmentfile',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ ]
diff --git a/archaeological_finds/migrations/0012_auto_20171017_1840.py b/archaeological_finds/migrations/0012_auto_20171017_1840.py
new file mode 100644
index 000000000..8c347b270
--- /dev/null
+++ b/archaeological_finds/migrations/0012_auto_20171017_1840.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_finds', '0011_auto_20171012_1316'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='basefind',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='find',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicalbasefind',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicalfind',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicaltreatment',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicaltreatmentfile',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='property',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='treatment',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='treatmentfile',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ ]
diff --git a/archaeological_finds/models_finds.py b/archaeological_finds/models_finds.py
index e58d14f7e..8052601bf 100644
--- a/archaeological_finds/models_finds.py
+++ b/archaeological_finds/models_finds.py
@@ -231,14 +231,17 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, OwnPerms):
line = models.LineStringField(_(u"Line"), blank=True, null=True)
polygon = models.PolygonField(_(u"Polygon"), blank=True, null=True)
cache_short_id = models.TextField(
- _(u"Short ID"), blank=True, null=True,
+ _(u"Short ID"), blank=True, null=True, db_index=True,
help_text=_(u"Cached value - do not edit"))
cache_complete_id = models.TextField(
- _(u"Complete ID"), blank=True, null=True,
+ _(u"Complete ID"), blank=True, null=True, db_index=True,
help_text=_(u"Cached value - do not edit"))
history = HistoricalRecords()
RELATED_POST_PROCESS = ['find']
CACHED_LABELS = ['cache_short_id', 'cache_complete_id']
+ PARENT_SEARCH_VECTORS = ['context_record']
+ BASE_SEARCH_VECTORS = ["label", "description", "comment", "cache_short_id",
+ "cache_complete_id"]
class Meta:
verbose_name = _(u"Base find")
@@ -748,9 +751,18 @@ class Find(BulkUpdatedItem, ValueGetter, BaseHistorizedItem, ImageModel,
appraisal_date = models.DateField(_(u"Appraisal date"), blank=True,
null=True)
- cached_label = models.TextField(_(u"Cached name"), null=True, blank=True)
+ cached_label = models.TextField(_(u"Cached name"), null=True, blank=True,
+ db_index=True)
history = HistoricalRecords()
BASKET_MODEL = FindBasket
+ PARENT_SEARCH_VECTORS = ['base_finds']
+ BASE_SEARCH_VECTORS = [
+ "cached_label", "label", "description", "container__location__name",
+ "container__reference", "mark", "comment", "dating_comment",
+ "previous_id"]
+ M2M_SEARCH_VECTORS = [
+ "datings__period__label", "object_types__label", "integrities__label",
+ "remarkabilities__label", "material_types__label"]
class Meta:
verbose_name = _(u"Find")
diff --git a/archaeological_finds/models_treatments.py b/archaeological_finds/models_treatments.py
index 0ffcd87fa..03eeed452 100644
--- a/archaeological_finds/models_treatments.py
+++ b/archaeological_finds/models_treatments.py
@@ -115,7 +115,8 @@ class Treatment(DashboardFormItem, ValueGetter, BaseHistorizedItem,
blank=True, null=True)
target_is_basket = models.BooleanField(_(u"Target a basket"),
default=False)
- cached_label = models.TextField(_(u"Cached name"), null=True, blank=True)
+ cached_label = models.TextField(_(u"Cached name"), null=True, blank=True,
+ db_index=True)
history = HistoricalRecords()
class Meta:
@@ -224,6 +225,7 @@ class Treatment(DashboardFormItem, ValueGetter, BaseHistorizedItem,
return values
def pre_save(self):
+ super(Treatment, self).pre_save()
# is not new
if self.pk is not None:
return
@@ -506,7 +508,8 @@ class TreatmentFile(DashboardFormItem, ClosedItem, BaseHistorizedItem,
reception_date = models.DateField(_(u'Reception date'), blank=True,
null=True)
comment = models.TextField(_(u"Comment"), null=True, blank=True)
- cached_label = models.TextField(_(u"Cached name"), null=True, blank=True)
+ cached_label = models.TextField(_(u"Cached name"), null=True, blank=True,
+ db_index=True)
history = HistoricalRecords()
class Meta:
diff --git a/archaeological_finds/templates/ishtar/sheet_find.html b/archaeological_finds/templates/ishtar/sheet_find.html
index efd38e406..68304740d 100644
--- a/archaeological_finds/templates/ishtar/sheet_find.html
+++ b/archaeological_finds/templates/ishtar/sheet_find.html
@@ -58,6 +58,8 @@
{% field "Description" item.description "<pre>" "</pre>" %}
{% field "Comment" item.comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if item.conservatory_state or item.conservatory_comment or item.alterations.count or item.alteration_causes.count or item.preservation_to_considers.count or item.treatment_emergency or item.insurance_value %}
<h4>{% trans "Preservation" %}</h4>
<ul class='form-flex'>
diff --git a/archaeological_finds/templates/ishtar/sheet_find_pdf.html b/archaeological_finds/templates/ishtar/sheet_find_pdf.html
index 262bcdad7..11c39f059 100644
--- a/archaeological_finds/templates/ishtar/sheet_find_pdf.html
+++ b/archaeological_finds/templates/ishtar/sheet_find_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_find.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_finds/templates/ishtar/sheet_findbasket_pdf.html b/archaeological_finds/templates/ishtar/sheet_findbasket_pdf.html
index 2b55f0f76..47a4d2bd8 100644
--- a/archaeological_finds/templates/ishtar/sheet_findbasket_pdf.html
+++ b/archaeological_finds/templates/ishtar/sheet_findbasket_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_findbasket.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_finds/templates/ishtar/sheet_findsource_pdf.html b/archaeological_finds/templates/ishtar/sheet_findsource_pdf.html
index 26ef9f2d7..7ca3bd3c1 100644
--- a/archaeological_finds/templates/ishtar/sheet_findsource_pdf.html
+++ b/archaeological_finds/templates/ishtar/sheet_findsource_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_findsource.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_finds/templates/ishtar/sheet_treatment.html b/archaeological_finds/templates/ishtar/sheet_treatment.html
index 5fc8f2aac..94f6e20a7 100644
--- a/archaeological_finds/templates/ishtar/sheet_treatment.html
+++ b/archaeological_finds/templates/ishtar/sheet_treatment.html
@@ -42,6 +42,8 @@
{% field "Goal" item.goal "<pre>" "</pre>" %}
{% endif %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% trans "Upstream finds" as finds %}
{% if item.upstream.count %}
{% dynamic_table_document finds 'finds_for_treatment' 'downstream_treatment' item.pk 'TABLE_COLS_FOR_OPE' output %}
diff --git a/archaeological_finds/templates/ishtar/sheet_treatment_pdf.html b/archaeological_finds/templates/ishtar/sheet_treatment_pdf.html
index 08df52e97..ccd860ec9 100644
--- a/archaeological_finds/templates/ishtar/sheet_treatment_pdf.html
+++ b/archaeological_finds/templates/ishtar/sheet_treatment_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_treatment.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
- &ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_finds/templates/ishtar/sheet_treatmentfile.html b/archaeological_finds/templates/ishtar/sheet_treatmentfile.html
index f8fb3e0aa..9567d3081 100644
--- a/archaeological_finds/templates/ishtar/sheet_treatmentfile.html
+++ b/archaeological_finds/templates/ishtar/sheet_treatmentfile.html
@@ -26,6 +26,8 @@
</ul>
{% field "Comment" item.comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if item.applicant %}
<h3>{% trans "Applicant" %}</h3>
<ul class='form-flex'>
diff --git a/archaeological_finds/templates/ishtar/sheet_treatmentfile_pdf.html b/archaeological_finds/templates/ishtar/sheet_treatmentfile_pdf.html
index be64ff7eb..c216556b3 100644
--- a/archaeological_finds/templates/ishtar/sheet_treatmentfile_pdf.html
+++ b/archaeological_finds/templates/ishtar/sheet_treatmentfile_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_treatmentfile.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
- &ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_finds/templates/ishtar/sheet_treatmentfilesource_pdf.html b/archaeological_finds/templates/ishtar/sheet_treatmentfilesource_pdf.html
index d0a0ec8e7..2ef4d63b5 100644
--- a/archaeological_finds/templates/ishtar/sheet_treatmentfilesource_pdf.html
+++ b/archaeological_finds/templates/ishtar/sheet_treatmentfilesource_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_treatmentfilesource.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_finds/templates/ishtar/sheet_treatmentsource_pdf.html b/archaeological_finds/templates/ishtar/sheet_treatmentsource_pdf.html
index c38764559..4b7218a14 100644
--- a/archaeological_finds/templates/ishtar/sheet_treatmentsource_pdf.html
+++ b/archaeological_finds/templates/ishtar/sheet_treatmentsource_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_treatmentsource.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_operations/admin.py b/archaeological_operations/admin.py
index f1deac188..bf1415989 100644
--- a/archaeological_operations/admin.py
+++ b/archaeological_operations/admin.py
@@ -40,7 +40,7 @@ class AdministrativeActAdmin(HistorizedObjectAdmin):
search_fields = ('year', 'index')
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
'in_charge', 'operator', 'scientist', 'signatory', 'associated_file',
- 'imports', 'departments_label', 'towns_label']
+ 'departments_label', 'towns_label']
model = models.AdministrativeAct
form = make_ajax_form(
models.AdministrativeAct, {'operation': 'operation'}
@@ -69,7 +69,6 @@ class ArchaeologicalSiteAdmin(HistorizedObjectAdmin):
list_display = ('name', 'reference')
search_fields = ('name', 'reference')
model = models.ArchaeologicalSite
- readonly_fields = HistorizedObjectAdmin.readonly_fields + ['imports']
inlines = [OperationInline]
admin_site.register(models.ArchaeologicalSite, ArchaeologicalSiteAdmin)
@@ -112,7 +111,7 @@ class OperationAdmin(HistorizedObjectAdmin):
search_fields += ['code_patriarche']
model = models.Operation
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'cached_label']
+ 'cached_label']
form = AdminOperationForm
inlines = [ArchaeologicalSiteInline]
@@ -144,7 +143,7 @@ class ParcelAdmin(HistorizedObjectAdmin):
'town': 'town'}
)
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'history_date'
+ 'history_date'
]
admin_site.register(models.Parcel, ParcelAdmin)
@@ -196,7 +195,7 @@ class ParcelOwnerAdmin(HistorizedObjectAdmin):
'parcel': 'parcel'}
)
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'history_date'
+ 'history_date'
]
admin_site.register(models.ParcelOwner, ParcelOwnerAdmin)
diff --git a/archaeological_operations/data_importer.py b/archaeological_operations/data_importer.py
deleted file mode 100644
index b4cd2f0d0..000000000
--- a/archaeological_operations/data_importer.py
+++ /dev/null
@@ -1,280 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (C) 2015 Étienne Loks <etienne.loks_AT_peacefrogsDOTnet>
-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-# See the file COPYING for details.
-
-import re
-
-from django.db import IntegrityError
-from django.template.defaultfilters import slugify
-
-from ishtar_common.data_importer import *
-from ishtar_common.models import Town, OrganizationType, SourceType, \
- SupportType, Format, AuthorType
-
-from archaeological_operations import models
-from archaeological_operations.forms import OPERATOR
-from archaeological_operations.utils import parse_parcels
-
-RE_PERMIT_REFERENCE = re.compile('[A-Za-z]*(.*)')
-
-
-class ImportParcelFormater(ImportFormater):
- NEED = ['town', ]
- PARCEL_OWNER_KEY = 'associated_file'
-
- def post_process(self, obj, context, value, owner=None):
- value = value.strip()
- base_dct = {self.PARCEL_OWNER_KEY: obj, 'history_modifier': owner}
- if 'parcels' in context:
- for key in context['parcels']:
- if context['parcels'][key]:
- base_dct[key] = context['parcels'][key]
- for parcel_dct in parse_parcels(value, owner=owner):
- parcel_dct.update(base_dct)
- try:
- models.Parcel.objects.get_or_create(**parcel_dct)
- except IntegrityError:
- try:
- p = unicode(parcel_dct)
- except UnicodeDecodeError:
- try:
- p = str(parcel_dct).decode('utf-8')
- except UnicodeDecodeError:
- p = u""
- raise ImporterError(u"Erreur d'import parcelle, contexte : %s"
- % p)
-
-
-class ImportYearFormater(ImportFormater):
- def post_process(self, obj, context, value, owner=None):
- value = self.formater.format(value)
- if not value:
- return
- obj.year = value.year
- obj.save()
-
-
-class TownFormater(Formater):
- def __init__(self, town_full_dct={}, town_dct={}):
- self._town_full_dct = town_full_dct
- self._town_dct = town_dct
- self._initialized = False if not self._town_full_dct else True
-
- def town_dct_init(self):
- for town in Town.objects.all():
- key = (slugify(town.name.strip()), town.numero_insee[:2])
- if key in self._town_full_dct:
- # print("Danger! %s is ambiguous with another town on the same"
- # " department." % town.name)
- continue
- self._town_full_dct[key] = town
- key = slugify(town.name.strip())
- if key in self._town_dct:
- # print("Warning %s is ambiguous with no department provided" %
- # town.name)
- continue
- self._town_dct[key] = town
- self._initialized = True
-
- def format(self, value, extra=None):
- if not self._initialized:
- self.town_dct_init()
- m = RE_FILTER_CEDEX.match(value)
- if m:
- value = m.groups()[0]
- if not value:
- return None
- if extra:
- key = (slugify(value), extra)
- if key in self._town_full_dct:
- return self._town_full_dct[key]
- key = slugify(value)
- if key in self._town_dct:
- return self._town_dct[key]
-
-
-class TownINSEEFormater(Formater):
- def __init__(self):
- self._town_dct = {}
-
- def format(self, value, extra=None):
- value = value.strip()
- if not value:
- return None
- if value in self._town_dct:
- return self._town_dct[value]
- q = Town.objects.filter(numero_insee=value)
- if not q.count():
- return
- self._town_dct[value] = q.all()[0]
- return self._town_dct[value]
-
-
-class SurfaceFormater(Formater):
- def test(self):
- assert self.format(u"352 123") == 352123
- assert self.format(u"456 789 m²") == 456789
- assert self.format(u"78ha") == 780000
-
- def format(self, value, extra=None):
- value = value.strip()
- if not value:
- return None
- factor = 1
- if value.endswith(u"m2") or value.endswith(u"m²"):
- value = value[:-2]
- if value.endswith(u"ha"):
- value = value[:-2]
- factor = 10000
- try:
- return int(value.replace(' ', '')) * factor
- except ValueError:
- raise ImporterError("Erreur import surface : %s" % unicode(value))
-
-# RE_ADD_CD_POSTAL_TOWN = re.compile("(.*)[, ](\d{5}) (.*?) *(?: "\
-# "*CEDEX|cedex|Cedex *\d*)*")
-
-RE_NAME_ADD_CD_POSTAL_TOWN = re.compile(
- "(.+)?[, ]*" + NEW_LINE_BREAK + "(.+)?[, ]*(\d{2} *\d{3})[, ]*(.+)")
-
-RE_ADD_CD_POSTAL_TOWN = re.compile("(.+)?[, ]*(\d{2} *\d{3})[, ]*(.+)")
-
-RE_CD_POSTAL_FILTER = re.compile("(\d*) (\d*)")
-
-RE_ORGA = re.compile("([^,\n]*)")
-
-
-class OperationImporterBibracte(Importer):
- OBJECT_CLS = models.Operation
- DESC = u"Exports Bibracte : importeur pour l'onglet opération"
- DEFAULTS = {
- ('operator',): {
- 'organization_type': OPERATOR
- },
- }
- LINE_FORMAT = [
- # CODE OPE
- ImportFormater('operation_code', IntegerFormater(),),
- # REGION
- None,
- # TYPE operation
- ImportFormater('operation_type', TypeFormater(models.OperationType),),
- # NOM
- ImportFormater('common_name', UnicodeFormater(120),),
- # OPERATEUR
- ImportFormater('operator__name', UnicodeFormater(120),),
- # resp. lien IMPORT avec personne
- ImportFormater('in_charge__raw_name', UnicodeFormater(300),),
- # début
- ImportFormater('start_date', DateFormater(['%Y/%m/%d']),),
- # fin
- ImportFormater('excavation_end_date', DateFormater(['%Y/%m/%d']),),
- # Chronos
- ImportFormater('periods', TypeFormater(models.Period, many_split="&"),
- required=False),
- ]
-
-RE_PARCEL_SECT_NUM = re.compile("([A-Za-z]*)([0-9]*)")
-RE_NUM_INSEE = re.compile("([0-9]*)")
-
-
-class ParcelImporterBibracte(Importer):
- OBJECT_CLS = models.Parcel
- DESC = u"Exports Bibracte : importeur pour l'onglet parcelles"
- DEFAULTS = {
- ('operator',): {
- 'organization_type': OrganizationType.objects.get(
- txt_idx="operator")},
- }
- LINE_FORMAT = [
- # code OA
- ImportFormater('operation__operation_code', IntegerFormater(),),
- # identifiant parcelle
- ImportFormater(
- ['section', 'parcel_number'],
- [UnicodeFormater(4), UnicodeFormater(6), ],
- regexp=RE_PARCEL_SECT_NUM,
- regexp_formater_args=[[0], [1]], required=False,
- duplicate_fields=[('external_id', False)],),
- # numero parcelle
- ImportFormater('parcel_number', UnicodeFormater(6),
- required=False,),
- # section cadastre
- ImportFormater('section', UnicodeFormater(4),
- required=False,),
- # annee cadastre
- ImportFormater('year', YearFormater(), required=False,),
- # nom commune
- None,
- # numero INSEE commune
- ImportFormater('town__numero_insee', UnicodeFormater(6),
- regexp=RE_NUM_INSEE, required=False,),
- # nom departement
- None,
- # lieu dit adresse
- ImportFormater('address', UnicodeFormater(500),
- required=False,),
- ]
-
-MAIN_AUTHOR, created = AuthorType.objects.get_or_create(txt_idx='main_author')
-
-
-class DocImporterBibracte(Importer):
- OBJECT_CLS = models.OperationSource
- DEFAULTS = {
- ('authors',): {'author_type': MAIN_AUTHOR},
- }
- DESC = u"Exports Bibracte : importeur pour l'onglet documentation"
- LINE_FORMAT = [
- # code OA
- ImportFormater('operation__operation_code', IntegerFormater(),),
- # identifiant documentation
- ImportFormater('external_id', UnicodeFormater(12),),
- # type
- ImportFormater('source_type', TypeFormater(SourceType),
- required=False),
- # nature support
- ImportFormater('support_type', TypeFormater(SupportType),
- required=False),
- # nombre element
- ImportFormater('item_number', IntegerFormater(), required=False),
- # auteur
- ImportFormater('authors__person__raw_name', UnicodeFormater(300),
- required=False),
- # annee
- ImportFormater('creation_date', DateFormater(['%Y']),),
- # format
- ImportFormater('format_type', TypeFormater(Format), required=False),
- # description legende
- ImportFormater('description', UnicodeFormater(1000), required=False),
- # type contenant
- None,
- # numero contenant
- None,
- # commentaire
- ImportFormater('comment', UnicodeFormater(1000), required=False),
- # echelle
- ImportFormater('scale', UnicodeFormater(30), required=False),
- # type sous contenant
- None,
- # numero sous contenant
- None,
- # informations complementaires
- ImportFormater('additional_information', UnicodeFormater(1000),
- required=False),
- ]
diff --git a/archaeological_operations/forms.py b/archaeological_operations/forms.py
index 651cd740f..841131da6 100644
--- a/archaeological_operations/forms.py
+++ b/archaeological_operations/forms.py
@@ -480,6 +480,7 @@ RecordRelationsFormSet.form_label = _(u"Relations")
class OperationSelect(TableSelect):
+ search_vector = forms.CharField(label=_(u"Full text search"))
year = forms.IntegerField(label=_("Year"))
operation_code = forms.IntegerField(label=_(u"Numeric reference"))
if settings.COUNTRY == 'fr':
diff --git a/archaeological_operations/migrations/0009_auto_20171011_1644.py b/archaeological_operations/migrations/0009_auto_20171011_1644.py
new file mode 100644
index 000000000..18a284a21
--- /dev/null
+++ b/archaeological_operations/migrations/0009_auto_20171011_1644.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_operations', '0008_auto_20170829_1639'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='administrativeact',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='archaeologicalsite',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicaladministrativeact',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicaloperation',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='operation',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='parcel',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='parcelowner',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ ]
diff --git a/archaeological_operations/migrations/0010_auto_20171012_1316.py b/archaeological_operations/migrations/0010_auto_20171012_1316.py
new file mode 100644
index 000000000..3a847a803
--- /dev/null
+++ b/archaeological_operations/migrations/0010_auto_20171012_1316.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-12 13:16
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_operations', '0009_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='historicaloperation',
+ name='cached_label',
+ field=models.CharField(blank=True, db_index=True, max_length=500, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='operation',
+ name='cached_label',
+ field=models.CharField(blank=True, db_index=True, max_length=500, null=True, verbose_name='Cached name'),
+ ),
+ ]
diff --git a/archaeological_operations/migrations/0011_auto_20171017_1840.py b/archaeological_operations/migrations/0011_auto_20171017_1840.py
new file mode 100644
index 000000000..cd169957a
--- /dev/null
+++ b/archaeological_operations/migrations/0011_auto_20171017_1840.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_operations', '0010_auto_20171012_1316'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='administrativeact',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='archaeologicalsite',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicaladministrativeact',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicaloperation',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='operation',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='parcel',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='parcelowner',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ ]
diff --git a/archaeological_operations/models.py b/archaeological_operations/models.py
index bc03ee387..70c1c02ba 100644
--- a/archaeological_operations/models.py
+++ b/archaeological_operations/models.py
@@ -248,6 +248,10 @@ class Operation(ClosedItem, BaseHistorizedItem, ImageModel, OwnPerms,
'archaeological_sites__reference': _(u"Archaeological sites ("
u"reference)"),
}
+ BASE_SEARCH_VECTORS = ["scientist__raw_name", "cached_label",
+ "common_name", "comment", "address", "old_code"]
+ INT_SEARCH_VECTORS = ["year"]
+ M2M_SEARCH_VECTORS = ["towns__name"]
# fields definition
creation_date = models.DateField(_(u"Creation date"),
@@ -309,6 +313,7 @@ class Operation(ClosedItem, BaseHistorizedItem, ImageModel, OwnPerms,
code_patriarche = models.TextField(u"Code PATRIARCHE", null=True,
blank=True, unique=True)
TABLE_COLS = ['full_code_patriarche'] + TABLE_COLS
+ BASE_SEARCH_VECTORS = ['code_patriarche'] + BASE_SEARCH_VECTORS
# preventive
fnap_financing = models.FloatField(u"Financement FNAP (%)",
blank=True, null=True)
@@ -340,7 +345,7 @@ class Operation(ClosedItem, BaseHistorizedItem, ImageModel, OwnPerms,
scientific_documentation_comment = models.TextField(
_(u"Comment about scientific documentation"), null=True, blank=True)
cached_label = models.CharField(_(u"Cached name"), max_length=500,
- null=True, blank=True)
+ null=True, blank=True, db_index=True)
archaeological_sites = models.ManyToManyField(
ArchaeologicalSite, verbose_name=_(u"Archaeological sites"),
blank=True, related_name='operations')
diff --git a/archaeological_operations/templates/ishtar/sheet_administrativeact_pdf.html b/archaeological_operations/templates/ishtar/sheet_administrativeact_pdf.html
index b6d257cb0..be3e24428 100644
--- a/archaeological_operations/templates/ishtar/sheet_administrativeact_pdf.html
+++ b/archaeological_operations/templates/ishtar/sheet_administrativeact_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_administrativeact.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_operations/templates/ishtar/sheet_operation.html b/archaeological_operations/templates/ishtar/sheet_operation.html
index 5a02236a3..e46db74c7 100644
--- a/archaeological_operations/templates/ishtar/sheet_operation.html
+++ b/archaeological_operations/templates/ishtar/sheet_operation.html
@@ -71,6 +71,8 @@
{% field "Abstract" item.abstract "<pre>" "</pre>" %}
{% field "Comment about scientific documentation" item.scientific_documentation_comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if not next %}
{% if item.towns.count %}
<h3>{% trans "Localisation"%}</h3>
diff --git a/archaeological_operations/templates/ishtar/sheet_operation_pdf.html b/archaeological_operations/templates/ishtar/sheet_operation_pdf.html
index dc3c8b46f..7d86bd924 100644
--- a/archaeological_operations/templates/ishtar/sheet_operation_pdf.html
+++ b/archaeological_operations/templates/ishtar/sheet_operation_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_operation.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_operations/templates/ishtar/sheet_operationsource_pdf.html b/archaeological_operations/templates/ishtar/sheet_operationsource_pdf.html
index 1b2cd9ff3..68eb7aa2d 100644
--- a/archaeological_operations/templates/ishtar/sheet_operationsource_pdf.html
+++ b/archaeological_operations/templates/ishtar/sheet_operationsource_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_operationsource.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_operations/tests.py b/archaeological_operations/tests.py
index 0d6908374..b75c02cae 100644
--- a/archaeological_operations/tests.py
+++ b/archaeological_operations/tests.py
@@ -19,10 +19,12 @@
import json
import datetime
+from subprocess import Popen, PIPE
import StringIO
import zipfile
from django.conf import settings
+from django.contrib.contenttypes.models import ContentType
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.db.models import Q
@@ -37,7 +39,8 @@ from archaeological_operations import views
from ishtar_common.models import OrganizationType, Organization, ItemKey, \
ImporterType, IshtarUser, TargetKey, ImporterModel, IshtarSiteProfile, \
Town, ImporterColumn, Person, Author, SourceType, AuthorType, \
- DocumentTemplate, PersonType, TargetKeyGroup
+ DocumentTemplate, PersonType, TargetKeyGroup, JsonDataField, \
+ JsonDataSection, ImportTarget, FormaterType
from archaeological_files.models import File, FileType
from archaeological_context_records.models import Unit
@@ -453,6 +456,24 @@ class ImportOperationTest(ImportTest, TestCase):
impt.delete()
self.assertEqual(parcel_count - 3, models.Parcel.objects.count())
+ def test_json_fields(self):
+ importer, form = self.init_ope_import("operations-with-json-fields.csv")
+ col = ImporterColumn.objects.create(importer_type=importer,
+ col_number=11)
+ formater_type = FormaterType.objects.get(
+ formater_type='IntegerFormater')
+ ImportTarget.objects.create(
+ column=col, target='data__autre_refs__arbitraire',
+ formater_type=formater_type)
+ impt = form.save(self.ishtar_user)
+ impt.initialize()
+ self.init_ope_targetkey(imp=impt)
+ impt.importation()
+ ope1 = models.Operation.objects.get(code_patriarche='4200')
+ self.assertEqual(ope1.data, {u'autre_refs': {u'arbitraire': 789}})
+ ope2 = models.Operation.objects.get(code_patriarche='4201')
+ self.assertEqual(ope2.data, {u'autre_refs': {u'arbitraire': 456}})
+
class ParcelTest(ImportTest, TestCase):
fixtures = OPERATION_TOWNS_FIXTURES
@@ -895,6 +916,21 @@ class OperationTest(TestCase, OperationInitTest):
self.assertEqual(ope_id, 'OP2011-1')
self.assertEqual(town, self.towns[0].name)
+ def test_search_vector_update(self):
+ operation = self.operations[0]
+ town = self.create_towns({'numero_insee': '12346', 'name': 'Daisy'})[-1]
+ operation.towns.add(town)
+ town = self.create_towns(
+ {'numero_insee': '12347', 'name': 'Dirty old'})[-1]
+ operation.towns.add(town)
+ operation = models.Operation.objects.get(pk=operation.pk)
+ operation.comment = u"Zardoz"
+ operation.code_patriarche = u"HUIAAA5"
+ operation.save()
+ for key in ('old', 'op2010', 'dirty', 'daisy', "'2010'", "zardoz",
+ "huiaaa5"):
+ self.assertIn(key, operation.search_vector)
+
def test_cache_bulk_update(self):
if settings.USE_SPATIALITE_FOR_TESTS:
# using views - can only be tested with postgresql
@@ -1008,6 +1044,32 @@ class OperationTest(TestCase, OperationInitTest):
self.assertEqual(response.status_code, 200)
self.assertIn('class="sheet"', response.content)
+ def test_show_pdf(self):
+ operation = self.operations[0]
+ c = Client()
+ response = c.get(reverse('show-operation',
+ kwargs={'pk': operation.pk, 'type': 'pdf'}))
+ self.assertEqual(response.status_code, 200)
+ # empty content when not allowed
+ self.assertEqual(response.content, "")
+ c.login(username=self.username, password=self.password)
+ response = c.get(reverse('show-operation',
+ kwargs={'pk': operation.pk, 'type': 'pdf'}))
+ self.assertEqual(response.status_code, 200)
+ f = StringIO.StringIO(response.content)
+ filetype = Popen("/usr/bin/file -b --mime -", shell=True, stdout=PIPE,
+ stdin=PIPE).communicate(f.read(1024))[0].strip()
+ self.assertTrue(filetype.startswith('application/pdf'))
+
+ def test_show_odt(self):
+ operation = self.operations[0]
+ c = Client()
+ response = c.get(reverse('show-operation',
+ kwargs={'pk': operation.pk, 'type': 'pdf'}))
+ self.assertEqual(response.status_code, 200)
+ # empty content when not allowed
+ self.assertEqual(response.content, "")
+ c.login(username=self.username, password=self.password)
response = c.get(reverse('show-operation', kwargs={'pk': operation.pk,
'type': 'odt'}))
self.assertEqual(response.status_code, 200)
@@ -1015,6 +1077,53 @@ class OperationTest(TestCase, OperationInitTest):
z = zipfile.ZipFile(f)
self.assertIsNone(z.testzip())
+ def test_json(self):
+ operation = self.operations[0]
+ operation.data = {"groundhog": {"number": 53444,
+ "awake_state": u"réveillée",
+ "with_feather": "Oui"},
+ "frog_number": 32303}
+ operation.save()
+
+ content_type = ContentType.objects.get_for_model(operation)
+ groundhog_section = JsonDataSection.objects.create(
+ name="Marmotte", content_type=content_type)
+ JsonDataField.objects.create(name=u"État d'éveil",
+ key='groundhog__awake_state',
+ content_type=content_type,
+ section=groundhog_section)
+ JsonDataField.objects.create(name=u"Avec plume",
+ key='groundhog__with_feather',
+ content_type=content_type,
+ section=groundhog_section)
+ JsonDataField.objects.create(name=u"Zzzzzzzz",
+ key='groundhog__zzz',
+ content_type=content_type,
+ section=groundhog_section)
+ JsonDataField.objects.create(name=u"Grenouille",
+ key='frog_number',
+ content_type=content_type)
+
+ c = Client()
+ c.login(username=self.username, password=self.password)
+ response = c.get(reverse('show-operation', kwargs={'pk': operation.pk}))
+ self.assertEqual(response.status_code, 200)
+ self.assertIn('class="sheet"', response.content)
+ self.assertIn(u"Marmotte".encode('utf-8'), response.content)
+ self.assertIn(u"État d&#39;éveil".encode('utf-8'), response.content)
+ self.assertIn(u"réveillée".encode('utf-8'), response.content)
+ self.assertIn(u"Grenouille".encode('utf-8'), response.content)
+ self.assertIn(u"32303".encode('utf-8'), response.content)
+ self.assertNotIn(u"53444".encode('utf-8'), response.content)
+ self.assertNotIn(u"Zzzzzzzz".encode('utf-8'), response.content)
+
+ operation.data = {}
+ operation.save()
+ response = c.get(reverse('show-operation', kwargs={'pk': operation.pk}))
+ self.assertEqual(response.status_code, 200)
+ self.assertIn('class="sheet"', response.content)
+ self.assertNotIn(u"Marmotte".encode('utf-8'), response.content)
+
class OperationSearchTest(TestCase, OperationInitTest):
fixtures = FILE_FIXTURES
@@ -1104,6 +1213,42 @@ class OperationSearchTest(TestCase, OperationInitTest):
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)['total'], 1)
+ def test_town_search(self):
+ c = Client()
+ c.login(username=self.username, password=self.password)
+
+ data = {'numero_insee': '98989', 'name': 'base_town'}
+ base_town = self.create_towns(datas=data)[-1]
+
+ data = {'numero_insee': '56789', 'name': 'parent_town'}
+ parent_town = self.create_towns(datas=data)[-1]
+ parent_town.children.add(base_town)
+
+ data = {'numero_insee': '01234', 'name': 'child_town'}
+ child_town = self.create_towns(datas=data)[-1]
+ base_town.children.add(child_town)
+
+ ope = self.operations[1]
+ ope.towns.add(base_town)
+
+ # simple search
+ search = {'towns': base_town.pk}
+ response = c.get(reverse('get-operation'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
+ # parent search
+ search = {'towns': parent_town.pk}
+ response = c.get(reverse('get-operation'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
+ # child search
+ search = {'towns': child_town.pk}
+ response = c.get(reverse('get-operation'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
def testOwnSearch(self):
c = Client()
response = c.get(reverse('get-operation'), {'year': '2010'})
diff --git a/archaeological_operations/tests/operations-with-json-fields.csv b/archaeological_operations/tests/operations-with-json-fields.csv
new file mode 100644
index 000000000..015497b4c
--- /dev/null
+++ b/archaeological_operations/tests/operations-with-json-fields.csv
@@ -0,0 +1,3 @@
+code OA,region,type operation,intitule operation,operateur,responsable operation,date debut terrain,date fin terrain,chronologie generale,identifiant document georeferencement,notice scientifique,numéro arbitraire
+4201,Bourgogne,Fouille programmée,Oppìdum de Paris 2,L'opérateur,,2000/01/31,2002/12/31,Age du Fer,,456
+4200,Bourgogne,Fouille programmée,Oppìdum de Paris,L'opérateur,Jean Sui-Resp'on Sablé,2000/01/22,2002/12/31,Age du Fer & Gallo-Romain & Néolithik & Moderne,,789
diff --git a/archaeological_warehouse/admin.py b/archaeological_warehouse/admin.py
index deaffde94..f2b44fcf0 100644
--- a/archaeological_warehouse/admin.py
+++ b/archaeological_warehouse/admin.py
@@ -36,9 +36,7 @@ class WarehouseAdmin(HistorizedObjectAdmin):
'town': 'town',
'person_in_charge': 'person'
})
- readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports'
- ]
+
admin_site.register(models.Warehouse, WarehouseAdmin)
@@ -48,6 +46,7 @@ class ContainerTypeAdmin(admin.ModelAdmin):
'volume')
model = models.ContainerType
+
admin_site.register(models.ContainerType, ContainerTypeAdmin)
@@ -56,7 +55,7 @@ class ContainerAdmin(HistorizedObjectAdmin):
list_filter = ("container_type",)
model = models.Container
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'history_date'
+ 'history_date'
]
form = make_ajax_form(model, {
'location': 'warehouse',
diff --git a/archaeological_warehouse/migrations/0008_auto_20171011_1644.py b/archaeological_warehouse/migrations/0008_auto_20171011_1644.py
new file mode 100644
index 000000000..82245647d
--- /dev/null
+++ b/archaeological_warehouse/migrations/0008_auto_20171011_1644.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_warehouse', '0007_auto_20171004_1125'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='collection',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='container',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='warehouse',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AlterField(
+ model_name='container',
+ name='index',
+ field=models.IntegerField(default=0, verbose_name='Container ID'),
+ ),
+ ]
diff --git a/archaeological_warehouse/migrations/0009_auto_20171012_1316.py b/archaeological_warehouse/migrations/0009_auto_20171012_1316.py
new file mode 100644
index 000000000..a25a2d2f2
--- /dev/null
+++ b/archaeological_warehouse/migrations/0009_auto_20171012_1316.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-12 13:16
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_warehouse', '0008_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='container',
+ name='cached_label',
+ field=models.CharField(blank=True, db_index=True, max_length=500, null=True, verbose_name='Localisation'),
+ ),
+ migrations.AlterField(
+ model_name='container',
+ name='cached_location',
+ field=models.CharField(blank=True, db_index=True, max_length=500, null=True, verbose_name='Cached location'),
+ ),
+ ]
diff --git a/archaeological_warehouse/migrations/0010_auto_20171017_1840.py b/archaeological_warehouse/migrations/0010_auto_20171017_1840.py
new file mode 100644
index 000000000..e45c44674
--- /dev/null
+++ b/archaeological_warehouse/migrations/0010_auto_20171017_1840.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_warehouse', '0009_auto_20171012_1316'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='collection',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='container',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='warehouse',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ ]
diff --git a/archaeological_warehouse/models.py b/archaeological_warehouse/models.py
index a7865cf0e..fdd3a5e63 100644
--- a/archaeological_warehouse/models.py
+++ b/archaeological_warehouse/models.py
@@ -288,9 +288,9 @@ class Container(LightHistorizedItem, ImageModel):
reference = models.CharField(_(u"Container ref."), max_length=40)
comment = models.TextField(_(u"Comment"), null=True, blank=True)
cached_label = models.CharField(_(u"Localisation"), max_length=500,
- null=True, blank=True)
+ null=True, blank=True, db_index=True)
cached_location = models.CharField(_(u"Cached location"), max_length=500,
- null=True, blank=True)
+ null=True, blank=True, db_index=True)
index = models.IntegerField(u"Container ID", default=0)
external_id = models.TextField(_(u"External ID"), blank=True, null=True)
auto_external_id = models.BooleanField(
diff --git a/archaeological_warehouse/templates/ishtar/sheet_container.html b/archaeological_warehouse/templates/ishtar/sheet_container.html
index 7845da2c1..7c9f84a32 100644
--- a/archaeological_warehouse/templates/ishtar/sheet_container.html
+++ b/archaeological_warehouse/templates/ishtar/sheet_container.html
@@ -23,6 +23,8 @@
{% field "Location" item.precise_location %}
{% field "Comment" item.comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if item.finds.count %}
<h4>{% trans "Content" %}</h4>
{% dynamic_table_document finds 'finds' 'container' item.pk 'TABLE_COLS' output 'large' %}
diff --git a/archaeological_warehouse/templates/ishtar/sheet_container_pdf.html b/archaeological_warehouse/templates/ishtar/sheet_container_pdf.html
index 5e4947cfa..ba0e3164c 100644
--- a/archaeological_warehouse/templates/ishtar/sheet_container_pdf.html
+++ b/archaeological_warehouse/templates/ishtar/sheet_container_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_container.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/archaeological_warehouse/templates/ishtar/sheet_warehouse.html b/archaeological_warehouse/templates/ishtar/sheet_warehouse.html
index 3d39f9845..de93f9af3 100644
--- a/archaeological_warehouse/templates/ishtar/sheet_warehouse.html
+++ b/archaeological_warehouse/templates/ishtar/sheet_warehouse.html
@@ -17,6 +17,8 @@
{% include "ishtar/blocks/sheet_address_section.html" %}
{% field "Comment" item.comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if item.containers.count %}
<h4>{% trans "Containers" %}</h4>
{% dynamic_table_document '' 'containers' 'location' item.pk 'TABLE_COLS' output %}
diff --git a/archaeological_warehouse/templates/ishtar/sheet_warehouse_pdf.html b/archaeological_warehouse/templates/ishtar/sheet_warehouse_pdf.html
index 260834ac6..d95efe58f 100644
--- a/archaeological_warehouse/templates/ishtar/sheet_warehouse_pdf.html
+++ b/archaeological_warehouse/templates/ishtar/sheet_warehouse_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_warehouse.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/example_project/.coveragerc b/example_project/.coveragerc
index 43462778f..ff9237bdd 100644
--- a/example_project/.coveragerc
+++ b/example_project/.coveragerc
@@ -6,4 +6,5 @@ exclude_lines =
[run]
omit =
*/migrations/*
+ */old_migrations/*
diff --git a/example_project/settings.py b/example_project/settings.py
index ea50daffb..f631047cb 100644
--- a/example_project/settings.py
+++ b/example_project/settings.py
@@ -32,10 +32,6 @@ BASE_URL = "/"
URL_PATH = ""
EXTRA_VERSION = 'git'
-STATICFILES_DIRS = (
- ROOT_PATH + "../static/",
-)
-
ODT_TEMPLATE = ROOT_PATH + "../ishtar_common/static/template.odt"
LOGIN_REDIRECT_URL = "/" + URL_PATH
@@ -240,6 +236,7 @@ ISHTAR_PERIODS = {}
ISHTAR_PERMIT_TYPES = {}
ISHTAR_DOC_TYPES = {u"undefined": u"Undefined"}
+ISHTAR_SEARCH_LANGUAGE = "french"
ISHTAR_DPTS = []
diff --git a/install/ishtar-install b/install/ishtar-install
index 36b937388..a34b1984c 100755
--- a/install/ishtar-install
+++ b/install/ishtar-install
@@ -118,16 +118,11 @@ do_install() {
;;
debian)
- MAINBACKS=`cat /etc/apt/sources.list | grep jessie-backports |grep -v "^#"`
- ALLBACKS=''
- if [ "$(ls -A /etc/apt/sources.list.d/)" ]; then
- ALLBACKS=`cat /etc/apt/sources.list.d/* | grep jessie-backports |grep -v "^#"`
- fi
- if [ "$ALLBACKS" != '' ] || [ "$MAINBACKS" != '' ]; then
- backports_activated='true';
- fi
dist_version="$(cat /etc/debian_version | sed 's/\/.*//' | sed 's/\..*//')"
case "$dist_version" in
+ 9)
+ dist_version="stretch"
+ ;;
8)
dist_version="jessie"
;;
@@ -135,6 +130,16 @@ do_install() {
dist_version="wheezy"
;;
esac
+ set +e
+ MAINBACKS=`cat /etc/apt/sources.list | grep $dist_version'-backports' |grep -v "^#"`
+ ALLBACKS=''
+ if [ "$(ls -A /etc/apt/sources.list.d/)" ]; then
+ ALLBACKS=`cat /etc/apt/sources.list.d/* | grep $dist_version'-backports' |grep -v "^#"`
+ fi
+ set -e
+ if [ "$ALLBACKS" != '' ] || [ "$MAINBACKS" != '' ]; then
+ backports_activated='true';
+ fi
;;
oracleserver)
@@ -282,7 +287,7 @@ EOF
# Run setup for each distro accordingly
case "$lsb_dist" in
ubuntu|debian)
- if [ "$dist_version" != "jessie" ] && [ "$dist_version" != "wheezy" ]; then
+ if [ "$dist_version" != "stretch" ] && [ "$dist_version" != "jessie" ] && [ "$dist_version" != "wheezy" ]; then
echo ""
cecho r " Sorry this script cannot manage your version of Debian/Ubuntu."
echo ""
@@ -372,9 +377,9 @@ EOF
cecho y "Installing Ishtar dependencies"
echo "";
( set -x; $sh_c 'sleep 3; apt-get install -t jessie-backports -y -q python python-django\
- python-django-registration' )
+ python-django-registration python-cffi' )
( set -x; $sh_c 'sleep 3; apt-get install -y -q \
- python-pisa python-bs4 python-django-formtools\
+ python-bs4 python-django-formtools libpangocairo-1.0-0 \
python-tidylib python-lxml python-imaging python-html5lib \
python-psycopg2 python-gdal gettext python-unicodecsv memcached \
python-django-extra-views python-memcache python-dbf python-markdown' )
@@ -389,6 +394,53 @@ EOF
cecho y "Installing python-ajax-select (available version in Debian is not compatible with backported Django)"
echo "";
( set -x; $sh_c 'pip install django-ajax-selects==1.4.3' )
+ cecho y "Installing weasyprint"
+ echo "";
+ ( set -x; $sh_c 'pip install WeasyPrint==0.41' )
+
+ fi
+
+ if [ "$dist_version" == "stretch" ]; then
+ if [ "$backports_activated" != 'true' ]; then
+ echo ""
+ cecho r " In order to install Ishtar you have to activate Debian backports."
+ echo " To do that:"
+ echo ""
+ echo " echo 'deb http://ftp.debian.org/debian stretch-backports main contrib' >> /etc/apt/sources.list"
+ echo ""
+ cecho p " Run again Ishtar installation script after that."
+ exit 1
+ fi
+
+ if [ "$default_db" == '127.0.0.1' ]; then
+ echo "-------------------------------------------------------------------------------";
+ cecho y "Installing postgresql"
+ echo ""
+ POSTGIS=postgresql-9.6-postgis-2.3
+ ( set -x; $sh_c 'sleep 3; apt-get install -y -q postgresql '$POSTGIS )
+ fi
+ echo "-------------------------------------------------------------------------------";
+ cecho y "Installing Ishtar dependencies"
+ echo "";
+ ( set -x; $sh_c 'sleep 3; apt-get install -t stretch-backports -y -q python-django' )
+ ( set -x; $sh_c 'sleep 3; apt-get install -y -q \
+ python-django-registration libpangocairo-1.0-0 \
+ python-bs4 python-django-formtools python-cffi \
+ python-tidylib python-lxml python-imaging python-html5lib \
+ python-psycopg2 python-gdal gettext python-unicodecsv memcached \
+ python-django-extra-views python-memcache python-dbf python-markdown \
+ python-reportlab django-ajax-selects python-django-extensions' )
+ echo "-------------------------------------------------------------------------------";
+ cecho y "Installing django-simple-history"
+ echo "";
+ ( set -x; $sh_c 'pip install git+https://github.com/treyhunner/django-simple-history.git@1.8.2#egg=django-simple-history' )
+ echo "-------------------------------------------------------------------------------";
+ cecho y "Installing python-secretary"
+ echo "";
+ ( set -x; $sh_c 'pip install secretary==0.2.14' )
+ cecho y "Installing weasyprint"
+ echo "";
+ ( set -x; $sh_c 'pip install WeasyPrint==0.41' )
fi
;;
diff --git a/ishtar_common/admin.py b/ishtar_common/admin.py
index cec61a51e..2df910ffd 100644
--- a/ishtar_common/admin.py
+++ b/ishtar_common/admin.py
@@ -20,11 +20,14 @@
import csv
from ajax_select import make_ajax_form
+from ajax_select.fields import AutoCompleteSelectField, \
+ AutoCompleteSelectMultipleField
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.admin import GroupAdmin, UserAdmin
from django.contrib.auth.models import Group, User
+from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.admin import SiteAdmin
from django.contrib.sites.models import Site
from django.contrib.gis.forms import PointField, OSMWidget
@@ -114,12 +117,22 @@ def export_as_csv_action(description=_(u"Export selected as CSV file"),
class HistorizedObjectAdmin(admin.ModelAdmin):
- readonly_fields = ['history_creator', 'history_modifier',]
+ readonly_fields = ['history_creator', 'history_modifier', 'search_vector']
def save_model(self, request, obj, form, change):
obj.history_modifier = request.user
obj.save()
+ def get_readonly_fields(self, request, obj=None):
+ if obj: # editing an existing object
+ return tuple(self.readonly_fields or []) + tuple(['imports'])
+ return self.readonly_fields
+
+ def get_exclude(self, request, obj=None):
+ if not obj:
+ return tuple(self.exclude or []) + tuple(['imports'])
+ return self.exclude
+
class MyGroupAdmin(GroupAdmin):
class Media:
@@ -153,7 +166,6 @@ class OrganizationAdmin(HistorizedObjectAdmin):
list_filter = ("organization_type",)
search_fields = ('name',)
exclude = ('merge_key', 'merge_exclusion', 'merge_candidate', )
- readonly_fields = HistorizedObjectAdmin.readonly_fields + ['imports']
model = models.Organization
admin_site.register(models.Organization, OrganizationAdmin)
@@ -164,31 +176,51 @@ class PersonAdmin(HistorizedObjectAdmin):
list_filter = ("person_types",)
search_fields = ('name', 'surname', 'email', 'raw_name')
exclude = ('merge_key', 'merge_exclusion', 'merge_candidate', )
- readonly_fields = HistorizedObjectAdmin.readonly_fields + ['imports']
form = make_ajax_form(models.Person, {'attached_to': 'organization'})
model = models.Person
admin_site.register(models.Person, PersonAdmin)
+class AdminRelatedTownForm(forms.ModelForm):
+ class Meta:
+ model = models.Town.children.through
+ exclude = []
+ from_town = AutoCompleteSelectField(
+ 'town', required=True, label=_(u"Parent"))
+
+
class AdminTownForm(forms.ModelForm):
class Meta:
model = models.Town
- exclude = []
+ exclude = ['imports']
center = PointField(label=_(u"center"), required=False,
widget=OSMWidget)
+ children = AutoCompleteSelectMultipleField('town', required=False,
+ label=_(u"Town children"))
+
+
+class TownParentInline(admin.TabularInline):
+ model = models.Town.children.through
+ fk_name = 'to_town'
+ form = AdminRelatedTownForm
+ verbose_name = _(u"Parent")
+ verbose_name_plural = _(u"Parents")
+ extra = 1
class TownAdmin(admin.ModelAdmin):
+ model = models.Town
list_display = ['name', ]
search_fields = ['name']
+ readonly_fields = ['cached_label']
if settings.COUNTRY == 'fr':
list_display += ['numero_insee', 'departement', ]
search_fields += ['numero_insee', 'departement__label', ]
list_filter = ("departement",)
- readonly_fields = ['imports']
- model = models.Town
form = AdminTownForm
+ inlines = [TownParentInline]
+
admin_site.register(models.Town, TownAdmin)
@@ -333,6 +365,56 @@ class ItemKeyAdmin(admin.ModelAdmin):
admin_site.register(models.ItemKey, ItemKeyAdmin)
+class JsonContentTypeFormMixin(object):
+ class Meta:
+ model = models.JsonDataSection
+ exclude = []
+
+ def __init__(self, *args, **kwargs):
+ super(JsonContentTypeFormMixin, self).__init__(*args, **kwargs)
+ choices = []
+ for pk, label in self.fields['content_type'].choices:
+ if not pk:
+ choices.append((pk, label))
+ continue
+ ct = ContentType.objects.get(pk=pk)
+ model_class = ct.model_class()
+ if hasattr(model_class, 'data') and \
+ not hasattr(model_class, 'history_type'):
+ choices.append((pk, label))
+ self.fields['content_type'].choices = sorted(choices,
+ key=lambda x: x[1])
+
+
+class JsonDataSectionForm(JsonContentTypeFormMixin, forms.ModelForm):
+ class Meta:
+ model = models.JsonDataSection
+ exclude = []
+
+
+class JsonDataSectionAdmin(admin.ModelAdmin):
+ list_display = ['name', 'content_type', 'order']
+ form = JsonDataSectionForm
+
+
+admin_site.register(models.JsonDataSection, JsonDataSectionAdmin)
+
+
+class JsonDataFieldForm(JsonContentTypeFormMixin, forms.ModelForm):
+ class Meta:
+ model = models.JsonDataField
+ exclude = []
+
+
+class JsonDataFieldAdmin(admin.ModelAdmin):
+ list_display = ['name', 'content_type', 'key', 'display',
+ 'order', 'section']
+ form = JsonDataFieldForm
+
+
+admin_site.register(models.JsonDataField, JsonDataFieldAdmin)
+
+
class AdministrationScriptAdmin(admin.ModelAdmin):
list_display = ['name', 'path']
diff --git a/ishtar_common/data_importer.py b/ishtar_common/data_importer.py
index 9caebb2dd..e8ec43ab2 100644
--- a/ishtar_common/data_importer.py
+++ b/ishtar_common/data_importer.py
@@ -1486,6 +1486,9 @@ class Importer(object):
# importer trigger
self._set_importer_trigger(cls, attribute, data)
return
+ if attribute == 'data': # json field
+ # no need to do anything
+ return
try:
field_object = cls._meta.get_field(attribute)
except FieldDoesNotExist:
@@ -1570,8 +1573,8 @@ class Importer(object):
create_dict = copy.deepcopy(data)
for k in create_dict.keys():
- # filter unnecessary default values
- if type(create_dict[k]) == dict:
+ # filter unnecessary default values but not the json field
+ if type(create_dict[k]) == dict and k != 'data':
create_dict.pop(k)
# File doesn't like deepcopy
elif type(create_dict[k]) == File:
diff --git a/ishtar_common/management/commands/update_search_vectors.py b/ishtar_common/management/commands/update_search_vectors.py
new file mode 100644
index 000000000..c73a6e88e
--- /dev/null
+++ b/ishtar_common/management/commands/update_search_vectors.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import sys
+
+from django.core.management.base import BaseCommand
+import django.apps
+
+
+class Command(BaseCommand):
+ help = "./manage.py update_search_vectors\n\n"\
+ "Update full texte search vectors."
+
+ def handle(self, *args, **options):
+ for model in django.apps.apps.get_models():
+ if hasattr(model, "update_search_vector") and \
+ getattr(model, "BASE_SEARCH_VECTORS", None):
+ self.stdout.write("\n* update {}".format(model))
+ total = model.objects.count()
+ for idx, item in enumerate(model.objects.all()):
+ sys.stdout.write("\r{}/{} ".format(idx, total))
+ sys.stdout.flush()
+ item.update_search_vector()
+ self.stdout.write("\n")
diff --git a/ishtar_common/migrations/0015_auto_20171011_1644.py b/ishtar_common/migrations/0015_auto_20171011_1644.py
new file mode 100644
index 000000000..a9f4499c2
--- /dev/null
+++ b/ishtar_common/migrations/0015_auto_20171011_1644.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0014_ishtarsiteprofile_preservation'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='historicalorganization',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicalperson',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='organization',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='person',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0016_auto_20171016_1104.py b/ishtar_common/migrations/0016_auto_20171016_1104.py
new file mode 100644
index 000000000..1d9209bdd
--- /dev/null
+++ b/ishtar_common/migrations/0016_auto_20171016_1104.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-16 11:04
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0015_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='town',
+ name='cached_label',
+ field=models.CharField(blank=True, db_index=True, max_length=500, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AddField(
+ model_name='town',
+ name='children',
+ field=models.ManyToManyField(blank=True, related_name='parents', to='ishtar_common.Town', verbose_name='Town children'),
+ ),
+ migrations.AddField(
+ model_name='town',
+ name='year',
+ field=models.IntegerField(blank=True, help_text='If not filled considered as the older town known.', null=True, verbose_name='Year of creation'),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0017_auto_20171016_1320.py b/ishtar_common/migrations/0017_auto_20171016_1320.py
new file mode 100644
index 000000000..a48b36ce7
--- /dev/null
+++ b/ishtar_common/migrations/0017_auto_20171016_1320.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-16 13:20
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0016_auto_20171016_1104'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='town',
+ name='numero_insee',
+ field=models.CharField(max_length=6, verbose_name='Num\xe9ro INSEE'),
+ ),
+ migrations.AlterField(
+ model_name='town',
+ name='year',
+ field=models.IntegerField(blank=True, help_text='Filling this field is relevant to distinguish old towns to new towns.', null=True, verbose_name='Year of creation'),
+ ),
+ migrations.AlterUniqueTogether(
+ name='town',
+ unique_together=set([('numero_insee', 'year')]),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0018_auto_20171017_1840.py b/ishtar_common/migrations/0018_auto_20171017_1840.py
new file mode 100644
index 000000000..0c617a3d5
--- /dev/null
+++ b/ishtar_common/migrations/0018_auto_20171017_1840.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('contenttypes', '0002_remove_content_type_name'),
+ ('ishtar_common', '0017_auto_20171016_1320'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='JsonDataField',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=200, verbose_name='Name')),
+ ('key', models.CharField(help_text='Value of the key in the JSON schema. For hierarchical key use "__" to explain it. For instance the key \'my_subkey\' with data such as {\'my_key\': {\'my_subkey\': \'value\'}} will be reached with my_key__my_subkey.', max_length=200, verbose_name='Key')),
+ ('display', models.BooleanField(default=True, verbose_name='Display')),
+ ('order', models.IntegerField(default=10, verbose_name='Order')),
+ ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
+ ],
+ options={
+ 'verbose_name': 'Json data - Field',
+ 'verbose_name_plural': 'Json data - Fields',
+ },
+ ),
+ migrations.CreateModel(
+ name='JsonDataSection',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=200, verbose_name='Name')),
+ ('order', models.IntegerField(default=10, verbose_name='Order')),
+ ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
+ ],
+ options={
+ 'ordering': ['name'],
+ 'verbose_name': 'Json data - Menu',
+ 'verbose_name_plural': 'Json data - Menus',
+ },
+ ),
+ migrations.AddField(
+ model_name='historicalorganization',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicalperson',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='organization',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='person',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='jsondatafield',
+ name='section',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='ishtar_common.JsonDataSection'),
+ ),
+ ]
diff --git a/ishtar_common/models.py b/ishtar_common/models.py
index 28a24115b..c3ba4fdd0 100644
--- a/ishtar_common/models.py
+++ b/ishtar_common/models.py
@@ -35,6 +35,8 @@ import tempfile
import time
from django.conf import settings
+from django.contrib.postgres.fields import JSONField
+from django.contrib.postgres.search import SearchVectorField, SearchVector
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
@@ -58,7 +60,7 @@ from simple_history.models import HistoricalRecords as BaseHistoricalRecords
from ishtar_common.model_merging import merge_model_objects
from ishtar_common.utils import get_cache, disable_for_loaddata, create_slug,\
- get_all_field_names
+ get_all_field_names, merge_tsvectors, cached_label_changed
from ishtar_common.models_imports import ImporterModel, ImporterType, \
ImporterDefault, ImporterDefaultValues, ImporterColumn, \
@@ -908,6 +910,96 @@ class BulkUpdatedItem(object):
return transaction_id, False
+class JsonDataSection(models.Model):
+ content_type = models.ForeignKey(ContentType)
+ name = models.CharField(_(u"Name"), max_length=200)
+ order = models.IntegerField(_(u"Order"), default=10)
+
+ class Meta:
+ verbose_name = _(u"Json data - Menu")
+ verbose_name_plural = _(u"Json data - Menus")
+ ordering = ['order', 'name']
+
+ def __unicode__(self):
+ return u"{} - {}".format(self.content_type, self.name)
+
+
+class JsonDataField(models.Model):
+ name = models.CharField(_(u"Name"), max_length=200)
+ content_type = models.ForeignKey(ContentType)
+ key = models.CharField(
+ _(u"Key"), max_length=200,
+ help_text=_(u"Value of the key in the JSON schema. For hierarchical "
+ u"key use \"__\" to explain it. For instance the key "
+ u"'my_subkey' with data such as {'my_key': {'my_subkey': "
+ u"'value'}} will be reached with my_key__my_subkey."))
+ display = models.BooleanField(_(u"Display"), default=True)
+ order = models.IntegerField(_(u"Order"), default=10)
+ section = models.ForeignKey(JsonDataSection, blank=True, null=True)
+
+ class Meta:
+ verbose_name = _(u"Json data - Field")
+ verbose_name_plural = _(u"Json data - Fields")
+ ordering = ['order', 'name']
+
+ def __unicode__(self):
+ return u"{} - {}".format(self.content_type, self.name)
+
+ def clean(self):
+ if not self.section:
+ return
+ if self.section.content_type != self.content_type:
+ raise ValidationError(
+ _(u"Content type of the field and of the menu do not match"))
+
+
+class JsonData(models.Model):
+ data = JSONField(default={}, db_index=True, blank=True)
+
+ class Meta:
+ abstract = True
+
+ def pre_save(self):
+ if not self.data:
+ self.data = {}
+
+ @property
+ def json_sections(self):
+ sections = []
+ try:
+ content_type = ContentType.objects.get_for_model(self)
+ except ContentType.DoesNotExists:
+ return sections
+ fields = list(JsonDataField.objects.filter(
+ content_type=content_type, display=True, section__isnull=True
+ ).all()) # no section fields
+
+ fields += list(JsonDataField.objects.filter(
+ content_type=content_type, display=True, section__isnull=False
+ ).order_by('section__order', 'order').all())
+
+ for field in fields:
+ value = None
+ data = self.data.copy()
+ for key in field.key.split('__'):
+ if key in data:
+ value = copy.copy(data[key])
+ data = data[key]
+ else:
+ value = None
+ break
+ if not value:
+ continue
+ if type(value) in (list, tuple):
+ value = u" ; ".join([unicode(v) for v in value])
+ section_name = field.section.name if field.section else None
+ if not sections or section_name != sections[-1][0]:
+ # if section name is identical it is the same
+ sections.append((section_name, []))
+ sections[-1][1].append((field.name, value))
+ return sections
+
+
class Imported(models.Model):
imports = models.ManyToManyField(
Import, blank=True,
@@ -917,9 +1009,85 @@ class Imported(models.Model):
abstract = True
-class BaseHistorizedItem(Imported):
+class FullSearch(models.Model):
+ search_vector = SearchVectorField(_("Search vector"), blank=True, null=True,
+ help_text=_("Auto filled at save"))
+ BASE_SEARCH_VECTORS = []
+ INT_SEARCH_VECTORS = []
+ M2M_SEARCH_VECTORS = []
+ PARENT_SEARCH_VECTORS = []
+
+ class Meta:
+ abstract = True
+
+ def update_search_vector(self, save=True):
+ """
+ Update the search vector
+ :param save: True if you want to save the object immediately
+ :return: True if modified
+ """
+ if not self.BASE_SEARCH_VECTORS and not self.M2M_SEARCH_VECTORS:
+ logger.warning("No search_vectors defined for {}".format(
+ self.__class__))
+ return
+ if getattr(self, '_search_updated', None):
+ return
+ self._search_updated = True
+
+ old_search = ""
+ if self.search_vector:
+ old_search = self.search_vector[:]
+ search_vectors = []
+ base_q = self.__class__.objects.filter(pk=self.pk)
+
+ # many to many have to be queried one by one otherwise only one is fetch
+ for M2M_SEARCH_VECTOR in self.M2M_SEARCH_VECTORS:
+ key = M2M_SEARCH_VECTOR.split('__')[0]
+ rel_key = getattr(self, key)
+ for item in rel_key.values('pk').all():
+ query_dct = {key + "__pk": item['pk']}
+ q = copy.copy(base_q).filter(**query_dct)
+ q = q.annotate(
+ search=SearchVector(
+ M2M_SEARCH_VECTOR,
+ config=settings.ISHTAR_SEARCH_LANGUAGE)
+ ).values('search')
+ search_vectors.append(q.all()[0]['search'])
+
+ # int/float are not well managed by the SearchVector
+ for INT_SEARCH_VECTOR in self.INT_SEARCH_VECTORS:
+ q = base_q.values(INT_SEARCH_VECTOR)
+ search_vectors.append(
+ "'{}':1".format(q.all()[0][INT_SEARCH_VECTOR]))
+
+ # copy parent vector fields
+ for PARENT_SEARCH_VECTOR in self.PARENT_SEARCH_VECTORS:
+ parent = getattr(self, PARENT_SEARCH_VECTOR)
+ if hasattr(parent, 'all'): # m2m
+ for p in parent.all():
+ search_vectors.append(p.search_vector)
+ else:
+ search_vectors.append(parent.search_vector)
+
+ # query "simple" fields
+ q = base_q.annotate(
+ search=SearchVector(
+ *self.BASE_SEARCH_VECTORS,
+ config=settings.ISHTAR_SEARCH_LANGUAGE
+ )).values('search')
+ search_vectors.append(q.all()[0]['search'])
+ self.search_vector = merge_tsvectors(search_vectors)
+ changed = old_search != self.search_vector
+ if save and changed:
+ self.skip_history_when_saving = True
+ self.save()
+ return changed
+
+
+class BaseHistorizedItem(FullSearch, Imported, JsonData):
"""
- Historized item with external ID management
+ Historized item with external ID management.
+ All historized items are searcheable and have a data json field
"""
IS_BASKET = False
EXTERNAL_ID_KEY = ''
@@ -1187,6 +1355,7 @@ class LightHistorizedItem(BaseHistorizedItem):
super(LightHistorizedItem, self).save(*args, **kwargs)
return True
+
PARSE_FORMULA = re.compile("{([^}]*)}")
FORMULA_FILTERS = {
@@ -1409,6 +1578,7 @@ def get_current_profile(force=False):
def cached_site_changed(sender, **kwargs):
get_current_profile(force=True)
+
post_save.connect(cached_site_changed, sender=IshtarSiteProfile)
post_delete.connect(cached_site_changed, sender=IshtarSiteProfile)
@@ -2490,12 +2660,20 @@ class Town(Imported, models.Model):
center = models.PointField(_(u"Localisation"), srid=settings.SRID,
blank=True, null=True)
if settings.COUNTRY == 'fr':
- numero_insee = models.CharField(u"Numéro INSEE", max_length=6,
- unique=True)
+ numero_insee = models.CharField(u"Numéro INSEE", max_length=6)
departement = models.ForeignKey(
Department, verbose_name=u"Département", null=True, blank=True)
canton = models.ForeignKey(Canton, verbose_name=u"Canton", null=True,
blank=True)
+ year = models.IntegerField(
+ _("Year of creation"), null=True, blank=True,
+ help_text=_(u"Filling this field is relevant to distinguish old towns "
+ u"to new towns."))
+ children = models.ManyToManyField(
+ 'Town', verbose_name=_(u"Town children"), blank=True,
+ related_name='parents')
+ cached_label = models.CharField(_(u"Cached name"), max_length=500,
+ null=True, blank=True, db_index=True)
objects = models.GeoManager()
class Meta:
@@ -2503,11 +2681,24 @@ class Town(Imported, models.Model):
verbose_name_plural = _(u"Towns")
if settings.COUNTRY == 'fr':
ordering = ['numero_insee']
+ unique_together = (('numero_insee', 'year'),)
def __unicode__(self):
+ if self.cached_label:
+ return self.cached_label
+ self.save()
+ return self.cached_label
+
+ def _generate_cached_label(self):
+ cached_label = self.name
if settings.COUNTRY == "fr":
- return u"%s (%s)" % (self.name, self.numero_insee[:2])
- return self.name
+ cached_label = u"%s - %s" % (self.name, self.numero_insee[:2])
+ if self.year:
+ cached_label += " ({})".format(self.year)
+ return cached_label
+
+
+post_save.connect(cached_label_changed, sender=Town)
class OperationType(GeneralType):
diff --git a/ishtar_common/static/gentium/GentiumPlus-I.ttf b/ishtar_common/static/gentium/GentiumPlus-I.ttf
new file mode 100644
index 000000000..7bc1b3d8b
--- /dev/null
+++ b/ishtar_common/static/gentium/GentiumPlus-I.ttf
Binary files differ
diff --git a/ishtar_common/static/gentium/GentiumPlus-R.ttf b/ishtar_common/static/gentium/GentiumPlus-R.ttf
new file mode 100644
index 000000000..c1194dd35
--- /dev/null
+++ b/ishtar_common/static/gentium/GentiumPlus-R.ttf
Binary files differ
diff --git a/ishtar_common/static/gentium/OFL.txt b/ishtar_common/static/gentium/OFL.txt
new file mode 100644
index 000000000..4f7540787
--- /dev/null
+++ b/ishtar_common/static/gentium/OFL.txt
@@ -0,0 +1,94 @@
+Copyright (c) 2003-2014 SIL International (http://www.sil.org/),
+with Reserved Font Names "Gentium" and "SIL".
+
+This Font Software is licensed under the SIL Open Font License, Version 1.1.
+This license is copied below, and is also available with a FAQ at:
+http://scripts.sil.org/OFL
+
+
+-----------------------------------------------------------
+SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
+-----------------------------------------------------------
+
+PREAMBLE
+The goals of the Open Font License (OFL) are to stimulate worldwide
+development of collaborative font projects, to support the font creation
+efforts of academic and linguistic communities, and to provide a free and
+open framework in which fonts may be shared and improved in partnership
+with others.
+
+The OFL allows the licensed fonts to be used, studied, modified and
+redistributed freely as long as they are not sold by themselves. The
+fonts, including any derivative works, can be bundled, embedded,
+redistributed and/or sold with any software provided that any reserved
+names are not used by derivative works. The fonts and derivatives,
+however, cannot be released under any other type of license. The
+requirement for fonts to remain under this license does not apply
+to any document created using the fonts or their derivatives.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this license and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Reserved Font Name" refers to any names specified as such after the
+copyright statement(s).
+
+"Original Version" refers to the collection of Font Software components as
+distributed by the Copyright Holder(s).
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to a
+new environment.
+
+"Author" refers to any designer, engineer, programmer, technical
+writer or other person who contributed to the Font Software.
+
+PERMISSION & CONDITIONS
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Font Software, to use, study, copy, merge, embed, modify,
+redistribute, and sell modified and unmodified copies of the Font
+Software, subject to the following conditions:
+
+1) Neither the Font Software nor any of its individual components,
+in Original or Modified Versions, may be sold by itself.
+
+2) Original or Modified Versions of the Font Software may be bundled,
+redistributed and/or sold with any software, provided that each copy
+contains the above copyright notice and this license. These can be
+included either as stand-alone text files, human-readable headers or
+in the appropriate machine-readable metadata fields within text or
+binary files as long as those fields can be easily viewed by the user.
+
+3) No Modified Version of the Font Software may use the Reserved Font
+Name(s) unless explicit written permission is granted by the corresponding
+Copyright Holder. This restriction only applies to the primary font name as
+presented to the users.
+
+4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
+Software shall not be used to promote, endorse or advertise any
+Modified Version, except to acknowledge the contribution(s) of the
+Copyright Holder(s) and the Author(s) or with their explicit written
+permission.
+
+5) The Font Software, modified or unmodified, in part or in whole,
+must be distributed entirely under this license, and must not be
+distributed under any other license. The requirement for fonts to
+remain under this license does not apply to any document created
+using the Font Software.
+
+TERMINATION
+This license becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
+OTHER DEALINGS IN THE FONT SOFTWARE.
diff --git a/ishtar_common/static/gentium/README.txt b/ishtar_common/static/gentium/README.txt
new file mode 100644
index 000000000..bc17a8cb7
--- /dev/null
+++ b/ishtar_common/static/gentium/README.txt
@@ -0,0 +1,88 @@
+README
+Gentium Plus
+========================
+
+Thank you for your interest in the Gentium Plus fonts.
+We hope you find them useful!
+
+Gentium Plus supports a wide range of Latin, Greek and Cyrillic
+characters. Documentation for the fonts is available on Gentium website
+(http://scripts.sil.org/Gentium), including details on what ranges are
+supported.
+
+Gentium Plus is released under the SIL Open Font License.
+
+See the OFL and OFL-FAQ for details of the SIL Open Font License.
+See the FONTLOG for information on this and previous releases.
+See the GENTIUM-FAQ for answers to common questions about the Gentium fonts
+See the website (http://scripts.sil.org/Gentium) for further documentation.
+See the SIL Unicode Roman FAQ (http://scripts.sil.org/ComplexRomanFontFAQ)
+for frequently asked questions and their answers regarding SIL's Roman fonts.
+
+
+TIPS
+====
+
+As this font is distributed at no cost, we are unable to provide a
+commercial level of personal technical support. The font has, however,
+been through some testing on various platforms to be sure it works in most
+situations. In particular, it has been tested and shown to work on Windows
+XP, Windows Vista and Windows 7. Graphite capabilities have been tested
+on Graphite-supported platforms.
+
+If you do find a problem, please do report it to fonts@sil.org.
+We can't guarantee any direct response, but will try to fix reported bugs in
+future versions. Make sure you read through the
+SIL Unicode Roman FAQ (http://scripts.sil.org/ComplexRomanFontFAQ).
+
+Many problems can be solved, or at least explained, through an understanding
+of the encoding and use of the fonts. Here are some basic hints:
+
+Encoding:
+The fonts are encoded according to Unicode, so your application must support
+Unicode text in order to access letters other than the standard alphabet.
+Most Windows applications provide basic Unicode support. You will, however,
+need some way of entering Unicode text into your document.
+
+Keyboarding:
+This font does not include any keyboarding helps or utilities. It uses the
+built-in keyboards of the operating system. You will need to install the
+appropriate keyboard and input method for the characters of the language you
+wish to use. If you want to enter characters that are not supported by any
+system keyboard, the Keyman program (www.tavultesoft.com) can be helpful
+on Windows systems. Also available for Windows is MSKLC
+(http://www.microsoft.com/globaldev/tools/msklc.mspx).
+For Linux systems such as Ubuntu, KMFL (http://kmfl.sourceforge.net/)
+is available. Ukelele (http://scripts.sil.org/ukelele) is available for
+Mac OS X versions 10.2 and later.
+
+For other platforms, KMFL (http://kmfl.sourceforge.net/),
+XKB (http://www.x.org/wiki/XKB) or Ukelele (http://scripts.sil.org/ukelele)
+can be helpful.
+
+If you want to enter characters that are not supported by any system
+keyboard, and to access the full Unicode range, we suggest you use
+gucharmap, kcharselect on Ubuntu or similar software.
+
+Another method of entering some symbols is provided by a few applications such
+as Adobe InDesign or OpenOffice.org. They can display a glyph palette or input
+dialog that shows all the glyphs (symbols) in a font and allow you to enter
+them by clicking on the glyph you want.
+
+Rendering:
+This font is designed to work with Graphite or Opentype advanced font
+technologies. To take advantage of the advanced typographic
+capabilities of this font, you must be using applications that provide an
+adequate level of support for Graphite or OpenType. See "Applications
+that provide an adequate level of support for SIL Unicode Roman fonts"
+(http://scripts.sil.org/Complex_AdLvSup).
+
+
+CONTACT
+========
+For more information please visit the Gentium page on SIL International's
+Computers and Writing systems website:
+http://scripts.sil.org/Gentium
+
+Support through the website: http://scripts.sil.org/Support
+
diff --git a/ishtar_common/static/media/style_basic.css b/ishtar_common/static/media/style_basic.css
index 1d92928dc..d0f5bbe4a 100644
--- a/ishtar_common/static/media/style_basic.css
+++ b/ishtar_common/static/media/style_basic.css
@@ -1,7 +1,8 @@
@page {
size: a4 portrait;
- margin: 2.5cm 1cm 2.5cm 1cm;
+ margin: 2cm 1cm 2.5cm 1cm;
background-image: url("images/ishtar-bg.jpg");
+ background-repeat: no-repeat;
@frame footer {
-pdf-frame-content: pdffooter;
bottom: 1cm;
@@ -16,6 +17,9 @@
margin-right: 1cm;
height: 1.5cm;
}
+ @bottom-center {
+ content: counter(page) "/" counter(pages);
+ }
}
label{
@@ -36,6 +40,13 @@ caption, h3{
font-size:1.5em;
}
+a img {
+ display: block;
+ margin-left: auto;
+ margin-right: auto;
+ padding:0.5em;
+}
+
th{
text-align:center;
border-bottom:2px solid #922;
@@ -72,10 +83,21 @@ td{
display:none;
}
+caption, hr, .tool-left, .tool-right, .display_details, .display_details_inline{
+ display: None;
+ color: transparent;
+ background-color: transparent;
+ border-color: transparent;
+}
+
p{
margin:0.2em;
}
+td{
+ background-color: #ddd;
+}
+
#pdffooter, #pdfheader{
text-align:center;
}
@@ -84,8 +106,15 @@ p{
font-weight:bold;
width:100%;
border-bottom:1px solid #922;
+ position: fixed;
+ top: -0.5cm;
}
-.display_details, .display_details_inline{
- display: none;
+.window-refs{
+ text-align:center;
+ padding:0;
+ margin:0;
+ font-size: 0.9em;
+ width:100%;
+ display:block;
}
diff --git a/ishtar_common/templates/ishtar/blocks/sheet_json.html b/ishtar_common/templates/ishtar/blocks/sheet_json.html
new file mode 100644
index 000000000..31e6acb84
--- /dev/null
+++ b/ishtar_common/templates/ishtar/blocks/sheet_json.html
@@ -0,0 +1,11 @@
+{% load i18n window_field %}
+{% for json_section, json_fields in item.json_sections %}
+{% if json_section %}
+<h3>{{json_section}}</h3>
+{% endif %}
+{% for label, value in json_fields %}
+{% if forloop.first %}<ul class='form-flex'>{% endif %}
+ {% field_li label value %}
+{% if forloop.last %}</ul>{% endif %}
+{% endfor %}
+{% endfor %}
diff --git a/ishtar_common/templates/ishtar/sheet_organization_pdf.html b/ishtar_common/templates/ishtar/sheet_organization_pdf.html
index 887c7ccb2..2276aa4d1 100644
--- a/ishtar_common/templates/ishtar/sheet_organization_pdf.html
+++ b/ishtar_common/templates/ishtar/sheet_organization_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_organization.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/ishtar_common/templates/ishtar/sheet_person_pdf.html b/ishtar_common/templates/ishtar/sheet_person_pdf.html
index 199892d2f..9dd9e4c50 100644
--- a/ishtar_common/templates/ishtar/sheet_person_pdf.html
+++ b/ishtar_common/templates/ishtar/sheet_person_pdf.html
@@ -1,6 +1,5 @@
{% extends "ishtar/sheet_person.html" %}
{% block header %}
-<link rel="stylesheet" href="{{STATIC_URL}}/media/style_basic.css?ver={{VERSION}}" />
{% endblock %}
{% block main_head %}
{{ block.super }}
@@ -10,9 +9,6 @@ Ishtar &ndash; {{APP_NAME}} &ndash; {{item}}
{% endblock %}
{%block head_sheet%}{%endblock%}
{%block main_foot%}
-<div id="pdffooter">
-&ndash; <pdf:pagenumber/> &ndash;
-</div>
</body>
</html>
{%endblock%}
diff --git a/ishtar_common/tests.py b/ishtar_common/tests.py
index 349408465..bbb449fe3 100644
--- a/ishtar_common/tests.py
+++ b/ishtar_common/tests.py
@@ -23,6 +23,8 @@ import os
import shutil
from StringIO import StringIO
+from django.apps import apps
+
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
@@ -38,6 +40,7 @@ from django.test.runner import DiscoverRunner
from ishtar_common import models
from ishtar_common import views
+from ishtar_common.apps import admin_site
from ishtar_common.utils import post_save_point
@@ -347,6 +350,13 @@ class AdminGenTypeTest(TestCase):
models_with_data = gen_models + [models.ImporterModel]
models = models_with_data
module_name = 'ishtar_common'
+ ishtar_apps = [
+ 'ishtar_common', 'archaeological_files', 'archaeological_operations',
+ 'archaeological_context_records', 'archaeological_warehouse',
+ 'archaeological_finds'
+ ]
+ readonly_models = ['archaeological_finds.Property',
+ 'archaeological_finds.Treatment']
def setUp(self):
self.password = 'mypassword'
@@ -359,16 +369,34 @@ class AdminGenTypeTest(TestCase):
self.client.login(username=self.username, password=self.password)
def test_listing_and_detail(self):
- for model in self.models:
+ models = []
+ for app in self.ishtar_apps:
+ app_models = apps.get_app_config(app).get_models()
+ for model in app_models:
+ if model in admin_site._registry:
+ models.append((app, model))
+ for app, model in models:
# quick test to verify basic access to listing
- base_url = '/admin/{}/{}/'.format(self.module_name,
- model.__name__.lower())
+ base_url = '/admin/{}/{}/'.format(app, model.__name__.lower())
url = base_url
response = self.client.get(url)
self.assertEqual(
response.status_code, 200,
msg="Can not access admin list for {}.".format(model))
- if model in self.models_with_data:
+ nb = model.objects.count()
+ url = base_url + "add/"
+ response = self.client.get(url)
+ if app + "." + model.__name__ in self.readonly_models:
+ continue
+ self.assertEqual(
+ response.status_code, 200,
+ msg="Can not access admin add page for {}.".format(model))
+ self.assertEqual(
+ nb, model.objects.count(),
+ msg="A ghost object have been created on access to add page "
+ "for {}.".format(model))
+
+ if nb:
url = base_url + "{}/change/".format(model.objects.all()[0].pk)
response = self.client.get(url)
self.assertEqual(
@@ -1046,6 +1074,15 @@ class IshtarBasicTest(TestCase):
self.assertEqual(response.status_code, 200)
self.assertIn('class="sheet"', response.content)
+ def test_town_cache(self):
+ models.Town.objects.create(name="Sin City", numero_insee="99999")
+ town = models.Town.objects.get(numero_insee="99999")
+ self.assertEqual(town.cached_label, "Sin City - 99")
+ town.year = 2050
+ town.save()
+ town = models.Town.objects.get(numero_insee="99999")
+ self.assertEqual(town.cached_label, "Sin City - 99 (2050)")
+
class GeomaticTest(TestCase):
def test_post_save_point(self):
diff --git a/ishtar_common/utils.py b/ishtar_common/utils.py
index c6a4032f0..5d9e85c60 100644
--- a/ishtar_common/utils.py
+++ b/ishtar_common/utils.py
@@ -104,9 +104,12 @@ def cached_label_changed(sender, **kwargs):
setattr(instance, cached_label, lbl)
changed = True
if changed:
+ instance._search_updated = False
if hasattr(instance, '_cascade_change') and instance._cascade_change:
instance.skip_history_when_saving = True
instance.save()
+ if hasattr(instance, 'update_search_vector'):
+ instance.update_search_vector()
updated = False
if hasattr(instance, '_cached_labels_bulk_update'):
updated = instance._cached_labels_bulk_update()
@@ -117,6 +120,7 @@ def cached_label_changed(sender, **kwargs):
item.test_obj = instance.test_obj
cached_label_changed(item.__class__, instance=item)
+
SHORTIFY_STR = ugettext(" (...)")
@@ -289,3 +293,43 @@ def get_all_related_objects(model):
and f.auto_created and not f.concrete
]
+
+def merge_tsvectors(vectors):
+ """
+ Parse tsvector to merge them in one string
+ :param vectors: list of tsvector string
+ :return: merged tsvector
+ """
+ result_dict = {}
+ for vector in vectors:
+ if not vector:
+ continue
+
+ current_position = 0
+ if result_dict:
+ for key in result_dict:
+ max_position = max(result_dict[key])
+ if max_position > current_position:
+ current_position = max_position
+
+ for dct_member in vector.split(" "):
+ splitted = dct_member.split(':')
+ key = ":".join(splitted[:-1])
+ positions = splitted[-1]
+ key = key[1:-1] # remove quotes
+ positions = [int(pos) + current_position
+ for pos in positions.split(',')]
+ if key in result_dict:
+ result_dict[key] += positions
+ else:
+ result_dict[key] = positions
+
+ # {'lamelie': [1, 42, 5]} => {'lamelie': "1,42,5"}
+ result_dict = {k: ",".join([str(val) for val in result_dict[k]])
+ for k in result_dict}
+ # {'lamelie': "1,5", "hagarde": "2", "regarde": "4"} =>
+ # "'lamelie':1,5 'hagarde':2 'regarde':4"
+ result = " ".join(["'{}':{}".format(k, result_dict[k])
+ for k in result_dict])
+
+ return result
diff --git a/ishtar_common/views.py b/ishtar_common/views.py
index 997acd7df..8d475aff5 100644
--- a/ishtar_common/views.py
+++ b/ishtar_common/views.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (C) 2010-2016 Étienne Loks <etienne.loks_AT_peacefrogsDOTnet>
+# Copyright (C) 2010-2017 Étienne Loks <etienne.loks_AT_peacefrogsDOTnet>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
@@ -21,13 +21,7 @@ from tidylib import tidy_document as tidy
from copy import copy, deepcopy
import csv
-import cStringIO as StringIO
import datetime
-
-import reportlab
-reportlab.Version = "2.2" # stupid hack for an old library...
-import ho.pisa as pisa
-
import json
import logging
from markdown import markdown
@@ -35,12 +29,16 @@ import optparse
import re
from tempfile import NamedTemporaryFile
import unicodedata
+from weasyprint import HTML, CSS
+from weasyprint.fonts import FontConfiguration
from extra_views import ModelFormSetView
from django.conf import settings
from django.contrib.auth import logout
from django.contrib.auth.decorators import login_required
+from django.contrib.postgres.search import SearchQuery
+from django.contrib.staticfiles.templatetags.staticfiles import static
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse, NoReverseMatch
from django.db.models import Q, ImageField
@@ -814,6 +812,23 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
dct.pop(k)
# manage hierarchic conditions
for req in dct.copy():
+ if req.endswith('town__pk') or req.endswith('towns__pk'):
+ val = dct.pop(req)
+ reqs = Q(**{req: val})
+ base_req = req[:-2] + '__'
+ req = base_req[:]
+ for idx in range(HIERARCHIC_LEVELS):
+ req = req[:-2] + 'parents__pk'
+ q = Q(**{req: val})
+ reqs |= q
+ req = base_req[:]
+ for idx in range(HIERARCHIC_LEVELS):
+ req = req[:-2] + 'children__pk'
+ q = Q(**{req: val})
+ reqs |= q
+ and_reqs.append(reqs)
+ continue
+
for k_hr in HIERARCHIC_FIELDS:
if type(req) in (list, tuple):
val = dct.pop(req)
@@ -829,12 +844,15 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
val = dct.pop(req)
reqs = Q(**{req: val})
req = req[:-2] + '__'
- for idx in xrange(HIERARCHIC_LEVELS):
+ for idx in range(HIERARCHIC_LEVELS):
req = req[:-2] + 'parent__pk'
q = Q(**{req: val})
reqs |= q
and_reqs.append(reqs)
break
+ if 'search_vector' in dct:
+ dct['search_vector'] = SearchQuery(
+ dct['search_vector'], config=settings.ISHTAR_SEARCH_LANGUAGE)
query = Q(**dct)
for k, or_req in or_reqs:
alt_dct = dct.copy()
@@ -908,6 +926,9 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
items = model.objects.filter(query).distinct()
# print(items.query)
+ if 'search_vector' in dct: # for serialization
+ dct['search_vector'] = dct['search_vector'].value
+
# table cols
if own_table_cols:
table_cols = own_table_cols
@@ -1309,19 +1330,25 @@ def show_item(model, name, extra_dct=None):
elif doc_type == 'pdf':
tpl = loader.get_template('ishtar/sheet_%s_pdf.html' % name)
context_instance['output'] = 'PDF'
- content = tpl.render(context_instance, request)
- result = StringIO.StringIO()
- html = content.encode('utf-8')
- html = html.replace("<table", "<pdf:nextpage/><table repeat='1'")
- pdf = pisa.pisaDocument(StringIO.StringIO(html), result,
- encoding='utf-8')
- response = HttpResponse(result.getvalue(),
- content_type='application/pdf')
+ html = tpl.render(context_instance, request)
+ font_config = FontConfiguration()
+ css = CSS(string='''
+ @font-face {
+ font-family: Gentium;
+ src: url(%s);
+ }
+ body{
+ font-family: Gentium
+ }
+ ''' % (static("gentium/GentiumPlus-R.ttf")))
+ css2 = CSS(filename=settings.STATIC_ROOT + '/media/style_basic.css')
+ pdf = HTML(string=html, base_url=request.build_absolute_uri()
+ ).write_pdf(stylesheets=[css, css2],
+ font_config=font_config)
+ response = HttpResponse(pdf, content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=%s.pdf' % \
filename
- if not pdf.err:
- return response
- return HttpResponse(content, content_type="application/xhtml")
+ return response
else:
tpl = loader.get_template('ishtar/sheet_%s_window.html' % name)
content = tpl.render(context_instance, request)
diff --git a/ishtar_common/wizards.py b/ishtar_common/wizards.py
index 701f6eca3..f86e03df0 100644
--- a/ishtar_common/wizards.py
+++ b/ishtar_common/wizards.py
@@ -737,6 +737,9 @@ class Wizard(NamedUrlWizardView):
if has_problemetic_null:
continue
+ if hasattr(model, 'data') and 'data' not in value:
+ value['data'] = {}
+
if get_or_create:
value, created = model.objects.get_or_create(
**value)
diff --git a/requirements.txt b/requirements.txt
index 2b41343a2..d61697b5a 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,19 +1,20 @@
+six>=1.9
psycopg2==2.5.4
django-registration==2.2
django==1.11
Pillow==3.4.2
-pisa==3.0.33
-reportlab==3.1.8
+WeasyPrint==0.41
+html5lib==0.999999999
+
dbf==0.96.003
python-memcached==1.57
unicodecsv==0.14.1
pytidylib==0.2.1
lxml==3.4.0
-html5lib==0.999
django-extra-views==0.2.4
-beautifulsoup4==4.3.2
+beautifulsoup4==4.5.3
markdown==2.5.1
django-ajax-selects==1.6.0
diff --git a/static/gis/js/OLMapWidget.js b/static/gis/js/OLMapWidget.js
deleted file mode 100644
index c101c65fc..000000000
--- a/static/gis/js/OLMapWidget.js
+++ /dev/null
@@ -1,376 +0,0 @@
-(function() {
-/**
- * Transforms an array of features to a single feature with the merged
- * geometry of geom_type
- */
-OpenLayers.Util.properFeatures = function(features, geom_type) {
- if (features.constructor == Array) {
- var geoms = [];
- for (var i=0; i<features.length; i++) {
- geoms.push(features[i].geometry);
- }
- var geom = new geom_type(geoms);
- features = new OpenLayers.Feature.Vector(geom);
- }
- return features;
-}
-
-/**
- * @requires OpenLayers/Format/WKT.js
- */
-
-/**
- * Class: OpenLayers.Format.DjangoWKT
- * Class for reading Well-Known Text, with workarounds to successfully parse
- * geometries and collections as returned by django.contrib.gis.geos.
- *
- * Inherits from:
- * - <OpenLayers.Format.WKT>
- */
-
-OpenLayers.Format.DjangoWKT = OpenLayers.Class(OpenLayers.Format.WKT, {
- initialize: function(options) {
- OpenLayers.Format.WKT.prototype.initialize.apply(this, [options]);
- this.regExes.justComma = /\s*,\s*/;
- },
-
- parse: {
- 'point': function(str) {
- var coords = OpenLayers.String.trim(str).split(this.regExes.spaces);
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.Point(coords[0], coords[1])
- );
- },
-
- 'multipoint': function(str) {
- var point;
- var points = OpenLayers.String.trim(str).split(this.regExes.justComma);
- var components = [];
- for(var i=0, len=points.length; i<len; ++i) {
- point = points[i].replace(this.regExes.trimParens, '$1');
- components.push(this.parse.point.apply(this, [point]).geometry);
- }
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.MultiPoint(components)
- );
- },
-
- 'linestring': function(str) {
- var points = OpenLayers.String.trim(str).split(',');
- var components = [];
- for(var i=0, len=points.length; i<len; ++i) {
- components.push(this.parse.point.apply(this, [points[i]]).geometry);
- }
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.LineString(components)
- );
- },
-
- 'multilinestring': function(str) {
- var line;
- var lines = OpenLayers.String.trim(str).split(this.regExes.parenComma);
- var components = [];
- for(var i=0, len=lines.length; i<len; ++i) {
- line = lines[i].replace(this.regExes.trimParens, '$1');
- components.push(this.parse.linestring.apply(this, [line]).geometry);
- }
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.MultiLineString(components)
- );
- },
-
- 'polygon': function(str) {
- var ring, linestring, linearring;
- var rings = OpenLayers.String.trim(str).split(this.regExes.parenComma);
- var components = [];
- for(var i=0, len=rings.length; i<len; ++i) {
- ring = rings[i].replace(this.regExes.trimParens, '$1');
- linestring = this.parse.linestring.apply(this, [ring]).geometry;
- linearring = new OpenLayers.Geometry.LinearRing(linestring.components);
- components.push(linearring);
- }
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.Polygon(components)
- );
- },
-
- 'multipolygon': function(str) {
- var polygon;
- var polygons = OpenLayers.String.trim(str).split(this.regExes.doubleParenComma);
- var components = [];
- for(var i=0, len=polygons.length; i<len; ++i) {
- polygon = polygons[i].replace(this.regExes.trimParens, '$1');
- components.push(this.parse.polygon.apply(this, [polygon]).geometry);
- }
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.MultiPolygon(components)
- );
- },
-
- 'geometrycollection': function(str) {
- // separate components of the collection with |
- str = str.replace(/,\s*([A-Za-z])/g, '|$1');
- var wktArray = OpenLayers.String.trim(str).split('|');
- var components = [];
- for(var i=0, len=wktArray.length; i<len; ++i) {
- components.push(OpenLayers.Format.WKT.prototype.read.apply(this,[wktArray[i]]));
- }
- return components;
- }
- },
-
- extractGeometry: function(geometry) {
- var type = geometry.CLASS_NAME.split('.')[2].toLowerCase();
- if (!this.extract[type]) {
- return null;
- }
- if (this.internalProjection && this.externalProjection) {
- geometry = geometry.clone();
- geometry.transform(this.internalProjection, this.externalProjection);
- }
- var wktType = type == 'collection' ? 'GEOMETRYCOLLECTION' : type.toUpperCase();
- var data = wktType + '(' + this.extract[type].apply(this, [geometry]) + ')';
- return data;
- },
-
- /**
- * Patched write: successfully writes WKT for geometries and
- * geometrycollections.
- */
- write: function(features) {
- var collection, geometry, type, data, isCollection;
- isCollection = features.geometry.CLASS_NAME == "OpenLayers.Geometry.Collection";
- var pieces = [];
- if (isCollection) {
- collection = features.geometry.components;
- pieces.push('GEOMETRYCOLLECTION(');
- for (var i=0, len=collection.length; i<len; ++i) {
- if (i>0) {
- pieces.push(',');
- }
- pieces.push(this.extractGeometry(collection[i]));
- }
- pieces.push(')');
- } else {
- pieces.push(this.extractGeometry(features.geometry));
- }
- return pieces.join('');
- },
-
- CLASS_NAME: "OpenLayers.Format.DjangoWKT"
-});
-
-function MapWidget(options) {
- this.map = null;
- this.controls = null;
- this.panel = null;
- this.layers = {};
- this.wkt_f = new OpenLayers.Format.DjangoWKT();
-
- // Mapping from OGRGeomType name to OpenLayers.Geometry name
- if (options['geom_name'] == 'Unknown') options['geom_type'] = OpenLayers.Geometry;
- else if (options['geom_name'] == 'GeometryCollection') options['geom_type'] = OpenLayers.Geometry.Collection;
- else options['geom_type'] = eval('OpenLayers.Geometry.' + options['geom_name']);
-
- // Default options
- this.options = {
- color: 'ee9900',
- default_lat: 0,
- default_lon: 0,
- default_zoom: 4,
- is_collection: options.geom_name.indexOf('Multi') > -1 || options.geom_name.indexOf('Collection') > -1,
- layerswitcher: false,
- map_options: {},
- map_srid: 4326,
- modifiable: true,
- mouse_position: false,
- opacity: 0.4,
- point_zoom: 12,
- scale_text: false,
- scrollable: true
- };
-
- // Altering using user-provided options
- for (var property in options) {
- if (options.hasOwnProperty(property)) {
- this.options[property] = options[property];
- }
- }
-
- this.map = this.create_map();
-
- var defaults_style = {
- 'fillColor': '#' + this.options.color,
- 'fillOpacity': this.options.opacity,
- 'strokeColor': '#' + this.options.color
- };
- if (this.options.geom_name == 'LineString') {
- defaults_style['strokeWidth'] = 3;
- }
- var styleMap = new OpenLayers.StyleMap({'default': OpenLayers.Util.applyDefaults(defaults_style, OpenLayers.Feature.Vector.style['default'])});
- this.layers.vector = new OpenLayers.Layer.Vector(" " + this.options.name, {styleMap: styleMap});
- this.map.addLayer(this.layers.vector);
- var wkt = document.getElementById(this.options.id).value;
- if (wkt) {
- var feat = OpenLayers.Util.properFeatures(this.read_wkt(wkt), this.options.geom_type);
- this.write_wkt(feat);
- if (this.options.is_collection) {
- for (var i=0; i<this.num_geom; i++) {
- this.layers.vector.addFeatures([new OpenLayers.Feature.Vector(feat.geometry.components[i].clone())]);
- }
- } else {
- this.layers.vector.addFeatures([feat]);
- }
- this.map.zoomToExtent(feat.geometry.getBounds());
- if (this.options.geom_name == 'Point') {
- this.map.zoomTo(this.options.point_zoom);
- }
- } else {
- this.map.setCenter(this.defaultCenter(), this.options.default_zoom);
- }
- this.layers.vector.events.on({'featuremodified': this.modify_wkt, scope: this});
- this.layers.vector.events.on({'featureadded': this.add_wkt, scope: this});
-
- this.getControls(this.layers.vector);
- this.panel.addControls(this.controls);
- this.map.addControl(this.panel);
- this.addSelectControl();
-
- if (this.options.mouse_position) {
- this.map.addControl(new OpenLayers.Control.MousePosition());
- }
- if (this.options.scale_text) {
- this.map.addControl(new OpenLayers.Control.Scale());
- }
- if (this.options.layerswitcher) {
- this.map.addControl(new OpenLayers.Control.LayerSwitcher());
- }
- if (!this.options.scrollable) {
- this.map.getControlsByClass('OpenLayers.Control.Navigation')[0].disableZoomWheel();
- }
- if (wkt) {
- if (this.options.modifiable) {
- this.enableEditing();
- }
- } else {
- this.enableDrawing();
- }
-}
-
-MapWidget.prototype.create_map = function() {
- var map = new OpenLayers.Map(this.options.map_id, this.options.map_options);
- if (this.options.base_layer) this.layers.base = this.options.base_layer;
- else this.layers.base = new OpenLayers.Layer.WMS('OpenLayers WMS', 'http://vmap0.tiles.osgeo.org/wms/vmap0', {layers: 'basic'});
- map.addLayer(this.layers.base);
- return map
-};
-
-MapWidget.prototype.get_ewkt = function(feat) {
- return "SRID=" + this.options.map_srid + ";" + this.wkt_f.write(feat);
-};
-
-MapWidget.prototype.read_wkt = function(wkt) {
- var prefix = 'SRID=' + this.options.map_srid + ';'
- if (wkt.indexOf(prefix) === 0) {
- wkt = wkt.slice(prefix.length);
- }
- return this.wkt_f.read(wkt);
-};
-
-MapWidget.prototype.write_wkt = function(feat) {
- feat = OpenLayers.Util.properFeatures(feat, this.options.geom_type);
- if (this.options.is_collection) {
- this.num_geom = feat.geometry.components.length;
- } else {
- this.num_geom = 1;
- }
- document.getElementById(this.options.id).value = this.get_ewkt(feat);
-};
-
-MapWidget.prototype.add_wkt = function(event) {
- if (this.options.is_collection) {
- var feat = new OpenLayers.Feature.Vector(new this.options.geom_type());
- for (var i=0; i<this.layers.vector.features.length; i++) {
- feat.geometry.addComponents([this.layers.vector.features[i].geometry]);
- }
- this.write_wkt(feat);
- } else {
- if (this.layers.vector.features.length > 1) {
- old_feats = [this.layers.vector.features[0]];
- this.layers.vector.removeFeatures(old_feats);
- this.layers.vector.destroyFeatures(old_feats);
- }
- this.write_wkt(event.feature);
- }
-};
-
-MapWidget.prototype.modify_wkt = function(event) {
- if (this.options.is_collection) {
- if (this.options.geom_name == 'MultiPoint') {
- this.add_wkt(event);
- return;
- } else {
- var feat = new OpenLayers.Feature.Vector(new this.options.geom_type());
- for (var i=0; i<this.num_geom; i++) {
- feat.geometry.addComponents([this.layers.vector.features[i].geometry]);
- }
- this.write_wkt(feat);
- }
- } else {
- this.write_wkt(event.feature);
- }
-};
-
-MapWidget.prototype.deleteFeatures = function() {
- this.layers.vector.removeFeatures(this.layers.vector.features);
- this.layers.vector.destroyFeatures();
-};
-
-MapWidget.prototype.clearFeatures = function() {
- this.deleteFeatures();
- document.getElementById(this.options.id).value = '';
- this.map.setCenter(this.defaultCenter(), this.options.default_zoom);
-};
-
-MapWidget.prototype.defaultCenter = function() {
- var center = new OpenLayers.LonLat(this.options.default_lon, this.options.default_lat);
- if (this.options.map_srid) {
- return center.transform(new OpenLayers.Projection("EPSG:4326"), this.map.getProjectionObject());
- }
- return center;
-};
-
-MapWidget.prototype.addSelectControl = function() {
- var select = new OpenLayers.Control.SelectFeature(this.layers.vector, {'toggle': true, 'clickout': true});
- this.map.addControl(select);
- select.activate();
-};
-
-MapWidget.prototype.enableDrawing = function () {
- this.map.getControlsByClass('OpenLayers.Control.DrawFeature')[0].activate();
-};
-
-MapWidget.prototype.enableEditing = function () {
- this.map.getControlsByClass('OpenLayers.Control.ModifyFeature')[0].activate();
-};
-
-MapWidget.prototype.getControls = function(layer) {
- this.panel = new OpenLayers.Control.Panel({'displayClass': 'olControlEditingToolbar'});
- this.controls = [new OpenLayers.Control.Navigation()];
- if (!this.options.modifiable && layer.features.length)
- return;
- if (this.options.geom_name.indexOf('LineString') >= 0 || this.options.geom_name == 'GeometryCollection' || this.options.geom_name == 'Unknown') {
- this.controls.push(new OpenLayers.Control.DrawFeature(layer, OpenLayers.Handler.Path, {'displayClass': 'olControlDrawFeaturePath'}));
- }
- if (this.options.geom_name.indexOf('Polygon') >= 0 || this.options.geom_name == 'GeometryCollection' || this.options.geom_name == 'Unknown') {
- this.controls.push(new OpenLayers.Control.DrawFeature(layer, OpenLayers.Handler.Polygon, {'displayClass': 'olControlDrawFeaturePolygon'}));
- }
- if (this.options.geom_name.indexOf('Point') >= 0 || this.options.geom_name == 'GeometryCollection' || this.options.geom_name == 'Unknown') {
- this.controls.push(new OpenLayers.Control.DrawFeature(layer, OpenLayers.Handler.Point, {'displayClass': 'olControlDrawFeaturePoint'}));
- }
- if (this.options.modifiable) {
- this.controls.push(new OpenLayers.Control.ModifyFeature(layer, {'displayClass': 'olControlModifyFeature'}));
- }
-};
-window.MapWidget = MapWidget;
-})();
diff --git a/version.py b/version.py
index 42b630409..38f3d146e 100644
--- a/version.py
+++ b/version.py
@@ -1,5 +1,5 @@
-# 1.99.10
-VERSION = (1, 99, 10)
+# 1.99.11
+VERSION = (1, 99, 11)
def get_version():