summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--UPGRADE.md7
-rw-r--r--archaeological_context_records/admin.py2
-rw-r--r--archaeological_context_records/forms.py1
-rw-r--r--archaeological_context_records/migrations/0010_auto_20171011_1644.py26
-rw-r--r--archaeological_context_records/migrations/0011_auto_20171012_1316.py25
-rw-r--r--archaeological_context_records/migrations/0012_auto_20171017_1840.py26
-rw-r--r--archaeological_context_records/models.py9
-rw-r--r--archaeological_context_records/templates/ishtar/sheet_contextrecord.html2
-rw-r--r--archaeological_context_records/tests.py60
-rw-r--r--archaeological_files/admin.py4
-rw-r--r--archaeological_files/migrations/0008_auto_20171011_1644.py26
-rw-r--r--archaeological_files/migrations/0009_auto_20171012_1316.py25
-rw-r--r--archaeological_files/migrations/0010_auto_20171017_1840.py26
-rw-r--r--archaeological_files/models.py3
-rw-r--r--archaeological_files/templates/ishtar/sheet_file.html2
-rw-r--r--archaeological_finds/admin.py11
-rw-r--r--archaeological_finds/forms.py1
-rw-r--r--archaeological_finds/migrations/0010_auto_20171011_1644.py61
-rw-r--r--archaeological_finds/migrations/0011_auto_20171012_1316.py65
-rw-r--r--archaeological_finds/migrations/0012_auto_20171017_1840.py61
-rw-r--r--archaeological_finds/models_finds.py18
-rw-r--r--archaeological_finds/models_treatments.py7
-rw-r--r--archaeological_finds/templates/ishtar/sheet_find.html2
-rw-r--r--archaeological_finds/templates/ishtar/sheet_treatment.html2
-rw-r--r--archaeological_finds/templates/ishtar/sheet_treatmentfile.html2
-rw-r--r--archaeological_operations/admin.py9
-rw-r--r--archaeological_operations/forms.py1
-rw-r--r--archaeological_operations/migrations/0009_auto_20171011_1644.py51
-rw-r--r--archaeological_operations/migrations/0010_auto_20171012_1316.py25
-rw-r--r--archaeological_operations/migrations/0011_auto_20171017_1840.py51
-rw-r--r--archaeological_operations/models.py7
-rw-r--r--archaeological_operations/templates/ishtar/sheet_operation.html2
-rw-r--r--archaeological_operations/tests.py147
-rw-r--r--archaeological_operations/tests/operations-with-json-fields.csv3
-rw-r--r--archaeological_warehouse/admin.py7
-rw-r--r--archaeological_warehouse/migrations/0008_auto_20171011_1644.py36
-rw-r--r--archaeological_warehouse/migrations/0009_auto_20171012_1316.py25
-rw-r--r--archaeological_warehouse/migrations/0010_auto_20171017_1840.py31
-rw-r--r--archaeological_warehouse/models.py4
-rw-r--r--archaeological_warehouse/templates/ishtar/sheet_container.html2
-rw-r--r--archaeological_warehouse/templates/ishtar/sheet_warehouse.html2
-rw-r--r--example_project/settings.py5
-rwxr-xr-xinstall/ishtar-install68
-rw-r--r--ishtar_common/admin.py94
-rw-r--r--ishtar_common/data_importer.py7
-rw-r--r--ishtar_common/management/commands/update_search_vectors.py24
-rw-r--r--ishtar_common/migrations/0015_auto_20171011_1644.py36
-rw-r--r--ishtar_common/migrations/0016_auto_20171016_1104.py30
-rw-r--r--ishtar_common/migrations/0017_auto_20171016_1320.py29
-rw-r--r--ishtar_common/migrations/0018_auto_20171017_1840.py72
-rw-r--r--ishtar_common/models.py205
-rw-r--r--ishtar_common/static/gentium/GentiumPlus-I.ttfbin0 -> 1818280 bytes
-rw-r--r--ishtar_common/static/gentium/GentiumPlus-R.ttfbin0 -> 1918536 bytes
-rw-r--r--ishtar_common/static/gentium/OFL.txt94
-rw-r--r--ishtar_common/static/gentium/README.txt88
-rw-r--r--ishtar_common/templates/ishtar/blocks/sheet_json.html11
-rw-r--r--ishtar_common/tests.py45
-rw-r--r--ishtar_common/utils.py44
-rw-r--r--ishtar_common/views.py26
-rw-r--r--ishtar_common/wizards.py3
-rw-r--r--static/gis/js/OLMapWidget.js376
61 files changed, 1697 insertions, 437 deletions
diff --git a/UPGRADE.md b/UPGRADE.md
index 34d1560db..de01ca78b 100644
--- a/UPGRADE.md
+++ b/UPGRADE.md
@@ -35,3 +35,10 @@ cd <application-path>
./manage.py migrate --fake archaeological_warehouse 0002_auto_20170414_2123
./manage.py migrate
```
+
+Finally create indexes the new full text search engine
+
+```
+cd <application-path>
+./manage.py update_search_vectors
+```
diff --git a/archaeological_context_records/admin.py b/archaeological_context_records/admin.py
index 2733fa2ff..d5e4d09b9 100644
--- a/archaeological_context_records/admin.py
+++ b/archaeological_context_records/admin.py
@@ -62,7 +62,7 @@ class ContextRecordAdmin(HistorizedObjectAdmin):
model = models.ContextRecord
form = AdminContextRecordForm
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'cached_label', 'datings'
+ 'cached_label', 'datings'
]
admin_site.register(models.ContextRecord, ContextRecordAdmin)
diff --git a/archaeological_context_records/forms.py b/archaeological_context_records/forms.py
index e5c244fde..c310e98fa 100644
--- a/archaeological_context_records/forms.py
+++ b/archaeological_context_records/forms.py
@@ -56,6 +56,7 @@ class OperationFormSelection(forms.Form):
class RecordSelect(TableSelect):
+ search_vector = forms.CharField(label=_(u"Full text search"))
label = forms.CharField(label=_(u"ID"), max_length=100)
parcel__town = get_town_field()
if settings.COUNTRY == 'fr':
diff --git a/archaeological_context_records/migrations/0010_auto_20171011_1644.py b/archaeological_context_records/migrations/0010_auto_20171011_1644.py
new file mode 100644
index 000000000..379110e44
--- /dev/null
+++ b/archaeological_context_records/migrations/0010_auto_20171011_1644.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_context_records', '0009_auto_20170829_1639'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='contextrecord',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicalcontextrecord',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ ]
diff --git a/archaeological_context_records/migrations/0011_auto_20171012_1316.py b/archaeological_context_records/migrations/0011_auto_20171012_1316.py
new file mode 100644
index 000000000..95b042c43
--- /dev/null
+++ b/archaeological_context_records/migrations/0011_auto_20171012_1316.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-12 13:16
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_context_records', '0010_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='contextrecord',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='historicalcontextrecord',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ ]
diff --git a/archaeological_context_records/migrations/0012_auto_20171017_1840.py b/archaeological_context_records/migrations/0012_auto_20171017_1840.py
new file mode 100644
index 000000000..6de4abb0f
--- /dev/null
+++ b/archaeological_context_records/migrations/0012_auto_20171017_1840.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_context_records', '0011_auto_20171012_1316'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='contextrecord',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicalcontextrecord',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ ]
diff --git a/archaeological_context_records/models.py b/archaeological_context_records/models.py
index 082d4f53c..925a48597 100644
--- a/archaeological_context_records/models.py
+++ b/archaeological_context_records/models.py
@@ -302,7 +302,14 @@ class ContextRecord(BulkUpdatedItem, BaseHistorizedItem,
point_2d = models.PointField(_(u"Point (2D)"), blank=True, null=True)
point = models.PointField(_(u"Point (3D)"), blank=True, null=True, dim=3)
polygon = models.PolygonField(_(u"Polygon"), blank=True, null=True)
- cached_label = models.TextField(_(u"Cached name"), null=True, blank=True)
+ cached_label = models.TextField(_(u"Cached name"), null=True, blank=True,
+ db_index=True)
+ PARENT_SEARCH_VECTORS = ['operation']
+ BASE_SEARCH_VECTORS = ["cached_label", "label", "location",
+ "interpretation", "filling", "datings_comment",
+ "identification__label", "activity__label",
+ "excavation_technic__label"]
+ M2M_SEARCH_VECTORS = ["datings__period__label"]
history = HistoricalRecords()
class Meta:
diff --git a/archaeological_context_records/templates/ishtar/sheet_contextrecord.html b/archaeological_context_records/templates/ishtar/sheet_contextrecord.html
index 170c2d87c..e1ea9019b 100644
--- a/archaeological_context_records/templates/ishtar/sheet_contextrecord.html
+++ b/archaeological_context_records/templates/ishtar/sheet_contextrecord.html
@@ -29,6 +29,8 @@
</ul>
{% field "Comment on datings" item.datings_comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if item.diameter or item.depth_of_appearance or item.documentations.count or item.description or item.lenght or item.width or item.depth or item.thickness or item.comment %}
<h3>{% trans "Description"%}</h3>
{% field "Description" item.description "<pre>" "</pre>" %}
diff --git a/archaeological_context_records/tests.py b/archaeological_context_records/tests.py
index 89b15fbbf..b0f4b8f9e 100644
--- a/archaeological_context_records/tests.py
+++ b/archaeological_context_records/tests.py
@@ -273,6 +273,20 @@ class ContextRecordTest(ContextRecordInit, TestCase):
cr.operation
)
+ def test_search_vector_update(self):
+ cr = self.create_context_record(force=True)[0]
+ cr = models.ContextRecord.objects.get(pk=cr.pk)
+ cr.label = "Label label"
+ cr.location = "I am heeere"
+ cr.save()
+ for key in ('label', 'heeer'):
+ self.assertIn(key, cr.search_vector)
+ cr.operation.code_patriarche = "PATRIARCHE"
+ cr.operation.save()
+ cr = models.ContextRecord.objects.get(pk=cr.pk)
+ self.assertIn(settings.ISHTAR_OPE_PREFIX.lower() + "patriarch",
+ cr.search_vector)
+
def test_upstream_cache_update(self):
cr = self.create_context_record()[0]
cr_pk = cr.pk
@@ -399,6 +413,44 @@ class ContextRecordSearchTest(ContextRecordInit, TestCase):
models.RecordRelations.objects.create(
left_record=cr_1, right_record=cr_2, relation_type=sym_rel_type)
+ def test_town_search(self):
+ c = Client()
+ c.login(username=self.username, password=self.password)
+
+ data = {'numero_insee': '98989', 'name': 'base_town'}
+ base_town = self.create_towns(datas=data)[-1]
+
+ parcel = self.create_parcel(data={'town': base_town,
+ 'section': 'A', 'parcel_number': '1'})[-1]
+ self.context_records[0].parcel = parcel
+ self.context_records[0].save()
+
+ data = {'numero_insee': '56789', 'name': 'parent_town'}
+ parent_town = self.create_towns(datas=data)[-1]
+ parent_town.children.add(base_town)
+
+ data = {'numero_insee': '01234', 'name': 'child_town'}
+ child_town = self.create_towns(datas=data)[-1]
+ base_town.children.add(child_town)
+
+ # simple search
+ search = {'parcel__town': base_town.pk}
+ response = c.get(reverse('get-contextrecord'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
+ # parent search
+ search = {'parcel__town': parent_town.pk}
+ response = c.get(reverse('get-contextrecord'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
+ # child search
+ search = {'parcel__town': child_town.pk}
+ response = c.get(reverse('get-contextrecord'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
def testSearchExport(self):
c = Client()
response = c.get(reverse('get-contextrecord'))
@@ -416,6 +468,14 @@ class ContextRecordSearchTest(ContextRecordInit, TestCase):
{'label': 'cr 1',
'cr_relation_types_0': self.cr_rel_type.pk})
self.assertEqual(json.loads(response.content)['total'], 2)
+ # test search vector
+ response = c.get(reverse('get-contextrecord'),
+ {'search_vector': 'CR'})
+ self.assertEqual(json.loads(response.content)['total'], 2)
+ # the 2 context records have the same operation
+ response = c.get(reverse('get-contextrecord'),
+ {'search_vector': 'op2010'})
+ self.assertEqual(json.loads(response.content)['total'], 2)
# test search between related operations
first_ope = self.operations[0]
first_ope.year = 2010
diff --git a/archaeological_files/admin.py b/archaeological_files/admin.py
index 525f7e840..4dca1afa9 100644
--- a/archaeological_files/admin.py
+++ b/archaeological_files/admin.py
@@ -48,13 +48,15 @@ class FileAdmin(HistorizedObjectAdmin):
'related_file': 'file'
})
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'raw_general_contractor', 'raw_town_planning_service', 'imports',
+ 'raw_general_contractor', 'raw_town_planning_service',
'cached_label', 'imported_line'
]
model = models.File
+
admin_site.register(models.File, FileAdmin)
+
general_models = [models.FileType, models.PermitType]
if settings.COUNTRY == 'fr':
general_models.append(models.SaisineType)
diff --git a/archaeological_files/migrations/0008_auto_20171011_1644.py b/archaeological_files/migrations/0008_auto_20171011_1644.py
new file mode 100644
index 000000000..33dfbf59e
--- /dev/null
+++ b/archaeological_files/migrations/0008_auto_20171011_1644.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_files', '0007_auto_20170826_1152'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='file',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicalfile',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ ]
diff --git a/archaeological_files/migrations/0009_auto_20171012_1316.py b/archaeological_files/migrations/0009_auto_20171012_1316.py
new file mode 100644
index 000000000..cd33d8243
--- /dev/null
+++ b/archaeological_files/migrations/0009_auto_20171012_1316.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-12 13:16
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_files', '0008_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='file',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='historicalfile',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ ]
diff --git a/archaeological_files/migrations/0010_auto_20171017_1840.py b/archaeological_files/migrations/0010_auto_20171017_1840.py
new file mode 100644
index 000000000..04eb5b1cc
--- /dev/null
+++ b/archaeological_files/migrations/0010_auto_20171017_1840.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_files', '0009_auto_20171012_1316'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='file',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicalfile',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ ]
diff --git a/archaeological_files/models.py b/archaeological_files/models.py
index 0d5b4b3e8..b0f53f11c 100644
--- a/archaeological_files/models.py
+++ b/archaeological_files/models.py
@@ -204,7 +204,8 @@ class File(ClosedItem, BaseHistorizedItem, OwnPerms, ValueGetter,
mh_listing = models.NullBooleanField(
u"Sur Monument Historique inscrit", blank=True, null=True)
# <-- research archaeology
- cached_label = models.TextField(_(u"Cached name"), null=True, blank=True)
+ cached_label = models.TextField(_(u"Cached name"), null=True, blank=True,
+ db_index=True)
imported_line = models.TextField(_(u"Imported line"), null=True,
blank=True)
history = HistoricalRecords()
diff --git a/archaeological_files/templates/ishtar/sheet_file.html b/archaeological_files/templates/ishtar/sheet_file.html
index 6d64a975f..6eda1124a 100644
--- a/archaeological_files/templates/ishtar/sheet_file.html
+++ b/archaeological_files/templates/ishtar/sheet_file.html
@@ -44,6 +44,8 @@
</ul>
{% field "Comment" item.comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
<h3>{% trans "Localisation"%}</h3>
{% if item.towns.count %}<p><label>{%trans "Towns"%}</label> <span class='value'>{{ item.towns.all|join:", " }}</span></p>{% endif %}
{% if item.departments.count %}<p><label>{%trans "Departments"%}</label> <span class='value'>{{ item.departments.all|join:", " }}</span></p>{% endif %}
diff --git a/archaeological_finds/admin.py b/archaeological_finds/admin.py
index b8ed15865..6f32365b9 100644
--- a/archaeological_finds/admin.py
+++ b/archaeological_finds/admin.py
@@ -47,11 +47,11 @@ class AdminBaseFindForm(forms.ModelForm):
class BaseFindAdmin(HistorizedObjectAdmin):
list_display = ('label', 'context_record', 'index')
- search_fields = ('label', 'context_record__parcel__operation__name',)
+ search_fields = ('label', 'cache_complete_id',)
model = models.BaseFind
form = AdminBaseFindForm
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'cache_short_id', 'cache_complete_id', 'imports'
+ 'cache_short_id', 'cache_complete_id',
]
admin_site.register(models.BaseFind, BaseFindAdmin)
@@ -69,7 +69,7 @@ class FindAdmin(HistorizedObjectAdmin):
'container': 'container'
})
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'datings', 'cached_label'
+ 'datings', 'cached_label'
]
admin_site.register(models.Find, FindAdmin)
@@ -97,7 +97,7 @@ class PropertyAdmin(HistorizedObjectAdmin):
'person': 'person',
})
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'administrative_act', 'imports', ]
+ 'administrative_act']
def has_add_permission(self, request):
return False
@@ -118,7 +118,7 @@ class TreatmentAdmin(HistorizedObjectAdmin):
'container': 'container',
})
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'cached_label', 'downstream_lbl', 'upstream_lbl'
+ 'cached_label', 'downstream_lbl', 'upstream_lbl'
]
def has_add_permission(self, request):
@@ -140,7 +140,6 @@ class TreatmentFileAdmin(HistorizedObjectAdmin):
'applicant': 'person',
'applicant_organisation': 'organization',
})
- exclude = ['imports']
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
'cached_label',
]
diff --git a/archaeological_finds/forms.py b/archaeological_finds/forms.py
index aa0ae4621..1f81cf52f 100644
--- a/archaeological_finds/forms.py
+++ b/archaeological_finds/forms.py
@@ -366,6 +366,7 @@ DatingFormSet.form_label = _("Dating")
class FindSelect(TableSelect):
+ search_vector = forms.CharField(label=_(u"Full text search"))
base_finds__cache_short_id = forms.CharField(label=_(u"Short ID"))
base_finds__cache_complete_id = forms.CharField(label=_(u"Complete ID"))
label = forms.CharField(label=_(u"Free ID"))
diff --git a/archaeological_finds/migrations/0010_auto_20171011_1644.py b/archaeological_finds/migrations/0010_auto_20171011_1644.py
new file mode 100644
index 000000000..ce892e96d
--- /dev/null
+++ b/archaeological_finds/migrations/0010_auto_20171011_1644.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_finds', '0009_auto_20171010_1644'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='basefind',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='find',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicalbasefind',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicalfind',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicaltreatment',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicaltreatmentfile',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='property',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='treatment',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='treatmentfile',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ ]
diff --git a/archaeological_finds/migrations/0011_auto_20171012_1316.py b/archaeological_finds/migrations/0011_auto_20171012_1316.py
new file mode 100644
index 000000000..6fabd578f
--- /dev/null
+++ b/archaeological_finds/migrations/0011_auto_20171012_1316.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-12 13:16
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_finds', '0010_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='basefind',
+ name='cache_complete_id',
+ field=models.TextField(blank=True, db_index=True, help_text='Cached value - do not edit', null=True, verbose_name='Complete ID'),
+ ),
+ migrations.AlterField(
+ model_name='basefind',
+ name='cache_short_id',
+ field=models.TextField(blank=True, db_index=True, help_text='Cached value - do not edit', null=True, verbose_name='Short ID'),
+ ),
+ migrations.AlterField(
+ model_name='find',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='historicalbasefind',
+ name='cache_complete_id',
+ field=models.TextField(blank=True, db_index=True, help_text='Cached value - do not edit', null=True, verbose_name='Complete ID'),
+ ),
+ migrations.AlterField(
+ model_name='historicalbasefind',
+ name='cache_short_id',
+ field=models.TextField(blank=True, db_index=True, help_text='Cached value - do not edit', null=True, verbose_name='Short ID'),
+ ),
+ migrations.AlterField(
+ model_name='historicalfind',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='historicaltreatment',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='historicaltreatmentfile',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='treatment',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='treatmentfile',
+ name='cached_label',
+ field=models.TextField(blank=True, db_index=True, null=True, verbose_name='Cached name'),
+ ),
+ ]
diff --git a/archaeological_finds/migrations/0012_auto_20171017_1840.py b/archaeological_finds/migrations/0012_auto_20171017_1840.py
new file mode 100644
index 000000000..8c347b270
--- /dev/null
+++ b/archaeological_finds/migrations/0012_auto_20171017_1840.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_finds', '0011_auto_20171012_1316'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='basefind',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='find',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicalbasefind',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicalfind',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicaltreatment',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicaltreatmentfile',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='property',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='treatment',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='treatmentfile',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ ]
diff --git a/archaeological_finds/models_finds.py b/archaeological_finds/models_finds.py
index e58d14f7e..8052601bf 100644
--- a/archaeological_finds/models_finds.py
+++ b/archaeological_finds/models_finds.py
@@ -231,14 +231,17 @@ class BaseFind(BulkUpdatedItem, BaseHistorizedItem, OwnPerms):
line = models.LineStringField(_(u"Line"), blank=True, null=True)
polygon = models.PolygonField(_(u"Polygon"), blank=True, null=True)
cache_short_id = models.TextField(
- _(u"Short ID"), blank=True, null=True,
+ _(u"Short ID"), blank=True, null=True, db_index=True,
help_text=_(u"Cached value - do not edit"))
cache_complete_id = models.TextField(
- _(u"Complete ID"), blank=True, null=True,
+ _(u"Complete ID"), blank=True, null=True, db_index=True,
help_text=_(u"Cached value - do not edit"))
history = HistoricalRecords()
RELATED_POST_PROCESS = ['find']
CACHED_LABELS = ['cache_short_id', 'cache_complete_id']
+ PARENT_SEARCH_VECTORS = ['context_record']
+ BASE_SEARCH_VECTORS = ["label", "description", "comment", "cache_short_id",
+ "cache_complete_id"]
class Meta:
verbose_name = _(u"Base find")
@@ -748,9 +751,18 @@ class Find(BulkUpdatedItem, ValueGetter, BaseHistorizedItem, ImageModel,
appraisal_date = models.DateField(_(u"Appraisal date"), blank=True,
null=True)
- cached_label = models.TextField(_(u"Cached name"), null=True, blank=True)
+ cached_label = models.TextField(_(u"Cached name"), null=True, blank=True,
+ db_index=True)
history = HistoricalRecords()
BASKET_MODEL = FindBasket
+ PARENT_SEARCH_VECTORS = ['base_finds']
+ BASE_SEARCH_VECTORS = [
+ "cached_label", "label", "description", "container__location__name",
+ "container__reference", "mark", "comment", "dating_comment",
+ "previous_id"]
+ M2M_SEARCH_VECTORS = [
+ "datings__period__label", "object_types__label", "integrities__label",
+ "remarkabilities__label", "material_types__label"]
class Meta:
verbose_name = _(u"Find")
diff --git a/archaeological_finds/models_treatments.py b/archaeological_finds/models_treatments.py
index 0ffcd87fa..03eeed452 100644
--- a/archaeological_finds/models_treatments.py
+++ b/archaeological_finds/models_treatments.py
@@ -115,7 +115,8 @@ class Treatment(DashboardFormItem, ValueGetter, BaseHistorizedItem,
blank=True, null=True)
target_is_basket = models.BooleanField(_(u"Target a basket"),
default=False)
- cached_label = models.TextField(_(u"Cached name"), null=True, blank=True)
+ cached_label = models.TextField(_(u"Cached name"), null=True, blank=True,
+ db_index=True)
history = HistoricalRecords()
class Meta:
@@ -224,6 +225,7 @@ class Treatment(DashboardFormItem, ValueGetter, BaseHistorizedItem,
return values
def pre_save(self):
+ super(Treatment, self).pre_save()
# is not new
if self.pk is not None:
return
@@ -506,7 +508,8 @@ class TreatmentFile(DashboardFormItem, ClosedItem, BaseHistorizedItem,
reception_date = models.DateField(_(u'Reception date'), blank=True,
null=True)
comment = models.TextField(_(u"Comment"), null=True, blank=True)
- cached_label = models.TextField(_(u"Cached name"), null=True, blank=True)
+ cached_label = models.TextField(_(u"Cached name"), null=True, blank=True,
+ db_index=True)
history = HistoricalRecords()
class Meta:
diff --git a/archaeological_finds/templates/ishtar/sheet_find.html b/archaeological_finds/templates/ishtar/sheet_find.html
index efd38e406..68304740d 100644
--- a/archaeological_finds/templates/ishtar/sheet_find.html
+++ b/archaeological_finds/templates/ishtar/sheet_find.html
@@ -58,6 +58,8 @@
{% field "Description" item.description "<pre>" "</pre>" %}
{% field "Comment" item.comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if item.conservatory_state or item.conservatory_comment or item.alterations.count or item.alteration_causes.count or item.preservation_to_considers.count or item.treatment_emergency or item.insurance_value %}
<h4>{% trans "Preservation" %}</h4>
<ul class='form-flex'>
diff --git a/archaeological_finds/templates/ishtar/sheet_treatment.html b/archaeological_finds/templates/ishtar/sheet_treatment.html
index 5fc8f2aac..94f6e20a7 100644
--- a/archaeological_finds/templates/ishtar/sheet_treatment.html
+++ b/archaeological_finds/templates/ishtar/sheet_treatment.html
@@ -42,6 +42,8 @@
{% field "Goal" item.goal "<pre>" "</pre>" %}
{% endif %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% trans "Upstream finds" as finds %}
{% if item.upstream.count %}
{% dynamic_table_document finds 'finds_for_treatment' 'downstream_treatment' item.pk 'TABLE_COLS_FOR_OPE' output %}
diff --git a/archaeological_finds/templates/ishtar/sheet_treatmentfile.html b/archaeological_finds/templates/ishtar/sheet_treatmentfile.html
index f8fb3e0aa..9567d3081 100644
--- a/archaeological_finds/templates/ishtar/sheet_treatmentfile.html
+++ b/archaeological_finds/templates/ishtar/sheet_treatmentfile.html
@@ -26,6 +26,8 @@
</ul>
{% field "Comment" item.comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if item.applicant %}
<h3>{% trans "Applicant" %}</h3>
<ul class='form-flex'>
diff --git a/archaeological_operations/admin.py b/archaeological_operations/admin.py
index f1deac188..bf1415989 100644
--- a/archaeological_operations/admin.py
+++ b/archaeological_operations/admin.py
@@ -40,7 +40,7 @@ class AdministrativeActAdmin(HistorizedObjectAdmin):
search_fields = ('year', 'index')
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
'in_charge', 'operator', 'scientist', 'signatory', 'associated_file',
- 'imports', 'departments_label', 'towns_label']
+ 'departments_label', 'towns_label']
model = models.AdministrativeAct
form = make_ajax_form(
models.AdministrativeAct, {'operation': 'operation'}
@@ -69,7 +69,6 @@ class ArchaeologicalSiteAdmin(HistorizedObjectAdmin):
list_display = ('name', 'reference')
search_fields = ('name', 'reference')
model = models.ArchaeologicalSite
- readonly_fields = HistorizedObjectAdmin.readonly_fields + ['imports']
inlines = [OperationInline]
admin_site.register(models.ArchaeologicalSite, ArchaeologicalSiteAdmin)
@@ -112,7 +111,7 @@ class OperationAdmin(HistorizedObjectAdmin):
search_fields += ['code_patriarche']
model = models.Operation
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'cached_label']
+ 'cached_label']
form = AdminOperationForm
inlines = [ArchaeologicalSiteInline]
@@ -144,7 +143,7 @@ class ParcelAdmin(HistorizedObjectAdmin):
'town': 'town'}
)
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'history_date'
+ 'history_date'
]
admin_site.register(models.Parcel, ParcelAdmin)
@@ -196,7 +195,7 @@ class ParcelOwnerAdmin(HistorizedObjectAdmin):
'parcel': 'parcel'}
)
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'history_date'
+ 'history_date'
]
admin_site.register(models.ParcelOwner, ParcelOwnerAdmin)
diff --git a/archaeological_operations/forms.py b/archaeological_operations/forms.py
index 651cd740f..841131da6 100644
--- a/archaeological_operations/forms.py
+++ b/archaeological_operations/forms.py
@@ -480,6 +480,7 @@ RecordRelationsFormSet.form_label = _(u"Relations")
class OperationSelect(TableSelect):
+ search_vector = forms.CharField(label=_(u"Full text search"))
year = forms.IntegerField(label=_("Year"))
operation_code = forms.IntegerField(label=_(u"Numeric reference"))
if settings.COUNTRY == 'fr':
diff --git a/archaeological_operations/migrations/0009_auto_20171011_1644.py b/archaeological_operations/migrations/0009_auto_20171011_1644.py
new file mode 100644
index 000000000..18a284a21
--- /dev/null
+++ b/archaeological_operations/migrations/0009_auto_20171011_1644.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_operations', '0008_auto_20170829_1639'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='administrativeact',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='archaeologicalsite',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicaladministrativeact',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicaloperation',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='operation',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='parcel',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='parcelowner',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ ]
diff --git a/archaeological_operations/migrations/0010_auto_20171012_1316.py b/archaeological_operations/migrations/0010_auto_20171012_1316.py
new file mode 100644
index 000000000..3a847a803
--- /dev/null
+++ b/archaeological_operations/migrations/0010_auto_20171012_1316.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-12 13:16
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_operations', '0009_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='historicaloperation',
+ name='cached_label',
+ field=models.CharField(blank=True, db_index=True, max_length=500, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AlterField(
+ model_name='operation',
+ name='cached_label',
+ field=models.CharField(blank=True, db_index=True, max_length=500, null=True, verbose_name='Cached name'),
+ ),
+ ]
diff --git a/archaeological_operations/migrations/0011_auto_20171017_1840.py b/archaeological_operations/migrations/0011_auto_20171017_1840.py
new file mode 100644
index 000000000..cd169957a
--- /dev/null
+++ b/archaeological_operations/migrations/0011_auto_20171017_1840.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_operations', '0010_auto_20171012_1316'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='administrativeact',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='archaeologicalsite',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicaladministrativeact',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicaloperation',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='operation',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='parcel',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='parcelowner',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ ]
diff --git a/archaeological_operations/models.py b/archaeological_operations/models.py
index bc03ee387..70c1c02ba 100644
--- a/archaeological_operations/models.py
+++ b/archaeological_operations/models.py
@@ -248,6 +248,10 @@ class Operation(ClosedItem, BaseHistorizedItem, ImageModel, OwnPerms,
'archaeological_sites__reference': _(u"Archaeological sites ("
u"reference)"),
}
+ BASE_SEARCH_VECTORS = ["scientist__raw_name", "cached_label",
+ "common_name", "comment", "address", "old_code"]
+ INT_SEARCH_VECTORS = ["year"]
+ M2M_SEARCH_VECTORS = ["towns__name"]
# fields definition
creation_date = models.DateField(_(u"Creation date"),
@@ -309,6 +313,7 @@ class Operation(ClosedItem, BaseHistorizedItem, ImageModel, OwnPerms,
code_patriarche = models.TextField(u"Code PATRIARCHE", null=True,
blank=True, unique=True)
TABLE_COLS = ['full_code_patriarche'] + TABLE_COLS
+ BASE_SEARCH_VECTORS = ['code_patriarche'] + BASE_SEARCH_VECTORS
# preventive
fnap_financing = models.FloatField(u"Financement FNAP (%)",
blank=True, null=True)
@@ -340,7 +345,7 @@ class Operation(ClosedItem, BaseHistorizedItem, ImageModel, OwnPerms,
scientific_documentation_comment = models.TextField(
_(u"Comment about scientific documentation"), null=True, blank=True)
cached_label = models.CharField(_(u"Cached name"), max_length=500,
- null=True, blank=True)
+ null=True, blank=True, db_index=True)
archaeological_sites = models.ManyToManyField(
ArchaeologicalSite, verbose_name=_(u"Archaeological sites"),
blank=True, related_name='operations')
diff --git a/archaeological_operations/templates/ishtar/sheet_operation.html b/archaeological_operations/templates/ishtar/sheet_operation.html
index 5a02236a3..e46db74c7 100644
--- a/archaeological_operations/templates/ishtar/sheet_operation.html
+++ b/archaeological_operations/templates/ishtar/sheet_operation.html
@@ -71,6 +71,8 @@
{% field "Abstract" item.abstract "<pre>" "</pre>" %}
{% field "Comment about scientific documentation" item.scientific_documentation_comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if not next %}
{% if item.towns.count %}
<h3>{% trans "Localisation"%}</h3>
diff --git a/archaeological_operations/tests.py b/archaeological_operations/tests.py
index 0d6908374..b75c02cae 100644
--- a/archaeological_operations/tests.py
+++ b/archaeological_operations/tests.py
@@ -19,10 +19,12 @@
import json
import datetime
+from subprocess import Popen, PIPE
import StringIO
import zipfile
from django.conf import settings
+from django.contrib.contenttypes.models import ContentType
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.db.models import Q
@@ -37,7 +39,8 @@ from archaeological_operations import views
from ishtar_common.models import OrganizationType, Organization, ItemKey, \
ImporterType, IshtarUser, TargetKey, ImporterModel, IshtarSiteProfile, \
Town, ImporterColumn, Person, Author, SourceType, AuthorType, \
- DocumentTemplate, PersonType, TargetKeyGroup
+ DocumentTemplate, PersonType, TargetKeyGroup, JsonDataField, \
+ JsonDataSection, ImportTarget, FormaterType
from archaeological_files.models import File, FileType
from archaeological_context_records.models import Unit
@@ -453,6 +456,24 @@ class ImportOperationTest(ImportTest, TestCase):
impt.delete()
self.assertEqual(parcel_count - 3, models.Parcel.objects.count())
+ def test_json_fields(self):
+ importer, form = self.init_ope_import("operations-with-json-fields.csv")
+ col = ImporterColumn.objects.create(importer_type=importer,
+ col_number=11)
+ formater_type = FormaterType.objects.get(
+ formater_type='IntegerFormater')
+ ImportTarget.objects.create(
+ column=col, target='data__autre_refs__arbitraire',
+ formater_type=formater_type)
+ impt = form.save(self.ishtar_user)
+ impt.initialize()
+ self.init_ope_targetkey(imp=impt)
+ impt.importation()
+ ope1 = models.Operation.objects.get(code_patriarche='4200')
+ self.assertEqual(ope1.data, {u'autre_refs': {u'arbitraire': 789}})
+ ope2 = models.Operation.objects.get(code_patriarche='4201')
+ self.assertEqual(ope2.data, {u'autre_refs': {u'arbitraire': 456}})
+
class ParcelTest(ImportTest, TestCase):
fixtures = OPERATION_TOWNS_FIXTURES
@@ -895,6 +916,21 @@ class OperationTest(TestCase, OperationInitTest):
self.assertEqual(ope_id, 'OP2011-1')
self.assertEqual(town, self.towns[0].name)
+ def test_search_vector_update(self):
+ operation = self.operations[0]
+ town = self.create_towns({'numero_insee': '12346', 'name': 'Daisy'})[-1]
+ operation.towns.add(town)
+ town = self.create_towns(
+ {'numero_insee': '12347', 'name': 'Dirty old'})[-1]
+ operation.towns.add(town)
+ operation = models.Operation.objects.get(pk=operation.pk)
+ operation.comment = u"Zardoz"
+ operation.code_patriarche = u"HUIAAA5"
+ operation.save()
+ for key in ('old', 'op2010', 'dirty', 'daisy', "'2010'", "zardoz",
+ "huiaaa5"):
+ self.assertIn(key, operation.search_vector)
+
def test_cache_bulk_update(self):
if settings.USE_SPATIALITE_FOR_TESTS:
# using views - can only be tested with postgresql
@@ -1008,6 +1044,32 @@ class OperationTest(TestCase, OperationInitTest):
self.assertEqual(response.status_code, 200)
self.assertIn('class="sheet"', response.content)
+ def test_show_pdf(self):
+ operation = self.operations[0]
+ c = Client()
+ response = c.get(reverse('show-operation',
+ kwargs={'pk': operation.pk, 'type': 'pdf'}))
+ self.assertEqual(response.status_code, 200)
+ # empty content when not allowed
+ self.assertEqual(response.content, "")
+ c.login(username=self.username, password=self.password)
+ response = c.get(reverse('show-operation',
+ kwargs={'pk': operation.pk, 'type': 'pdf'}))
+ self.assertEqual(response.status_code, 200)
+ f = StringIO.StringIO(response.content)
+ filetype = Popen("/usr/bin/file -b --mime -", shell=True, stdout=PIPE,
+ stdin=PIPE).communicate(f.read(1024))[0].strip()
+ self.assertTrue(filetype.startswith('application/pdf'))
+
+ def test_show_odt(self):
+ operation = self.operations[0]
+ c = Client()
+ response = c.get(reverse('show-operation',
+ kwargs={'pk': operation.pk, 'type': 'pdf'}))
+ self.assertEqual(response.status_code, 200)
+ # empty content when not allowed
+ self.assertEqual(response.content, "")
+ c.login(username=self.username, password=self.password)
response = c.get(reverse('show-operation', kwargs={'pk': operation.pk,
'type': 'odt'}))
self.assertEqual(response.status_code, 200)
@@ -1015,6 +1077,53 @@ class OperationTest(TestCase, OperationInitTest):
z = zipfile.ZipFile(f)
self.assertIsNone(z.testzip())
+ def test_json(self):
+ operation = self.operations[0]
+ operation.data = {"groundhog": {"number": 53444,
+ "awake_state": u"réveillée",
+ "with_feather": "Oui"},
+ "frog_number": 32303}
+ operation.save()
+
+ content_type = ContentType.objects.get_for_model(operation)
+ groundhog_section = JsonDataSection.objects.create(
+ name="Marmotte", content_type=content_type)
+ JsonDataField.objects.create(name=u"État d'éveil",
+ key='groundhog__awake_state',
+ content_type=content_type,
+ section=groundhog_section)
+ JsonDataField.objects.create(name=u"Avec plume",
+ key='groundhog__with_feather',
+ content_type=content_type,
+ section=groundhog_section)
+ JsonDataField.objects.create(name=u"Zzzzzzzz",
+ key='groundhog__zzz',
+ content_type=content_type,
+ section=groundhog_section)
+ JsonDataField.objects.create(name=u"Grenouille",
+ key='frog_number',
+ content_type=content_type)
+
+ c = Client()
+ c.login(username=self.username, password=self.password)
+ response = c.get(reverse('show-operation', kwargs={'pk': operation.pk}))
+ self.assertEqual(response.status_code, 200)
+ self.assertIn('class="sheet"', response.content)
+ self.assertIn(u"Marmotte".encode('utf-8'), response.content)
+ self.assertIn(u"État d&#39;éveil".encode('utf-8'), response.content)
+ self.assertIn(u"réveillée".encode('utf-8'), response.content)
+ self.assertIn(u"Grenouille".encode('utf-8'), response.content)
+ self.assertIn(u"32303".encode('utf-8'), response.content)
+ self.assertNotIn(u"53444".encode('utf-8'), response.content)
+ self.assertNotIn(u"Zzzzzzzz".encode('utf-8'), response.content)
+
+ operation.data = {}
+ operation.save()
+ response = c.get(reverse('show-operation', kwargs={'pk': operation.pk}))
+ self.assertEqual(response.status_code, 200)
+ self.assertIn('class="sheet"', response.content)
+ self.assertNotIn(u"Marmotte".encode('utf-8'), response.content)
+
class OperationSearchTest(TestCase, OperationInitTest):
fixtures = FILE_FIXTURES
@@ -1104,6 +1213,42 @@ class OperationSearchTest(TestCase, OperationInitTest):
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)['total'], 1)
+ def test_town_search(self):
+ c = Client()
+ c.login(username=self.username, password=self.password)
+
+ data = {'numero_insee': '98989', 'name': 'base_town'}
+ base_town = self.create_towns(datas=data)[-1]
+
+ data = {'numero_insee': '56789', 'name': 'parent_town'}
+ parent_town = self.create_towns(datas=data)[-1]
+ parent_town.children.add(base_town)
+
+ data = {'numero_insee': '01234', 'name': 'child_town'}
+ child_town = self.create_towns(datas=data)[-1]
+ base_town.children.add(child_town)
+
+ ope = self.operations[1]
+ ope.towns.add(base_town)
+
+ # simple search
+ search = {'towns': base_town.pk}
+ response = c.get(reverse('get-operation'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
+ # parent search
+ search = {'towns': parent_town.pk}
+ response = c.get(reverse('get-operation'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
+ # child search
+ search = {'towns': child_town.pk}
+ response = c.get(reverse('get-operation'), search)
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(json.loads(response.content)['total'], 1)
+
def testOwnSearch(self):
c = Client()
response = c.get(reverse('get-operation'), {'year': '2010'})
diff --git a/archaeological_operations/tests/operations-with-json-fields.csv b/archaeological_operations/tests/operations-with-json-fields.csv
new file mode 100644
index 000000000..015497b4c
--- /dev/null
+++ b/archaeological_operations/tests/operations-with-json-fields.csv
@@ -0,0 +1,3 @@
+code OA,region,type operation,intitule operation,operateur,responsable operation,date debut terrain,date fin terrain,chronologie generale,identifiant document georeferencement,notice scientifique,numéro arbitraire
+4201,Bourgogne,Fouille programmée,Oppìdum de Paris 2,L'opérateur,,2000/01/31,2002/12/31,Age du Fer,,456
+4200,Bourgogne,Fouille programmée,Oppìdum de Paris,L'opérateur,Jean Sui-Resp'on Sablé,2000/01/22,2002/12/31,Age du Fer & Gallo-Romain & Néolithik & Moderne,,789
diff --git a/archaeological_warehouse/admin.py b/archaeological_warehouse/admin.py
index deaffde94..f2b44fcf0 100644
--- a/archaeological_warehouse/admin.py
+++ b/archaeological_warehouse/admin.py
@@ -36,9 +36,7 @@ class WarehouseAdmin(HistorizedObjectAdmin):
'town': 'town',
'person_in_charge': 'person'
})
- readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports'
- ]
+
admin_site.register(models.Warehouse, WarehouseAdmin)
@@ -48,6 +46,7 @@ class ContainerTypeAdmin(admin.ModelAdmin):
'volume')
model = models.ContainerType
+
admin_site.register(models.ContainerType, ContainerTypeAdmin)
@@ -56,7 +55,7 @@ class ContainerAdmin(HistorizedObjectAdmin):
list_filter = ("container_type",)
model = models.Container
readonly_fields = HistorizedObjectAdmin.readonly_fields + [
- 'imports', 'history_date'
+ 'history_date'
]
form = make_ajax_form(model, {
'location': 'warehouse',
diff --git a/archaeological_warehouse/migrations/0008_auto_20171011_1644.py b/archaeological_warehouse/migrations/0008_auto_20171011_1644.py
new file mode 100644
index 000000000..82245647d
--- /dev/null
+++ b/archaeological_warehouse/migrations/0008_auto_20171011_1644.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_warehouse', '0007_auto_20171004_1125'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='collection',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='container',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='warehouse',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AlterField(
+ model_name='container',
+ name='index',
+ field=models.IntegerField(default=0, verbose_name='Container ID'),
+ ),
+ ]
diff --git a/archaeological_warehouse/migrations/0009_auto_20171012_1316.py b/archaeological_warehouse/migrations/0009_auto_20171012_1316.py
new file mode 100644
index 000000000..a25a2d2f2
--- /dev/null
+++ b/archaeological_warehouse/migrations/0009_auto_20171012_1316.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-12 13:16
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_warehouse', '0008_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='container',
+ name='cached_label',
+ field=models.CharField(blank=True, db_index=True, max_length=500, null=True, verbose_name='Localisation'),
+ ),
+ migrations.AlterField(
+ model_name='container',
+ name='cached_location',
+ field=models.CharField(blank=True, db_index=True, max_length=500, null=True, verbose_name='Cached location'),
+ ),
+ ]
diff --git a/archaeological_warehouse/migrations/0010_auto_20171017_1840.py b/archaeological_warehouse/migrations/0010_auto_20171017_1840.py
new file mode 100644
index 000000000..e45c44674
--- /dev/null
+++ b/archaeological_warehouse/migrations/0010_auto_20171017_1840.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('archaeological_warehouse', '0009_auto_20171012_1316'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='collection',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='container',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='warehouse',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ ]
diff --git a/archaeological_warehouse/models.py b/archaeological_warehouse/models.py
index a7865cf0e..fdd3a5e63 100644
--- a/archaeological_warehouse/models.py
+++ b/archaeological_warehouse/models.py
@@ -288,9 +288,9 @@ class Container(LightHistorizedItem, ImageModel):
reference = models.CharField(_(u"Container ref."), max_length=40)
comment = models.TextField(_(u"Comment"), null=True, blank=True)
cached_label = models.CharField(_(u"Localisation"), max_length=500,
- null=True, blank=True)
+ null=True, blank=True, db_index=True)
cached_location = models.CharField(_(u"Cached location"), max_length=500,
- null=True, blank=True)
+ null=True, blank=True, db_index=True)
index = models.IntegerField(u"Container ID", default=0)
external_id = models.TextField(_(u"External ID"), blank=True, null=True)
auto_external_id = models.BooleanField(
diff --git a/archaeological_warehouse/templates/ishtar/sheet_container.html b/archaeological_warehouse/templates/ishtar/sheet_container.html
index 7845da2c1..7c9f84a32 100644
--- a/archaeological_warehouse/templates/ishtar/sheet_container.html
+++ b/archaeological_warehouse/templates/ishtar/sheet_container.html
@@ -23,6 +23,8 @@
{% field "Location" item.precise_location %}
{% field "Comment" item.comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if item.finds.count %}
<h4>{% trans "Content" %}</h4>
{% dynamic_table_document finds 'finds' 'container' item.pk 'TABLE_COLS' output 'large' %}
diff --git a/archaeological_warehouse/templates/ishtar/sheet_warehouse.html b/archaeological_warehouse/templates/ishtar/sheet_warehouse.html
index 3d39f9845..de93f9af3 100644
--- a/archaeological_warehouse/templates/ishtar/sheet_warehouse.html
+++ b/archaeological_warehouse/templates/ishtar/sheet_warehouse.html
@@ -17,6 +17,8 @@
{% include "ishtar/blocks/sheet_address_section.html" %}
{% field "Comment" item.comment "<pre>" "</pre>" %}
+{% include "ishtar/blocks/sheet_json.html" %}
+
{% if item.containers.count %}
<h4>{% trans "Containers" %}</h4>
{% dynamic_table_document '' 'containers' 'location' item.pk 'TABLE_COLS' output %}
diff --git a/example_project/settings.py b/example_project/settings.py
index ea50daffb..f631047cb 100644
--- a/example_project/settings.py
+++ b/example_project/settings.py
@@ -32,10 +32,6 @@ BASE_URL = "/"
URL_PATH = ""
EXTRA_VERSION = 'git'
-STATICFILES_DIRS = (
- ROOT_PATH + "../static/",
-)
-
ODT_TEMPLATE = ROOT_PATH + "../ishtar_common/static/template.odt"
LOGIN_REDIRECT_URL = "/" + URL_PATH
@@ -240,6 +236,7 @@ ISHTAR_PERIODS = {}
ISHTAR_PERMIT_TYPES = {}
ISHTAR_DOC_TYPES = {u"undefined": u"Undefined"}
+ISHTAR_SEARCH_LANGUAGE = "french"
ISHTAR_DPTS = []
diff --git a/install/ishtar-install b/install/ishtar-install
index 36b937388..2a9d6fc5b 100755
--- a/install/ishtar-install
+++ b/install/ishtar-install
@@ -118,16 +118,11 @@ do_install() {
;;
debian)
- MAINBACKS=`cat /etc/apt/sources.list | grep jessie-backports |grep -v "^#"`
- ALLBACKS=''
- if [ "$(ls -A /etc/apt/sources.list.d/)" ]; then
- ALLBACKS=`cat /etc/apt/sources.list.d/* | grep jessie-backports |grep -v "^#"`
- fi
- if [ "$ALLBACKS" != '' ] || [ "$MAINBACKS" != '' ]; then
- backports_activated='true';
- fi
dist_version="$(cat /etc/debian_version | sed 's/\/.*//' | sed 's/\..*//')"
case "$dist_version" in
+ 9)
+ dist_version="stretch"
+ ;;
8)
dist_version="jessie"
;;
@@ -135,6 +130,16 @@ do_install() {
dist_version="wheezy"
;;
esac
+ set +e
+ MAINBACKS=`cat /etc/apt/sources.list | grep $dist_version'-backports' |grep -v "^#"`
+ ALLBACKS=''
+ if [ "$(ls -A /etc/apt/sources.list.d/)" ]; then
+ ALLBACKS=`cat /etc/apt/sources.list.d/* | grep $dist_version'-backports' |grep -v "^#"`
+ fi
+ set -e
+ if [ "$ALLBACKS" != '' ] || [ "$MAINBACKS" != '' ]; then
+ backports_activated='true';
+ fi
;;
oracleserver)
@@ -282,7 +287,7 @@ EOF
# Run setup for each distro accordingly
case "$lsb_dist" in
ubuntu|debian)
- if [ "$dist_version" != "jessie" ] && [ "$dist_version" != "wheezy" ]; then
+ if [ "$dist_version" != "stretch" ] && [ "$dist_version" != "jessie" ] && [ "$dist_version" != "wheezy" ]; then
echo ""
cecho r " Sorry this script cannot manage your version of Debian/Ubuntu."
echo ""
@@ -391,6 +396,51 @@ EOF
( set -x; $sh_c 'pip install django-ajax-selects==1.4.3' )
fi
+
+ if [ "$dist_version" == "stretch" ]; then
+ if [ "$backports_activated" != 'true' ]; then
+ echo ""
+ cecho r " In order to install Ishtar you have to activate Debian backports."
+ echo " To do that:"
+ echo ""
+ echo " echo 'deb http://ftp.debian.org/debian stretch-backports main contrib' >> /etc/apt/sources.list"
+ echo ""
+ cecho p " Run again Ishtar installation script after that."
+ exit 1
+ fi
+
+ if [ "$default_db" == '127.0.0.1' ]; then
+ echo "-------------------------------------------------------------------------------";
+ cecho y "Installing postgresql"
+ echo ""
+ POSTGIS=postgresql-9.6-postgis-2.3
+ ( set -x; $sh_c 'sleep 3; apt-get install -y -q postgresql '$POSTGIS )
+ fi
+ echo "-------------------------------------------------------------------------------";
+ cecho y "Installing Ishtar dependencies"
+ echo "";
+ ( set -x; $sh_c 'sleep 3; apt-get install -t stretch-backports -y -q python-django' )
+ ( set -x; $sh_c 'sleep 3; apt-get install -y -q \
+ python-django-registration \
+ python-bs4 python-django-formtools \
+ python-tidylib python-lxml python-imaging python-html5lib \
+ python-psycopg2 python-gdal gettext python-unicodecsv memcached \
+ python-django-extra-views python-memcache python-dbf python-markdown \
+ python-reportlab django-ajax-selects python-django-extensions' )
+ echo "-------------------------------------------------------------------------------";
+ cecho y "Installing django-simple-history"
+ echo "";
+ ( set -x; $sh_c 'pip install git+https://github.com/treyhunner/django-simple-history.git@1.8.2#egg=django-simple-history' )
+ echo "-------------------------------------------------------------------------------";
+ cecho y "Installing python-secretary"
+ echo "";
+ ( set -x; $sh_c 'pip install secretary==0.2.14' )
+
+ cecho y "Installing python-pisa (not available anymore as a debian package)"
+ echo "";
+ ( set -x; $sh_c 'pip install pisa==3.0.33' )
+
+ fi
;;
esac
diff --git a/ishtar_common/admin.py b/ishtar_common/admin.py
index cec61a51e..2df910ffd 100644
--- a/ishtar_common/admin.py
+++ b/ishtar_common/admin.py
@@ -20,11 +20,14 @@
import csv
from ajax_select import make_ajax_form
+from ajax_select.fields import AutoCompleteSelectField, \
+ AutoCompleteSelectMultipleField
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.admin import GroupAdmin, UserAdmin
from django.contrib.auth.models import Group, User
+from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.admin import SiteAdmin
from django.contrib.sites.models import Site
from django.contrib.gis.forms import PointField, OSMWidget
@@ -114,12 +117,22 @@ def export_as_csv_action(description=_(u"Export selected as CSV file"),
class HistorizedObjectAdmin(admin.ModelAdmin):
- readonly_fields = ['history_creator', 'history_modifier',]
+ readonly_fields = ['history_creator', 'history_modifier', 'search_vector']
def save_model(self, request, obj, form, change):
obj.history_modifier = request.user
obj.save()
+ def get_readonly_fields(self, request, obj=None):
+ if obj: # editing an existing object
+ return tuple(self.readonly_fields or []) + tuple(['imports'])
+ return self.readonly_fields
+
+ def get_exclude(self, request, obj=None):
+ if not obj:
+ return tuple(self.exclude or []) + tuple(['imports'])
+ return self.exclude
+
class MyGroupAdmin(GroupAdmin):
class Media:
@@ -153,7 +166,6 @@ class OrganizationAdmin(HistorizedObjectAdmin):
list_filter = ("organization_type",)
search_fields = ('name',)
exclude = ('merge_key', 'merge_exclusion', 'merge_candidate', )
- readonly_fields = HistorizedObjectAdmin.readonly_fields + ['imports']
model = models.Organization
admin_site.register(models.Organization, OrganizationAdmin)
@@ -164,31 +176,51 @@ class PersonAdmin(HistorizedObjectAdmin):
list_filter = ("person_types",)
search_fields = ('name', 'surname', 'email', 'raw_name')
exclude = ('merge_key', 'merge_exclusion', 'merge_candidate', )
- readonly_fields = HistorizedObjectAdmin.readonly_fields + ['imports']
form = make_ajax_form(models.Person, {'attached_to': 'organization'})
model = models.Person
admin_site.register(models.Person, PersonAdmin)
+class AdminRelatedTownForm(forms.ModelForm):
+ class Meta:
+ model = models.Town.children.through
+ exclude = []
+ from_town = AutoCompleteSelectField(
+ 'town', required=True, label=_(u"Parent"))
+
+
class AdminTownForm(forms.ModelForm):
class Meta:
model = models.Town
- exclude = []
+ exclude = ['imports']
center = PointField(label=_(u"center"), required=False,
widget=OSMWidget)
+ children = AutoCompleteSelectMultipleField('town', required=False,
+ label=_(u"Town children"))
+
+
+class TownParentInline(admin.TabularInline):
+ model = models.Town.children.through
+ fk_name = 'to_town'
+ form = AdminRelatedTownForm
+ verbose_name = _(u"Parent")
+ verbose_name_plural = _(u"Parents")
+ extra = 1
class TownAdmin(admin.ModelAdmin):
+ model = models.Town
list_display = ['name', ]
search_fields = ['name']
+ readonly_fields = ['cached_label']
if settings.COUNTRY == 'fr':
list_display += ['numero_insee', 'departement', ]
search_fields += ['numero_insee', 'departement__label', ]
list_filter = ("departement",)
- readonly_fields = ['imports']
- model = models.Town
form = AdminTownForm
+ inlines = [TownParentInline]
+
admin_site.register(models.Town, TownAdmin)
@@ -333,6 +365,56 @@ class ItemKeyAdmin(admin.ModelAdmin):
admin_site.register(models.ItemKey, ItemKeyAdmin)
+class JsonContentTypeFormMixin(object):
+ class Meta:
+ model = models.JsonDataSection
+ exclude = []
+
+ def __init__(self, *args, **kwargs):
+ super(JsonContentTypeFormMixin, self).__init__(*args, **kwargs)
+ choices = []
+ for pk, label in self.fields['content_type'].choices:
+ if not pk:
+ choices.append((pk, label))
+ continue
+ ct = ContentType.objects.get(pk=pk)
+ model_class = ct.model_class()
+ if hasattr(model_class, 'data') and \
+ not hasattr(model_class, 'history_type'):
+ choices.append((pk, label))
+ self.fields['content_type'].choices = sorted(choices,
+ key=lambda x: x[1])
+
+
+class JsonDataSectionForm(JsonContentTypeFormMixin, forms.ModelForm):
+ class Meta:
+ model = models.JsonDataSection
+ exclude = []
+
+
+class JsonDataSectionAdmin(admin.ModelAdmin):
+ list_display = ['name', 'content_type', 'order']
+ form = JsonDataSectionForm
+
+
+admin_site.register(models.JsonDataSection, JsonDataSectionAdmin)
+
+
+class JsonDataFieldForm(JsonContentTypeFormMixin, forms.ModelForm):
+ class Meta:
+ model = models.JsonDataField
+ exclude = []
+
+
+class JsonDataFieldAdmin(admin.ModelAdmin):
+ list_display = ['name', 'content_type', 'key', 'display',
+ 'order', 'section']
+ form = JsonDataFieldForm
+
+
+admin_site.register(models.JsonDataField, JsonDataFieldAdmin)
+
+
class AdministrationScriptAdmin(admin.ModelAdmin):
list_display = ['name', 'path']
diff --git a/ishtar_common/data_importer.py b/ishtar_common/data_importer.py
index 9caebb2dd..e8ec43ab2 100644
--- a/ishtar_common/data_importer.py
+++ b/ishtar_common/data_importer.py
@@ -1486,6 +1486,9 @@ class Importer(object):
# importer trigger
self._set_importer_trigger(cls, attribute, data)
return
+ if attribute == 'data': # json field
+ # no need to do anything
+ return
try:
field_object = cls._meta.get_field(attribute)
except FieldDoesNotExist:
@@ -1570,8 +1573,8 @@ class Importer(object):
create_dict = copy.deepcopy(data)
for k in create_dict.keys():
- # filter unnecessary default values
- if type(create_dict[k]) == dict:
+ # filter unnecessary default values but not the json field
+ if type(create_dict[k]) == dict and k != 'data':
create_dict.pop(k)
# File doesn't like deepcopy
elif type(create_dict[k]) == File:
diff --git a/ishtar_common/management/commands/update_search_vectors.py b/ishtar_common/management/commands/update_search_vectors.py
new file mode 100644
index 000000000..c73a6e88e
--- /dev/null
+++ b/ishtar_common/management/commands/update_search_vectors.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import sys
+
+from django.core.management.base import BaseCommand
+import django.apps
+
+
+class Command(BaseCommand):
+ help = "./manage.py update_search_vectors\n\n"\
+ "Update full texte search vectors."
+
+ def handle(self, *args, **options):
+ for model in django.apps.apps.get_models():
+ if hasattr(model, "update_search_vector") and \
+ getattr(model, "BASE_SEARCH_VECTORS", None):
+ self.stdout.write("\n* update {}".format(model))
+ total = model.objects.count()
+ for idx, item in enumerate(model.objects.all()):
+ sys.stdout.write("\r{}/{} ".format(idx, total))
+ sys.stdout.flush()
+ item.update_search_vector()
+ self.stdout.write("\n")
diff --git a/ishtar_common/migrations/0015_auto_20171011_1644.py b/ishtar_common/migrations/0015_auto_20171011_1644.py
new file mode 100644
index 000000000..a9f4499c2
--- /dev/null
+++ b/ishtar_common/migrations/0015_auto_20171011_1644.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-11 16:44
+from __future__ import unicode_literals
+
+import django.contrib.postgres.search
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0014_ishtarsiteprofile_preservation'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='historicalorganization',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='historicalperson',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='organization',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ migrations.AddField(
+ model_name='person',
+ name='search_vector',
+ field=django.contrib.postgres.search.SearchVectorField(blank=True, help_text='Auto filled at save', null=True, verbose_name='Search vector'),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0016_auto_20171016_1104.py b/ishtar_common/migrations/0016_auto_20171016_1104.py
new file mode 100644
index 000000000..1d9209bdd
--- /dev/null
+++ b/ishtar_common/migrations/0016_auto_20171016_1104.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-16 11:04
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0015_auto_20171011_1644'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='town',
+ name='cached_label',
+ field=models.CharField(blank=True, db_index=True, max_length=500, null=True, verbose_name='Cached name'),
+ ),
+ migrations.AddField(
+ model_name='town',
+ name='children',
+ field=models.ManyToManyField(blank=True, related_name='parents', to='ishtar_common.Town', verbose_name='Town children'),
+ ),
+ migrations.AddField(
+ model_name='town',
+ name='year',
+ field=models.IntegerField(blank=True, help_text='If not filled considered as the older town known.', null=True, verbose_name='Year of creation'),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0017_auto_20171016_1320.py b/ishtar_common/migrations/0017_auto_20171016_1320.py
new file mode 100644
index 000000000..a48b36ce7
--- /dev/null
+++ b/ishtar_common/migrations/0017_auto_20171016_1320.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-16 13:20
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('ishtar_common', '0016_auto_20171016_1104'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='town',
+ name='numero_insee',
+ field=models.CharField(max_length=6, verbose_name='Num\xe9ro INSEE'),
+ ),
+ migrations.AlterField(
+ model_name='town',
+ name='year',
+ field=models.IntegerField(blank=True, help_text='Filling this field is relevant to distinguish old towns to new towns.', null=True, verbose_name='Year of creation'),
+ ),
+ migrations.AlterUniqueTogether(
+ name='town',
+ unique_together=set([('numero_insee', 'year')]),
+ ),
+ ]
diff --git a/ishtar_common/migrations/0018_auto_20171017_1840.py b/ishtar_common/migrations/0018_auto_20171017_1840.py
new file mode 100644
index 000000000..0c617a3d5
--- /dev/null
+++ b/ishtar_common/migrations/0018_auto_20171017_1840.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11 on 2017-10-17 18:40
+from __future__ import unicode_literals
+
+import django.contrib.postgres.fields.jsonb
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('contenttypes', '0002_remove_content_type_name'),
+ ('ishtar_common', '0017_auto_20171016_1320'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='JsonDataField',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=200, verbose_name='Name')),
+ ('key', models.CharField(help_text='Value of the key in the JSON schema. For hierarchical key use "__" to explain it. For instance the key \'my_subkey\' with data such as {\'my_key\': {\'my_subkey\': \'value\'}} will be reached with my_key__my_subkey.', max_length=200, verbose_name='Key')),
+ ('display', models.BooleanField(default=True, verbose_name='Display')),
+ ('order', models.IntegerField(default=10, verbose_name='Order')),
+ ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
+ ],
+ options={
+ 'verbose_name': 'Json data - Field',
+ 'verbose_name_plural': 'Json data - Fields',
+ },
+ ),
+ migrations.CreateModel(
+ name='JsonDataSection',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=200, verbose_name='Name')),
+ ('order', models.IntegerField(default=10, verbose_name='Order')),
+ ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
+ ],
+ options={
+ 'ordering': ['name'],
+ 'verbose_name': 'Json data - Menu',
+ 'verbose_name_plural': 'Json data - Menus',
+ },
+ ),
+ migrations.AddField(
+ model_name='historicalorganization',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='historicalperson',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='organization',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='person',
+ name='data',
+ field=django.contrib.postgres.fields.jsonb.JSONField(db_index=True, default={}),
+ ),
+ migrations.AddField(
+ model_name='jsondatafield',
+ name='section',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='ishtar_common.JsonDataSection'),
+ ),
+ ]
diff --git a/ishtar_common/models.py b/ishtar_common/models.py
index 28a24115b..c3ba4fdd0 100644
--- a/ishtar_common/models.py
+++ b/ishtar_common/models.py
@@ -35,6 +35,8 @@ import tempfile
import time
from django.conf import settings
+from django.contrib.postgres.fields import JSONField
+from django.contrib.postgres.search import SearchVectorField, SearchVector
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
@@ -58,7 +60,7 @@ from simple_history.models import HistoricalRecords as BaseHistoricalRecords
from ishtar_common.model_merging import merge_model_objects
from ishtar_common.utils import get_cache, disable_for_loaddata, create_slug,\
- get_all_field_names
+ get_all_field_names, merge_tsvectors, cached_label_changed
from ishtar_common.models_imports import ImporterModel, ImporterType, \
ImporterDefault, ImporterDefaultValues, ImporterColumn, \
@@ -908,6 +910,96 @@ class BulkUpdatedItem(object):
return transaction_id, False
+class JsonDataSection(models.Model):
+ content_type = models.ForeignKey(ContentType)
+ name = models.CharField(_(u"Name"), max_length=200)
+ order = models.IntegerField(_(u"Order"), default=10)
+
+ class Meta:
+ verbose_name = _(u"Json data - Menu")
+ verbose_name_plural = _(u"Json data - Menus")
+ ordering = ['order', 'name']
+
+ def __unicode__(self):
+ return u"{} - {}".format(self.content_type, self.name)
+
+
+class JsonDataField(models.Model):
+ name = models.CharField(_(u"Name"), max_length=200)
+ content_type = models.ForeignKey(ContentType)
+ key = models.CharField(
+ _(u"Key"), max_length=200,
+ help_text=_(u"Value of the key in the JSON schema. For hierarchical "
+ u"key use \"__\" to explain it. For instance the key "
+ u"'my_subkey' with data such as {'my_key': {'my_subkey': "
+ u"'value'}} will be reached with my_key__my_subkey."))
+ display = models.BooleanField(_(u"Display"), default=True)
+ order = models.IntegerField(_(u"Order"), default=10)
+ section = models.ForeignKey(JsonDataSection, blank=True, null=True)
+
+ class Meta:
+ verbose_name = _(u"Json data - Field")
+ verbose_name_plural = _(u"Json data - Fields")
+ ordering = ['order', 'name']
+
+ def __unicode__(self):
+ return u"{} - {}".format(self.content_type, self.name)
+
+ def clean(self):
+ if not self.section:
+ return
+ if self.section.content_type != self.content_type:
+ raise ValidationError(
+ _(u"Content type of the field and of the menu do not match"))
+
+
+class JsonData(models.Model):
+ data = JSONField(default={}, db_index=True, blank=True)
+
+ class Meta:
+ abstract = True
+
+ def pre_save(self):
+ if not self.data:
+ self.data = {}
+
+ @property
+ def json_sections(self):
+ sections = []
+ try:
+ content_type = ContentType.objects.get_for_model(self)
+ except ContentType.DoesNotExists:
+ return sections
+ fields = list(JsonDataField.objects.filter(
+ content_type=content_type, display=True, section__isnull=True
+ ).all()) # no section fields
+
+ fields += list(JsonDataField.objects.filter(
+ content_type=content_type, display=True, section__isnull=False
+ ).order_by('section__order', 'order').all())
+
+ for field in fields:
+ value = None
+ data = self.data.copy()
+ for key in field.key.split('__'):
+ if key in data:
+ value = copy.copy(data[key])
+ data = data[key]
+ else:
+ value = None
+ break
+ if not value:
+ continue
+ if type(value) in (list, tuple):
+ value = u" ; ".join([unicode(v) for v in value])
+ section_name = field.section.name if field.section else None
+ if not sections or section_name != sections[-1][0]:
+ # if section name is identical it is the same
+ sections.append((section_name, []))
+ sections[-1][1].append((field.name, value))
+ return sections
+
+
class Imported(models.Model):
imports = models.ManyToManyField(
Import, blank=True,
@@ -917,9 +1009,85 @@ class Imported(models.Model):
abstract = True
-class BaseHistorizedItem(Imported):
+class FullSearch(models.Model):
+ search_vector = SearchVectorField(_("Search vector"), blank=True, null=True,
+ help_text=_("Auto filled at save"))
+ BASE_SEARCH_VECTORS = []
+ INT_SEARCH_VECTORS = []
+ M2M_SEARCH_VECTORS = []
+ PARENT_SEARCH_VECTORS = []
+
+ class Meta:
+ abstract = True
+
+ def update_search_vector(self, save=True):
+ """
+ Update the search vector
+ :param save: True if you want to save the object immediately
+ :return: True if modified
+ """
+ if not self.BASE_SEARCH_VECTORS and not self.M2M_SEARCH_VECTORS:
+ logger.warning("No search_vectors defined for {}".format(
+ self.__class__))
+ return
+ if getattr(self, '_search_updated', None):
+ return
+ self._search_updated = True
+
+ old_search = ""
+ if self.search_vector:
+ old_search = self.search_vector[:]
+ search_vectors = []
+ base_q = self.__class__.objects.filter(pk=self.pk)
+
+ # many to many have to be queried one by one otherwise only one is fetch
+ for M2M_SEARCH_VECTOR in self.M2M_SEARCH_VECTORS:
+ key = M2M_SEARCH_VECTOR.split('__')[0]
+ rel_key = getattr(self, key)
+ for item in rel_key.values('pk').all():
+ query_dct = {key + "__pk": item['pk']}
+ q = copy.copy(base_q).filter(**query_dct)
+ q = q.annotate(
+ search=SearchVector(
+ M2M_SEARCH_VECTOR,
+ config=settings.ISHTAR_SEARCH_LANGUAGE)
+ ).values('search')
+ search_vectors.append(q.all()[0]['search'])
+
+ # int/float are not well managed by the SearchVector
+ for INT_SEARCH_VECTOR in self.INT_SEARCH_VECTORS:
+ q = base_q.values(INT_SEARCH_VECTOR)
+ search_vectors.append(
+ "'{}':1".format(q.all()[0][INT_SEARCH_VECTOR]))
+
+ # copy parent vector fields
+ for PARENT_SEARCH_VECTOR in self.PARENT_SEARCH_VECTORS:
+ parent = getattr(self, PARENT_SEARCH_VECTOR)
+ if hasattr(parent, 'all'): # m2m
+ for p in parent.all():
+ search_vectors.append(p.search_vector)
+ else:
+ search_vectors.append(parent.search_vector)
+
+ # query "simple" fields
+ q = base_q.annotate(
+ search=SearchVector(
+ *self.BASE_SEARCH_VECTORS,
+ config=settings.ISHTAR_SEARCH_LANGUAGE
+ )).values('search')
+ search_vectors.append(q.all()[0]['search'])
+ self.search_vector = merge_tsvectors(search_vectors)
+ changed = old_search != self.search_vector
+ if save and changed:
+ self.skip_history_when_saving = True
+ self.save()
+ return changed
+
+
+class BaseHistorizedItem(FullSearch, Imported, JsonData):
"""
- Historized item with external ID management
+ Historized item with external ID management.
+ All historized items are searcheable and have a data json field
"""
IS_BASKET = False
EXTERNAL_ID_KEY = ''
@@ -1187,6 +1355,7 @@ class LightHistorizedItem(BaseHistorizedItem):
super(LightHistorizedItem, self).save(*args, **kwargs)
return True
+
PARSE_FORMULA = re.compile("{([^}]*)}")
FORMULA_FILTERS = {
@@ -1409,6 +1578,7 @@ def get_current_profile(force=False):
def cached_site_changed(sender, **kwargs):
get_current_profile(force=True)
+
post_save.connect(cached_site_changed, sender=IshtarSiteProfile)
post_delete.connect(cached_site_changed, sender=IshtarSiteProfile)
@@ -2490,12 +2660,20 @@ class Town(Imported, models.Model):
center = models.PointField(_(u"Localisation"), srid=settings.SRID,
blank=True, null=True)
if settings.COUNTRY == 'fr':
- numero_insee = models.CharField(u"Numéro INSEE", max_length=6,
- unique=True)
+ numero_insee = models.CharField(u"Numéro INSEE", max_length=6)
departement = models.ForeignKey(
Department, verbose_name=u"Département", null=True, blank=True)
canton = models.ForeignKey(Canton, verbose_name=u"Canton", null=True,
blank=True)
+ year = models.IntegerField(
+ _("Year of creation"), null=True, blank=True,
+ help_text=_(u"Filling this field is relevant to distinguish old towns "
+ u"to new towns."))
+ children = models.ManyToManyField(
+ 'Town', verbose_name=_(u"Town children"), blank=True,
+ related_name='parents')
+ cached_label = models.CharField(_(u"Cached name"), max_length=500,
+ null=True, blank=True, db_index=True)
objects = models.GeoManager()
class Meta:
@@ -2503,11 +2681,24 @@ class Town(Imported, models.Model):
verbose_name_plural = _(u"Towns")
if settings.COUNTRY == 'fr':
ordering = ['numero_insee']
+ unique_together = (('numero_insee', 'year'),)
def __unicode__(self):
+ if self.cached_label:
+ return self.cached_label
+ self.save()
+ return self.cached_label
+
+ def _generate_cached_label(self):
+ cached_label = self.name
if settings.COUNTRY == "fr":
- return u"%s (%s)" % (self.name, self.numero_insee[:2])
- return self.name
+ cached_label = u"%s - %s" % (self.name, self.numero_insee[:2])
+ if self.year:
+ cached_label += " ({})".format(self.year)
+ return cached_label
+
+
+post_save.connect(cached_label_changed, sender=Town)
class OperationType(GeneralType):
diff --git a/ishtar_common/static/gentium/GentiumPlus-I.ttf b/ishtar_common/static/gentium/GentiumPlus-I.ttf
new file mode 100644
index 000000000..7bc1b3d8b
--- /dev/null
+++ b/ishtar_common/static/gentium/GentiumPlus-I.ttf
Binary files differ
diff --git a/ishtar_common/static/gentium/GentiumPlus-R.ttf b/ishtar_common/static/gentium/GentiumPlus-R.ttf
new file mode 100644
index 000000000..c1194dd35
--- /dev/null
+++ b/ishtar_common/static/gentium/GentiumPlus-R.ttf
Binary files differ
diff --git a/ishtar_common/static/gentium/OFL.txt b/ishtar_common/static/gentium/OFL.txt
new file mode 100644
index 000000000..4f7540787
--- /dev/null
+++ b/ishtar_common/static/gentium/OFL.txt
@@ -0,0 +1,94 @@
+Copyright (c) 2003-2014 SIL International (http://www.sil.org/),
+with Reserved Font Names "Gentium" and "SIL".
+
+This Font Software is licensed under the SIL Open Font License, Version 1.1.
+This license is copied below, and is also available with a FAQ at:
+http://scripts.sil.org/OFL
+
+
+-----------------------------------------------------------
+SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
+-----------------------------------------------------------
+
+PREAMBLE
+The goals of the Open Font License (OFL) are to stimulate worldwide
+development of collaborative font projects, to support the font creation
+efforts of academic and linguistic communities, and to provide a free and
+open framework in which fonts may be shared and improved in partnership
+with others.
+
+The OFL allows the licensed fonts to be used, studied, modified and
+redistributed freely as long as they are not sold by themselves. The
+fonts, including any derivative works, can be bundled, embedded,
+redistributed and/or sold with any software provided that any reserved
+names are not used by derivative works. The fonts and derivatives,
+however, cannot be released under any other type of license. The
+requirement for fonts to remain under this license does not apply
+to any document created using the fonts or their derivatives.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this license and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Reserved Font Name" refers to any names specified as such after the
+copyright statement(s).
+
+"Original Version" refers to the collection of Font Software components as
+distributed by the Copyright Holder(s).
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to a
+new environment.
+
+"Author" refers to any designer, engineer, programmer, technical
+writer or other person who contributed to the Font Software.
+
+PERMISSION & CONDITIONS
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Font Software, to use, study, copy, merge, embed, modify,
+redistribute, and sell modified and unmodified copies of the Font
+Software, subject to the following conditions:
+
+1) Neither the Font Software nor any of its individual components,
+in Original or Modified Versions, may be sold by itself.
+
+2) Original or Modified Versions of the Font Software may be bundled,
+redistributed and/or sold with any software, provided that each copy
+contains the above copyright notice and this license. These can be
+included either as stand-alone text files, human-readable headers or
+in the appropriate machine-readable metadata fields within text or
+binary files as long as those fields can be easily viewed by the user.
+
+3) No Modified Version of the Font Software may use the Reserved Font
+Name(s) unless explicit written permission is granted by the corresponding
+Copyright Holder. This restriction only applies to the primary font name as
+presented to the users.
+
+4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
+Software shall not be used to promote, endorse or advertise any
+Modified Version, except to acknowledge the contribution(s) of the
+Copyright Holder(s) and the Author(s) or with their explicit written
+permission.
+
+5) The Font Software, modified or unmodified, in part or in whole,
+must be distributed entirely under this license, and must not be
+distributed under any other license. The requirement for fonts to
+remain under this license does not apply to any document created
+using the Font Software.
+
+TERMINATION
+This license becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
+OTHER DEALINGS IN THE FONT SOFTWARE.
diff --git a/ishtar_common/static/gentium/README.txt b/ishtar_common/static/gentium/README.txt
new file mode 100644
index 000000000..bc17a8cb7
--- /dev/null
+++ b/ishtar_common/static/gentium/README.txt
@@ -0,0 +1,88 @@
+README
+Gentium Plus
+========================
+
+Thank you for your interest in the Gentium Plus fonts.
+We hope you find them useful!
+
+Gentium Plus supports a wide range of Latin, Greek and Cyrillic
+characters. Documentation for the fonts is available on Gentium website
+(http://scripts.sil.org/Gentium), including details on what ranges are
+supported.
+
+Gentium Plus is released under the SIL Open Font License.
+
+See the OFL and OFL-FAQ for details of the SIL Open Font License.
+See the FONTLOG for information on this and previous releases.
+See the GENTIUM-FAQ for answers to common questions about the Gentium fonts
+See the website (http://scripts.sil.org/Gentium) for further documentation.
+See the SIL Unicode Roman FAQ (http://scripts.sil.org/ComplexRomanFontFAQ)
+for frequently asked questions and their answers regarding SIL's Roman fonts.
+
+
+TIPS
+====
+
+As this font is distributed at no cost, we are unable to provide a
+commercial level of personal technical support. The font has, however,
+been through some testing on various platforms to be sure it works in most
+situations. In particular, it has been tested and shown to work on Windows
+XP, Windows Vista and Windows 7. Graphite capabilities have been tested
+on Graphite-supported platforms.
+
+If you do find a problem, please do report it to fonts@sil.org.
+We can't guarantee any direct response, but will try to fix reported bugs in
+future versions. Make sure you read through the
+SIL Unicode Roman FAQ (http://scripts.sil.org/ComplexRomanFontFAQ).
+
+Many problems can be solved, or at least explained, through an understanding
+of the encoding and use of the fonts. Here are some basic hints:
+
+Encoding:
+The fonts are encoded according to Unicode, so your application must support
+Unicode text in order to access letters other than the standard alphabet.
+Most Windows applications provide basic Unicode support. You will, however,
+need some way of entering Unicode text into your document.
+
+Keyboarding:
+This font does not include any keyboarding helps or utilities. It uses the
+built-in keyboards of the operating system. You will need to install the
+appropriate keyboard and input method for the characters of the language you
+wish to use. If you want to enter characters that are not supported by any
+system keyboard, the Keyman program (www.tavultesoft.com) can be helpful
+on Windows systems. Also available for Windows is MSKLC
+(http://www.microsoft.com/globaldev/tools/msklc.mspx).
+For Linux systems such as Ubuntu, KMFL (http://kmfl.sourceforge.net/)
+is available. Ukelele (http://scripts.sil.org/ukelele) is available for
+Mac OS X versions 10.2 and later.
+
+For other platforms, KMFL (http://kmfl.sourceforge.net/),
+XKB (http://www.x.org/wiki/XKB) or Ukelele (http://scripts.sil.org/ukelele)
+can be helpful.
+
+If you want to enter characters that are not supported by any system
+keyboard, and to access the full Unicode range, we suggest you use
+gucharmap, kcharselect on Ubuntu or similar software.
+
+Another method of entering some symbols is provided by a few applications such
+as Adobe InDesign or OpenOffice.org. They can display a glyph palette or input
+dialog that shows all the glyphs (symbols) in a font and allow you to enter
+them by clicking on the glyph you want.
+
+Rendering:
+This font is designed to work with Graphite or Opentype advanced font
+technologies. To take advantage of the advanced typographic
+capabilities of this font, you must be using applications that provide an
+adequate level of support for Graphite or OpenType. See "Applications
+that provide an adequate level of support for SIL Unicode Roman fonts"
+(http://scripts.sil.org/Complex_AdLvSup).
+
+
+CONTACT
+========
+For more information please visit the Gentium page on SIL International's
+Computers and Writing systems website:
+http://scripts.sil.org/Gentium
+
+Support through the website: http://scripts.sil.org/Support
+
diff --git a/ishtar_common/templates/ishtar/blocks/sheet_json.html b/ishtar_common/templates/ishtar/blocks/sheet_json.html
new file mode 100644
index 000000000..31e6acb84
--- /dev/null
+++ b/ishtar_common/templates/ishtar/blocks/sheet_json.html
@@ -0,0 +1,11 @@
+{% load i18n window_field %}
+{% for json_section, json_fields in item.json_sections %}
+{% if json_section %}
+<h3>{{json_section}}</h3>
+{% endif %}
+{% for label, value in json_fields %}
+{% if forloop.first %}<ul class='form-flex'>{% endif %}
+ {% field_li label value %}
+{% if forloop.last %}</ul>{% endif %}
+{% endfor %}
+{% endfor %}
diff --git a/ishtar_common/tests.py b/ishtar_common/tests.py
index 349408465..bbb449fe3 100644
--- a/ishtar_common/tests.py
+++ b/ishtar_common/tests.py
@@ -23,6 +23,8 @@ import os
import shutil
from StringIO import StringIO
+from django.apps import apps
+
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
@@ -38,6 +40,7 @@ from django.test.runner import DiscoverRunner
from ishtar_common import models
from ishtar_common import views
+from ishtar_common.apps import admin_site
from ishtar_common.utils import post_save_point
@@ -347,6 +350,13 @@ class AdminGenTypeTest(TestCase):
models_with_data = gen_models + [models.ImporterModel]
models = models_with_data
module_name = 'ishtar_common'
+ ishtar_apps = [
+ 'ishtar_common', 'archaeological_files', 'archaeological_operations',
+ 'archaeological_context_records', 'archaeological_warehouse',
+ 'archaeological_finds'
+ ]
+ readonly_models = ['archaeological_finds.Property',
+ 'archaeological_finds.Treatment']
def setUp(self):
self.password = 'mypassword'
@@ -359,16 +369,34 @@ class AdminGenTypeTest(TestCase):
self.client.login(username=self.username, password=self.password)
def test_listing_and_detail(self):
- for model in self.models:
+ models = []
+ for app in self.ishtar_apps:
+ app_models = apps.get_app_config(app).get_models()
+ for model in app_models:
+ if model in admin_site._registry:
+ models.append((app, model))
+ for app, model in models:
# quick test to verify basic access to listing
- base_url = '/admin/{}/{}/'.format(self.module_name,
- model.__name__.lower())
+ base_url = '/admin/{}/{}/'.format(app, model.__name__.lower())
url = base_url
response = self.client.get(url)
self.assertEqual(
response.status_code, 200,
msg="Can not access admin list for {}.".format(model))
- if model in self.models_with_data:
+ nb = model.objects.count()
+ url = base_url + "add/"
+ response = self.client.get(url)
+ if app + "." + model.__name__ in self.readonly_models:
+ continue
+ self.assertEqual(
+ response.status_code, 200,
+ msg="Can not access admin add page for {}.".format(model))
+ self.assertEqual(
+ nb, model.objects.count(),
+ msg="A ghost object have been created on access to add page "
+ "for {}.".format(model))
+
+ if nb:
url = base_url + "{}/change/".format(model.objects.all()[0].pk)
response = self.client.get(url)
self.assertEqual(
@@ -1046,6 +1074,15 @@ class IshtarBasicTest(TestCase):
self.assertEqual(response.status_code, 200)
self.assertIn('class="sheet"', response.content)
+ def test_town_cache(self):
+ models.Town.objects.create(name="Sin City", numero_insee="99999")
+ town = models.Town.objects.get(numero_insee="99999")
+ self.assertEqual(town.cached_label, "Sin City - 99")
+ town.year = 2050
+ town.save()
+ town = models.Town.objects.get(numero_insee="99999")
+ self.assertEqual(town.cached_label, "Sin City - 99 (2050)")
+
class GeomaticTest(TestCase):
def test_post_save_point(self):
diff --git a/ishtar_common/utils.py b/ishtar_common/utils.py
index c6a4032f0..5d9e85c60 100644
--- a/ishtar_common/utils.py
+++ b/ishtar_common/utils.py
@@ -104,9 +104,12 @@ def cached_label_changed(sender, **kwargs):
setattr(instance, cached_label, lbl)
changed = True
if changed:
+ instance._search_updated = False
if hasattr(instance, '_cascade_change') and instance._cascade_change:
instance.skip_history_when_saving = True
instance.save()
+ if hasattr(instance, 'update_search_vector'):
+ instance.update_search_vector()
updated = False
if hasattr(instance, '_cached_labels_bulk_update'):
updated = instance._cached_labels_bulk_update()
@@ -117,6 +120,7 @@ def cached_label_changed(sender, **kwargs):
item.test_obj = instance.test_obj
cached_label_changed(item.__class__, instance=item)
+
SHORTIFY_STR = ugettext(" (...)")
@@ -289,3 +293,43 @@ def get_all_related_objects(model):
and f.auto_created and not f.concrete
]
+
+def merge_tsvectors(vectors):
+ """
+ Parse tsvector to merge them in one string
+ :param vectors: list of tsvector string
+ :return: merged tsvector
+ """
+ result_dict = {}
+ for vector in vectors:
+ if not vector:
+ continue
+
+ current_position = 0
+ if result_dict:
+ for key in result_dict:
+ max_position = max(result_dict[key])
+ if max_position > current_position:
+ current_position = max_position
+
+ for dct_member in vector.split(" "):
+ splitted = dct_member.split(':')
+ key = ":".join(splitted[:-1])
+ positions = splitted[-1]
+ key = key[1:-1] # remove quotes
+ positions = [int(pos) + current_position
+ for pos in positions.split(',')]
+ if key in result_dict:
+ result_dict[key] += positions
+ else:
+ result_dict[key] = positions
+
+ # {'lamelie': [1, 42, 5]} => {'lamelie': "1,42,5"}
+ result_dict = {k: ",".join([str(val) for val in result_dict[k]])
+ for k in result_dict}
+ # {'lamelie': "1,5", "hagarde": "2", "regarde": "4"} =>
+ # "'lamelie':1,5 'hagarde':2 'regarde':4"
+ result = " ".join(["'{}':{}".format(k, result_dict[k])
+ for k in result_dict])
+
+ return result
diff --git a/ishtar_common/views.py b/ishtar_common/views.py
index 997acd7df..b8350c62a 100644
--- a/ishtar_common/views.py
+++ b/ishtar_common/views.py
@@ -41,6 +41,7 @@ from extra_views import ModelFormSetView
from django.conf import settings
from django.contrib.auth import logout
from django.contrib.auth.decorators import login_required
+from django.contrib.postgres.search import SearchQuery
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse, NoReverseMatch
from django.db.models import Q, ImageField
@@ -814,6 +815,23 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
dct.pop(k)
# manage hierarchic conditions
for req in dct.copy():
+ if req.endswith('town__pk') or req.endswith('towns__pk'):
+ val = dct.pop(req)
+ reqs = Q(**{req: val})
+ base_req = req[:-2] + '__'
+ req = base_req[:]
+ for idx in range(HIERARCHIC_LEVELS):
+ req = req[:-2] + 'parents__pk'
+ q = Q(**{req: val})
+ reqs |= q
+ req = base_req[:]
+ for idx in range(HIERARCHIC_LEVELS):
+ req = req[:-2] + 'children__pk'
+ q = Q(**{req: val})
+ reqs |= q
+ and_reqs.append(reqs)
+ continue
+
for k_hr in HIERARCHIC_FIELDS:
if type(req) in (list, tuple):
val = dct.pop(req)
@@ -829,12 +847,15 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
val = dct.pop(req)
reqs = Q(**{req: val})
req = req[:-2] + '__'
- for idx in xrange(HIERARCHIC_LEVELS):
+ for idx in range(HIERARCHIC_LEVELS):
req = req[:-2] + 'parent__pk'
q = Q(**{req: val})
reqs |= q
and_reqs.append(reqs)
break
+ if 'search_vector' in dct:
+ dct['search_vector'] = SearchQuery(
+ dct['search_vector'], config=settings.ISHTAR_SEARCH_LANGUAGE)
query = Q(**dct)
for k, or_req in or_reqs:
alt_dct = dct.copy()
@@ -908,6 +929,9 @@ def get_item(model, func_name, default_name, extra_request_keys=[],
items = model.objects.filter(query).distinct()
# print(items.query)
+ if 'search_vector' in dct: # for serialization
+ dct['search_vector'] = dct['search_vector'].value
+
# table cols
if own_table_cols:
table_cols = own_table_cols
diff --git a/ishtar_common/wizards.py b/ishtar_common/wizards.py
index 701f6eca3..f86e03df0 100644
--- a/ishtar_common/wizards.py
+++ b/ishtar_common/wizards.py
@@ -737,6 +737,9 @@ class Wizard(NamedUrlWizardView):
if has_problemetic_null:
continue
+ if hasattr(model, 'data') and 'data' not in value:
+ value['data'] = {}
+
if get_or_create:
value, created = model.objects.get_or_create(
**value)
diff --git a/static/gis/js/OLMapWidget.js b/static/gis/js/OLMapWidget.js
deleted file mode 100644
index c101c65fc..000000000
--- a/static/gis/js/OLMapWidget.js
+++ /dev/null
@@ -1,376 +0,0 @@
-(function() {
-/**
- * Transforms an array of features to a single feature with the merged
- * geometry of geom_type
- */
-OpenLayers.Util.properFeatures = function(features, geom_type) {
- if (features.constructor == Array) {
- var geoms = [];
- for (var i=0; i<features.length; i++) {
- geoms.push(features[i].geometry);
- }
- var geom = new geom_type(geoms);
- features = new OpenLayers.Feature.Vector(geom);
- }
- return features;
-}
-
-/**
- * @requires OpenLayers/Format/WKT.js
- */
-
-/**
- * Class: OpenLayers.Format.DjangoWKT
- * Class for reading Well-Known Text, with workarounds to successfully parse
- * geometries and collections as returned by django.contrib.gis.geos.
- *
- * Inherits from:
- * - <OpenLayers.Format.WKT>
- */
-
-OpenLayers.Format.DjangoWKT = OpenLayers.Class(OpenLayers.Format.WKT, {
- initialize: function(options) {
- OpenLayers.Format.WKT.prototype.initialize.apply(this, [options]);
- this.regExes.justComma = /\s*,\s*/;
- },
-
- parse: {
- 'point': function(str) {
- var coords = OpenLayers.String.trim(str).split(this.regExes.spaces);
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.Point(coords[0], coords[1])
- );
- },
-
- 'multipoint': function(str) {
- var point;
- var points = OpenLayers.String.trim(str).split(this.regExes.justComma);
- var components = [];
- for(var i=0, len=points.length; i<len; ++i) {
- point = points[i].replace(this.regExes.trimParens, '$1');
- components.push(this.parse.point.apply(this, [point]).geometry);
- }
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.MultiPoint(components)
- );
- },
-
- 'linestring': function(str) {
- var points = OpenLayers.String.trim(str).split(',');
- var components = [];
- for(var i=0, len=points.length; i<len; ++i) {
- components.push(this.parse.point.apply(this, [points[i]]).geometry);
- }
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.LineString(components)
- );
- },
-
- 'multilinestring': function(str) {
- var line;
- var lines = OpenLayers.String.trim(str).split(this.regExes.parenComma);
- var components = [];
- for(var i=0, len=lines.length; i<len; ++i) {
- line = lines[i].replace(this.regExes.trimParens, '$1');
- components.push(this.parse.linestring.apply(this, [line]).geometry);
- }
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.MultiLineString(components)
- );
- },
-
- 'polygon': function(str) {
- var ring, linestring, linearring;
- var rings = OpenLayers.String.trim(str).split(this.regExes.parenComma);
- var components = [];
- for(var i=0, len=rings.length; i<len; ++i) {
- ring = rings[i].replace(this.regExes.trimParens, '$1');
- linestring = this.parse.linestring.apply(this, [ring]).geometry;
- linearring = new OpenLayers.Geometry.LinearRing(linestring.components);
- components.push(linearring);
- }
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.Polygon(components)
- );
- },
-
- 'multipolygon': function(str) {
- var polygon;
- var polygons = OpenLayers.String.trim(str).split(this.regExes.doubleParenComma);
- var components = [];
- for(var i=0, len=polygons.length; i<len; ++i) {
- polygon = polygons[i].replace(this.regExes.trimParens, '$1');
- components.push(this.parse.polygon.apply(this, [polygon]).geometry);
- }
- return new OpenLayers.Feature.Vector(
- new OpenLayers.Geometry.MultiPolygon(components)
- );
- },
-
- 'geometrycollection': function(str) {
- // separate components of the collection with |
- str = str.replace(/,\s*([A-Za-z])/g, '|$1');
- var wktArray = OpenLayers.String.trim(str).split('|');
- var components = [];
- for(var i=0, len=wktArray.length; i<len; ++i) {
- components.push(OpenLayers.Format.WKT.prototype.read.apply(this,[wktArray[i]]));
- }
- return components;
- }
- },
-
- extractGeometry: function(geometry) {
- var type = geometry.CLASS_NAME.split('.')[2].toLowerCase();
- if (!this.extract[type]) {
- return null;
- }
- if (this.internalProjection && this.externalProjection) {
- geometry = geometry.clone();
- geometry.transform(this.internalProjection, this.externalProjection);
- }
- var wktType = type == 'collection' ? 'GEOMETRYCOLLECTION' : type.toUpperCase();
- var data = wktType + '(' + this.extract[type].apply(this, [geometry]) + ')';
- return data;
- },
-
- /**
- * Patched write: successfully writes WKT for geometries and
- * geometrycollections.
- */
- write: function(features) {
- var collection, geometry, type, data, isCollection;
- isCollection = features.geometry.CLASS_NAME == "OpenLayers.Geometry.Collection";
- var pieces = [];
- if (isCollection) {
- collection = features.geometry.components;
- pieces.push('GEOMETRYCOLLECTION(');
- for (var i=0, len=collection.length; i<len; ++i) {
- if (i>0) {
- pieces.push(',');
- }
- pieces.push(this.extractGeometry(collection[i]));
- }
- pieces.push(')');
- } else {
- pieces.push(this.extractGeometry(features.geometry));
- }
- return pieces.join('');
- },
-
- CLASS_NAME: "OpenLayers.Format.DjangoWKT"
-});
-
-function MapWidget(options) {
- this.map = null;
- this.controls = null;
- this.panel = null;
- this.layers = {};
- this.wkt_f = new OpenLayers.Format.DjangoWKT();
-
- // Mapping from OGRGeomType name to OpenLayers.Geometry name
- if (options['geom_name'] == 'Unknown') options['geom_type'] = OpenLayers.Geometry;
- else if (options['geom_name'] == 'GeometryCollection') options['geom_type'] = OpenLayers.Geometry.Collection;
- else options['geom_type'] = eval('OpenLayers.Geometry.' + options['geom_name']);
-
- // Default options
- this.options = {
- color: 'ee9900',
- default_lat: 0,
- default_lon: 0,
- default_zoom: 4,
- is_collection: options.geom_name.indexOf('Multi') > -1 || options.geom_name.indexOf('Collection') > -1,
- layerswitcher: false,
- map_options: {},
- map_srid: 4326,
- modifiable: true,
- mouse_position: false,
- opacity: 0.4,
- point_zoom: 12,
- scale_text: false,
- scrollable: true
- };
-
- // Altering using user-provided options
- for (var property in options) {
- if (options.hasOwnProperty(property)) {
- this.options[property] = options[property];
- }
- }
-
- this.map = this.create_map();
-
- var defaults_style = {
- 'fillColor': '#' + this.options.color,
- 'fillOpacity': this.options.opacity,
- 'strokeColor': '#' + this.options.color
- };
- if (this.options.geom_name == 'LineString') {
- defaults_style['strokeWidth'] = 3;
- }
- var styleMap = new OpenLayers.StyleMap({'default': OpenLayers.Util.applyDefaults(defaults_style, OpenLayers.Feature.Vector.style['default'])});
- this.layers.vector = new OpenLayers.Layer.Vector(" " + this.options.name, {styleMap: styleMap});
- this.map.addLayer(this.layers.vector);
- var wkt = document.getElementById(this.options.id).value;
- if (wkt) {
- var feat = OpenLayers.Util.properFeatures(this.read_wkt(wkt), this.options.geom_type);
- this.write_wkt(feat);
- if (this.options.is_collection) {
- for (var i=0; i<this.num_geom; i++) {
- this.layers.vector.addFeatures([new OpenLayers.Feature.Vector(feat.geometry.components[i].clone())]);
- }
- } else {
- this.layers.vector.addFeatures([feat]);
- }
- this.map.zoomToExtent(feat.geometry.getBounds());
- if (this.options.geom_name == 'Point') {
- this.map.zoomTo(this.options.point_zoom);
- }
- } else {
- this.map.setCenter(this.defaultCenter(), this.options.default_zoom);
- }
- this.layers.vector.events.on({'featuremodified': this.modify_wkt, scope: this});
- this.layers.vector.events.on({'featureadded': this.add_wkt, scope: this});
-
- this.getControls(this.layers.vector);
- this.panel.addControls(this.controls);
- this.map.addControl(this.panel);
- this.addSelectControl();
-
- if (this.options.mouse_position) {
- this.map.addControl(new OpenLayers.Control.MousePosition());
- }
- if (this.options.scale_text) {
- this.map.addControl(new OpenLayers.Control.Scale());
- }
- if (this.options.layerswitcher) {
- this.map.addControl(new OpenLayers.Control.LayerSwitcher());
- }
- if (!this.options.scrollable) {
- this.map.getControlsByClass('OpenLayers.Control.Navigation')[0].disableZoomWheel();
- }
- if (wkt) {
- if (this.options.modifiable) {
- this.enableEditing();
- }
- } else {
- this.enableDrawing();
- }
-}
-
-MapWidget.prototype.create_map = function() {
- var map = new OpenLayers.Map(this.options.map_id, this.options.map_options);
- if (this.options.base_layer) this.layers.base = this.options.base_layer;
- else this.layers.base = new OpenLayers.Layer.WMS('OpenLayers WMS', 'http://vmap0.tiles.osgeo.org/wms/vmap0', {layers: 'basic'});
- map.addLayer(this.layers.base);
- return map
-};
-
-MapWidget.prototype.get_ewkt = function(feat) {
- return "SRID=" + this.options.map_srid + ";" + this.wkt_f.write(feat);
-};
-
-MapWidget.prototype.read_wkt = function(wkt) {
- var prefix = 'SRID=' + this.options.map_srid + ';'
- if (wkt.indexOf(prefix) === 0) {
- wkt = wkt.slice(prefix.length);
- }
- return this.wkt_f.read(wkt);
-};
-
-MapWidget.prototype.write_wkt = function(feat) {
- feat = OpenLayers.Util.properFeatures(feat, this.options.geom_type);
- if (this.options.is_collection) {
- this.num_geom = feat.geometry.components.length;
- } else {
- this.num_geom = 1;
- }
- document.getElementById(this.options.id).value = this.get_ewkt(feat);
-};
-
-MapWidget.prototype.add_wkt = function(event) {
- if (this.options.is_collection) {
- var feat = new OpenLayers.Feature.Vector(new this.options.geom_type());
- for (var i=0; i<this.layers.vector.features.length; i++) {
- feat.geometry.addComponents([this.layers.vector.features[i].geometry]);
- }
- this.write_wkt(feat);
- } else {
- if (this.layers.vector.features.length > 1) {
- old_feats = [this.layers.vector.features[0]];
- this.layers.vector.removeFeatures(old_feats);
- this.layers.vector.destroyFeatures(old_feats);
- }
- this.write_wkt(event.feature);
- }
-};
-
-MapWidget.prototype.modify_wkt = function(event) {
- if (this.options.is_collection) {
- if (this.options.geom_name == 'MultiPoint') {
- this.add_wkt(event);
- return;
- } else {
- var feat = new OpenLayers.Feature.Vector(new this.options.geom_type());
- for (var i=0; i<this.num_geom; i++) {
- feat.geometry.addComponents([this.layers.vector.features[i].geometry]);
- }
- this.write_wkt(feat);
- }
- } else {
- this.write_wkt(event.feature);
- }
-};
-
-MapWidget.prototype.deleteFeatures = function() {
- this.layers.vector.removeFeatures(this.layers.vector.features);
- this.layers.vector.destroyFeatures();
-};
-
-MapWidget.prototype.clearFeatures = function() {
- this.deleteFeatures();
- document.getElementById(this.options.id).value = '';
- this.map.setCenter(this.defaultCenter(), this.options.default_zoom);
-};
-
-MapWidget.prototype.defaultCenter = function() {
- var center = new OpenLayers.LonLat(this.options.default_lon, this.options.default_lat);
- if (this.options.map_srid) {
- return center.transform(new OpenLayers.Projection("EPSG:4326"), this.map.getProjectionObject());
- }
- return center;
-};
-
-MapWidget.prototype.addSelectControl = function() {
- var select = new OpenLayers.Control.SelectFeature(this.layers.vector, {'toggle': true, 'clickout': true});
- this.map.addControl(select);
- select.activate();
-};
-
-MapWidget.prototype.enableDrawing = function () {
- this.map.getControlsByClass('OpenLayers.Control.DrawFeature')[0].activate();
-};
-
-MapWidget.prototype.enableEditing = function () {
- this.map.getControlsByClass('OpenLayers.Control.ModifyFeature')[0].activate();
-};
-
-MapWidget.prototype.getControls = function(layer) {
- this.panel = new OpenLayers.Control.Panel({'displayClass': 'olControlEditingToolbar'});
- this.controls = [new OpenLayers.Control.Navigation()];
- if (!this.options.modifiable && layer.features.length)
- return;
- if (this.options.geom_name.indexOf('LineString') >= 0 || this.options.geom_name == 'GeometryCollection' || this.options.geom_name == 'Unknown') {
- this.controls.push(new OpenLayers.Control.DrawFeature(layer, OpenLayers.Handler.Path, {'displayClass': 'olControlDrawFeaturePath'}));
- }
- if (this.options.geom_name.indexOf('Polygon') >= 0 || this.options.geom_name == 'GeometryCollection' || this.options.geom_name == 'Unknown') {
- this.controls.push(new OpenLayers.Control.DrawFeature(layer, OpenLayers.Handler.Polygon, {'displayClass': 'olControlDrawFeaturePolygon'}));
- }
- if (this.options.geom_name.indexOf('Point') >= 0 || this.options.geom_name == 'GeometryCollection' || this.options.geom_name == 'Unknown') {
- this.controls.push(new OpenLayers.Control.DrawFeature(layer, OpenLayers.Handler.Point, {'displayClass': 'olControlDrawFeaturePoint'}));
- }
- if (this.options.modifiable) {
- this.controls.push(new OpenLayers.Control.ModifyFeature(layer, {'displayClass': 'olControlModifyFeature'}));
- }
-};
-window.MapWidget = MapWidget;
-})();