summaryrefslogtreecommitdiff
path: root/chimere/utils.py
diff options
context:
space:
mode:
Diffstat (limited to 'chimere/utils.py')
-rw-r--r--chimere/utils.py48
1 files changed, 40 insertions, 8 deletions
diff --git a/chimere/utils.py b/chimere/utils.py
index 55fc45c..790fd56 100644
--- a/chimere/utils.py
+++ b/chimere/utils.py
@@ -24,10 +24,12 @@ Utilitaries
import csv
import datetime
import feedparser
+import simplejson as json
import os
import re
import StringIO
import tempfile
+from urllib import urlencode
import urllib2
import unicodedata
import zipfile
@@ -510,10 +512,18 @@ class CSVManager(ImportManager):
if msg:
return (0, 0, msg)
reader = csv.reader(source, delimiter=';', quotechar='"')
- prop_cols = []
- for pm in Marker.all_properties():
+ prop_cols, nominatim_fields = [], {}
+ reverse_nominatim_dct = dict((v, k)
+ for k, v in settings.CHIMERE_NOMINATIM_FIELDS.iteritems())
+ nominatim_default_query = settings.CHIMERE_NOMINATIM_FIELDS
+ for idx, pm in enumerate(Marker.all_properties()):
prop_cols.append((pm.name, pm.getAttrName(),
pm.getAttrName()+'_set'))
+ if settings.CHIMERE_NOMINATIM_FIELDS and \
+ pm.slug in reverse_nominatim_dct:
+ nominatim_fields[idx+len(self.COLS)] = \
+ reverse_nominatim_dct[pm.slug]
+ nominatim_default_query.pop(reverse_nominatim_dct[pm.slug])
cols = list(self.COLS) + prop_cols
datas = []
for idx, row in enumerate(reader):
@@ -521,7 +531,8 @@ class CSVManager(ImportManager):
try:
assert(len(row) >= len(cols))
except AssertionError:
- return (0, 0, _(u"Invalid CSV format"))
+ return (0, 0, _(u"Invalid CSV format - not enough columns "
+ u"check a reference CSV file"))
continue
if len(row) < len(cols):
continue
@@ -542,6 +553,22 @@ class CSVManager(ImportManager):
elif 'LINE' in geom:
cls = Route
dct['route'] = geom
+ elif settings.CHIMERE_NOMINATIM_FIELDS:
+ nominatim_query = settings.NOMINATIM_URL + "?"
+ nominatim_keys = nominatim_default_query.copy()
+ nominatim_keys['format'] = 'json'
+ for idx in nominatim_fields:
+ nominatim_keys[nominatim_fields[idx]] = row[idx]
+ nominatim_query += urlencode(nominatim_keys)
+ remotehandle = urllib2.urlopen(nominatim_query)
+ result = StringIO.StringIO(remotehandle.read())
+ remotehandle.close()
+ result = json.load(result)
+ if not result:
+ continue
+ result = result[0]
+ cls = Marker
+ dct['point'] = "POINT(%s %s)" % (result['lon'], result['lat'])
else:
continue
import_key = pk if pk else name.decode('utf-8')
@@ -559,12 +586,14 @@ class CSVManager(ImportManager):
return (new_item, updated_item, msg)
@classmethod
- def export(cls, queryset):
+ def export(cls, queryset, cols=[]):
dct = {'description':unicode(datetime.date.today()), 'data':[]}
cls_name = queryset.model.__name__.lower()
- cols = list(cls.COLS)
- for pm in queryset.model.all_properties():
- cols.append((pm.name, pm.getAttrName(), pm.getAttrName()+'_set'))
+ if not cols:
+ cols = list(cls.COLS)
+ if hasattr(queryset.model, 'all_properties'):
+ for pm in queryset.model.all_properties():
+ cols.append((pm.name, pm.getAttrName(), pm.getAttrName()+'_set'))
header = [col[0] for col in cols]
dct['data'].append(header)
for item in queryset.all():
@@ -573,7 +602,10 @@ class CSVManager(ImportManager):
if callable(attr):
data.append(attr(item))
else:
- data.append(getattr(item, attr))
+ v = getattr(item, attr)
+ if v == None:
+ v = ''
+ data.append(v)
dct['data'].append(data)
filename = unicode_normalize(settings.PROJECT_NAME + dct['description']\
+ '.csv')