summaryrefslogtreecommitdiff
path: root/ishtar_common/views_item.py
diff options
context:
space:
mode:
Diffstat (limited to 'ishtar_common/views_item.py')
-rw-r--r--ishtar_common/views_item.py85
1 files changed, 42 insertions, 43 deletions
diff --git a/ishtar_common/views_item.py b/ishtar_common/views_item.py
index 1dcb01b34..557058728 100644
--- a/ishtar_common/views_item.py
+++ b/ishtar_common/views_item.py
@@ -13,7 +13,7 @@ import subprocess
from tempfile import NamedTemporaryFile
from django.conf import settings
-from django.contrib.gis.geos import GEOSException, GEOSGeometry
+from django.contrib.gis.geos import GEOSException
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
@@ -35,9 +35,9 @@ from ishtar_common.utils import check_model_access_control, CSV_OPTIONS, \
get_all_field_names
from ishtar_common.models import HistoryError, get_current_profile, \
PRIVATE_FIELDS, GeneralType, SearchAltName
-from menus import Menu
+from .menus import Menu
-import models
+from . import models
from archaeological_files.models import File
from archaeological_operations.models import Operation, ArchaeologicalSite, \
AdministrativeAct
@@ -146,13 +146,13 @@ def new_item(model, frm, many=False):
if not check_permission(request, 'add_' + model_name.lower()):
not_permitted_msg = ugettext(u"Operation not permitted.")
return HttpResponse(not_permitted_msg)
- dct = {'title': unicode(_(u'New %s' % model_name.lower())),
+ dct = {'title': str(_(u'New %s' % model_name.lower())),
'many': many}
if request.method == 'POST':
dct['form'] = frm(request.POST, limits=limits)
if dct['form'].is_valid():
new_item = dct['form'].save(request.user)
- dct['new_item_label'] = unicode(new_item)
+ dct['new_item_label'] = str(new_item)
dct['new_item_pk'] = new_item.pk
dct['parent_name'] = parent_name
dct['parent_pk'] = parent_name
@@ -271,7 +271,7 @@ def show_item(model, name, extra_dct=None, model_for_perms=None):
'tidy-mark': 0, 'doctype': 'auto',
'add-xml-decl': 1, 'wrap': 1}
html, errors = tidy(content, options=tidy_options)
- html = html.encode('utf-8').replace(" ", " ")
+ html = html.replace(" ", " ")
html = re.sub('<pre([^>]*)>\n', '<pre\\1>', html)
odt = NamedTemporaryFile()
@@ -290,7 +290,7 @@ def show_item(model, name, extra_dct=None, model_for_perms=None):
content_type='application/vnd.oasis.opendocument.text')
response['Content-Disposition'] = \
'attachment; filename={}.odt'.format(filename)
- with open(odt.name, 'r') as odt_file:
+ with open(odt.name, 'rb') as odt_file:
response.write(odt_file.read())
return response
elif doc_type == 'pdf':
@@ -353,9 +353,12 @@ def _get_values(request, val):
for v in vals:
if callable(v):
v = v()
- if hasattr(v, 'url'):
- v = (request.is_secure() and
- 'https' or 'http') + '://' + request.get_host() + v.url
+ try:
+ if hasattr(v, 'url'):
+ v = (request.is_secure() and
+ 'https' or 'http') + '://' + request.get_host() + v.url
+ except ValueError:
+ pass
new_vals.append(v)
return new_vals
@@ -371,8 +374,8 @@ def _push_to_list(obj, current_group, depth):
except IndexError:
# tolerant to parentheses mismatch
pass
- if current_group and type(obj) in (unicode, str) and \
- type(current_group[-1]) in (unicode, str):
+ if current_group and type(obj) in (str, str) and \
+ type(current_group[-1]) in (str, str):
current_group[-1] += obj
else:
current_group.append(obj)
@@ -381,13 +384,13 @@ def _push_to_list(obj, current_group, depth):
true_strings = [u"1", u"true"]
for language_code, language_lbl in settings.LANGUAGES:
activate(language_code)
- true_strings.append(unicode(_(u"Yes")).lower())
- true_strings.append(unicode(_(u"True")).lower())
+ true_strings.append(str(_(u"Yes")).lower())
+ true_strings.append(str(_(u"True")).lower())
deactivate()
def is_true_string(val):
- val = unicode(val).lower().replace(u'"', u"")
+ val = str(val).lower().replace(u'"', u"")
if val in true_strings:
return True
@@ -630,7 +633,7 @@ def _manage_bool_fields(model, bool_fields, reversed_bool_fields, dct, or_reqs):
dct.pop(k)
continue
dct[k] = dct[k].replace(u'"', u'')
- if dct[k] in [u"2", u"yes", unicode(_(u"Yes")).lower()]:
+ if dct[k] in [u"2", u"yes", str(_(u"Yes")).lower()]:
dct[k] = True
else:
dct[k] = False
@@ -661,7 +664,7 @@ def _manage_many_counted_fields(fields, reversed_fields, dct, excluded_dct):
dct.pop(k)
continue
dct[k] = dct[k].replace(u'"', u'')
- if dct[k] in [u"2", u"yes", unicode(_(u"Yes")).lower()]:
+ if dct[k] in [u"2", u"yes", str(_(u"Yes")).lower()]:
dct[k] = True
else:
dct[k] = None
@@ -679,7 +682,7 @@ TODAYS = ['today']
for language_code, language_lbl in settings.LANGUAGES:
activate(language_code)
- TODAYS.append(unicode(today_lbl))
+ TODAYS.append(str(today_lbl))
deactivate()
@@ -894,7 +897,7 @@ def _manage_hierarchic_fields(dct, and_reqs):
def _manage_clean_search_field(dct):
for k in dct.keys():
# clean quoted search field
- if type(dct[k]) == unicode:
+ if type(dct[k]) == str:
dct[k] = dct[k].replace(u'"', '')
dct[k] = _clean_type_val(dct[k])
if '*' in dct[k] and k.endswith('__iexact'):
@@ -953,7 +956,7 @@ def _construct_query(relation_types, dct, or_reqs, and_reqs):
# manage multi value not already managed
for key in dct.keys():
- if type(dct[key]) == unicode and u";" in dct[key]:
+ if type(dct[key]) == str and u";" in dct[key]:
values = [v for v in dct[key].split(u';') if v]
if not values:
dct.pop(key)
@@ -986,7 +989,7 @@ def _construct_query(relation_types, dct, or_reqs, and_reqs):
done = []
for and_req in and_reqs:
- str_q = unicode(and_req)
+ str_q = str(and_req)
if str_q in done:
continue
done.append(str_q)
@@ -1007,9 +1010,9 @@ def _manage_default_search(dct, request, model, default_name, my_base_request,
value = request.session[default_name]
if 'basket-' in value:
try:
- dct = {"basket__pk":
- request.session[default_name].split('-')[-1]}
- pinned_search = unicode(FindBasket.objects.get(
+ dct = {
+ "basket__pk": request.session[default_name].split('-')[-1]}
+ pinned_search = str(FindBasket.objects.get(
pk=dct["basket__pk"]))
except FindBasket.DoesNotExist:
pass
@@ -1060,12 +1063,10 @@ def _format_val(val):
return u""
if type(val) == bool:
if val:
- return unicode(_(u"True"))
+ return str(_(u"True"))
else:
- return unicode(_(u"False"))
- if type(val) == str:
- val = val.decode('utf-8')
- return unicode(val)
+ return str(_(u"False"))
+ return str(val)
def _format_geojson(rows, link_template):
@@ -1348,8 +1349,9 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
or ''))
for field in fields])
+ # add keys of associated models to available request key
for associated_model, key in my_associated_models:
- if type(associated_model) in (str, unicode):
+ if type(associated_model) == str:
if associated_model not in globals():
continue
associated_model = globals()[associated_model]
@@ -1490,6 +1492,7 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
relation_types = {}
for rtype_key in my_relation_types_prefix:
relation_types[my_relation_types_prefix[rtype_key]] = set()
+ for rtype_key in my_relation_types_prefix:
for keys in list(dct.keys()):
if type(keys) not in (list, tuple):
keys = [keys]
@@ -1576,7 +1579,7 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
items_nb = items.values('id').aggregate(Count('id'))['id__count']
if count:
return items_nb
- # print(unicode(items.query).encode('utf-8'))
+ # print(str(items.query).encode('utf-8'))
if search_vector: # for serialization
dct['search_vector'] = search_vector
@@ -1766,7 +1769,7 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
except NoReverseMatch:
logger.warning(
'**WARN "show-' + default_name + '" args ('
- + unicode(data[0]) + ") url not available")
+ + str(data[0]) + ") url not available")
lnk = ''
res = {
'id': data[0],
@@ -1810,7 +1813,7 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
})
return HttpResponse(data, content_type='application/json')
elif data_type == "csv":
- response = HttpResponse(content_type='text/csv')
+ response = HttpResponse(content_type='text/csv', charset=ENCODING)
n = datetime.datetime.now()
filename = u'%s_%s.csv' % (
default_name, n.strftime('%Y%m%d-%H%M%S'))
@@ -1818,8 +1821,7 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
% filename
writer = csv.writer(response, **CSV_OPTIONS)
if col_names:
- col_names = [name.encode(ENCODING, errors='replace')
- for name in col_names]
+ col_names = [name for name in col_names]
else:
col_names = []
for field_name in table_cols:
@@ -1828,21 +1830,20 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
if hasattr(model, 'COL_LABELS') and \
field_name in model.COL_LABELS:
field = model.COL_LABELS[field_name]
- col_names.append(unicode(field).encode(ENCODING))
+ col_names.append(str(field))
continue
else:
try:
field = model._meta.get_field(field_name)
except:
- col_names.append(u"".encode(ENCODING))
+ col_names.append("")
logger.warning(
"**WARN get_item - csv export**: no col name "
"for {}\nadd explicit label to "
"COL_LABELS attribute of "
"{}".format(field_name, model))
continue
- col_names.append(
- unicode(field.verbose_name).encode(ENCODING))
+ col_names.append(str(field.verbose_name))
writer.writerow(col_names)
for data in datas:
row, delta = [], 0
@@ -1850,14 +1851,12 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
for idx, col_name in enumerate(table_cols):
if len(data[1:]) <= idx + delta:
break
- val = data[1:][idx + delta].encode(
- ENCODING, errors='replace')
+ val = data[1:][idx + delta]
if col_name and "|" in col_name[0]:
for delta_idx in range(
len(col_name[0].split('|')) - 1):
delta += 1
- val += data[1:][idx + delta].encode(
- ENCODING, errors='replace')
+ val += data[1:][idx + delta]
row.append(val)
writer.writerow(row)
return response