summaryrefslogtreecommitdiff
path: root/ishtar_common/views_item.py
diff options
context:
space:
mode:
authorÉtienne Loks <etienne.loks@iggdrasil.net>2019-02-05 15:31:51 +0100
committerÉtienne Loks <etienne.loks@iggdrasil.net>2019-04-24 19:38:56 +0200
commitd3b1234e0ba3dfef4889dd4b1c1f8615231337fe (patch)
tree087de46a6b117bbdf588eac7e4f47be515f1e026 /ishtar_common/views_item.py
parent18860dd2c4ed4c8a57e6a6eb9b66f9e949f69ee1 (diff)
downloadIshtar-d3b1234e0ba3dfef4889dd4b1c1f8615231337fe.tar.bz2
Ishtar-d3b1234e0ba3dfef4889dd4b1c1f8615231337fe.zip
Map: performance optimization to manage large dataset
Diffstat (limited to 'ishtar_common/views_item.py')
-rw-r--r--ishtar_common/views_item.py182
1 files changed, 115 insertions, 67 deletions
diff --git a/ishtar_common/views_item.py b/ishtar_common/views_item.py
index 275414d0e..4a6337fe7 100644
--- a/ishtar_common/views_item.py
+++ b/ishtar_common/views_item.py
@@ -1030,6 +1030,110 @@ def _format_geojson(rows):
return data
+def _get_data_from_query(items, query_table_cols, request, extra_request_keys,
+ do_not_deduplicate=False):
+ for query_keys in query_table_cols:
+ if not isinstance(query_keys, (tuple, list)):
+ query_keys = [query_keys]
+ for query_key in query_keys:
+ if query_key in extra_request_keys: # translate query term
+ query_key = extra_request_keys[query_key]
+ if isinstance(query_key, (list, tuple)):
+ # only manage one level for display
+ query_key = query_key[0]
+ # clean
+ for filtr in ('__icontains', '__contains', '__iexact',
+ '__exact'):
+ if query_key.endswith(filtr):
+ query_key = query_key[:len(query_key) - len(filtr)]
+ query_key.replace(".", "__") # class style to query
+
+ values = ['id'] + query_table_cols
+
+ c_ids, data_list = [], []
+ for item in items.values(*values):
+ # manual deduplicate when distinct is not enough
+ if not do_not_deduplicate and item['id'] in c_ids:
+ continue
+ c_ids.append(item['id'])
+ data = [item['id']]
+ for key in query_table_cols:
+ data.append(item[key])
+ data_list.append(data)
+ return data_list
+
+
+def _get_data_from_query_old(items, query_table_cols, request,
+ extra_request_keys, do_not_deduplicate=False):
+ c_ids, datas = [], []
+
+ for item in items:
+ # manual deduplicate when distinct is not enough
+ if not do_not_deduplicate and item.pk in c_ids:
+ continue
+ c_ids.append(item.pk)
+ data = [item.pk]
+ for keys in query_table_cols:
+ if type(keys) not in (list, tuple):
+ keys = [keys]
+ my_vals = []
+ for k in keys:
+ if k in extra_request_keys:
+ k = extra_request_keys[k]
+ if type(k) in (list, tuple):
+ k = k[0]
+ for filtr in ('__icontains', '__contains', '__iexact',
+ '__exact'):
+ if k.endswith(filtr):
+ k = k[:len(k) - len(filtr)]
+ vals = [item]
+ # foreign key may be divided by "." or "__"
+ splitted_k = []
+ for ky in k.split('.'):
+ if '__' in ky:
+ splitted_k += ky.split('__')
+ else:
+ splitted_k.append(ky)
+ for ky in splitted_k:
+ new_vals = []
+ for val in vals:
+ if hasattr(val, 'all'): # manage related objects
+ val = list(val.all())
+ for v in val:
+ v = getattr(v, ky)
+ new_vals += _get_values(request, v)
+ elif val:
+ try:
+ val = getattr(val, ky)
+ new_vals += _get_values(request, val)
+ except (AttributeError, GEOSException):
+ # must be a query key such as "contains"
+ pass
+ vals = new_vals
+ # manage last related objects
+ if vals and hasattr(vals[0], 'all'):
+ new_vals = []
+ for val in vals:
+ new_vals += list(val.all())
+ vals = new_vals
+ if not my_vals:
+ my_vals = [_format_val(va) for va in vals]
+ else:
+ new_vals = []
+ if not vals:
+ for idx, my_v in enumerate(my_vals):
+ new_vals.append(u"{}{}{}".format(
+ my_v, u' - ', ''))
+ else:
+ for idx, v in enumerate(vals):
+ new_vals.append(u"{}{}{}".format(
+ vals[idx], u' - ', _format_val(v)))
+ my_vals = new_vals[:]
+ data.append(u" & ".join(my_vals) or u"")
+ datas.append(data)
+ return datas
+
+
DEFAULT_ROW_NUMBER = 10
# length is used by ajax DataTables requests
EXCLUDED_FIELDS = ['length']
@@ -1210,7 +1314,7 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
except (ValueError, TypeError):
row_nb = DEFAULT_ROW_NUMBER
- if data_type == 'jso # no limit for mapn-map': # no limit for map
+ if data_type == 'json-map': # no limit for map
row_nb = None
dct_request_items = {}
@@ -1524,74 +1628,18 @@ def get_item(model, func_name, default_name, extra_request_keys=None,
else:
items = items[start:end]
- datas = []
if old:
items = [item.get_previous(old) for item in items]
- c_ids = []
- for item in items:
- # manual deduplicate when distinct is not enough
- if not do_not_deduplicate and item.pk in c_ids:
- continue
- c_ids.append(item.pk)
- data = [item.pk]
- for keys in query_table_cols:
- if type(keys) not in (list, tuple):
- keys = [keys]
- my_vals = []
- for k in keys:
- if k in my_extra_request_keys:
- k = my_extra_request_keys[k]
- if type(k) in (list, tuple):
- k = k[0]
- for filtr in ('__icontains', '__contains', '__iexact',
- '__exact'):
- if k.endswith(filtr):
- k = k[:len(k) - len(filtr)]
- vals = [item]
- # foreign key may be divided by "." or "__"
- splitted_k = []
- for ky in k.split('.'):
- if '__' in ky:
- splitted_k += ky.split('__')
- else:
- splitted_k.append(ky)
- for ky in splitted_k:
- new_vals = []
- for val in vals:
- if hasattr(val, 'all'): # manage related objects
- val = list(val.all())
- for v in val:
- v = getattr(v, ky)
- new_vals += _get_values(request, v)
- elif val:
- try:
- val = getattr(val, ky)
- new_vals += _get_values(request, val)
- except (AttributeError, GEOSException):
- # must be a query key such as "contains"
- pass
- vals = new_vals
- # manage last related objects
- if vals and hasattr(vals[0], 'all'):
- new_vals = []
- for val in vals:
- new_vals += list(val.all())
- vals = new_vals
- if not my_vals:
- my_vals = [_format_val(va) for va in vals]
- else:
- new_vals = []
- if not vals:
- for idx, my_v in enumerate(my_vals):
- new_vals.append(u"{}{}{}".format(
- my_v, u' - ', ''))
- else:
- for idx, v in enumerate(vals):
- new_vals.append(u"{}{}{}".format(
- vals[idx], u' - ', _format_val(v)))
- my_vals = new_vals[:]
- data.append(u" & ".join(my_vals) or u"")
- datas.append(data)
+
+ if data_type == 'json-map':
+ datas = _get_data_from_query(
+ items, query_table_cols, request, my_extra_request_keys,
+ do_not_deduplicate)
+ else:
+ datas = _get_data_from_query_old(
+ items, query_table_cols, request, my_extra_request_keys,
+ do_not_deduplicate)
+
if manual_sort_key:
# +1 because the id is added as a first col
idx_col = None