diff options
author | Étienne Loks <etienne.loks@proxience.com> | 2015-02-23 15:32:40 +0100 |
---|---|---|
committer | Étienne Loks <etienne.loks@proxience.com> | 2015-05-06 16:04:02 +0200 |
commit | 35e6edb19711261764a416d78568637c8f9b5caa (patch) | |
tree | 14c34492ed0534f0e77169700c3c336bc60f27d7 | |
parent | bab45393e2e05a9f589b81b13f3af0125621133f (diff) | |
download | Ishtar-35e6edb19711261764a416d78568637c8f9b5caa.tar.bz2 Ishtar-35e6edb19711261764a416d78568637c8f9b5caa.zip |
Data importer: multiple format for DateFormater - pre treatment for cutting too long strings
-rw-r--r-- | archaeological_context_records/data_importer.py | 4 | ||||
-rw-r--r-- | archaeological_files/data_importer.py | 5 | ||||
-rw-r--r-- | archaeological_finds/data_importer.py | 2 | ||||
-rw-r--r-- | archaeological_operations/data_importer.py | 6 | ||||
-rw-r--r-- | ishtar_common/data_importer.py | 24 |
5 files changed, 27 insertions, 14 deletions
diff --git a/archaeological_context_records/data_importer.py b/archaeological_context_records/data_importer.py index 0bf6f83a1..8c5df2ddf 100644 --- a/archaeological_context_records/data_importer.py +++ b/archaeological_context_records/data_importer.py @@ -43,9 +43,9 @@ class ContextRecordsImporterBibracte(Importer): # interprétation ImportFormater('interpretation', UnicodeFormater(1000), required=False,), # date ouverture - ImportFormater('opening_date', DateFormater('%Y/%m/%d'), required=False,), + ImportFormater('opening_date', DateFormater(['%Y/%m/%d']), required=False,), # date fermeture - ImportFormater('closing_date', DateFormater('%Y/%m/%d'), required=False,), + ImportFormater('closing_date', DateFormater(['%Y/%m/%d']), required=False,), # lien vers parcelle ImportFormater('parcel__external_id', UnicodeFormater(12), required=False,), diff --git a/archaeological_files/data_importer.py b/archaeological_files/data_importer.py index bb817f82c..4d2962d98 100644 --- a/archaeological_files/data_importer.py +++ b/archaeological_files/data_importer.py @@ -71,6 +71,7 @@ class FileImporterSraPdL(FilePostProcessing, Importer): }, ('in_charge',):{'attached_to':None}, # initialized in __init__ } + STR_CUT = {tuple():{'comment':2000}} def _init_line_format(self): tf = TownFormater() @@ -148,7 +149,7 @@ class FileImporterSraPdL(FilePostProcessing, Importer): comment=u"Commentaire", concat=True, required=False), ImportYearFormater('reception_date', # T, 20 - DateFormater(), + DateFormater(['%d/%m/%Y', '%d/%m/%Y']), comment=u"Date de création", required=False, duplicate_fields=['creation_date']), @@ -167,7 +168,7 @@ class FileImporterSraPdL(FilePostProcessing, Importer): None, # AG, 33 None, # AH, 34 ImportFormater('creation_date', # AI, 35 - DateFormater(), + DateFormater(['%d/%m/%Y', '%d/%m/%Y']), force_value=True, comment=u"Date de création", required=False,), diff --git a/archaeological_finds/data_importer.py b/archaeological_finds/data_importer.py index 197bf20ec..40808cbcd 100644 --- a/archaeological_finds/data_importer.py +++ b/archaeological_finds/data_importer.py @@ -64,7 +64,7 @@ class FindsImporterBibracte(Importer): # lien UE ImportFormater('context_record__external_id', UnicodeFormater(120),), # date decouverte - ImportFormater('discovery_date', DateFormater('%Y/%m/%d'), required=False,), + ImportFormater('discovery_date', DateFormater(['%Y/%m/%d']), required=False,), # lien parcelle (unique) None, # etat conservation diff --git a/archaeological_operations/data_importer.py b/archaeological_operations/data_importer.py index 94bbdff7c..252fdcca0 100644 --- a/archaeological_operations/data_importer.py +++ b/archaeological_operations/data_importer.py @@ -170,9 +170,9 @@ class OperationImporterBibracte(Importer): # resp. lien IMPORT avec personne ImportFormater('in_charge__raw_name', UnicodeFormater(300),), # début - ImportFormater('start_date', DateFormater('%Y/%m/%d'),), + ImportFormater('start_date', DateFormater(['%Y/%m/%d']),), # fin - ImportFormater('excavation_end_date', DateFormater('%Y/%m/%d'),), + ImportFormater('excavation_end_date', DateFormater(['%Y/%m/%d']),), # Chronos ImportFormater('periods', TypeFormater(models.Period, many_split="&"), required=False), @@ -242,7 +242,7 @@ class DocImporterBibracte(Importer): # auteur ImportFormater('authors__person__raw_name', UnicodeFormater(300), required=False), # annee - ImportFormater('creation_date', DateFormater('%Y'),), + ImportFormater('creation_date', DateFormater(['%Y']),), # format ImportFormater('format_type', TypeFormater(Format), required=False), # description legende diff --git a/ishtar_common/data_importer.py b/ishtar_common/data_importer.py index c6019652c..3a321c7fd 100644 --- a/ishtar_common/data_importer.py +++ b/ishtar_common/data_importer.py @@ -351,18 +351,22 @@ class TypeFormater(StrChoiceFormater): return self.model.objects.create(**values) class DateFormater(Formater): - def __init__(self, date_format="%d/%m/%Y", db_target=None): - self.date_format = date_format + def __init__(self, date_formats=["%d/%m/%Y"], db_target=None): + self.date_formats = date_formats + if type(date_formats) not in (list, tuple): + self.date_formats = [self.date_formats] self.db_target = db_target def format(self, value): value = value.strip() if not value: return - try: - return datetime.datetime.strptime(value, self.date_format).date() - except: - raise ValueError(_(u"\"%(value)s\" is not a valid date") % { + for date_format in self.date_formats: + try: + return datetime.datetime.strptime(value, date_format).date() + except: + continue + raise ValueError(_(u"\"%(value)s\" is not a valid date") % { 'value':value}) class StrToBoolean(Formater, ChoiceChecker): @@ -453,6 +457,7 @@ class Importer(object): UNICITY_KEYS = [] EXTRA_DEFAULTS = {} DEFAULTS = {} + STR_CUT = {} ERRORS = { 'header_check':_(u"The given file is not correct. Check the file " u"format. If you use a CSV file: check that column separator " @@ -866,12 +871,19 @@ class Importer(object): if self.import_instance and hasattr(val, 'imports'): val.imports.add(self.import_instance) m2ms.append((attribute, val)) + # default values path = tuple(path) if path in self._defaults: for k in self._defaults[path]: if k not in data or not data[k]: data[k] = self._defaults[path][k] + # pre treatment + if path in self.STR_CUT: + for k in self.STR_CUT[path]: + if k in data and data[k]: + data[k] = unicode(data[k])[:self.STR_CUT[k]] + # filter default values create_dict = copy.deepcopy(data) for k in create_dict.keys(): |