diff options
Diffstat (limited to 'ishtar_common/data_importer.py')
| -rw-r--r-- | ishtar_common/data_importer.py | 128 | 
1 files changed, 87 insertions, 41 deletions
diff --git a/ishtar_common/data_importer.py b/ishtar_common/data_importer.py index 6239bf4c0..3d623e7a6 100644 --- a/ishtar_common/data_importer.py +++ b/ishtar_common/data_importer.py @@ -177,7 +177,8 @@ class UnicodeFormater(Formater):                                       'length': self.max_length})          if self.notnull and not value:              return -        value = self.prefix + value +        if value: +            value = self.prefix + value          return value @@ -603,7 +604,6 @@ class Importer(object):      UNICITY_KEYS = []      EXTRA_DEFAULTS = {}      DEFAULTS = {} -    STR_CUT = {}      ERRORS = {          'header_check': _(              u"The given file is not correct. Check the file " @@ -630,9 +630,13 @@ class Importer(object):          model_name = self.OBJECT_CLS.__module__ + '.' + \              self.OBJECT_CLS.__name__ +        unicity_keys = '' +        if self.UNICITY_KEYS: +            unicity_keys = ";".join(self.UNICITY_KEYS) +          importer = models.ImporterType.objects.create(              slug=self.SLUG, name=name, description=self.DESC, -            associated_models=model_name) +            associated_models=model_name, unicity_keys=unicity_keys)          for default in self.DEFAULTS:              values = self.DEFAULTS[default] @@ -691,13 +695,15 @@ class Importer(object):                  models.ImportTarget.objects.get_or_create(                      column=column, target=target, formater_type=formater_model,                      force_new=getattr(formater, 'force_new', False), +                    concat=getattr(formater, 'concat', False),                      regexp_filter=regexp_filter,                      comment=line.comment)          return True      def __init__(self, skip_lines=0, reference_header=None,                   check_col_num=False, test=False, history_modifier=None, -                 output='silent', import_instance=None): +                 output='silent', import_instance=None, +                 conservative_import=False):          """           * skip_line must be set if the data provided has got headers lines.           * a reference_header can be provided to perform a data compliance @@ -715,8 +721,12 @@ class Importer(object):          self.line_format = copy.copy(self.LINE_FORMAT)          self.import_instance = import_instance          self.archive = None +        self.conservative_import = conservative_import +        # for a conservative_import UNICITY_KEYS should be defined +        assert not self.conservative_import or bool(self.UNICITY_KEYS)          self.DB_TARGETS = {}          self.match_table = {} +        self.concats = set()          if import_instance and import_instance.imported_images:              self.archive = import_instance.imported_images          self._defaults = self.DEFAULTS.copy() @@ -883,6 +893,7 @@ class Importer(object):                  self.errors.append((idx_line, None, msg))      def _line_processing(self, idx_line, line): +        self.idx_line = idx_line          if self.skip_lines > idx_line:              self.validity.append(line)              return @@ -927,17 +938,11 @@ class Importer(object):          if self.test:              return          # manage unicity of items (mainly for updates) -        if self.UNICITY_KEYS: -            data['defaults'] = {} -            for k in data.keys(): -                if k not in self.UNICITY_KEYS \ -                   and k != 'defaults': -                    data['defaults'][k] = data.pop(k) -          if 'history_modifier' in \                  self.OBJECT_CLS._meta.get_all_field_names():              data['history_modifier'] = self.history_modifier +        self.plouf = 0          obj, created = self.get_object(self.OBJECT_CLS, data)          if self.import_instance and hasattr(obj, 'imports') \             and created: @@ -1014,10 +1019,10 @@ class Importer(object):                      self.errors.append(                          (idx_line + 1, idx_col + 1,                           self.ERRORS['value_required'])) +                    self.c_errors = True                  elif not val.strip():                      c_row.append("")                      return -                self.c_errors = True                  val = val.replace(NEW_LINE_BREAK, '\n')                  self.errors.append(                      (idx_line + 1, idx_col + 1, @@ -1049,6 +1054,8 @@ class Importer(object):                  field_name = field_name[idx_v]              if type(force_new) in (list, tuple):                  force_new = force_new[idx_v] +            if formater.concat: +                self.concats.add(field_name)              if self.DB_TARGETS:                  formater.reinit_db_target( @@ -1192,14 +1199,18 @@ class Importer(object):                          m2ms.append((attribute, v))          elif hasattr(field_object, 'rel') and field_object.rel:              if type(data[attribute]) == dict: -                c_path.append(attribute)                  # put history_modifier for every created item                  if 'history_modifier' in \                     field_object.rel.to._meta.get_all_field_names():                      data[attribute]['history_modifier'] = \                          self.history_modifier -                data[attribute], created = self.get_object( -                    field_object.rel.to, data[attribute], c_path) +                try: +                    c_path.append(attribute) +                    data[attribute], created = self.get_object( +                        field_object.rel.to, data[attribute].copy(), c_path) +                except ImporterError, msg: +                    self.errors.append((self.idx_line, None, msg)) +                    data[attribute] = None              elif type(data[attribute]) == list:                  data[attribute] = data[attribute][0] @@ -1207,37 +1218,37 @@ class Importer(object):          m2ms = []          if data and type(data) == dict:              c_path = path[:] -            for attribute in data.keys(): + +            # get all related fields +            for attribute in list(data.keys()): +                c_c_path = c_path[:]                  if not attribute:                      data.pop(attribute)                      continue                  if not data[attribute]:                      continue                  if attribute != '__force_new': -                    self.get_field(cls, attribute, data, m2ms, c_path) -            # default values -            path = tuple(path) -            if path in self._defaults: -                for k in self._defaults[path]: -                    if k not in data or not data[k]: -                        data[k] = self._defaults[path][k] - -            # pre treatment -            if path in self.STR_CUT: -                for k in self.STR_CUT[path]: -                    if k in data and data[k]: -                        data[k] = unicode(data[k])[:self.STR_CUT[path][k]] +                    self.get_field(cls, attribute, data, m2ms, c_c_path) -            # filter default values +            # filter uncessary default values              create_dict = copy.deepcopy(data)              for k in create_dict.keys():                  if type(create_dict[k]) == dict:                      create_dict.pop(k) + +            # default values +            path = tuple(path)              defaults = {} +            if path in self._defaults: +                for k in self._defaults[path]: +                    if (k not in data or not data[k]): +                        defaults[k] = self._defaults[path][k] +              if 'history_modifier' in create_dict: -                defaults = { +                defaults.update({                      'history_modifier': create_dict.pop('history_modifier') -                } +                }) +              try:                  try:                      dct = create_dict.copy() @@ -1248,10 +1259,44 @@ class Importer(object):                          created = dct.pop('__force_new')                          if not [k for k in dct if dct[k] is not None]:                              return None, created -                        obj = cls.objects.create(**dct) +                        new_dct = defaults.copy() +                        new_dct.update(dct) +                        obj = cls.objects.create(**new_dct)                      else: -                        dct['defaults'] = defaults +                        # manage UNICITY_KEYS - only level 1 +                        if not path and self.UNICITY_KEYS: +                            for k in dct.keys(): +                                if k not in self.UNICITY_KEYS \ +                                   and k != 'defaults': +                                    defaults[k] = dct.pop(k) + +                        dct['defaults'] = defaults.copy()                          obj, created = cls.objects.get_or_create(**dct) + +                        if not created and not path and self.UNICITY_KEYS: +                            changed = False +                            if self.conservative_import: +                                for k in dct['defaults']: +                                    new_val = dct['defaults'][k] +                                    if new_val is None or new_val == '': +                                        continue +                                    val = getattr(obj, k) +                                    if val is None or val == '': +                                        changed = True +                                        setattr(obj, k, new_val) +                                    elif k in self.concats \ +                                            and type(val) == unicode \ +                                            and type(new_val) == unicode: +                                        setattr(obj, k, val + u" - " + new_val) +                            else: +                                for k in dct['defaults']: +                                    new_val = dct['defaults'][k] +                                    if new_val is None or new_val == '': +                                        continue +                                    changed = True +                                    setattr(obj, k, new_val) +                            if changed: +                                obj.save()                      if self.import_instance and hasattr(obj, 'imports') \                         and created:                          obj.imports.add(self.import_instance) @@ -1286,24 +1331,25 @@ class Importer(object):                  except UnicodeDecodeError:                      data = ''                  raise ImporterError( -                    "Erreur d'import %s, contexte : %s, erreur : %s" -                    % (unicode(cls), unicode(data), message)) +                    "Erreur d'import %s %s, contexte : %s, erreur : %s" +                    % (unicode(cls), unicode("__".join(path)), +                       unicode(data), message))              return obj, created          return data -    def _format_csv_line(self, values): +    def _format_csv_line(self, values, empty=u"-"):          return u'"' + u'","'.join( -            [(v and unicode(v).replace('"', '""')) or u'-' +            [(v and unicode(v).replace('"', '""')) or empty               for v in values]) + u'"' -    def _get_csv(self, rows, header=[]): +    def _get_csv(self, rows, header=[], empty=u"-"):          if not rows:              return ""          csv_v = []          if header: -            csv_v.append(self._format_csv_line(header)) +            csv_v.append(self._format_csv_line(header, empty=empty))          for values in rows: -            csv_v.append(self._format_csv_line(values)) +            csv_v.append(self._format_csv_line(values, empty=empty))          return u"\n".join(csv_v)      def get_csv_errors(self):  | 
