diff options
| -rw-r--r-- | CHANGES.md | 4 | ||||
| -rw-r--r-- | ishtar_common/management/commands/ishtar_maintenance.py | 80 | 
2 files changed, 67 insertions, 17 deletions
| diff --git a/CHANGES.md b/CHANGES.md index 00d70235d..434a94855 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -6,6 +6,10 @@ date: 2022-12-05  Ishtar changelog  ================ +### Features/improvements ### +- Commands: manage update search vector with maintenance script + +  v4.0.32 - 2022-12-12  -------------------- diff --git a/ishtar_common/management/commands/ishtar_maintenance.py b/ishtar_common/management/commands/ishtar_maintenance.py index 1eeab179a..c4ad52ab6 100644 --- a/ishtar_common/management/commands/ishtar_maintenance.py +++ b/ishtar_common/management/commands/ishtar_maintenance.py @@ -28,6 +28,42 @@ if not os.path.exists(log_path):      os.mkdir(log_path, mode=0o770) +def task_update_search_vectors(quiet=False, log=False): +    for model in apps.get_models(): +        if not hasattr(model, "update_search_vector") or \ +                not (getattr(model, "BASE_SEARCH_VECTORS", None) or +                     getattr(model, "INT_SEARCH_VECTORS", None) or +                     getattr(model, "M2M_SEARCH_VECTORS", None) or +                     getattr(model, "PARENT_SEARCH_VECTORS", None)): +            continue +        store_results = [] +        msg = "-> processing {}: ".format(model._meta.verbose_name) +        q = model.objects +        ln = q.count() + +        changed_nb = 0 +        for idx, obj_id in enumerate(q.values("pk").all()): +            obj = model.objects.get(pk=obj_id["pk"]) +            if not quiet: +                cmsg = "\r{} {}/{}".format(msg, idx + 1, ln) +                sys.stdout.write(cmsg) +                sys.stdout.flush() +            changed = obj.update_search_vector() +            if changed: +                if log: +                    store_results.append( +                        ( +                            obj.pk, +                            str(obj), +                        ) +                    ) +                changed_nb += 1 + +        log_name = "update-search-vector-" + slugify(model.__name__) +        csv_cols = ["id", "name"] +        _end_task(changed_nb, msg, quiet, store_results, log, log_name, csv_cols) + +  def task_check_cached_label(quiet=False, log=False):      for model in apps.get_models():          if model.__name__.startswith("Historical"): @@ -75,21 +111,27 @@ def task_check_cached_label(quiet=False, log=False):                  obj._no_move = True                  obj._no_geo_check = True                  obj.save() +        log_name = "update-cached-label-" + slugify(model.__name__) +        csv_cols = ["id", "name", "attribute", "old", "new"] +        _end_task(changed_nb, msg, quiet, store_results, log, log_name, csv_cols) + + +def _end_task(changed_nb, msg, quiet, store_results, log, log_name, csv_cols): +    if not quiet: +        if changed_nb: +            cmsg = f"\r{msg} {changed_nb} updated" + 20 * " " + "\n" +        else: +            cmsg = " " * 80 +        sys.stdout.write(cmsg) +    if log and changed_nb: +        filename = f"{get_time().replace(':', '')}-{log_name}.csv" +        path = os.sep.join([log_path, filename]) +        with open(path, 'w+') as fle: +            writer = csv.writer(fle) +            writer.writerow(csv_cols) +            writer.writerows(store_results)          if not quiet: -            if changed_nb: -                cmsg = f"\r{msg} {changed_nb} updated" + 20 * " " + "\n" -            else: -                cmsg = " " * 80 -            sys.stdout.write(cmsg) -        if log and changed_nb: -            filename = f"{slugify(model.__name__)}-{get_time().replace(':', '')}.csv" -            path = os.sep.join([log_path, filename]) -            with open(path, 'w+') as fle: -                writer = csv.writer(fle) -                writer.writerow(["id", "name", "attribute", "old", "new"]) -                writer.writerows(store_results) -            if not quiet: -                sys.stdout.write(f"log: {path} written.") +            sys.stdout.write(f"log: {path} written.")  def task_main_image(quiet=False, log=False): @@ -113,7 +155,7 @@ def task_main_image(quiet=False, log=False):              sys.stdout.write(f"{nb} main image fixed for {model.__name__}\n") -def task_missing_parcels(quiet=False): +def task_missing_parcels(quiet=False, log=False):      Parcel = apps.get_model("archaeological_operations", "Parcel")      q = Parcel.objects.filter(context_record__isnull=False, operation=None)      nb = q.count() @@ -145,13 +187,17 @@ def get_time():  TASKS = {      "main_image": { -        "help": "for items with images and no main image, put the first one created as a main image.", +        "help": "for items with images and no main image, put the first one created as a main image",          "action": task_main_image,      },      "cached_label": { -        "help": "regenerate cached label on all tables if necessary", +        "help": "regenerate cached label on all tables",          "action": task_check_cached_label,      }, +    "update_search_vector": { +        "help": "regenerate search vectors on all tables", +        "action": task_update_search_vectors, +    },      "operation_missing_parcels": {          "help": "fix lost parcel association on operation from context records.",          "action": task_missing_parcels, | 
