From 9086b4d26d1a91d2e661d302cf97f4dbdb237f6a Mon Sep 17 00:00:00 2001 From: KaySar12 Date: Wed, 19 Mar 2025 18:03:29 +0700 Subject: [PATCH] update delete records --- setup/delete_records.py | 66 +++++++++++++++++++++++++++++------------ 1 file changed, 47 insertions(+), 19 deletions(-) diff --git a/setup/delete_records.py b/setup/delete_records.py index 2754d7bd1..6b5f79bd0 100755 --- a/setup/delete_records.py +++ b/setup/delete_records.py @@ -34,6 +34,7 @@ import argparse from dotenv import load_dotenv import color_log from collections import deque + # Load environment variables load_dotenv() HOST = os.getenv("HOST", "localhost") @@ -96,10 +97,26 @@ except Exception as e: exit(1) +def fetch_related_fields_for_model(model, base_model): + """Fetch related fields for a single model.""" + related_fields = {} + try: + fields = odoo.env[model].fields_get() + for field_name, field_info in fields.items(): + if ( + field_info.get("type") in ["many2one", "many2many", "one2many"] + and field_info.get("relation") == base_model + ): + related_fields.setdefault(model, []).append(field_name) + except Exception as e: + color_log.Show(3, f"Skipping model {model}: {e}") + return related_fields + + # Function to fetch related models def get_related_fields(db_name, base_model): - """Fetch related fields for a given model, using a cached file to reduce API calls.""" - cache_file = f"cache/{db_name}-{base_model}.cache.json" + """Fetch related fields for a given model using multiprocessing.""" + cache_file = f"cache/{db_name}/{base_model}.cache.json" if os.path.exists(cache_file): with open(cache_file, "r") as f: @@ -112,29 +129,34 @@ def get_related_fields(db_name, base_model): try: all_model_ids = odoo.env["ir.model"].search([]) all_models = odoo.env["ir.model"].read(all_model_ids, ["model"]) + model_names = [model["model"] for model in all_models] - for model in all_models: - model_name = model["model"] - try: - fields = odoo.env[model_name].fields_get() - for field_name, field_info in fields.items(): - if ( - field_info.get("type") in ["many2one", "many2many", "one2many"] - and field_info.get("relation") == base_model - ): - related_models.setdefault(model_name, []).append(field_name) - except Exception as e: - color_log.Show(3, f"Skipping model {model_name}: {e}") + # Use multiprocessing to fetch related fields + with mp.Pool(processes=process_size) as pool: + results = pool.starmap( + fetch_related_fields_for_model, + [(model_name, base_model) for model_name in model_names], + ) + # Combine results from all processes + for result in results: + for model, fields in result.items(): + related_models.setdefault(model, []).extend(fields) + + # Save to cache + os.makedirs(os.path.dirname(cache_file), exist_ok=True) with open(cache_file, "w") as f: json.dump(related_models, f, indent=4) + return related_models except Exception as e: color_log.Show(3, f"Error fetching related models: {e}") return {} -def delete_records_cascade(record_ids, model_name, process_count, related_models, db_name): +def delete_records_cascade( + record_ids, model_name, process_count, related_models, db_name +): """Iteratively delete related records before deleting the base records""" # Stack for traversal: list of (model_name, record_ids) tuples stack = [(model_name, record_ids)] @@ -152,15 +174,21 @@ def delete_records_cascade(record_ids, model_name, process_count, related_models visited.update(current_pairs) has_dependents = False - for related_model, fields in get_related_fields(db_name, current_model).items(): + for related_model, fields in get_related_fields( + db_name, current_model + ).items(): related_model_obj = odoo.env[related_model] for field in fields: try: - related_ids = related_model_obj.search([(field, "in", current_ids)]) + related_ids = related_model_obj.search( + [(field, "in", current_ids)] + ) if related_ids: stack.append((related_model, related_ids)) has_dependents = True - print(f"{process_count}: Found {len(related_ids)} related records in {related_model} ({field})") + print( + f"{process_count}: Found {len(related_ids)} related records in {related_model} ({field})" + ) process_count += 1 except odoorpc.error.RPCError as e: print(f"Access denied for model {related_model}: {e}") @@ -250,7 +278,7 @@ def main(): ] processes = [] if args.force: - delete_records_cascade(record_ids, base_model, 0, related_models,db_name) + delete_records_cascade(record_ids, base_model, 0, related_models, db_name) # for i, batch in enumerate(batch_list, start=1): # process = mp.Process( # target=delete_records_cascade,