Odoo18-Base/setup/delete_records.py
KaySar12 2631c549a5
Some checks are pending
Setup Native Action / native (3.12.7) (push) Waiting to run
Setup Native Action / docker (3.12.7) (push) Waiting to run
update
2025-03-19 18:23:21 +07:00

338 lines
11 KiB
Python
Executable File

#!/usr/bin/env python
"""
Version :
This script deletes records from an Odoo database based on a given model and domain filter.
Usage:
delete_records.py <db_name> <base_model>
Configuration:
Create a .env file with the following variables to configure the Odoo connection:
HOST = localhost
PORT = 8069
DOMAIN = '[('employee', '=', False), ('customer_rank', '>', 0)]'
USERNAME = admin
PASSWORD = admin
PROCESS_SIZE = 30
Example:
delete_records.py enterprise-ambio res.partner
delete_records.py enterprise-ambio res.partner --domain "[('active', '=', False)]" --process_size 10
"""
__copyright__ = "Copyright 2025, NextZenOS"
__email__ = "techgroup@nextzenos.com"
__license__ = "GPLv3"
__maintainer__ = "hoangvv"
__status__ = "Development"
__version__ = "0.1.0"
import odoorpc
import json
import os
import multiprocessing as mp
import ast
import argparse
from dotenv import load_dotenv
import color_log
from collections import deque
# Load environment variables
load_dotenv()
HOST = os.getenv("HOST", "localhost")
PORT = int(os.getenv("PORT", "8069"))
USERNAME = os.getenv("USERNAME", "admin")
PASSWORD = os.getenv("PASSWORD", "admin")
DEFAULT_DOMAIN = os.getenv("DOMAIN", "[]")
DEFAULT_PROCESS_SIZE = int(os.getenv("PROCESS_SIZE", "30"))
# Parse command-line arguments
parser = argparse.ArgumentParser(
description="Delete records from an Odoo model based on a domain filter."
)
parser.add_argument("db_name", help="Database name")
parser.add_argument("base_model", help="Odoo model to delete records from")
parser.add_argument(
"--domain",
type=str,
default=DEFAULT_DOMAIN,
help="Domain filter (default from .env)",
)
parser.add_argument(
"--process_size",
type=int,
default=DEFAULT_PROCESS_SIZE,
help="Number of parallel processes (default from .env)",
)
parser.add_argument(
"--force",
action="store_true",
help="Force delete cascade all records instead of archiving",
)
args = parser.parse_args()
db_name = args.db_name
base_model = args.base_model
domain_str = args.domain
process_size = args.process_size
OK = 0
FAIL = 1
INFO = 2
NOTICE = 3
# Odoo connection setup
odoo = odoorpc.ODOO(HOST, port=PORT)
color_log.Show(INFO, ("Available databases:", odoo.db.list()))
# Login to Odoo
try:
odoo.login(db_name, USERNAME, PASSWORD)
color_log.Show(
0,
f"Connected to Odoo at {HOST}:{PORT}, Database: {db_name}, Model: {base_model}",
)
except Exception as e:
color_log.Show(FAIL, f"Fail to Connect to Odoo Server {e}")
exit(1)
# Convert DOMAIN from string to list
try:
domain_filter = ast.literal_eval(domain_str)
except Exception as e:
color_log.Show(FAIL, f"Invalid DOMAIN format: {e}")
exit(1)
skipped_models_cache = set() # Cache to store skipped models
def fetch_related_fields_for_model(model, base_model):
"""Fetch related fields for a single model with caching for skipped models."""
if model in skipped_models_cache:
color_log.Show(NOTICE, f"Skipping cached model {model}")
return {}
related_fields = {}
try:
fields = odoo.env[model].fields_get()
for field_name, field_info in fields.items():
if (
field_info.get("type") in ["many2one", "many2many", "one2many"]
and field_info.get("relation") == base_model
):
related_fields.setdefault(model, []).append(field_name)
except odoorpc.error.RPCError as e:
color_log.Show(FAIL, f"Access denied for model {model}: {e}")
skipped_models_cache.add(model) # Add to cache if access is denied
return {}
except Exception as e:
color_log.Show(NOTICE, f"Skipping model {model}: {e}")
skipped_models_cache.add(model) # Add to cache for any other errors
return related_fields
# Function to fetch related models
def get_related_fields(db_name, base_model):
"""Fetch related fields for a given model using multiprocessing."""
cache_file = f"cache/{db_name}/{base_model}.cache.json"
if os.path.exists(cache_file):
with open(cache_file, "r") as f:
related_models = json.load(f)
return related_models
color_log.Show(INFO, f"Fetching related models for {base_model} from Odoo...")
related_models = {}
try:
all_model_ids = odoo.env["ir.model"].search([])
all_models = odoo.env["ir.model"].read(all_model_ids, ["model"])
model_names = [model["model"] for model in all_models]
# Use multiprocessing to fetch related fields
with mp.Pool(processes=process_size) as pool:
results = pool.starmap(
fetch_related_fields_for_model,
[(model_name, base_model) for model_name in model_names],
)
# Combine results from all processes
for result in results:
for model, fields in result.items():
related_models.setdefault(model, []).extend(fields)
# Save to cache
os.makedirs(os.path.dirname(cache_file), exist_ok=True)
with open(cache_file, "w") as f:
json.dump(related_models, f, indent=4)
return related_models
except Exception as e:
color_log.Show(FAIL, f"Error fetching related models: {e}")
return {}
def delete_records_cascade(
record_ids, model_name, process_count, related_models, db_name
):
"""Iteratively delete related records before deleting the base records."""
# Stack for traversal: list of (model_name, record_ids) tuples
stack = [(model_name, record_ids)]
# Track visited records to avoid cycles: set of (model_name, record_id) tuples
visited = set()
# Deletion order: list of (model_name, record_ids) in post-order
deletion_order = []
# Step 1: Collect all records to delete in post-order
while stack:
current_model, current_ids = stack.pop()
current_pairs = {(current_model, rid) for rid in current_ids}
if not current_pairs.issubset(visited):
visited.update(current_pairs)
has_dependents = False
for related_model, fields in get_related_fields(
db_name, current_model
).items():
if related_model in skipped_models_cache:
color_log.Show(NOTICE, f"Skipping cached model {related_model}")
continue
related_model_obj = odoo.env[related_model]
for field in fields:
try:
related_ids = related_model_obj.search(
[(field, "in", current_ids)]
)
if related_ids:
stack.append((related_model, related_ids))
has_dependents = True
color_log.Show(
NOTICE,
f"{process_count}: Found {len(related_ids)} related records in {related_model} ({field})",
)
process_count += 1
except odoorpc.error.RPCError as e:
color_log.Show(
FAIL, f"Access denied for model {related_model}: {e}"
)
skipped_models_cache.add(related_model) # Add to cache
break # Skip further processing for this model
except Exception as e:
color_log.Show(
NOTICE, f"Skipping model {related_model} due to error: {e}"
)
skipped_models_cache.add(related_model) # Add to cache
break # Skip further processing for this model
if not has_dependents:
deletion_order.append((current_model, current_ids))
else:
deletion_order.append((current_model, current_ids))
# Step 2: Delete records in the collected order
for del_model, del_ids in deletion_order:
try:
if del_ids:
odoo.env[del_model].browse(del_ids).unlink()
color_log.Show(OK, f"Deleted {len(del_ids)} records from {del_model}")
except Exception as e:
color_log.Show(FAIL, f"Error deleting {del_model} records: {e}")
# Now delete the main records after all dependencies are gone
try:
if record_ids:
odoo.env[model_name].browse(record_ids).unlink()
color_log.Show(OK, f"Deleted {len(record_ids)} records from {model_name}")
except Exception as e:
color_log.Show(FAIL, f"Error deleting {model_name} records: {e}")
# Function to delete records in parallel
def process_batch(batch, model_name, process_count, related_models):
"""Process a batch of records - archive or delete based on references."""
model = odoo.env[model_name]
archived_count = 0
deleted_count = 0
skipped_count = 0
for record_id in batch:
is_referenced = any(
odoo.env[related_model].search_count([(field, "=", record_id)]) > 0
for related_model, fields in related_models.items()
for field in fields
)
try:
if is_referenced:
model.browse(record_id).write({"active": False})
archived_count += 1
color_log.Show(
INFO, f"{process_count}: Archived {model_name} ID {record_id}"
)
else:
model.unlink([record_id])
deleted_count += 1
color_log.Show(
INFO, f"{process_count}: Deleted {model_name} ID {record_id}"
)
except Exception as e:
skipped_count += 1
color_log.Show(
INFO, f"{process_count}: Skipped {model_name} ID {record_id}: {e}"
)
color_log.Show(
OK,
f"{process_count}: {model_name} - Deleted: {deleted_count}, Archived: {archived_count}, Skipped: {skipped_count}.",
)
# Main function to execute deletion
def main():
"""Main function to fetch records and process them in parallel."""
model = odoo.env[base_model]
color_log.Show(INFO, f"{domain_filter}")
record_ids = model.search(domain_filter)
if not record_ids:
color_log.Show(
FAIL, f"No records found for model {base_model} with the given domain."
)
return
related_models = get_related_fields(db_name, base_model)
batch_list = [
record_ids[i : i + process_size]
for i in range(0, len(record_ids), process_size)
]
processes = []
if args.force:
delete_records_cascade(record_ids, base_model, 0, related_models, db_name)
# for i, batch in enumerate(batch_list, start=1):
# process = mp.Process(
# target=delete_records_cascade,
# args=(batch, base_model, f"Process-{i}", related_models,db_name),
# )
# processes.append(process)
# process.start()
# for process in processes:
# process.join()
else:
for i, batch in enumerate(batch_list, start=1):
process = mp.Process(
target=process_batch,
args=(batch, base_model, f"Process-{i}", related_models),
)
processes.append(process)
process.start()
for process in processes:
process.join()
color_log.Show(OK, "Record deletion process completed.")
if __name__ == "__main__":
main()