update
Some checks are pending
Setup Native Action / native (3.12.7) (push) Waiting to run
Setup Native Action / docker (3.12.7) (push) Waiting to run

This commit is contained in:
KaySar12 2025-03-20 11:14:56 +07:00
parent 4692b037a0
commit 699bb65076

View File

@ -1,386 +1,246 @@
#!/usr/bin/env python
"""
Version :
This script deletes records from an Odoo database based on a given model and domain filter.
Delete records from an Odoo database based on a model and domain filter.
Usage:
delete_records.py <db_name> <base_model>
Configuration:
Create a .env file with the following variables to configure the Odoo connection:
HOST = localhost
PORT = 8069
DOMAIN = '[('employee', '=', False), ('customer_rank', '>', 0)]'
USERNAME = admin
PASSWORD = admin
PROCESS_SIZE = 30
Example:
delete_records.py enterprise-ambio res.partner
delete_records.py enterprise-ambio res.partner --domain "[('active', '=', False)]" --process_size 10
delete_records.py mydb res.partner --domain "[('active', '=', False)]" --force
"""
__copyright__ = "Copyright 2025, NextZenOS"
__email__ = "techgroup@nextzenos.com"
__license__ = "GPLv3"
__maintainer__ = "hoangvv"
__status__ = "Development"
__version__ = "0.1.0"
import odoorpc
import json
import os
import multiprocessing as mp
import ast
import argparse
from dotenv import load_dotenv
import ast
import json
import multiprocessing as mp
import os
import odoorpc
import color_log
from collections import deque
# Load environment variables
load_dotenv()
HOST = os.getenv("HOST", "localhost")
PORT = int(os.getenv("PORT", "8069"))
USERNAME = os.getenv("USERNAME", "admin")
PASSWORD = os.getenv("PASSWORD", "admin")
DEFAULT_DOMAIN = os.getenv("DOMAIN", "[]")
DEFAULT_PROCESS_SIZE = int(os.getenv("PROCESS_SIZE", "30"))
# Default configuration
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 8069
DEFAULT_USERNAME = "admin"
DEFAULT_PASSWORD = "admin"
DEFAULT_DOMAIN = "[]"
DEFAULT_PROCESS_SIZE = 30
# Parse command-line arguments
parser = argparse.ArgumentParser(
description="Delete records from an Odoo model based on a domain filter."
)
parser.add_argument("db_name", help="Database name")
parser.add_argument("base_model", help="Odoo model to delete records from")
parser.add_argument(
"--domain",
type=str,
default=DEFAULT_DOMAIN,
help="Domain filter (default from .env)",
)
parser.add_argument(
"--process_size",
type=int,
default=DEFAULT_PROCESS_SIZE,
help="Number of parallel processes (default from .env)",
)
parser.add_argument(
"--force",
action="store_true",
help="Force delete cascade all records instead of archiving",
)
parser.add_argument(
"--soft",
action="store_true",
help="Just archive the records instead of deleting",
)
parser.add_argument(
"--refresh-cache",
action="store_true",
help="Refersh the related models cache",
)
args = parser.parse_args()
# Logging levels
OK, FAIL, INFO, NOTICE = 0, 1, 2, 3
db_name = args.db_name
base_model = args.base_model
domain_str = args.domain
process_size = args.process_size
OK = 0
FAIL = 1
INFO = 2
NOTICE = 3
# Odoo connection setup
odoo = odoorpc.ODOO(HOST, port=PORT)
color_log.Show(INFO, ("Available databases:", odoo.db.list()))
# Login to Odoo
try:
odoo.login(db_name, USERNAME, PASSWORD)
color_log.Show(
OK, f"Connected to {HOST}:{PORT}, DB: {db_name}, Model: {base_model}"
def parse_arguments():
"""Parse command-line arguments."""
parser = argparse.ArgumentParser(description="Delete records from an Odoo model.")
parser.add_argument("db_name", help="Database name")
parser.add_argument("base_model", help="Model to delete records from")
parser.add_argument("--host", default=DEFAULT_HOST, help="Odoo server host")
parser.add_argument("--port", type=int, default=DEFAULT_PORT, help="Odoo server port")
parser.add_argument("--username", default=DEFAULT_USERNAME, help="Odoo username")
parser.add_argument("--password", default=DEFAULT_PASSWORD, help="Odoo password")
parser.add_argument("--domain", default=DEFAULT_DOMAIN, help="Domain filter")
parser.add_argument(
"--process_size",
type=int,
default=DEFAULT_PROCESS_SIZE,
help="Number of parallel processes",
)
except odoorpc.error.RPCError as e:
color_log.Show(FAIL, f"Login failed: {e} (check credentials or database)")
exit(1)
except Exception as e:
color_log.Show(FAIL, f"Connection error: {e} (check host/port)")
exit(1)
# Convert DOMAIN from string to list
try:
domain_filter = ast.literal_eval(domain_str)
if not isinstance(domain_filter, list) or not all(
isinstance(t, (tuple, list)) and len(t) == 3 for t in domain_filter
):
raise ValueError("Domain must be a list of 3-tuples")
except (ValueError, SyntaxError) as e:
color_log.Show(FAIL, f"Invalid domain format: {e}")
exit(1)
parser.add_argument(
"--force", action="store_true", help="Force delete instead of archiving"
)
parser.add_argument(
"--soft", action="store_true", help="Archive instead of deleting"
)
parser.add_argument(
"--refresh-cache", action="store_true", help="Refresh related models cache"
)
return parser.parse_args()
skipped_models_cache = set() # Cache to store skipped models
def fetch_related_fields_for_model(model, base_model):
"""Fetch related fields for a single model with caching for skipped models."""
if model in skipped_models_cache:
color_log.Show(NOTICE, f"Skipping cached model {model}")
return {}
related_fields = {}
def connect_to_odoo(host, port, db_name, username, password):
"""Establish connection to Odoo database."""
odoo = odoorpc.ODOO(host, port=port)
color_log.Show(INFO, f"Available databases: {odoo.db.list()}")
try:
fields = odoo.env[model].fields_get()
for field_name, field_info in fields.items():
if (
field_info.get("type") in ["many2one", "many2many", "one2many"]
and field_info.get("relation") == base_model
):
related_fields.setdefault(model, []).append(field_name)
odoo.login(db_name, username, password)
color_log.Show(OK, f"Connected to {host}:{port}, DB: {db_name}")
return odoo
except odoorpc.error.RPCError as e:
color_log.Show(NOTICE, f"Access denied for model {model}: {e}")
skipped_models_cache.add(model) # Add to cache if access is denied
return {}
color_log.Show(FAIL, f"Login failed: {e}")
exit(1)
except Exception as e:
color_log.Show(NOTICE, f"Skipping model {model}: {e}")
skipped_models_cache.add(model) # Add to cache for any other errors
return related_fields
color_log.Show(FAIL, f"Connection error: {e}")
exit(1)
# Function to fetch related models
def get_related_fields(db_name, base_model):
"""Fetch related fields for a given model using multiprocessing."""
def parse_domain(domain_str):
"""Convert domain string to a list and validate it."""
try:
domain = ast.literal_eval(domain_str)
if not isinstance(domain, list) or not all(
isinstance(t, (tuple, list)) and len(t) == 3 for t in domain
):
raise ValueError("Domain must be a list of 3-tuples")
return domain
except (ValueError, SyntaxError) as e:
color_log.Show(FAIL, f"Invalid domain format: {e}")
exit(1)
def get_related_fields(odoo, db_name, base_model, process_size, refresh_cache=False):
"""Fetch related fields for the base model, using cache if available."""
cache_file = f"cache/{db_name}/{base_model}.cache.json"
if os.path.exists(cache_file):
if not refresh_cache and os.path.exists(cache_file):
with open(cache_file, "r") as f:
related_models = json.load(f)
color_log.Show(INFO, f"Loaded related models for {base_model} from cache.")
return related_models
color_log.Show(INFO, f"Loaded related models from cache: {base_model}")
return json.load(f)
color_log.Show(INFO, f"Fetching related models for {base_model} from Odoo...")
color_log.Show(INFO, f"Fetching related models for {base_model}...")
related_models = {}
skipped_models = set()
try:
all_model_ids = odoo.env["ir.model"].search([])
all_models = odoo.env["ir.model"].read(all_model_ids, ["model"])
model_names = [model["model"] for model in all_models]
# Use multiprocessing to fetch related fields
with mp.Pool(processes=process_size) as pool:
results = pool.starmap(
fetch_related_fields_for_model,
[(model_name, base_model) for model_name in model_names],
)
# Combine results from all processes
for result in results:
for model, fields in result.items():
related_models.setdefault(model, []).extend(fields)
# Save to cache
os.makedirs(os.path.dirname(cache_file), exist_ok=True)
with open(cache_file, "w") as f:
json.dump(related_models, f, indent=4)
return related_models
except Exception as e:
color_log.Show(FAIL, f"Error fetching related models: {e}")
return {}
# Experimental function
def delete_records_cascade(
record_ids, model_name, process_count, related_models, db_name
):
"""Iteratively delete related records before deleting the base records."""
# Stack for traversal: list of (model_name, record_ids) tuples
stack = [(model_name, record_ids)]
# Track visited records to avoid cycles: set of (model_name, record_id) tuples
visited = set()
# Deletion order: list of (model_name, record_ids) in post-order
deletion_order = []
# Step 1: Collect all records to delete in post-order
while stack:
current_model, current_ids = stack.pop()
current_pairs = {(current_model, rid) for rid in current_ids}
if not current_pairs.issubset(visited):
visited.update(current_pairs)
has_dependents = False
for related_model, fields in get_related_fields(
db_name, current_model
).items():
if related_model in skipped_models_cache:
color_log.Show(NOTICE, f"Skipping cached model {related_model}")
continue
related_model_obj = odoo.env[related_model]
for field in fields:
try:
related_ids = related_model_obj.search(
[(field, "in", current_ids)]
)
if related_ids:
stack.append((related_model, related_ids))
has_dependents = True
color_log.Show(
NOTICE,
f"{process_count}: Found {len(related_ids)} related records in {related_model} ({field})",
)
process_count += 1
except odoorpc.error.RPCError as e:
color_log.Show(
NOTICE, f"Access denied for model {related_model}: {e}"
)
skipped_models_cache.add(related_model) # Add to cache
break # Skip further processing for this model
except Exception as e:
color_log.Show(
NOTICE, f"Skipping model {related_model} due to error: {e}"
)
skipped_models_cache.add(related_model) # Add to cache
break # Skip further processing for this model
if not has_dependents:
deletion_order.append((current_model, current_ids))
else:
deletion_order.append((current_model, current_ids))
# Step 2: Delete records in the collected order
for del_model, del_ids in deletion_order:
def fetch_fields(model_name):
if model_name in skipped_models:
return {}
try:
if del_ids:
odoo.env[del_model].browse(del_ids).unlink()
color_log.Show(OK, f"Deleted {len(del_ids)} records from {del_model}")
fields = odoo.env[model_name].fields_get()
related = {}
for name, info in fields.items():
if (
info.get("type") in ["many2one", "many2many", "one2many"]
and info.get("relation") == base_model
):
related.setdefault(model_name, []).append(name)
return related
except Exception as e:
color_log.Show(FAIL, f"Error deleting {del_model} records: {e}")
color_log.Show(NOTICE, f"Skipping {model_name}: {e}")
skipped_models.add(model_name)
return {}
# Now delete the main records after all dependencies are gone
try:
if record_ids:
odoo.env[model_name].browse(record_ids).unlink()
color_log.Show(OK, f"Deleted {len(record_ids)} records from {model_name}")
except Exception as e:
color_log.Show(FAIL, f"Error deleting {model_name} records: {e}")
model_ids = odoo.env["ir.model"].search([])
models = [m["model"] for m in odoo.env["ir.model"].read(model_ids, ["model"])]
with mp.Pool(processes=process_size) as pool:
results = pool.map(fetch_fields, models)
for result in results:
related_models.update(result)
os.makedirs(os.path.dirname(cache_file), exist_ok=True)
with open(cache_file, "w") as f:
json.dump(related_models, f, indent=4)
return related_models
def unreference_records(record_id, model_name, process_count, related_models):
"""Force delete a record and its related records."""
def unreference_record(odoo, record_id, model_name, related_models, process_id):
"""Remove references to a record from related models."""
for related_model, fields in related_models.items():
related_model_obj = odoo.env[related_model]
related_obj = odoo.env[related_model]
for field in fields:
try:
# Find records in the related model that reference the record_id
related_ids = related_model_obj.search([(field, "=", record_id)])
related_ids = related_obj.search([(field, "=", record_id)])
if related_ids:
# Unreference the record_id by setting the field to False
related_model_obj.browse(related_ids).write({field: False})
related_obj.browse(related_ids).write({field: False})
color_log.Show(
OK,
f"Unreferenced {record_id} in {related_model} ({field}) for {len(related_ids)} records.",
f"{process_id}: Unreferenced {record_id} from {related_model} ({field})",
)
except odoorpc.error.RPCError as e:
color_log.Show(
NOTICE,
f"Access denied while unreferencing {record_id} in {related_model} ({field}): {e}",
)
except Exception as e:
color_log.Show(
NOTICE,
f"Error while unreferencing {record_id} in {related_model} ({field}): {e}",
f"{process_id}: Error unreferencing {record_id} in {related_model}: {e}",
)
# Function to delete records in parallel
def delete_record(batch, model_name, process_count, related_models):
"""Process a batch of records - archive or delete based on references."""
def delete_batch(odoo, batch, model_name, related_models, process_id, force, soft):
"""Delete or archive a batch of records."""
model = odoo.env[model_name]
archived_count = 0
deleted_count = 0
skipped_count = 0
deleted, archived, skipped = 0, 0, 0
for record_id in batch:
is_referenced = any(
odoo.env[related_model].search_count([(field, "=", record_id)]) > 0
for related_model, fields in related_models.items()
for field in fields
odoo.env[m].search_count([(f, "=", record_id)]) > 0
for m, fields in related_models.items()
for f in fields
)
try:
if is_referenced:
if is_referenced and not force:
model.browse(record_id).write({"active": False})
archived_count += 1
archived += 1
color_log.Show(
INFO, f"{process_count}: Archived {model_name} ID {record_id}"
INFO, f"{process_id}: Archived {model_name} ID {record_id}"
)
if (not is_referenced and not args.soft) or args.force:
model.unlink([record_id])
deleted_count += 1
elif not soft or force:
model.browse(record_id).unlink()
deleted += 1
color_log.Show(
INFO, f"{process_count}: Deleted {model_name} ID {record_id}"
INFO, f"{process_id}: Deleted {model_name} ID {record_id}"
)
except Exception as e:
if args.force:
if force:
try:
unreference_records(
record_id, model_name, process_count, related_models
unreference_record(
odoo, record_id, model_name, related_models, process_id
)
model.unlink([record_id])
archived_count += 1
model.browse(record_id).unlink()
deleted += 1
color_log.Show(
INFO,
f"{process_count}: Force deleted {model_name} ID {record_id}",
INFO, f"{process_id}: Force deleted {model_name} ID {record_id}"
)
except Exception as e:
skipped_count += 1
color_log.Show(
NOTICE,
f"{process_count}: Skipped {model_name} ID {record_id} after force delete attempt: {e}",
)
skipped += 1
color_log.Show(NOTICE, f"{process_id}: Skipped {record_id}: {e}")
else:
skipped_count += 1
color_log.Show(
INFO, f"{process_count}: Skipped {model_name} ID {record_id}: {e}"
)
skipped += 1
color_log.Show(INFO, f"{process_id}: Skipped {record_id}: {e}")
color_log.Show(
OK,
f"{process_count}: {model_name} - Deleted: {deleted_count}, Archived: {archived_count}, Skipped: {skipped_count}.",
f"{process_id}: {model_name} - Deleted: {deleted}, Archived: {archived}, Skipped: {skipped}",
)
# Main function to execute deletion
def main():
"""Main function to fetch records and process them in parallel."""
model = odoo.env[base_model]
color_log.Show(INFO, f"{domain_filter}")
record_ids = model.search(domain_filter)
"""Orchestrate the deletion process."""
args = parse_arguments()
odoo = connect_to_odoo(args.host, args.port, args.db_name, args.username, args.password)
domain = parse_domain(args.domain)
model = odoo.env[args.base_model]
record_ids = model.search(domain)
if not record_ids:
color_log.Show(
FAIL, f"No records found for model {base_model} with the given domain."
FAIL, f"No records found in {args.base_model} with domain {args.domain}"
)
return
related_models = get_related_fields(db_name, base_model)
batch_list = [
record_ids[i : i + process_size]
for i in range(0, len(record_ids), process_size)
related_models = get_related_fields(
odoo, args.db_name, args.base_model, args.process_size, args.refresh_cache
)
batches = [
record_ids[i : i + args.process_size]
for i in range(0, len(record_ids), args.process_size)
]
processes = []
for i, batch in enumerate(batch_list, start=1):
# delete_record(batch, base_model, f"Process-{i}", related_models)
process = mp.Process(
target=delete_record,
args=(batch, base_model, f"Process-{i}", related_models),
)
processes.append(process)
process.start()
for process in processes:
process.join()
color_log.Show(OK, "Record deletion process completed.")
processes = []
for i, batch in enumerate(batches, 1):
p = mp.Process(
target=delete_batch,
args=(
odoo,
batch,
args.base_model,
related_models,
f"Process-{i}",
args.force,
args.soft,
),
)
processes.append(p)
p.start()
for p in processes:
p.join()
color_log.Show(OK, "Deletion process completed.")
if __name__ == "__main__":
main()
main()