update delete_record script : add delete recursice function(not tested)
Some checks are pending
Setup Native Action / native (3.12.7) (push) Waiting to run
Setup Native Action / docker (3.12.7) (push) Waiting to run

This commit is contained in:
KaySar12 2025-03-18 19:07:58 +07:00
parent 7c009f350c
commit b3018eedfc
2 changed files with 91 additions and 21 deletions

View File

@ -89,7 +89,7 @@ install_modules:
upgrade_modules:
${PYTHON} odoo-bin upgrade_code --script ${SCRIPTS} --addons-path=${UPGRADE_DIR} || true
delete_records:
${PYTHON} ${PWD}/setup/delete_records.py ${DATABASE} ${BASE_MODEL}
${PYTHON} ${PWD}/setup/delete_records.py ${DATABASE} ${BASE_MODEL} --force
##### Docker Deployment #########
run_test_docker:
sudo ${DOCKER_EXEC} ${CONTAINER_ID} odoo --test-tags :TestAccountMove.test_out_invoice_auto_post_monthly,TestAccountMove.test_included_tax --log-level=test --test-enable -d testdb-${TAG} --stop-after-init --config=/etc/odoo/${CONFIG} --xmlrpc-port=8071 && \

View File

@ -20,11 +20,11 @@ Example:
delete_records.py enterprise-ambio res.partner --domain "[('active', '=', False)]" --process_size 10
"""
__copyright__ = "Copyright 2025, NextZenOS"
__email__ = "techgroup@nextzenos.com"
__email__ = "techgroup@nextzenos.com"
__license__ = "GPLv3"
__maintainer__ = "hoangvv"
__status__ = "Development"
__version__ = "0.0.1"
__version__ = "0.1.0"
import odoorpc
import json
import os
@ -33,16 +33,14 @@ import ast
import argparse
from dotenv import load_dotenv
import color_log
from collections import deque
# Load environment variables
load_dotenv()
HOST = os.getenv("HOST", "localhost")
PORT = int(os.getenv("PORT", "8069"))
USERNAME = os.getenv("USERNAME", "admin")
PASSWORD = os.getenv("PASSWORD", "admin")
DEFAULT_DOMAIN = os.getenv(
"DOMAIN", "[]"
)
DEFAULT_DOMAIN = os.getenv("DOMAIN", "[]")
DEFAULT_PROCESS_SIZE = int(os.getenv("PROCESS_SIZE", "30"))
# Parse command-line arguments
@ -63,6 +61,11 @@ parser.add_argument(
default=DEFAULT_PROCESS_SIZE,
help="Number of parallel processes (default from .env)",
)
parser.add_argument(
"--force",
action="store_true",
help="Force delete cascade all records instead of archiving",
)
args = parser.parse_args()
db_name = args.db_name
@ -78,11 +81,12 @@ color_log.Show(2, ("Available databases:", odoo.db.list()))
try:
odoo.login(db_name, USERNAME, PASSWORD)
color_log.Show(
0, f"Connected to Odoo at {HOST}:{PORT}, Database: {db_name}, Model: {base_model}"
0,
f"Connected to Odoo at {HOST}:{PORT}, Database: {db_name}, Model: {base_model}",
)
except Exception as e:
color_log.Show(1, f"Fail to Connect to Odoo Server {e}")
exit(1)
exit(1)
# Convert DOMAIN from string to list
try:
@ -95,7 +99,7 @@ except Exception as e:
# Function to fetch related models
def get_related_fields(db_name, base_model):
"""Fetch related fields for a given model, using a cached file to reduce API calls."""
cache_file = f"{db_name}-{base_model}.cache.json"
cache_file = f"cache/{db_name}-{base_model}.cache.json"
if os.path.exists(cache_file):
with open(cache_file, "r") as f:
@ -130,6 +134,61 @@ def get_related_fields(db_name, base_model):
return {}
def delete_records_cascade(record_ids, model_name, process_count, related_models, db_name):
"""Iteratively delete related records before deleting the base records"""
# Stack for traversal: list of (model_name, record_ids) tuples
stack = [(model_name, record_ids)]
# Track visited records to avoid cycles: set of (model_name, record_id) tuples
visited = set()
# Deletion order: list of (model_name, record_ids) in post-order
deletion_order = []
# Step 1: Collect all records to delete in post-order
while stack:
current_model, current_ids = stack.pop()
current_pairs = {(current_model, rid) for rid in current_ids}
if not current_pairs.issubset(visited):
visited.update(current_pairs)
has_dependents = False
for related_model, fields in get_related_fields(db_name, current_model).items():
related_model_obj = odoo.env[related_model]
for field in fields:
try:
related_ids = related_model_obj.search([(field, "in", current_ids)])
if related_ids:
stack.append((related_model, related_ids))
has_dependents = True
print(f"{process_count}: Found {len(related_ids)} related records in {related_model} ({field})")
process_count += 1
except odoorpc.error.RPCError as e:
print(f"Access denied for model {related_model}: {e}")
continue
if not has_dependents:
deletion_order.append((current_model, current_ids))
else:
deletion_order.append((current_model, current_ids))
# Step 2: Delete records in the collected order
for del_model, del_ids in deletion_order:
try:
if del_ids:
odoo.env[del_model].browse(del_ids).unlink()
color_log.Show(0, f"Deleted {len(del_ids)} records from {del_model}")
except Exception as e:
color_log.Show(3, f"Error deleting {del_model} records: {e}")
# Now delete the main records after all dependencies are gone
try:
if record_ids:
odoo.env.browse(record_ids).unlink()
color_log.Show(0, f"Deleted {len(record_ids)} records from {model_name}")
except Exception as e:
color_log.Show(3, f"Error deleting {model_name} records: {e}")
# Function to delete records in parallel
def process_batch(batch, model_name, process_count, related_models):
"""Process a batch of records - archive or delete based on references."""
@ -174,9 +233,7 @@ def process_batch(batch, model_name, process_count, related_models):
def main():
"""Main function to fetch records and process them in parallel."""
model = odoo.env[base_model]
color_log.Show(
2, f"{domain_filter}"
)
color_log.Show(2, f"{domain_filter}")
record_ids = model.search(domain_filter)
if not record_ids:
@ -192,16 +249,29 @@ def main():
for i in range(0, len(record_ids), process_size)
]
processes = []
if args.force:
delete_records_cascade(record_ids, base_model, 0, related_models,db_name)
# for i, batch in enumerate(batch_list, start=1):
# process = mp.Process(
# target=delete_records_cascade,
# args=(batch, base_model, f"Process-{i}", related_models,db_name),
# )
# processes.append(process)
# process.start()
for i, batch in enumerate(batch_list, start=1):
process = mp.Process(
target=process_batch, args=(batch, base_model, f'Process-{i}', related_models)
)
processes.append(process)
process.start()
# for process in processes:
# process.join()
else:
for i, batch in enumerate(batch_list, start=1):
process = mp.Process(
target=process_batch,
args=(batch, base_model, f"Process-{i}", related_models),
)
processes.append(process)
process.start()
for process in processes:
process.join()
for process in processes:
process.join()
color_log.Show(0, "Record deletion process completed.")