update delete_records
Some checks are pending
Setup Native Action / native (3.12.7) (push) Waiting to run
Setup Native Action / docker (3.12.7) (push) Waiting to run

This commit is contained in:
KaySar12 2025-03-18 17:39:42 +07:00
parent e2aebd10d2
commit 8b112faa27

View File

@ -1,64 +1,136 @@
#!/usr/bin/env python
"""
Version :
This script deletes records from an Odoo database based on a given model and domain filter.
Usage:
delete_records.py <db_name> <base_model>
Configuration:
Create a .env file with the following variables to configure the Odoo connection:
HOST = localhost
PORT = 8069
DOMAIN = '[('employee', '=', False), ('customer_rank', '>', 0)]'
USERNAME = admin
PASSWORD = admin
PROCESS_SIZE = 30
Example:
delete_records.py enterprise-ambio res.partner
delete_records.py enterprise-ambio res.partner --domain "[('active', '=', False)]" --process_size 10
"""
__copyright__ = "Copyright 2025, NextZenOS"
__email__ = "techgroup@nextzenos.com"
__license__ = "GPLv3"
__maintainer__ = "hoangvv"
__status__ = "Development"
__version__ = "0.0.1"
import odoorpc
import json
import os
import multiprocessing as mp
import ast
import argparse
from dotenv import load_dotenv
import color_log
# Load environment variables
load_dotenv()
HOST = os.getenv("HOST", "localhost")
PORT = int(os.getenv("PORT", "8069"))
USERNAME = os.getenv("USERNAME", "admin")
PASSWORD = os.getenv("PASSWORD", "admin")
DEFAULT_DOMAIN = os.getenv(
"DOMAIN", "[]"
)
DEFAULT_PROCESS_SIZE = int(os.getenv("PROCESS_SIZE", "30"))
# Parse command-line arguments
parser = argparse.ArgumentParser(
description="Delete records from an Odoo model based on a domain filter."
)
parser.add_argument("db_name", help="Database name")
parser.add_argument("base_model", help="Odoo model to delete records from")
parser.add_argument(
"--domain",
type=str,
default=DEFAULT_DOMAIN,
help="Domain filter (default from .env)",
)
parser.add_argument(
"--process_size",
type=int,
default=DEFAULT_PROCESS_SIZE,
help="Number of parallel processes (default from .env)",
)
args = parser.parse_args()
db_name = args.db_name
base_model = args.base_model
domain_str = args.domain
process_size = args.process_size
# Odoo connection setup
odoo = odoorpc.ODOO("workspace.local", port=8069)
print("Available databases:", odoo.db.list())
db_name = "enterprise-ambio"
base_model = "res.partner" # Change this to any model (e.g., "res.partner")
domain = "[('employee', '=', False), ('customer_rank', '>', 0)]"
process_size = 30
odoo.login(db_name, "admin", "admin")
odoo = odoorpc.ODOO(HOST, port=PORT)
color_log.Show(2, ("Available databases:", odoo.db.list()))
# Login to Odoo
try:
odoo.login(db_name, USERNAME, PASSWORD)
color_log.Show(
0, f"Connected to Odoo at {HOST}:{PORT}, Database: {db_name}, Model: {base_model}"
)
except Exception as e:
color_log.Show(1, f"Fail to Connect to Odoo Server {e}")
exit(1)
# Convert DOMAIN from string to list
try:
domain_filter = ast.literal_eval(domain_str)
except Exception as e:
color_log.Show(3, f"Invalid DOMAIN format: {e}")
exit(1)
# Function to fetch related models
def get_related_fields(db_name, base_model):
"""Fetch related fields for a given model, using a cached file to reduce API calls."""
cache_file = f"{db_name}-{base_model}.cache.json"
# Load from cache if exists
if os.path.exists(cache_file):
with open(cache_file, "r") as f:
related_models = json.load(f)
print(f"Loaded related models from cache ({cache_file}).")
return related_models
print(f"Fetching related models for {base_model} from Odoo...")
color_log.Show(2, f"Fetching related models for {base_model} from Odoo...")
related_models = {}
# Get all models
all_model_ids = odoo.env["ir.model"].search([])
all_models = odoo.env["ir.model"].read(all_model_ids, ["model"])
try:
all_model_ids = odoo.env["ir.model"].search([])
all_models = odoo.env["ir.model"].read(all_model_ids, ["model"])
for model in all_models:
model_name = model["model"]
try:
fields = odoo.env[model_name].fields_get() # Get field metadata
for field_name, field_info in fields.items():
if (
field_info.get("type") in ["many2one", "many2many", "one2many"]
and field_info.get("relation") == base_model
):
related_models.setdefault(model_name, []).append(field_name)
except Exception as e:
print(f"Skipping model {model_name}: {e}")
for model in all_models:
model_name = model["model"]
try:
fields = odoo.env[model_name].fields_get()
for field_name, field_info in fields.items():
if (
field_info.get("type") in ["many2one", "many2many", "one2many"]
and field_info.get("relation") == base_model
):
related_models.setdefault(model_name, []).append(field_name)
except Exception as e:
color_log.Show(3, f"Skipping model {model_name}: {e}")
# Save to cache
with open(cache_file, "w") as f:
json.dump(related_models, f, indent=4)
print(f"Saved related models to cache ({cache_file}).")
return related_models
def split_array(array, size):
"""Split a list into chunks of specified size."""
return [array[i : i + size] for i in range(0, len(array), size)]
with open(cache_file, "w") as f:
json.dump(related_models, f, indent=4)
return related_models
except Exception as e:
color_log.Show(3, f"Error fetching related models: {e}")
return {}
# Function to delete records in parallel
def process_batch(batch, model_name, process_count, related_models):
"""Process a batch of records - archive or delete based on references."""
model = odoo.env[model_name]
@ -67,64 +139,72 @@ def process_batch(batch, model_name, process_count, related_models):
skipped_count = 0
for record_id in batch:
is_referenced = False
# Check references in related models
for related_model, fields in related_models.items():
for field in fields:
if odoo.env[related_model].search_count([(field, "=", record_id)]) > 0:
is_referenced = True
break
if is_referenced:
break # Exit loop early if reference is found
is_referenced = any(
odoo.env[related_model].search_count([(field, "=", record_id)]) > 0
for related_model, fields in related_models.items()
for field in fields
)
try:
if is_referenced:
model.browse(record_id).write({"active": False}) # Archive
model.browse(record_id).write({"active": False})
archived_count += 1
print(f"{process_count}: Archived {model_name} ID {record_id}")
color_log.Show(
2, f"{process_count}: Archived {model_name} ID {record_id}"
)
else:
model.unlink([record_id]) # Delete if no references
model.unlink([record_id])
deleted_count += 1
print(f"{process_count}: Deleted {model_name} ID {record_id}")
color_log.Show(
2, f"{process_count}: Deleted {model_name} ID {record_id}"
)
except Exception as e:
skipped_count += 1
print(f"{process_count}: Skipped {model_name} ID {record_id}: {e}")
color_log.Show(
3, f"{process_count}: Skipped {model_name} ID {record_id}: {e}"
)
print(
f"{process_count}: {model_name} - Deleted: {deleted_count}, Archived: {archived_count}, Skipped: {skipped_count}."
color_log.Show(
0,
f"{process_count}: {model_name} - Deleted: {deleted_count}, Archived: {archived_count}, Skipped: {skipped_count}.",
)
# Main function to execute deletion
def main():
"""Main function to fetch records and process them in parallel."""
model_name = base_model
model = odoo.env[model_name]
domain_filter = ast.literal_eval(domain) if domain else []
model = odoo.env[base_model]
color_log.Show(
2, f"{domain_filter}"
)
record_ids = model.search(domain_filter)
related_models = get_related_fields(db_name, model_name)
# Split records into batches
batch_list = split_array(record_ids, process_size)
processes = []
process_count = 0
for batch in batch_list:
process_count += 1
process = mp.Process(
name=f"Process-{process_count}",
target=process_batch,
args=(batch, model_name, process_count, related_models),
if not record_ids:
color_log.Show(
1, f"No records found for model {base_model} with the given domain."
)
print(f"Process-{process_count} Created")
process.start()
print(f"Process-{process_count} Started")
processes.append(process)
return
related_models = get_related_fields(db_name, base_model)
batch_list = [
record_ids[i : i + process_size]
for i in range(0, len(record_ids), process_size)
]
processes = []
for i, batch in enumerate(batch_list, start=1):
process = mp.Process(
target=process_batch, args=(batch, base_model, f'Process-{i}', related_models)
)
processes.append(process)
process.start()
# Ensure all processes complete
for process in processes:
process.join()
color_log.Show(0, "Record deletion process completed.")
if __name__ == "__main__":
main()