update delete records script
Some checks are pending
Setup Native Action / native (3.12.7) (push) Waiting to run
Setup Native Action / docker (3.12.7) (push) Waiting to run

This commit is contained in:
KaySar12 2025-03-18 16:57:40 +07:00
parent 297b846280
commit e2aebd10d2
3 changed files with 132 additions and 1 deletions

1
.gitignore vendored
View File

@ -56,3 +56,4 @@ package.json
*.pyc
/pragtech_hotel_management_addons/
pragtech_hotel_management_addons
*cache.json

@ -1 +1 @@
Subproject commit 1388a2c5e3f4b90d908c8ca7be5d7f654049ced7
Subproject commit 13ba4761ec37b117293b5cc2d96948e02726e5f6

130
setup/delete_records.py Executable file
View File

@ -0,0 +1,130 @@
#!/usr/bin/env python
import odoorpc
import json
import os
import multiprocessing as mp
import ast
# Odoo connection setup
odoo = odoorpc.ODOO("workspace.local", port=8069)
print("Available databases:", odoo.db.list())
db_name = "enterprise-ambio"
base_model = "res.partner" # Change this to any model (e.g., "res.partner")
domain = "[('employee', '=', False), ('customer_rank', '>', 0)]"
process_size = 30
odoo.login(db_name, "admin", "admin")
def get_related_fields(db_name, base_model):
"""Fetch related fields for a given model, using a cached file to reduce API calls."""
cache_file = f"{db_name}-{base_model}.cache.json"
# Load from cache if exists
if os.path.exists(cache_file):
with open(cache_file, "r") as f:
related_models = json.load(f)
print(f"Loaded related models from cache ({cache_file}).")
return related_models
print(f"Fetching related models for {base_model} from Odoo...")
related_models = {}
# Get all models
all_model_ids = odoo.env["ir.model"].search([])
all_models = odoo.env["ir.model"].read(all_model_ids, ["model"])
for model in all_models:
model_name = model["model"]
try:
fields = odoo.env[model_name].fields_get() # Get field metadata
for field_name, field_info in fields.items():
if (
field_info.get("type") in ["many2one", "many2many", "one2many"]
and field_info.get("relation") == base_model
):
related_models.setdefault(model_name, []).append(field_name)
except Exception as e:
print(f"Skipping model {model_name}: {e}")
# Save to cache
with open(cache_file, "w") as f:
json.dump(related_models, f, indent=4)
print(f"Saved related models to cache ({cache_file}).")
return related_models
def split_array(array, size):
"""Split a list into chunks of specified size."""
return [array[i : i + size] for i in range(0, len(array), size)]
def process_batch(batch, model_name, process_count, related_models):
"""Process a batch of records - archive or delete based on references."""
model = odoo.env[model_name]
archived_count = 0
deleted_count = 0
skipped_count = 0
for record_id in batch:
is_referenced = False
# Check references in related models
for related_model, fields in related_models.items():
for field in fields:
if odoo.env[related_model].search_count([(field, "=", record_id)]) > 0:
is_referenced = True
break
if is_referenced:
break # Exit loop early if reference is found
try:
if is_referenced:
model.browse(record_id).write({"active": False}) # Archive
archived_count += 1
print(f"{process_count}: Archived {model_name} ID {record_id}")
else:
model.unlink([record_id]) # Delete if no references
deleted_count += 1
print(f"{process_count}: Deleted {model_name} ID {record_id}")
except Exception as e:
skipped_count += 1
print(f"{process_count}: Skipped {model_name} ID {record_id}: {e}")
print(
f"{process_count}: {model_name} - Deleted: {deleted_count}, Archived: {archived_count}, Skipped: {skipped_count}."
)
def main():
"""Main function to fetch records and process them in parallel."""
model_name = base_model
model = odoo.env[model_name]
domain_filter = ast.literal_eval(domain) if domain else []
record_ids = model.search(domain_filter)
related_models = get_related_fields(db_name, model_name)
# Split records into batches
batch_list = split_array(record_ids, process_size)
processes = []
process_count = 0
for batch in batch_list:
process_count += 1
process = mp.Process(
name=f"Process-{process_count}",
target=process_batch,
args=(batch, model_name, process_count, related_models),
)
print(f"Process-{process_count} Created")
process.start()
print(f"Process-{process_count} Started")
processes.append(process)
# Ensure all processes complete
for process in processes:
process.join()
if __name__ == "__main__":
main()