diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..b4c6611b7 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,30 @@ +FROM odoo:16.0 +# Overwrite odoo with addons +COPY ./odoo /usr/lib/python3/dist-packages/ +COPY ./addons /usr/lib/python3/dist-packages/odoo +COPY ./deployment/entrypoint.sh / +# Copy entrypoint script and Odoo configuration file +# COPY deployment/entrypoint.sh / +# COPY deployment/odoo.conf /etc/odoo/ + +# Set permissions and Mount /var/lib/odoo to allow restoring filestore and /mnt/extra-addons for users addons and /mnt/backup for backup +RUN chown odoo /etc/odoo/odoo.conf \ + && mkdir -p /mnt/extra-addons \ + && chown -R odoo /mnt/extra-addons \ + && mkdir -p /var/lib/odoo/backup \ + && chown -R odoo /var/lib/odoo/backup +VOLUME ["/var/lib/odoo", "/mnt/extra-addons","/var/lib/odoo/backup"] + +# Expose Odoo services +EXPOSE 8069 8071 8072 + +# Set the default config file +ENV ODOO_RC /etc/odoo/odoo.conf + +# COPY deployment/wait-for-psql.py /usr/local/bin/wait-for-psql.py + +# Set default user when running the container +USER odoo + +ENTRYPOINT ["/entrypoint.sh"] +CMD ["odoo"] \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..6387ed643 --- /dev/null +++ b/Makefile @@ -0,0 +1,137 @@ +ifneq ("$(wildcard .env)", "") + include .env +endif +ifneq ("$(wildcard automation/automation.mk)", "") + include automation/automation.mk +endif +.SHELLFLAGS += ${SHELLFLAGS} -e +PWD = $(shell pwd) +UID = $(shell id -u) +GID = $(shell id -g) +PIP=${PWD}/venv/bin/pip +PYTHON=${PWD}/venv/bin/python +VENV := $(shell [ -d "${PWD}/venv" ] && echo "exists" || echo "missing") +DOCKERCMD=docker +DOCKER_COMPOSE_CMD=docker-compose +DOCKER_BUILD=$(DOCKERCMD) build +DOCKER_PUSH=$(DOCKERCMD) push +DOCKER_IMAGE=$(DOCKERCMD) image +DOCKER_EXEC=$(DOCKERCMD) exec +DEPLOY_PATH=${PWD}/deployment +SETUP_PATH=${PWD}/setup +CONFIG=odoo.conf +HUB=hub.nextzenos.com +CDN=https://cdn.nextzenos.com/CDN/NextERP +ORGANIZATION=nexterp +PROJECT := odoo18 +ODOO_IMAGE=${HUB}/${ORGANIZATION}/$(PROJECT) +TAG := $(shell git rev-parse --abbrev-ref HEAD) +CONTAINER_ID=${PROJECT}-${TAG} +VERSION := community +ADDONS=${PWD}/addons,${PWD}/odoo/addons,${PWD}/extra-addons +BACKUP=${CDN}/raw/branch/main/backup/${VERSION}/odoo18-main_2025-01-15_08-05-47.zip + +AUTOMATION_PATH=${PWD}/automation + +########### Automation ######### +deploy-native: + @echo "Not implemented" +##### Virtual Environment ##### + +check-virtualenv: + @if [ "$(VENV)" = "missing" ]; then \ + echo "Creating virtual environment..."; \ + python3 -m venv venv; \ + echo "Installing pip dependencies..."; \ + $(PIP) install --upgrade pip; \ + else \ + echo "Skipping Creating virtual environment..."; \ + fi; +##### Native Deployment######### +install: check-virtualenv + sudo apt update -y && \ + sudo apt install -y python3-pip libldap2-dev libpq-dev libsasl2-dev postgresql-client libreoffice wkhtmltopdf ansible terraform&& \ + ${PIP} install --no-input -r requirements.txt --break-system-packages +download_backup: + @if [ -z "${LINKDB}" ]; then \ + LinkDB=${BACKUP}; \ + read -p "LinkDownload [default: ${BACKUP}]: " InputLinkDB; \ + LinkDB=$${InputLinkDB:-${BACKUP}}; \ + else \ + LinkDB=${LINKDB}; \ + fi; \ + Filename=$$(basename $$LinkDB); \ + echo "Downloading $$Filename from: $$LinkDB"; \ + ${PWD}/setup/download_backup.sh $$LinkDB backup.zip +gen_config: + $(MAKE) download_backup LINKDB=${BACKUP} + ${PWD}/setup/init_config.sh --native ${ADDONS} ${DB_USER} ${DB_PASSWORD} ${DB_SERVER} ${DB_PORT} +run_server: + ${PYTHON} odoo-bin --config=${CONFIG} +run_test: + ${PYTHON} odoo-bin --test-tags :TestAccountMove.test_out_invoice_auto_post_monthly,TestAccountMove.test_included_tax --log-level=test --test-enable -d testdb-${TAG} --stop-after-init --config=${CONFIG} + ${PYTHON} odoo-bin db --config=${CONFIG} drop testdb-${TAG} +restore_database: + @read -p "Backup Path [default: ${PWD}/deployment/backup/backup.zip]: " InputbackupDB; \ + BackupDB=$${InputbackupDB:-${PWD}/deployment/backup/backup.zip}; \ + read -p "Enter the database name: " DBNAME; \ + echo "Restoring database: $$DBNAME"; \ + ${PYTHON} odoo-bin db --config=${CONFIG} load "$$DBNAME" "$$BackupDB" +scaffold_module: + read -p "Module Name: " ModuleName; \ + read -p "addons Path[eg:addons, extra-addons, exercise]: " ModulePath; \ + ${PYTHON} odoo-bin scaffold $$ModuleName && \ + mv $$ModuleName ${PWD}/$$ModulePath; +cleanup_addons: + @bash ${PWD}/setup/clean_up_addons.sh $(shell echo $(ADDONS) | tr ',' ' ') +install_modules: + ${PYTHON} odoo-bin --config=${CONFIG} -d ${DATABASE} -i ${MODULES} --xmlrpc-port=${PORT} + +##### Docker Deployment ######### +run_test_docker: + sudo ${DOCKER_EXEC} ${CONTAINER_ID} odoo --test-tags :TestAccountMove.test_out_invoice_auto_post_monthly,TestAccountMove.test_included_tax --log-level=test --test-enable -d testdb-${TAG} --stop-after-init --config=/etc/odoo/${CONFIG} --xmlrpc-port=8071 && \ + sudo ${DOCKER_EXEC} ${CONTAINER_ID} odoo db --config=/etc/odoo/${CONFIG} drop testdb-${TAG} +gen_config_docker: + $(MAKE) download_backup LINKDB=${BACKUP} + ${PWD}/setup/init_config.sh --docker ${ODOO_IMAGE} ${TAG} ${CONTAINER_ID} +build_image: + DOCKER_BUILDKIT=1 ${DOCKER_BUILD} . --progress plain --tag ${ODOO_IMAGE}:${TAG} +push_image: + $(DOCKER_PUSH) ${ODOO_IMAGE}:${TAG} +run_server_docker: + @if ! docker ps | grep -q "${CONTAINER_ID}"; then \ + echo "Container not found. Running docker-compose up -d"; \ + else \ + echo "Container already running. Skipping docker-compose up -d."; \ + fi + cd ${DEPLOY_PATH} &&\ + ${DOCKER_COMPOSE_CMD} up -d +update_tag: + ${SETUP_PATH}/update_tag.sh $(CURR_BRANCH) +restore_database_docker: + sudo ${DOCKER_EXEC} ${CONTAINER_ID} odoo db -c /etc/odoo/odoo.conf load backup /var/lib/odoo/backup/backup.zip +stop_server_docker: + @if ! docker ps | grep -q "${CONTAINER_ID}"; then \ + echo "Container not found. Skipping"; \ + else \ + cd ${DEPLOY_PATH} &&\ + ${DOCKER_COMPOSE_CMD} down; \ + fi +clean_up: + @if ! docker ps | grep -q "${CONTAINER_ID}"; then \ + echo "Container not found. Skipping"; \ + else \ + cd ${DEPLOY_PATH} &&\ + ${DOCKER_COMPOSE_CMD} down; \ + fi + find "${DEPLOY_PATH}" -mindepth 1 -maxdepth 1 \ + ! -name "etc" \ + ! -name "addons" \ + ! -name "backup" \ + ! -name "*.sh" \ + ! -name "*.template" \ + ! -name "*.py" \ + ! -name "*.yml" \ + -print0 | sudo xargs -0 rm -rf {} && \ + sudo rm -rf ${DEPLOY_PATH}/etc/* + diff --git a/deployment/backup/backup.zip b/deployment/backup/backup.zip new file mode 100644 index 000000000..36a5b1089 Binary files /dev/null and b/deployment/backup/backup.zip differ diff --git a/deployment/docker-compose.yml b/deployment/docker-compose.yml new file mode 100755 index 000000000..33064bcb4 --- /dev/null +++ b/deployment/docker-compose.yml @@ -0,0 +1,35 @@ +version: '3.8' +services: + app: + image: ${ODOO_IMAGE:-odoo}:${ODOO_TAG:-16} + container_name: ${ODOO_CONTAINER:-odoo} + # user: root + depends_on: + - db + ports: + - ${ODOO_PORT:-8069}:8069 + # - "${ODOO_CHAT_PORT:-8072}:8072" + tty: true + command: -- + environment: + - HOST=db + - USER=${PG_USER:-changeme} + - PASSWORD=${PG_PASS:-password} + volumes: + - ${ODOO_ADDONS:-./addons}:/mnt/extra-addons + - ${ODOO_CONFIG:-./etc}:/etc/odoo + - ${ODOO_BACKUP:-./backup}:/var/lib/odoo/backup + restart: always + db: + image: postgres:15 + # user: root + environment: + - POSTGRES_USER=${PG_USER:-changeme} + - POSTGRES_PASSWORD=${PG_PASS:-password} + - POSTGRES_DB=postgres + ports: + - ${PG_PORT:-5432}:5432 + restart: always + volumes: + - ${PG_DATA:-./postgresql}:/var/lib/postgresql/data + diff --git a/deployment/entrypoint.sh b/deployment/entrypoint.sh new file mode 100755 index 000000000..939544337 --- /dev/null +++ b/deployment/entrypoint.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +set -e + +if [ -v PASSWORD_FILE ]; then + PASSWORD="$(< $PASSWORD_FILE)" +fi + +# set the postgres database host, port, user and password according to the environment +# and pass them as arguments to the odoo process if not present in the config file +: ${HOST:=${DB_PORT_5432_TCP_ADDR:='db'}} +: ${PORT:=${DB_PORT_5432_TCP_PORT:=5432}} +: ${USER:=${DB_ENV_POSTGRES_USER:=${POSTGRES_USER:='odoo'}}} +: ${PASSWORD:=${DB_ENV_POSTGRES_PASSWORD:=${POSTGRES_PASSWORD:='odoo'}}} + +DB_ARGS=() +function check_config() { + param="$1" + value="$2" + if grep -q -E "^\s*\b${param}\b\s*=" "$ODOO_RC" ; then + value=$(grep -E "^\s*\b${param}\b\s*=" "$ODOO_RC" |cut -d " " -f3|sed 's/["\n\r]//g') + fi; + DB_ARGS+=("--${param}") + DB_ARGS+=("${value}") +} +check_config "db_host" "$HOST" +check_config "db_port" "$PORT" +check_config "db_user" "$USER" +check_config "db_password" "$PASSWORD" + +case "$1" in + -- | odoo) + shift + if [[ "$1" == "scaffold" ]] ; then + exec odoo "$@" + else + # shellcheck disable=SC2068 + wait-for-psql.py ${DB_ARGS[@]} --timeout=30 + exec odoo "$@" "${DB_ARGS[@]}" + fi + ;; + -*) + # shellcheck disable=SC2068 + wait-for-psql.py ${DB_ARGS[@]} --timeout=30 + exec odoo "$@" "${DB_ARGS[@]}" + ;; + *) + exec "$@" +esac + +exit 1 \ No newline at end of file diff --git a/deployment/env.template b/deployment/env.template new file mode 100755 index 000000000..009edac12 --- /dev/null +++ b/deployment/env.template @@ -0,0 +1,15 @@ +COMPOSE_PROJECT_NAME= +#Odoo +ODOO_IMAGE= +ODOO_TAG= +ODOO_PORT=10017 +ODOO_CONTAINER= +# ODOO_CHAT_PORT= +ODOO_ADDONS=./addons +ODOO_CONFIG=./etc +ODOO_BACKUP=./backup +#Database +PG_PORT= +PG_USER= +PG_PASS= +ENTRYPOINT=./entrypoint.sh diff --git a/deployment/etc/odoo.conf b/deployment/etc/odoo.conf new file mode 100644 index 000000000..ca1198737 --- /dev/null +++ b/deployment/etc/odoo.conf @@ -0,0 +1,9 @@ +[options] +addons_path = /mnt/extra-addons +data_dir = /var/lib/odoo +db_host = db +db_port = 5432 +db_user = default_repo +db_password = smartyourlife +proxy_mode = True + diff --git a/deployment/odoo.conf.template b/deployment/odoo.conf.template new file mode 100755 index 000000000..a295cb26b --- /dev/null +++ b/deployment/odoo.conf.template @@ -0,0 +1,7 @@ +[options] +addons_path = /mnt/extra-addons +data_dir = /var/lib/odoo +db_host = +db_port = +db_user = +db_password = \ No newline at end of file diff --git a/deployment/wait-for-psql.py b/deployment/wait-for-psql.py new file mode 100755 index 000000000..799639741 --- /dev/null +++ b/deployment/wait-for-psql.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +import argparse +import psycopg2 +import sys +import time + + +if __name__ == '__main__': + arg_parser = argparse.ArgumentParser() + arg_parser.add_argument('--db_host', required=True) + arg_parser.add_argument('--db_port', required=True) + arg_parser.add_argument('--db_user', required=True) + arg_parser.add_argument('--db_password', required=True) + arg_parser.add_argument('--timeout', type=int, default=5) + + args = arg_parser.parse_args() + + start_time = time.time() + while (time.time() - start_time) < args.timeout: + try: + conn = psycopg2.connect(user=args.db_user, host=args.db_host, port=args.db_port, password=args.db_password, dbname='postgres') + error = '' + break + except psycopg2.OperationalError as e: + error = e + else: + conn.close() + time.sleep(1) + + if error: + print("Database connection failure: %s" % error, file=sys.stderr) + sys.exit(1) \ No newline at end of file diff --git a/odoo.conf.template b/odoo.conf.template new file mode 100644 index 000000000..9d287c8ee --- /dev/null +++ b/odoo.conf.template @@ -0,0 +1,12 @@ +[options] +db_host = +db_port = 5432 +db_user = +db_password = +xmlrpc_port = +addons_path = +dbfilter = +proxy_mode = +logfile = /var/log/odoo/odoo.log +db_maxconn = 1000 +list_db = \ No newline at end of file diff --git a/setup/base-addons.txt b/setup/base-addons.txt new file mode 100644 index 000000000..b4792a77a --- /dev/null +++ b/setup/base-addons.txt @@ -0,0 +1,1195 @@ +account +account_check_printing +account_debit_note +account_edi +account_edi_proxy_client +account_edi_ubl_cii +account_edi_ubl_cii_tax_extension +account_fleet +account_payment +account_peppol +account_qr_code_emv +account_qr_code_sepa +account_tax_python +account_test +account_update_tax_tags +analytic +attachment_indexation +auth_ldap +auth_oauth +auth_passkey +auth_password_policy +auth_password_policy_portal +auth_password_policy_signup +auth_signup +auth_totp +auth_totp_mail +auth_totp_mail_enforce +auth_totp_portal +barcodes +barcodes_gs1_nomenclature +base_address_extended +base_automation +base_geolocalize +base_iban +base_import +base_import_module +base_install_request +base_setup +base_sparse_field +base_vat +board +bus +calendar +calendar_sms +certificate +cloud_storage +cloud_storage_azure +cloud_storage_google +contacts +crm +crm_iap_enrich +crm_iap_mine +crm_livechat +crm_mail_plugin +crm_sms +data_recycle +delivery +delivery_mondialrelay +delivery_stock_picking_batch +digest +event +event_booth +event_booth_sale +event_crm +event_crm_sale +event_product +event_sale +event_sms +fleet +gamification +gamification_sale_crm +google_account +google_calendar +google_gmail +google_recaptcha +hr +hr_attendance +hr_calendar +hr_contract +hr_expense +hr_fleet +hr_gamification +hr_holidays +hr_holidays_attendance +hr_holidays_contract +hr_homeworking +hr_homeworking_calendar +hr_hourly_cost +hr_livechat +hr_maintenance +hr_org_chart +hr_presence +hr_recruitment +hr_recruitment_skills +hr_recruitment_survey +hr_skills +hr_skills_slides +hr_skills_survey +hr_timesheet +hr_timesheet_attendance +hr_work_entry +hr_work_entry_contract +hr_work_entry_holidays +html_editor +http_routing +hw_drivers +hw_escpos +hw_posbox_homepage +iap +iap_crm +iap_mail +im_livechat +l10n_account_edi_ubl_cii_tests +l10n_ae +l10n_anz_ubl_pint +l10n_ar +l10n_ar_pos +l10n_ar_website_sale +l10n_ar_withholding +l10n_at +l10n_au +l10n_bd +l10n_be +l10n_be_pos_restaurant +l10n_be_pos_sale +l10n_bf +l10n_bg +l10n_bh +l10n_bj +l10n_bo +l10n_br +l10n_br_sales +l10n_br_website_sale +l10n_ca +l10n_cd +l10n_cf +l10n_cg +l10n_ch +l10n_ch_pos +l10n_ci +l10n_cl +l10n_cm +l10n_cn +l10n_cn_city +l10n_co +l10n_co_pos +l10n_cr +l10n_cy +l10n_cz +l10n_de +l10n_din5008 +l10n_din5008_purchase +l10n_din5008_repair +l10n_din5008_sale +l10n_din5008_stock +l10n_dk +l10n_dk_oioubl +l10n_do +l10n_dz +l10n_ec +l10n_ec_stock +l10n_ec_website_sale +l10n_ee +l10n_eg +l10n_eg_edi_eta +l10n_es +l10n_es_edi_facturae +l10n_es_edi_sii +l10n_es_edi_tbai +l10n_es_edi_tbai_pos +l10n_es_modelo130 +l10n_es_pos +l10n_et +l10n_eu_oss +l10n_fi +l10n_fi_sale +l10n_fr +l10n_fr_account +l10n_fr_facturx_chorus_pro +l10n_fr_hr_holidays +l10n_fr_hr_work_entry_holidays +l10n_fr_pos_cert +l10n_ga +l10n_gcc_invoice +l10n_gcc_invoice_stock_account +l10n_gcc_pos +l10n_gn +l10n_gq +l10n_gr +l10n_gt +l10n_gw +l10n_hk +l10n_hn +l10n_hr +l10n_hr_kuna +l10n_hu +l10n_hu_edi +l10n_id +l10n_id_efaktur +l10n_id_pos +l10n_ie +l10n_il +l10n_in +l10n_in_edi +l10n_in_edi_ewaybill +l10n_in_ewaybill_stock +l10n_in_gstin_status +l10n_in_hr_holidays +l10n_in_pos +l10n_in_purchase +l10n_in_purchase_stock +l10n_in_sale +l10n_in_sale_stock +l10n_in_stock +l10n_in_withholding +l10n_in_withholding_payment +l10n_iq +l10n_it +l10n_it_edi +l10n_it_edi_doi +l10n_it_edi_ndd +l10n_it_edi_website_sale +l10n_it_edi_withholding +l10n_it_stock_ddt +l10n_jo +l10n_jo_edi +l10n_jp +l10n_jp_ubl_pint +l10n_ke +l10n_ke_edi_tremol +l10n_km +l10n_kr +l10n_kw +l10n_kz +l10n_latam_base +l10n_latam_check +l10n_latam_invoice_document +l10n_lb_account +l10n_lt +l10n_lu +l10n_lv +l10n_ma +l10n_mc +l10n_ml +l10n_mn +l10n_mt +l10n_mt_pos +l10n_mu_account +l10n_mx +l10n_mx_hr +l10n_my +l10n_my_edi +l10n_my_ubl_pint +l10n_mz +l10n_ne +l10n_ng +l10n_nl +l10n_no +l10n_nz +l10n_pa +l10n_pe +l10n_pe_pos +l10n_pe_website_sale +l10n_ph +l10n_pk +l10n_pl +l10n_pt +l10n_qa +l10n_ro +l10n_ro_edi +l10n_rs +l10n_rw +l10n_sa +l10n_sa_edi +l10n_sa_edi_pos +l10n_sa_pos +l10n_se +l10n_sg +l10n_sg_ubl_pint +l10n_si +l10n_sk +l10n_sn +l10n_syscohada +l10n_td +l10n_test_pos_qr_payment +l10n_tg +l10n_th +l10n_tn +l10n_tr +l10n_tw +l10n_tz_account +l10n_ua +l10n_ug +l10n_uk +l10n_us +l10n_us_account +l10n_uy +l10n_ve +l10n_vn +l10n_vn_edi_viettel +l10n_za +l10n_zm_account +link_tracker +loyalty +lunch +mail +mail_bot +mail_bot_hr +mail_group +mail_plugin +maintenance +marketing_card +mass_mailing +mass_mailing_crm +mass_mailing_crm_sms +mass_mailing_event +mass_mailing_event_sms +mass_mailing_event_track +mass_mailing_event_track_sms +mass_mailing_sale +mass_mailing_sale_sms +mass_mailing_slides +mass_mailing_sms +mass_mailing_themes +membership +microsoft_account +microsoft_calendar +microsoft_outlook +mrp +mrp_account +mrp_landed_costs +mrp_product_expiry +mrp_repair +mrp_subcontracting +mrp_subcontracting_account +mrp_subcontracting_dropshipping +mrp_subcontracting_landed_costs +mrp_subcontracting_purchase +mrp_subcontracting_repair +onboarding +partner_autocomplete +payment +payment_adyen +payment_aps +payment_asiapay +payment_authorize +payment_buckaroo +payment_custom +payment_demo +payment_flutterwave +payment_mercado_pago +payment_mollie +payment_paypal +payment_razorpay +payment_razorpay_oauth +payment_stripe +payment_worldline +payment_xendit +phone_validation +point_of_sale +portal +portal_rating +pos_account_tax_python +pos_adyen +pos_discount +pos_epson_printer +pos_event +pos_hr +pos_hr_restaurant +pos_loyalty +pos_mercado_pago +pos_mrp +pos_online_payment +pos_online_payment_self_order +pos_paytm +pos_razorpay +pos_restaurant +pos_restaurant_adyen +pos_restaurant_loyalty +pos_restaurant_stripe +pos_sale +pos_sale_loyalty +pos_sale_margin +pos_self_order +pos_self_order_adyen +pos_self_order_epson_printer +pos_self_order_razorpay +pos_self_order_sale +pos_self_order_stripe +pos_six +pos_sms +pos_stripe +pos_viva_wallet +privacy_lookup +product +product_email_template +product_expiry +product_images +product_margin +product_matrix +project +project_account +project_hr_expense +project_hr_skills +project_mail_plugin +project_mrp +project_mrp_account +project_mrp_sale +project_mrp_stock_landed_costs +project_purchase +project_purchase_stock +project_sale_expense +project_sms +project_stock +project_stock_account +project_stock_landed_costs +project_timesheet_holidays +project_todo +purchase +purchase_edi_ubl_bis3 +purchase_mrp +purchase_product_matrix +purchase_repair +purchase_requisition +purchase_requisition_sale +purchase_requisition_stock +purchase_stock +rating +repair +resource +resource_mail +sale +sale_async_emails +sale_crm +sale_edi_ubl +sale_expense +sale_expense_margin +sale_loyalty +sale_loyalty_delivery +sale_management +sale_margin +sale_mrp +sale_mrp_margin +sale_pdf_quote_builder +sale_product_matrix +sale_project +sale_project_stock +sale_project_stock_account +sale_purchase +sale_purchase_project +sale_purchase_stock +sale_service +sale_sms +sale_stock +sale_stock_margin +sale_timesheet +sale_timesheet_margin +sales_team +sms +snailmail +snailmail_account +social_media +spreadsheet +spreadsheet_account +spreadsheet_dashboard +spreadsheet_dashboard_account +spreadsheet_dashboard_event_sale +spreadsheet_dashboard_hr_expense +spreadsheet_dashboard_hr_timesheet +spreadsheet_dashboard_im_livechat +spreadsheet_dashboard_pos_hr +spreadsheet_dashboard_pos_restaurant +spreadsheet_dashboard_sale +spreadsheet_dashboard_sale_timesheet +spreadsheet_dashboard_stock_account +spreadsheet_dashboard_website_sale +spreadsheet_dashboard_website_sale_slides +stock +stock_account +stock_delivery +stock_dropshipping +stock_fleet +stock_landed_costs +stock_picking_batch +stock_sms +survey +test_base_automation +test_crm_full +test_discuss_full +test_event_full +test_hr_contract_calendar +test_html_field_history +test_import_export +test_mail +test_mail_full +test_mail_sms +test_mass_mailing +test_resource +test_sale_product_configurators +test_spreadsheet +test_website +test_website_modules +test_website_slides_full +theme_default +transifex +uom +utm +web +web_editor +web_hierarchy +web_tour +web_unsplash +website +website_blog +website_cf_turnstile +website_crm +website_crm_iap_reveal +website_crm_livechat +website_crm_partner_assign +website_crm_sms +website_customer +website_event +website_event_booth +website_event_booth_exhibitor +website_event_booth_sale +website_event_booth_sale_exhibitor +website_event_crm +website_event_exhibitor +website_event_jitsi +website_event_meet +website_event_meet_quiz +website_event_sale +website_event_track +website_event_track_live +website_event_track_live_quiz +website_event_track_quiz +website_forum +website_google_map +website_hr_recruitment +website_jitsi +website_links +website_livechat +website_mail +website_mail_group +website_mass_mailing +website_mass_mailing_sms +website_membership +website_partner +website_payment +website_payment_authorize +website_profile +website_project +website_sale +website_sale_autocomplete +website_sale_collect +website_sale_comparison +website_sale_comparison_wishlist +website_sale_loyalty +website_sale_mass_mailing +website_sale_mondialrelay +website_sale_slides +website_sale_stock +website_sale_stock_wishlist +website_sale_wishlist +website_slides +website_slides_forum +website_slides_survey +website_sms +website_twitter +.tx +account_3way_match +account_accountant +account_accountant_batch_payment +account_accountant_check_printing +account_accountant_fleet +account_asset +account_asset_fleet +account_auto_transfer +account_avatax +account_avatax_geolocalize +account_avatax_sale +account_avatax_stock +account_bank_statement_extract +account_bank_statement_import +account_bank_statement_import_camt +account_bank_statement_import_csv +account_bank_statement_import_ofx +account_bank_statement_import_qif +account_base_import +account_batch_payment +account_budget +account_disallowed_expenses +account_disallowed_expenses_fleet +account_external_tax +account_extract +account_followup +account_inter_company_rules +account_intrastat +account_invoice_extract +account_invoice_extract_purchase +account_iso20022 +account_loans +account_online_payment +account_online_synchronization +account_reports +account_reports_cash_basis +account_saft +account_saft_import +account_sepa_direct_debit +account_winbooks_import +accountant +analytic_enterprise +appointment +appointment_account_payment +appointment_crm +appointment_google_calendar +appointment_hr +appointment_hr_recruitment +appointment_microsoft_calendar +appointment_sms +approvals +approvals_purchase +approvals_purchase_stock +base_automation_hr_contract +contacts_enterprise +crm_enterprise +crm_enterprise_partner_assign +crm_helpdesk +crm_sale_subscription +currency_rate_live +data_cleaning +data_merge_crm +data_merge_helpdesk +data_merge_project +data_merge_stock_account +data_merge_utm +delivery_bpost +delivery_dhl +delivery_easypost +delivery_fedex +delivery_fedex_rest +delivery_iot +delivery_sendcloud +delivery_shiprocket +delivery_starshipit +delivery_ups +delivery_ups_rest +delivery_usps +digest_enterprise +documents +documents_account +documents_account_peppol +documents_approvals +documents_fleet +documents_fsm +documents_hr +documents_hr_contract +documents_hr_expense +documents_hr_holidays +documents_hr_payroll +documents_hr_recruitment +documents_l10n_be_hr_payroll +documents_l10n_ch_hr_payroll +documents_l10n_hk_hr_payroll +documents_l10n_ke_hr_payroll +documents_product +documents_project +documents_project_sale +documents_project_sign +documents_sign +documents_spreadsheet +event_enterprise +event_social +frontdesk +helpdesk +helpdesk_account +helpdesk_fsm +helpdesk_fsm_report +helpdesk_fsm_sale +helpdesk_holidays +helpdesk_mail_plugin +helpdesk_repair +helpdesk_sale +helpdesk_sale_loyalty +helpdesk_sale_timesheet +helpdesk_sms +helpdesk_stock +helpdesk_stock_account +helpdesk_timesheet +hr_appraisal +hr_appraisal_contract +hr_appraisal_skills +hr_appraisal_survey +hr_attendance_gantt +hr_contract_reports +hr_contract_salary +hr_contract_salary_holidays +hr_contract_salary_payroll +hr_contract_sign +hr_expense_extract +hr_expense_predict_product +hr_gantt +hr_holidays_contract_gantt +hr_holidays_gantt +hr_mobile +hr_payroll +hr_payroll_account +hr_payroll_account_iso20022 +hr_payroll_attendance +hr_payroll_expense +hr_payroll_fleet +hr_payroll_holidays +hr_payroll_planning +hr_recruitment_extract +hr_recruitment_integration_base +hr_recruitment_integration_monster +hr_recruitment_integration_skills_monster +hr_recruitment_integration_website +hr_recruitment_integration_website_monster +hr_recruitment_reports +hr_recruitment_sign +hr_referral +hr_work_entry_contract_attendance +hr_work_entry_contract_enterprise +hr_work_entry_contract_planning +hr_work_entry_contract_planning_attendance +hr_work_entry_holidays_enterprise +iap_extract +industry_fsm +industry_fsm_repair +industry_fsm_report +industry_fsm_sale +industry_fsm_sale_report +industry_fsm_sale_subscription +industry_fsm_sms +industry_fsm_stock +iot +knowledge +l10n_ae_corporate_tax_report +l10n_ae_hr_payroll +l10n_ae_hr_payroll_account +l10n_ae_reports +l10n_ar_edi +l10n_ar_reports +l10n_at_reports +l10n_at_saft +l10n_au_aba +l10n_au_hr_payroll +l10n_au_hr_payroll_account +l10n_au_reports +l10n_bd_hr_payroll +l10n_bd_hr_payroll_account +l10n_bd_reports +l10n_be_account_disallowed_expenses_fleet +l10n_be_coda +l10n_be_codabox +l10n_be_disallowed_expenses +l10n_be_hr_contract_salary +l10n_be_hr_payroll +l10n_be_hr_payroll_account +l10n_be_hr_payroll_attendance +l10n_be_hr_payroll_dimona +l10n_be_hr_payroll_fleet +l10n_be_hr_payroll_group_s +l10n_be_hr_payroll_partena +l10n_be_hr_payroll_sd_worx +l10n_be_hr_payroll_ucm +l10n_be_intrastat +l10n_be_reports +l10n_be_reports_post_wizard +l10n_be_reports_prorata +l10n_be_reports_sms +l10n_be_soda +l10n_bg_reports +l10n_bo_reports +l10n_br_avatax +l10n_br_avatax_sale +l10n_br_edi +l10n_br_edi_sale +l10n_br_edi_website_sale +l10n_br_reports +l10n_br_sale_subscription +l10n_br_test_avatax_sale +l10n_ca_check_printing +l10n_ca_payment_cpa005 +l10n_ca_reports +l10n_ch_hr_payroll +l10n_ch_hr_payroll_account +l10n_ch_reports +l10n_cl_edi +l10n_cl_edi_exports +l10n_cl_edi_factoring +l10n_cl_edi_pos +l10n_cl_edi_stock +l10n_cl_edi_website_sale +l10n_cl_reports +l10n_co_dian +l10n_co_edi +l10n_co_edi_website_sale +l10n_co_reports +l10n_cy_reports +l10n_cz_reports +l10n_de_intrastat +l10n_de_pos_cert +l10n_de_pos_res_cert +l10n_de_reports +l10n_din5008_account_followup +l10n_din5008_industry_fsm +l10n_din5008_sale_renting +l10n_dk_intrastat +l10n_dk_reports +l10n_dk_saft_import +l10n_do_reports +l10n_dz_reports +l10n_ec_edi +l10n_ec_edi_pos +l10n_ec_edi_stock +l10n_ec_reports +l10n_ec_reports_ats +l10n_ee_reports +l10n_ee_rounding +l10n_eg_hr_payroll +l10n_eg_hr_payroll_account +l10n_employment_hero +l10n_es_real_estates +l10n_es_reports +l10n_es_reports_2024 +l10n_es_reports_modelo130 +l10n_es_sale_amazon +l10n_eu_oss_reports +l10n_fi_reports +l10n_fr_fec_import +l10n_fr_hr_payroll +l10n_fr_hr_payroll_account +l10n_fr_intrastat +l10n_fr_reports +l10n_gr_reports +l10n_hk_hr_payroll +l10n_hk_hr_payroll_account +l10n_hk_reports +l10n_hr_reports +l10n_hu_reports +l10n_id_hr_payroll +l10n_id_hr_payroll_account +l10n_ie_reports +l10n_in_asset +l10n_in_documents +l10n_in_enet_batch_payment +l10n_in_hr_payroll +l10n_in_hr_payroll_account +l10n_in_qr_code_bill_scan +l10n_in_reports +l10n_in_reports_gstr +l10n_in_reports_gstr_pos +l10n_in_reports_gstr_spreadsheet +l10n_in_reports_tds_tcs +l10n_it_pos +l10n_it_reports +l10n_it_riba +l10n_jo_hr_payroll +l10n_jo_hr_payroll_account +l10n_jp_zengin +l10n_ke_edi_oscu +l10n_ke_edi_oscu_mrp +l10n_ke_edi_oscu_pos +l10n_ke_edi_oscu_stock +l10n_ke_hr_payroll +l10n_ke_hr_payroll_account +l10n_ke_hr_payroll_shif +l10n_ke_reports +l10n_kr_reports +l10n_kz_reports +l10n_lt_hr_payroll +l10n_lt_hr_payroll_account +l10n_lt_intrastat +l10n_lt_reports +l10n_lt_saft +l10n_lt_saft_import +l10n_lu_hr_payroll +l10n_lu_hr_payroll_account +l10n_lu_reports +l10n_lv_reports +l10n_ma_hr_payroll +l10n_ma_hr_payroll_account +l10n_ma_reports +l10n_mn_reports +l10n_mt_reports +l10n_mx_edi +l10n_mx_edi_extended +l10n_mx_edi_landing +l10n_mx_edi_pos +l10n_mx_edi_sale +l10n_mx_edi_stock +l10n_mx_edi_website_sale +l10n_mx_hr_payroll +l10n_mx_hr_payroll_account +l10n_mx_reports +l10n_mx_reports_closing +l10n_mx_xml_polizas +l10n_my_reports +l10n_mz_reports +l10n_ng_reports +l10n_nl_hr_payroll +l10n_nl_hr_payroll_account +l10n_nl_intrastat +l10n_nl_reports +l10n_nl_reports_sbr +l10n_nl_reports_sbr_icp +l10n_nl_reports_sbr_ob_nummer +l10n_nl_reports_sbr_status_info +l10n_no_reports +l10n_no_saft +l10n_nz_eft +l10n_nz_reports +l10n_pe_edi +l10n_pe_edi_pos +l10n_pe_edi_stock +l10n_pe_reports +l10n_pe_reports_stock +l10n_ph_check_printing +l10n_ph_reports +l10n_pk_hr_payroll +l10n_pk_hr_payroll_account +l10n_pk_reports +l10n_pl_hr_payroll +l10n_pl_hr_payroll_account +l10n_pl_reports +l10n_pl_reports_pos_jpk +l10n_pt_reports +l10n_ro_hr_payroll +l10n_ro_hr_payroll_account +l10n_ro_reports +l10n_ro_saft +l10n_ro_saft_import +l10n_rs_reports +l10n_rw_reports +l10n_sa_hr_payroll +l10n_sa_hr_payroll_account +l10n_se_reports +l10n_se_sie_import +l10n_sg_reports +l10n_si_reports +l10n_sk_hr_payroll +l10n_sk_hr_payroll_account +l10n_syscohada_reports +l10n_th_reports +l10n_tn_reports +l10n_tr_hr_payroll +l10n_tr_hr_payroll_account +l10n_tr_reports +l10n_tw_reports +l10n_tz_reports +l10n_uk_bacs +l10n_uk_hmrc +l10n_uk_reports +l10n_uk_reports_cis +l10n_us_1099 +l10n_us_check_printing +l10n_us_hr_payroll +l10n_us_hr_payroll_account +l10n_us_hr_payroll_adp +l10n_us_hr_payroll_state_calculation +l10n_us_payment_nacha +l10n_us_reports +l10n_uy_edi +l10n_vn_reports +l10n_zm_reports +mail_enterprise +mail_mobile +maintenance_worksheet +marketing_automation +marketing_automation_crm +marketing_automation_sms +marketing_automation_website_sale +marketing_automation_whatsapp +mass_mailing_sale_subscription +mrp_account_enterprise +mrp_accountant +mrp_maintenance +mrp_mps +mrp_plm +mrp_subcontracting_account_enterprise +mrp_subcontracting_enterprise +mrp_subcontracting_quality +mrp_workorder +mrp_workorder_expiry +mrp_workorder_hr_account +mrp_workorder_iot +mrp_workorder_plm +partner_commission +payment_sepa_direct_debit +planning +planning_contract +planning_holidays +planning_hr_skills +pos_account_reports +pos_avatax +pos_barcodelookup +pos_blackbox_be +pos_enterprise +pos_hr_mobile +pos_hr_preparation_display +pos_iot +pos_iot_six +pos_l10n_se +pos_online_payment_self_order_preparation_display +pos_order_tracking_display +pos_preparation_display +pos_pricer +pos_restaurant_appointment +pos_restaurant_preparation_display +pos_restaurant_urban_piper +pos_sale_stock_renting +pos_sale_subscription +pos_self_order_iot +pos_self_order_preparation_display +pos_settle_due +pos_urban_piper +pos_urban_piper_enhancements +pos_urban_piper_swiggy +pos_urban_piper_ubereats +pos_urban_piper_zomato +product_barcodelookup +product_unspsc +project_account_asset +project_account_budget +project_enterprise +project_enterprise_hr +project_enterprise_hr_contract +project_enterprise_hr_skills +project_forecast +project_helpdesk +project_holidays +project_hr_payroll_account +project_mrp_workorder_account +project_sale_subscription +project_timesheet_forecast +project_timesheet_forecast_sale +purchase_intrastat +purchase_mrp_workorder_quality +quality +quality_control +quality_control_iot +quality_control_picking_batch +quality_control_worksheet +quality_iot +quality_mrp +quality_mrp_workorder +quality_mrp_workorder_iot +quality_mrp_workorder_worksheet +quality_repair +room +sale_account_accountant +sale_amazon +sale_amazon_avatax +sale_commission +sale_commission_margin +sale_commission_subscription +sale_external_tax +sale_intrastat +sale_management_renting +sale_mrp_renting +sale_planning +sale_project_forecast +sale_purchase_inter_company_rules +sale_purchase_stock_inter_company_rules +sale_renting +sale_renting_crm +sale_renting_planning +sale_renting_project +sale_renting_sign +sale_stock_renting +sale_subscription +sale_subscription_external_tax +sale_subscription_stock +sale_subscription_timesheet +sale_timesheet_enterprise +sale_timesheet_enterprise_holidays +sign +sign_itsme +snailmail_account_followup +social +social_crm +social_demo +social_facebook +social_instagram +social_linkedin +social_push_notifications +social_sale +social_test_full +social_twitter +social_youtube +spreadsheet_dashboard_account_accountant +spreadsheet_dashboard_crm +spreadsheet_dashboard_documents +spreadsheet_dashboard_edition +spreadsheet_dashboard_helpdesk +spreadsheet_dashboard_hr_contract +spreadsheet_dashboard_hr_payroll +spreadsheet_dashboard_hr_referral +spreadsheet_dashboard_marketing_automation +spreadsheet_dashboard_mrp_account +spreadsheet_dashboard_purchase_stock +spreadsheet_dashboard_sale_renting +spreadsheet_dashboard_sale_subscription +spreadsheet_dashboard_stock +spreadsheet_edition +spreadsheet_sale_management +stock_accountant +stock_barcode +stock_barcode_barcodelookup +stock_barcode_mrp +stock_barcode_mrp_subcontracting +stock_barcode_picking_batch +stock_barcode_product_expiry +stock_barcode_quality_control +stock_barcode_quality_control_picking_batch +stock_barcode_quality_mrp +stock_enterprise +stock_fleet_enterprise +stock_intrastat +test_appointment_full +test_data_cleaning +test_discuss_full_enterprise +test_event_full_enterprise +test_l10n_be_hr_payroll_account +test_l10n_ch_hr_payroll_account +test_l10n_hk_hr_payroll_account +test_l10n_us_hr_payroll_account +test_mail_enterprise +test_marketing_automation +test_rental_product_configurators +test_sale_subscription +test_spreadsheet_edition +test_timer +test_web_cohort +test_web_gantt +test_web_studio +test_website_sale_full +test_whatsapp +timer +timesheet_grid +timesheet_grid_holidays +voip +voip_crm +voip_onsip +web_cohort +web_enterprise +web_gantt +web_grid +web_map +web_mobile +web_studio +website_appointment +website_appointment_account_payment +website_appointment_crm +website_appointment_sale +website_appointment_sale_project +website_crm_iap_reveal_enterprise +website_delivery_sendcloud +website_documents +website_enterprise +website_event_social +website_event_track_gantt +website_event_track_social +website_generator +website_generator_sale +website_helpdesk +website_helpdesk_forum +website_helpdesk_knowledge +website_helpdesk_livechat +website_helpdesk_sale_loyalty +website_helpdesk_slides +website_helpdesk_slides_forum +website_knowledge +website_product_barcodelookup +website_sale_dashboard +website_sale_external_tax +website_sale_fedex +website_sale_renting +website_sale_shiprocket +website_sale_stock_renting +website_sale_subscription +website_sale_ups +website_studio +whatsapp +whatsapp_account +whatsapp_delivery +whatsapp_event +whatsapp_hr_referral +whatsapp_payment +whatsapp_pos +whatsapp_sale +whatsapp_website_sale +worksheet diff --git a/setup/clean_up_addons.sh b/setup/clean_up_addons.sh new file mode 100755 index 000000000..8ac385db3 --- /dev/null +++ b/setup/clean_up_addons.sh @@ -0,0 +1,101 @@ +#!/bin/bash + +# Check if at least one root folder is provided as an argument +if [ $# -eq 0 ]; then + echo "Usage: $0 [...]" + echo "Please provide at least one root folder path." + exit 1 +fi + +# Define the protected items list file +PROTECTED_LIST="protected.txt" +if [ ! -f "$PROTECTED_LIST" ]; then + echo "Error: '$PROTECTED_LIST' not found." + echo "Please create 'protected.txt' one directory up with a list of protected files/folders (one per line)." + exit 1 +fi + +# Safeguard: Check if any file/folder matching patterns in protected.txt exists in a root folder +check_protected_items() { + local root_dir="$1" + while IFS= read -r pattern; do + # Skip empty lines + [ -z "$pattern" ] && continue + + # Handle wildcards by using find for pattern matching + if [[ "$pattern" == *"*"* ]]; then + # Convert pattern to a find-compatible search + if [[ "$pattern" == /* ]]; then + search_path="${root_dir}${pattern}" + else + search_path="${root_dir}/${pattern}" + fi + + # Use find to check if any files match the pattern + if find "$root_dir" -path "$search_path" 2>/dev/null | grep -q .; then + echo "Error: Protected pattern '$pattern' matches files in '$root_dir'. Aborting execution." + exit 1 + fi + else + # Exact match for non-wildcard entries + if [ -e "$root_dir/$pattern" ]; then + echo "Error: Protected item '$pattern' found in '$root_dir'. Aborting execution." + exit 1 + fi + fi + done < "$PROTECTED_LIST" +} + +# Function to check and delete subfolders +delete_non_manifest_folders() { + local dir="$1" + + # Loop through all immediate subdirectories in the given directory + for subfolder in "$dir"/*/ ; do + # Check if it's a directory + if [ -d "$subfolder" ]; then + # Check if __manifest__.py exists in this subfolder + if [ ! -f "$subfolder/__manifest__.py" ]; then + echo "Deleting '$subfolder' (no __manifest__.py found)" + rm -rf "$subfolder" + else + echo "Keeping '$subfolder' (__manifest__.py found)" + fi + fi + done +} + +# Process each root folder provided as an argument +for ROOT_FOLDER in "$@"; do + # Check if the root folder exists and is a directory + if [ ! -d "$ROOT_FOLDER" ]; then + echo "Error: '$ROOT_FOLDER' is not a valid directory. Skipping." + continue + fi + + # Perform the safeguard check for this root folder + echo "Checking for protected items in '$ROOT_FOLDER' from '$PROTECTED_LIST'..." + check_protected_items "$ROOT_FOLDER" + + # Change to the root folder to handle relative paths cleanly + cd "$ROOT_FOLDER" || { + echo "Error: Could not change to directory '$ROOT_FOLDER'. Skipping." + continue + } + + # Call the function with the current root folder + echo "Processing '$ROOT_FOLDER'..." + delete_non_manifest_folders "." + + # Return to the original directory to process the next root folder + cd - > /dev/null || { + echo "Error: Could not return from '$ROOT_FOLDER'. Exiting." + exit 1 + } + + echo "Cleanup complete for '$ROOT_FOLDER'!" +done + +echo "All root folders processed!" + +exit 0 \ No newline at end of file diff --git a/setup/clean_up_virtualenvs.sh b/setup/clean_up_virtualenvs.sh new file mode 100755 index 000000000..f383c2fb3 --- /dev/null +++ b/setup/clean_up_virtualenvs.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +# Get the current branch name +branch_name=$(git rev-parse --abbrev-ref HEAD) + +# Get a list of all virtual environments, filtering out duplicates and those not containing the branch name +virtualenvs=$(pyenv virtualenvs | awk '{print $1}' | sort -u | grep "$branch_name") + +# Count the number of virtual environments +count=$(echo "$virtualenvs" | wc -l) + +# Calculate how many virtual environments to keep +keep_count=$((count - $1)) + +# If there are more than 3 virtual environments, delete the oldest ones +if (( keep_count > 0 )); then + # Get the oldest virtual environments (assuming they are listed first) + oldest_venvs=$(echo "$virtualenvs" | head -n "$keep_count") + + # Loop through the oldest virtual environments and delete them + for venv in $oldest_venvs; do + echo "Deleting virtual environment: $venv" + pyenv virtualenv-delete -f "$venv" + done +fi + +echo "Old virtual environments containing '$branch_name' deleted." \ No newline at end of file diff --git a/setup/color_log.py b/setup/color_log.py new file mode 100644 index 000000000..dbd92e059 --- /dev/null +++ b/setup/color_log.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 + +def colorize(text, code): + """Colorizes text using ANSI escape codes.""" + return f"\033[{code}m{text}\033[0m" + +def Show(status, message): + """Displays a message with a status indicator.""" + colors = { + 0: (colorize("[", "90")+colorize(" OK ", "38;5;154") + colorize("]", "90")), # Green, Grey + 1: (colorize("[", "90")+colorize(" FAILED ", "91") + colorize("]", "90")), # Red, Grey + 2: (colorize("[", "90")+colorize(" INFO ", "38;5;154") + colorize("]", "90")), # Green, Grey + 3: (colorize("[", "90")+colorize(" NOTICE ", "33") + colorize("]", "90")), # Yellow, Grey + } + print(f"{colors.get(status, '')} {message}") + if status == 1: + exit(1) + +def Warn(message): + """Displays a warning message in red.""" + print(colorize(message, "91")) + +def GreyStart(): + """Starts a grey-colored output.""" + print(colorize("", "90"), end="") + +def ColorReset(): + """Resets the output color.""" + print("\033[0m", end="") \ No newline at end of file diff --git a/setup/dir2file.sh b/setup/dir2file.sh new file mode 100755 index 000000000..02ab7f8a0 --- /dev/null +++ b/setup/dir2file.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +# Define output file name +input_dir="$1" +output_file="$2" +# Find all directories in root and write to file +# Using find to list only directories (-type d) at depth 1 (-maxdepth 1) +find $input_dir -maxdepth 1 -type d -not -path "$input_dir" -exec basename {} \; | sort >> "$output_file" + +echo "Folder list has been written to $output_file" \ No newline at end of file diff --git a/setup/download_backup.sh b/setup/download_backup.sh new file mode 100755 index 000000000..f213445c4 --- /dev/null +++ b/setup/download_backup.sh @@ -0,0 +1,82 @@ +#!/usr/bin/bash + +export PATH=/usr/sbin:$PATH +export DEBIAN_FRONTEND=noninteractive +set -euo pipefail +readonly COLOUR_RESET='\e[0m' +readonly aCOLOUR=( + '\e[38;5;154m' # green | Lines, bullets and separators + '\e[1m' # Bold white | Main descriptions + '\e[90m' # Grey | Credits + '\e[91m' # Red | Update notifications Alert + '\e[33m' # Yellow | Emphasis +) +trap 'onCtrlC' INT +onCtrlC() { + echo -e "${COLOUR_RESET}" + exit 1 +} + +Show() { + # OK + if (($1 == 0)); then + echo -e "${aCOLOUR[2]}[$COLOUR_RESET${aCOLOUR[0]} OK $COLOUR_RESET${aCOLOUR[2]}]$COLOUR_RESET $2" + # FAILED + elif (($1 == 1)); then + echo -e "${aCOLOUR[2]}[$COLOUR_RESET${aCOLOUR[3]}FAILED$COLOUR_RESET${aCOLOUR[2]}]$COLOUR_RESET $2" + exit 1 + # INFO + elif (($1 == 2)); then + echo -e "${aCOLOUR[2]}[$COLOUR_RESET${aCOLOUR[0]} INFO $COLOUR_RESET${aCOLOUR[2]}]$COLOUR_RESET $2" + # NOTICE + elif (($1 == 3)); then + echo -e "${aCOLOUR[2]}[$COLOUR_RESET${aCOLOUR[4]}NOTICE$COLOUR_RESET${aCOLOUR[2]}]$COLOUR_RESET $2" + fi +} + +Warn() { + echo -e "${aCOLOUR[3]}$1$COLOUR_RESET" +} + +GreyStart() { + echo -e "${aCOLOUR[2]}\c" +} + +ColorReset() { + echo -e "$COLOUR_RESET\c" +} + + +main() { + DEPLOYMENT_DIR=$(pwd)/deployment + BACKUP_DIR="$DEPLOYMENT_DIR/backup" + DOWNLOAD_URL="$1" + BACKUP_FILE="$BACKUP_DIR/$2" + # Check if the deployment and backup directories exist, create them if not + if [[ ! -d "$BACKUP_DIR" ]]; then + echo "Backup directory does not exist. Creating: $BACKUP_DIR" + mkdir -p "$BACKUP_DIR" + fi + + # Check if the download URL is valid + echo "Checking if the URL is valid: $DOWNLOAD_URL" + if curl --head --silent --fail "$DOWNLOAD_URL" > /dev/null; then + echo "URL is valid. Proceeding with download..." + else + Show 1 "Error: Invalid or inaccessible URL: $DOWNLOAD_URL" + exit 1 + fi + + # Download the file and rename it to backup.zip + wget -O "$BACKUP_FILE" "$DOWNLOAD_URL" + + # Check if the file was downloaded + if [[ -f "$BACKUP_FILE" ]]; then + Show 0 "Backup file successfully downloaded to: $BACKUP_FILE" + else + Show 1 "Error: Backup file was not downloaded." + exit 1 + fi +} + +main "$@" \ No newline at end of file diff --git a/setup/gen_config.py b/setup/gen_config.py new file mode 100644 index 000000000..b0a38390e --- /dev/null +++ b/setup/gen_config.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 +import argparse +import configparser +import shutil +import os +from dotenv import set_key +from pathlib import Path +import socket +import secrets +import string +import color_log +def find_available_port(start_port=80): + """Finds an available port starting from the given port.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + while True: + try: + sock.bind(('0.0.0.0', start_port)) + color_log.Show(3,f" {start_port} is Open") + return start_port + except OSError as e: + if e.errno == 98: # Address already in use + print(f"{start_port} already in use , Try other port ...") + start_port += 1 + else: + raise +def main(): + """ + Generates a random password and finds an available port. + Updates the Odoo configuration file and .env file with these values. + """ + parser = argparse.ArgumentParser(description="Generate Odoo configuration") + parser.add_argument('--db_user', type=str, help='') + parser.add_argument('--db_pass', type=str, help='') + parser.add_argument('--deploy_path', type=str, help='') + parser.add_argument('--addons_path', type=str, help='') + # parser.add_argument('--db_filter', type=str, help='') + parser.add_argument('--db_port', type=int, help='') + parser.add_argument('--db_server', type=str, help='') + args = parser.parse_args() + db_port = args.db_port + db_user = args.db_user + db_pass = args.db_pass + db_server = args.db_server + app_port = find_available_port(8069) + addons_path = args.addons_path + base_dir= args.deploy_path + # db_filter= args.db_filter + # Copy template files + os.makedirs(f"{base_dir}/etc", exist_ok=True) + color_log.Show(3,f"Copy {base_dir}/odoo.conf.template to {base_dir}/etc/odoo.conf") + shutil.copyfile(f'{base_dir}/odoo.conf.template', f'{base_dir}/odoo.conf') + + # Update Odoo configuration file + config = configparser.ConfigParser() + config.read(f'{base_dir}/odoo.conf') + config['options']['db_host'] = str(db_server) + config['options']['db_user'] = db_user + config['options']['db_password'] = db_pass + config['options']['db_port'] = str(db_port) + config['options']['addons_path'] = addons_path + config['options']['xmlrpc_port'] = str(app_port) + config['options']['dbfilter'] = ".*" + config['options']['proxy_mode'] = "True" + with open(f'{base_dir}/odoo.conf', 'w') as configfile: + config.write(configfile) +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/setup/gen_config_docker.py b/setup/gen_config_docker.py new file mode 100755 index 000000000..ce15bc55f --- /dev/null +++ b/setup/gen_config_docker.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 +import argparse +import configparser +import shutil +import os +from dotenv import set_key +from pathlib import Path +import socket +import secrets +import string +import color_log +def generate_password(length=16): + """Generates a random password of specified length.""" + alphabet = string.ascii_letters + string.digits + return ''.join(secrets.choice(alphabet) for _ in range(length)) + +def find_available_port(start_port=80): + """Finds an available port starting from the given port.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + while True: + try: + sock.bind(('0.0.0.0', start_port)) + color_log.Show(3,f" {start_port} is Open") + return start_port + except OSError as e: + if e.errno == 98: # Address already in use + print(f"{start_port} already in use , Try other port ...") + start_port += 1 + else: + raise + +def main(): + """ + Generates a random password and finds an available port. + Updates the Odoo configuration file and .env file with these values. + """ + parser = argparse.ArgumentParser(description="Generate Odoo configuration") + parser.add_argument('--db_port', type=int, help='') + parser.add_argument('--db_user', type=str, help='') + parser.add_argument('--deploy_path', type=str, help='') + parser.add_argument('--image', type=str, help='') + parser.add_argument('--tag', type=str, help='') + parser.add_argument('--addons', type=str, help='') + parser.add_argument('--config', type=str, help='') + parser.add_argument('--container', type=str, help='') + parser.add_argument('--backup', type=str, help='') + args = parser.parse_args() + db_port = args.db_port + db_pass = "smartyourlife" + db_user = args.db_user + base_dir= args.deploy_path + image=args.image + tag=args.tag + container=args.container + addons=args.addons + config_path=args.config + app_port = 10017 + backup = args.backup + # Copy template files + os.makedirs(f"{base_dir}/etc", exist_ok=True) + color_log.Show(3,f"Copy {base_dir}/odoo.conf.template to {base_dir}/etc/odoo.conf") + shutil.copyfile(f'{base_dir}/odoo.conf.template', f'{base_dir}/etc/odoo.conf') + shutil.copyfile(f'{base_dir}/env.template', f'{base_dir}/.env') + + # Update Odoo configuration file + config = configparser.ConfigParser() + config.read(f'{base_dir}/etc/odoo.conf') + config['options']['db_host'] = "db" + config['options']['db_user'] = db_user + config['options']['db_password'] = db_pass + config['options']['db_port'] = str(db_port) + config['options']['addons_path'] = "/mnt/extra-addons" + config['options']['data_dir'] = "/var/lib/odoo" + config['options']['proxy_mode'] = "True" + with open(f'{base_dir}/etc/odoo.conf', 'w') as configfile: + config.write(configfile) + + # Update .env file + env_file_path = Path("deployment/.env") + set_key(dotenv_path=env_file_path, key_to_set="COMPOSE_PROJECT_NAME", value_to_set=f"odoo-{tag}",quote_mode="never") + set_key(dotenv_path=env_file_path, key_to_set="PG_PORT", value_to_set=find_available_port(int(os.getenv('DB_PORT','5432'))+1),quote_mode="never") + set_key(dotenv_path=env_file_path, key_to_set="PG_USER", value_to_set=db_user,quote_mode="never") + set_key(dotenv_path=env_file_path, key_to_set="PG_PASS", value_to_set=db_pass,quote_mode="never") + set_key(dotenv_path=env_file_path, key_to_set="ODOO_CONFIG", value_to_set=config_path,quote_mode="never") + set_key(dotenv_path=env_file_path, key_to_set="ODOO_ADDONS", value_to_set=addons,quote_mode="never") + set_key(dotenv_path=env_file_path, key_to_set="ODOO_PORT", value_to_set=find_available_port(app_port),quote_mode="never") + set_key(dotenv_path=env_file_path, key_to_set="ODOO_IMAGE", value_to_set=image,quote_mode="never") + set_key(dotenv_path=env_file_path, key_to_set="ODOO_TAG", value_to_set=tag,quote_mode="never") + set_key(dotenv_path=env_file_path, key_to_set="ODOO_CONTAINER", value_to_set=container,quote_mode="never") + set_key(dotenv_path=env_file_path, key_to_set="ODOO_BACKUP", value_to_set=backup,quote_mode="never") + +if __name__ == "__main__": + main() diff --git a/setup/init_config.sh b/setup/init_config.sh new file mode 100755 index 000000000..637df5ce7 --- /dev/null +++ b/setup/init_config.sh @@ -0,0 +1,245 @@ +#!/usr/bin/bash + +export PATH=/usr/sbin:$PATH +export DEBIAN_FRONTEND=noninteractive +set -euo pipefail +DEPLOY_PATH=$(pwd)/deployment +SETUP_PATH=$(pwd)/setup +PIP=$(pwd)/venv/bin/pip +PYTHON=$(pwd)/venv/bin/python +ODOO_ADDONS=${DEPLOY_PATH}/addons +ODOO_CONFIG=${DEPLOY_PATH}/etc +ODOO_BACKUP=${DEPLOY_PATH}/backup +# System +DEPENDS_PACKAGE=('wget' 'curl' 'git' 'unzip' 'make' 'wkhtmltopdf' 'postgresql-client') +DEPENDS_COMMAND=('wget' 'curl' 'git' 'unzip' 'make' 'wkhtmltopdf' 'psql') +((EUID)) && sudo_cmd="sudo" || sudo_cmd="" +readonly MINIMUM_DOCER_VERSION="20" +UNAME_U="$(uname -s)" +readonly UNAME_U +readonly COLOUR_RESET='\e[0m' +readonly aCOLOUR=( + '\e[38;5;154m' # green | Lines, bullets and separators + '\e[1m' # Bold white | Main descriptions + '\e[90m' # Grey | Credits + '\e[91m' # Red | Update notifications Alert + '\e[33m' # Yellow | Emphasis +) +trap 'onCtrlC' INT +onCtrlC() { + echo -e "${COLOUR_RESET}" + exit 1 +} + +Show() { + # OK + if (($1 == 0)); then + echo -e "${aCOLOUR[2]}[$COLOUR_RESET${aCOLOUR[0]} OK $COLOUR_RESET${aCOLOUR[2]}]$COLOUR_RESET $2" + # FAILED + elif (($1 == 1)); then + echo -e "${aCOLOUR[2]}[$COLOUR_RESET${aCOLOUR[3]}FAILED$COLOUR_RESET${aCOLOUR[2]}]$COLOUR_RESET $2" + exit 1 + # INFO + elif (($1 == 2)); then + echo -e "${aCOLOUR[2]}[$COLOUR_RESET${aCOLOUR[0]} INFO $COLOUR_RESET${aCOLOUR[2]}]$COLOUR_RESET $2" + # NOTICE + elif (($1 == 3)); then + echo -e "${aCOLOUR[2]}[$COLOUR_RESET${aCOLOUR[4]}NOTICE$COLOUR_RESET${aCOLOUR[2]}]$COLOUR_RESET $2" + fi +} + +Warn() { + echo -e "${aCOLOUR[3]}$1$COLOUR_RESET" +} + +GreyStart() { + echo -e "${aCOLOUR[2]}\c" +} + +ColorReset() { + echo -e "$COLOUR_RESET\c" +} + + +Update_Package_Resource() { + GreyStart + if [ -x "$(command -v apk)" ]; then + ${sudo_cmd} apk update + elif [ -x "$(command -v apt)" ]; then + ${sudo_cmd} apt update + elif [ -x "$(command -v dnf)" ]; then + ${sudo_cmd} dnf check-update + elif [ -x "$(command -v zypper)" ]; then + ${sudo_cmd} zypper update + elif [ -x "$(command -v yum)" ]; then + ${sudo_cmd} yum update + fi + ColorReset +} +# 3 Check OS +Check_OS() { + if [[ $UNAME_U == *Linux* ]]; then + Show 0 "Your System is : $UNAME_U" + else + Show 1 "This script is only for Linux." + exit 1 + fi +} +Install_Depends() { + for ((i = 0; i < ${#DEPENDS_COMMAND[@]}; i++)); do + cmd=${DEPENDS_COMMAND[i]} + if [[ ! -x $(${sudo_cmd} which "$cmd") ]]; then + packagesNeeded=${DEPENDS_PACKAGE[i]} + Show 2 "Install the necessary dependencies: \e[33m$packagesNeeded \e[0m" + GreyStart + if [ -x "$(command -v apk)" ]; then + ${sudo_cmd} apk add --no-cache "$packagesNeeded" + elif [ -x "$(command -v apt-get)" ]; then + ${sudo_cmd} apt-get -y -q install "$packagesNeeded" --no-upgrade + elif [ -x "$(command -v dnf)" ]; then + ${sudo_cmd} dnf install "$packagesNeeded" + elif [ -x "$(command -v zypper)" ]; then + ${sudo_cmd} zypper install "$packagesNeeded" + elif [ -x "$(command -v yum)" ]; then + ${sudo_cmd} yum install "$packagesNeeded" + elif [ -x "$(command -v pacman)" ]; then + ${sudo_cmd} pacman -S "$packagesNeeded" + elif [ -x "$(command -v paru)" ]; then + ${sudo_cmd} paru -S "$packagesNeeded" + else + Show 1 "Package manager not found. You must manually install: \e[33m$packagesNeeded \e[0m" + fi + ColorReset + else + Show 2 "\e[33m ${DEPENDS_COMMAND[i]}\e[0m Installed" + fi + done +} + +Check_Dependency_Installation() { + for ((i = 0; i < ${#DEPENDS_COMMAND[@]}; i++)); do + cmd=${DEPENDS_COMMAND[i]} + if [[ ! -x $(${sudo_cmd} which "$cmd") ]]; then + packagesNeeded=${DEPENDS_PACKAGE[i]} + Show 1 "Dependency \e[33m$packagesNeeded \e[0m installation failed, please try again manually!" + exit 1 + fi + done +} +Check_Docker_Install() { + if [[ -x "$(command -v docker)" ]]; then + Docker_Version=$(${sudo_cmd} docker version --format '{{.Server.Version}}') + if [[ $? -ne 0 ]]; then + Install_Docker + elif [[ ${Docker_Version:0:2} -lt "${MINIMUM_DOCER_VERSION}" ]]; then + Show 1 "Recommended minimum Docker version is \e[33m${MINIMUM_DOCER_VERSION}.xx.xx\e[0m,\Current Docker verison is \e[33m${Docker_Version}\e[0m,\nPlease uninstall current Docker and rerun the CasaOS installation script." + exit 1 + else + Show 0 "Current Docker verison is ${Docker_Version}." + fi + else + Install_Docker + fi +} +Install_Docker() { + Show 2 "Install the necessary dependencies: \e[33mDocker \e[0m" + if [[ ! -d "${PREFIX}/etc/apt/sources.list.d" ]]; then + ${sudo_cmd} mkdir -p "${PREFIX}/etc/apt/sources.list.d" + fi + GreyStart + if [[ "${REGION}" = "CN" ]]; then + ${sudo_cmd} curl -fsSL https://get.docker.com | bash -s docker --mirror Aliyun + else + ${sudo_cmd} curl -fsSL https://get.docker.com | bash + fi + ColorReset + if [[ $? -ne 0 ]]; then + Show 1 "Installation failed, please try again." + exit 1 + else + Check_Docker_Install_Final + fi +} +Check_Docker_Install_Final() { + if [[ -x "$(command -v docker)" ]]; then + Docker_Version=$(${sudo_cmd} docker version --format '{{.Server.Version}}') + if [[ $? -ne 0 ]]; then + Install_Docker + elif [[ ${Docker_Version:0:2} -lt "${MINIMUM_DOCER_VERSION}" ]]; then + Show 1 "Recommended minimum Docker version is \e[33m${MINIMUM_DOCER_VERSION}.xx.xx\e[0m,\Current Docker verison is \e[33m${Docker_Version}\e[0m,\nPlease uninstall current Docker and rerun the CasaOS installation script." + exit 1 + else + Show 0 "Current Docker verison is ${Docker_Version}." + Check_Docker_Running + fi + else + Show 1 "Installation failed, please run 'curl -fsSL https://get.docker.com | bash' and rerun the CasaOS installation script." + exit 1 + fi +} +Generate_Config_Docker(){ + ODOO_IMAGE=${1:-} + ODOO_TAG=${2:-} + ODOO_CONTAINER=${3:-} + if [[ ! -f "${DEPLOY_PATH}/.env" ]]; then + cp "${DEPLOY_PATH}/env.template" "${DEPLOY_PATH}/.env" + fi + USER="${REPO_NAME:-"default_repo"}" + # Convert to lowercase + USER="${USER,,}" + ${PYTHON} "$SETUP_PATH/gen_config_docker.py" --db_port 5432 --db_user $USER --deploy_path "$DEPLOY_PATH" \ + --image "${ODOO_IMAGE}" --container "${ODOO_CONTAINER}" --tag "${ODOO_TAG:=latest}" \ + --addons "${ODOO_ADDONS}" --config "${ODOO_CONFIG}" --backup "${ODOO_BACKUP}" + Show 0 " Generate Config Complete" +} +Generate_Config_Native(){ + DB_USER=${2:-} + DB_PASSWORD=${3:-} + DB_SERVER=${4:-} + DB_PORT=${5:-} + ADDONS=${1:-} + REPO_NAME=$(basename "$(git rev-parse --show-toplevel)" | sed -E 's/[.-]/_/g') + USER="${REPO_NAME:-"default_repo"}" + # Convert to lowercase + USER="${USER,,}" + PASSWORD="$(openssl rand -hex 24)" + # Check if the user already exists + USER_EXISTS=$(psql "postgresql://${DB_USER}:${DB_PASSWORD}@${DB_SERVER}:${DB_PORT}/postgres" -t -A -c "SELECT COUNT(*) FROM pg_roles WHERE rolname='$USER';") + + if [ $USER_EXISTS -eq 0 ]; then + # User does not exist, create the user + Show 2 "Create the new PostgreSQL username: $USER with password: $PASSWORD" + psql "postgresql://${DB_USER}:${DB_PASSWORD}@${DB_SERVER}:${DB_PORT}/postgres" -c "CREATE USER $USER WITH PASSWORD '$PASSWORD';" + Show 2 "Grant $USER superuser (admin) privileges" + psql "postgresql://${DB_USER}:${DB_PASSWORD}@${DB_SERVER}:${DB_PORT}/postgres" -c "ALTER USER $USER WITH SUPERUSER;" + else + # User exists, update the password (do not try to create) + Show 2 "User $USER already exists, updating password to $PASSWORD" + psql "postgresql://${DB_USER}:${DB_PASSWORD}@${DB_SERVER}:${DB_PORT}/postgres" -c "ALTER USER $USER WITH PASSWORD '$PASSWORD';" + fi + ${PYTHON} "$SETUP_PATH/gen_config.py" --db_user $USER --db_pass $PASSWORD --deploy_path "$(pwd)" \ + --addons_path $ADDONS --db_port $DB_PORT --db_server $DB_SERVER + Show 0 " Generate Config Complete" +} +main(){ + TYPE=${1:-} + Check_OS + # Update_Package_Resource + # Install_Depends + # Check_Dependency_Installation + # Check_Docker_Install + case "$TYPE" in + --native) + Generate_Config_Native $2 $3 $4 $5 $6 + ;; + --docker) + Generate_Config_Docker $2 $3 $4 + ;; + *) + # else + Show 1 "Invalid argument (--docker|--native)" + ;; + esac +} + +main "$@" diff --git a/setup/merge_module.py b/setup/merge_module.py new file mode 100755 index 000000000..1e7f2edb2 --- /dev/null +++ b/setup/merge_module.py @@ -0,0 +1,44 @@ +import subprocess +import yaml +import os +import argparse + +# Set up argument parsing +parser = argparse.ArgumentParser( + description="Checkout modules from target branch that are not in source branch." +) +parser.add_argument("yaml_file", help="Path to the YAML file") +parser.add_argument("source_branch", help="The source branch") +parser.add_argument("target_branch", help="The target branch") +parser.add_argument("root_repo", help="Path to the root repository") + +# Parse the arguments +args = parser.parse_args() + +yaml_file = args.yaml_file +source_branch = args.source_branch +target_branch = args.target_branch +root_repo = args.root_repo + +# Change to the repository directory +os.chdir(root_repo) +# Read YAML file +with open(yaml_file, "r") as file: + data = yaml.safe_load(file) + +# Extract module lists for source and target branches +modules_source = data["branches"].get(source_branch, {}).get("modules", []) +modules_target = data["branches"].get(target_branch, {}).get("modules", []) + +# Ensure the latest changes are fetched +subprocess.run(["git", "fetch", "origin"], check=True) + +# Checkout source branch first +print(f"Checking out source branch: {source_branch}") +subprocess.run(["git", "checkout", source_branch], check=True) + +# Checkout modules in target_branch that are not in source_branch +for module in modules_target: + if module not in modules_source: + print(f"Checking out module: {module}") + subprocess.run(["git", "checkout", target_branch, "--", module], check=True) diff --git a/setup/migrate-lang.sh b/setup/migrate-lang.sh new file mode 100755 index 000000000..144cf9e8a --- /dev/null +++ b/setup/migrate-lang.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Set source and destination repositories +SRC_REPO="/root/dev/NextERP/dev/Viindoo/odoo-18.0" +DEST_REPO="/root/dev/NextERP/dev/odoo18/Odoo18" +LANG="vi" +# Ensure both paths exist +if [ ! -d "$SRC_REPO" ]; then + echo "Error: Source repository does not exist!" + exit 1 +fi + +if [ ! -d "$DEST_REPO" ]; then + echo "Error: Destination repository does not exist!" + exit 1 +fi + +# Find and copy vi.po files while preserving directory structure +cd "$SRC_REPO" || exit +find . -type f -name "${LANG}.po" | while read -r file; do + # Get the directory path of the file + dir_path=$(dirname "$file") + + # Ensure the destination directory exists + mkdir -p "$DEST_REPO/$dir_path" + + # Copy the file + cp "$file" "$DEST_REPO/$dir_path/" + + echo "Copied: $file -> $DEST_REPO/$dir_path/" +done + +echo "All ${LANG}.po files copied successfully!" \ No newline at end of file diff --git a/setup/modules_scan.sh b/setup/modules_scan.sh new file mode 100755 index 000000000..45fcf7e30 --- /dev/null +++ b/setup/modules_scan.sh @@ -0,0 +1,82 @@ +#!/bin/bash + +# Check if required arguments are provided +if [ $# -lt 3 ] || [ $# -gt 4 ]; then + echo "Usage: $0 [list_branch]" + echo "Example: $0 exclude_list.txt /path/to/git/repo /path/to/output.yaml 'branch1 branch2'" + exit 1 +fi + +INPUT_FILE="$1" +ROOT_FOLDER="$2" +OUTPUT_FILE="$3" +LIST_BRANCH="$4" + +# Check if input file exists +if [ ! -f "$INPUT_FILE" ]; then + echo "Error: Input file '$INPUT_FILE' not found" + exit 1 +fi + +# Check if root folder exists +if [ ! -d "$ROOT_FOLDER" ]; then + echo "Error: Root folder '$ROOT_FOLDER' not found" + exit 1 +fi + +# Check if output YAML file exists, if not create it +if [ ! -f "$OUTPUT_FILE" ]; then + echo "Output file does not exist. Creating $OUTPUT_FILE" + touch "$OUTPUT_FILE" +fi + +# Change to root folder +cd "$ROOT_FOLDER" || exit 1 + +# Initialize output file +echo "branches:" > "$OUTPUT_FILE" + +# Get all git branches +git fetch --all +if [ -z "$LIST_BRANCH" ]; then + branches=$(git branch -r | grep -v HEAD | sed 's/origin\///' | sed 's/^[[:space:]]*//') +else + branches=$LIST_BRANCH +fi + +# Process each branch +for branch in $branches; do + echo "Processing branch: $branch" + + # Checkout branch + git checkout "$branch" 2>/dev/null || continue + + # Get all folders in current branch + folders=$(find . -maxdepth 1 -type d -not -path '.' -not -path './.*' | sed 's|./||') + + # Array to store modules not in input file + modules=() + + # Check each folder against input file + while IFS= read -r folder; do + # Skip if folder is empty + [ -z "$folder" ] && continue + + # Check if folder is in input file + if ! grep -Fxq "$folder" "$INPUT_FILE"; then + modules+=("$folder") + fi + done <<< "$folders" + + # Write to yaml if there are modules + if [ ${#modules[@]} -gt 0 ]; then + echo " $branch:" >> "$OUTPUT_FILE" + echo " modules:" >> "$OUTPUT_FILE" + for module in "${modules[@]}"; do + echo " - $module" >> "$OUTPUT_FILE" + done + fi + +done + +echo "Output written to $OUTPUT_FILE"