From 979d1fd37791fa9c4c50a57da1b3887ec5987e9f Mon Sep 17 00:00:00 2001 From: Xavier-Do Date: Wed, 21 Feb 2024 10:01:38 +0100 Subject: [PATCH] [DEL] runbot: remove master version --- README.md | 318 +- conftest.py | 1226 ------ forwardport/__init__.py | 2 - forwardport/__manifest__.py | 14 - .../changelog/2021-09/authorship-dedup.md | 1 - forwardport/changelog/2021-09/authorship.md | 1 - .../changelog/2021-09/conflict-view.md | 1 - forwardport/changelog/2021-09/draft.md | 1 - .../2021-09/feedback-missing-login.md | 1 - .../changelog/2021-09/followup-conflict.md | 1 - .../changelog/2021-09/fp-remote-view.md | 1 - .../changelog/2021-09/fwbot-rplus-error.md | 1 - forwardport/changelog/2021-09/outstanding.md | 1 - .../changelog/2021-10/delegate-followup.md | 1 - .../changelog/2021-10/followupdate-race.md | 1 - .../changelog/2021-10/fw-reapproval.md | 1 - .../changelog/2021-10/outstanding-layout.md | 1 - forwardport/changelog/2022-06/closed.md | 1 - .../changelog/2022-06/conflict-diff3.md | 1 - forwardport/changelog/2022-06/detached.md | 1 - forwardport/controllers.py | 15 - forwardport/data/crons.xml | 45 - forwardport/data/queues.xml | 51 - forwardport/data/security.xml | 46 - forwardport/data/views.xml | 215 - .../migrations/13.0.1.1/post-reminder-date.py | 10 - .../migrations/13.0.1.1/pre-tagging.py | 2 - forwardport/models/__init__.py | 4 - forwardport/models/forwardport.py | 254 -- forwardport/models/project.py | 1224 ------ forwardport/models/project_freeze.py | 22 - forwardport/tests/conftest.py | 52 - forwardport/tests/test_batches.py | 89 - forwardport/tests/test_conflicts.py | 356 -- forwardport/tests/test_limit.py | 293 -- forwardport/tests/test_overrides.py | 116 - forwardport/tests/test_simple.py | 1019 ----- forwardport/tests/test_updates.py | 414 -- forwardport/tests/test_weird.py | 814 ---- mergebot_test_utils/utils.py | 139 - requirements.txt | 4 - runbot/__init__.py | 7 - runbot/__manifest__.py | 68 - runbot/common.py | 158 - runbot/container.py | 303 -- runbot/controllers/__init__.py | 5 - runbot/controllers/badge.py | 90 - runbot/controllers/frontend.py | 570 --- runbot/controllers/hook.py | 53 - runbot/data/build_parse.xml | 22 - runbot/data/dockerfile_data.xml | 9 - runbot/data/error_link.xml | 22 - runbot/data/runbot_build_config_data.xml | 160 - runbot/data/runbot_data.xml | 111 - runbot/data/runbot_error_regex_data.xml | 17 - runbot/data/website_data.xml | 5 - .../Screenshot from 2020-09-23 12-20-40.png | Bin 22177 -> 0 bytes runbot/documentation/images/repo_odoo.png | Bin 33221 -> 0 bytes runbot/documentation/images/repo_runbot.png | Bin 32167 -> 0 bytes runbot/documentation/images/trigger.png | Bin 16628 -> 0 bytes runbot/example_scripts/nginx.conf | 75 - runbot/example_scripts/runbot/builder.sh | 3 - runbot/example_scripts/runbot/leader.sh | 3 - runbot/example_scripts/runbot/runbot.sh | 3 - .../example_scripts/services/builder.service | 15 - .../example_scripts/services/leader.service | 15 - .../example_scripts/services/runbot.service | 15 - runbot/fields.py | 52 - runbot/models/__init__.py | 29 - runbot/models/batch.py | 462 --- runbot/models/branch.py | 249 -- runbot/models/build.py | 1192 ------ runbot/models/build_config.py | 1112 ----- runbot/models/build_error.py | 334 -- runbot/models/build_stat.py | 27 - runbot/models/build_stat_regex.py | 72 - runbot/models/bundle.py | 243 -- runbot/models/codeowner.py | 41 - runbot/models/commit.py | 238 -- runbot/models/custom_trigger.py | 97 - runbot/models/database.py | 23 - runbot/models/dockerfile.py | 55 - runbot/models/event.py | 230 - runbot/models/host.py | 146 - runbot/models/ir_cron.py | 13 - runbot/models/ir_ui_view.py | 15 - runbot/models/project.py | 24 - runbot/models/repo.py | 579 --- runbot/models/res_config_settings.py | 111 - runbot/models/res_users.py | 10 - runbot/models/runbot.py | 404 -- runbot/models/upgrade.py | 70 - runbot/models/user.py | 10 - runbot/models/version.py | 105 - runbot/security/ir.model.access.csv | 118 - runbot/security/ir.rule.csv | 14 - runbot/security/runbot_security.xml | 123 - runbot/static/src/css/runbot.css | 329 -- runbot/static/src/js/json_field.js | 72 - runbot/static/src/js/runbot.js | 32 - runbot/static/src/js/stats.js | 292 -- runbot/templates/badge.xml | 47 - runbot/templates/batch.xml | 154 - runbot/templates/branch.xml | 81 - runbot/templates/build.xml | 408 -- runbot/templates/build_error.xml | 144 - runbot/templates/build_stats.xml | 136 - runbot/templates/bundle.xml | 88 - runbot/templates/commit.xml | 126 - runbot/templates/dashboard.xml | 283 -- runbot/templates/dockerfile.xml | 151 - runbot/templates/frontend.xml | 127 - runbot/templates/git.xml | 15 - runbot/templates/nginx.xml | 76 - runbot/templates/utils.xml | 359 -- runbot/tests/__init__.py | 16 - runbot/tests/common.py | 232 -- runbot/tests/test_branch.py | 252 -- runbot/tests/test_build.py | 491 --- runbot/tests/test_build_config_step.py | 499 --- runbot/tests/test_build_error.py | 225 - runbot/tests/test_build_stat.py | 141 - runbot/tests/test_command.py | 63 - runbot/tests/test_commit.py | 94 - runbot/tests/test_cron.py | 54 - runbot/tests/test_dockerfile.py | 67 - runbot/tests/test_event.py | 133 - runbot/tests/test_repo.py | 469 --- runbot/tests/test_runbot.py | 14 - runbot/tests/test_schedule.py | 37 - runbot/tests/test_upgrade.py | 544 --- runbot/tests/test_version.py | 61 - runbot/views/branch_views.xml | 47 - runbot/views/build_error_views.xml | 193 - runbot/views/build_views.xml | 151 - runbot/views/bundle_views.xml | 210 - runbot/views/codeowner_views.xml | 46 - runbot/views/commit_views.xml | 64 - runbot/views/config_views.xml | 195 - runbot/views/custom_trigger_wizard_views.xml | 38 - runbot/views/dashboard_views.xml | 129 - runbot/views/dockerfile_views.xml | 77 - runbot/views/error_log_views.xml | 90 - runbot/views/host_views.xml | 51 - runbot/views/menus.xml | 57 - runbot/views/repo_views.xml | 181 - runbot/views/res_config_settings_views.xml | 88 - runbot/views/stat_views.xml | 42 - runbot/views/upgrade.xml | 88 - runbot/views/warning_views.xml | 22 - runbot/wizards/__init__.py | 3 - runbot/wizards/stat_regex_wizard.py | 71 - runbot/wizards/stat_regex_wizard_views.xml | 39 - runbot_builder/builder.py | 26 - runbot_builder/dbmover.py | 171 - runbot_builder/leader.py | 27 - runbot_builder/tester.py | 16 - runbot_builder/tools.py | 151 - runbot_cla/__init__.py | 1 - runbot_cla/__manifest__.py | 13 - runbot_cla/build_config.py | 52 - runbot_cla/data/runbot_build_config_data.xml | 8 - runbot_merge/README.rst | 221 - runbot_merge/__init__.py | 46 - runbot_merge/__manifest__.py | 21 - .../changelog/2021-09/conflict_authorship.md | 1 - .../2021-09/different_project_link.md | 1 - runbot_merge/changelog/2021-09/drafts.md | 1 - .../changelog/2021-09/fetch_closed.md | 1 - .../2021-09/persistent_linked_prs.md | 1 - .../changelog/2021-09/rebase_tagging.md | 1 - .../2021-09/staging_failure_message.md | 1 - runbot_merge/changelog/2021-09/timestamps.md | 1 - runbot_merge/changelog/2021-10/changelog.md | 1 - .../changelog/2021-10/commit-title-edition.md | 1 - .../2021-10/pr_description_up_to_date.md | 1 - runbot_merge/changelog/2021-10/pr_errors.md | 1 - runbot_merge/changelog/2021-10/pr_page.md | 1 - .../changelog/2021-10/review-without-email.md | 1 - .../2021-10/reviewer-merge-methods.md | 1 - runbot_merge/changelog/2021-10/squash.md | 1 - runbot_merge/changelog/2022-06/alerts.md | 1 - runbot_merge/changelog/2022-06/branch.md | 4 - runbot_merge/changelog/2022-06/empty-body.md | 4 - runbot_merge/changelog/2022-06/pinging.md | 3 - .../changelog/2022-06/provisioning.md | 1 - runbot_merge/changelog/2022-06/ui.md | 8 - runbot_merge/changelog/2022-06/unstaging.md | 1 - runbot_merge/controllers/__init__.py | 321 -- runbot_merge/controllers/dashboard.py | 82 - .../controllers/reviewer_provisioning.py | 131 - runbot_merge/data/merge_cron.xml | 72 - runbot_merge/exceptions.py | 8 - runbot_merge/github.py | 406 -- .../migrations/13.0.1.1/pre-migration.py | 17 - .../migrations/13.0.1.2/pre-migration.py | 16 - .../migrations/13.0.1.3/pre-migration.py | 2 - .../migrations/13.0.1.4/pre-migration.py | 35 - .../migrations/13.0.1.5/pre-migration.py | 22 - .../migrations/13.0.1.6/pre-migration.py | 39 - .../migrations/13.0.1.7/pre-migration.py | 6 - runbot_merge/models/__init__.py | 4 - runbot_merge/models/project.py | 123 - .../models/project_freeze/__init__.py | 386 -- runbot_merge/models/project_freeze/views.xml | 94 - runbot_merge/models/pull_requests.py | 2304 ---------- runbot_merge/models/res_partner.py | 117 - runbot_merge/security/ir.model.access.csv | 25 - runbot_merge/security/security.xml | 8 - runbot_merge/static/project_freeze/index.js | 62 - runbot_merge/static/scss/runbot_merge.scss | 101 - runbot_merge/tests/README.rst | 47 - runbot_merge/tests/conftest.py | 41 - runbot_merge/tests/test_basic.py | 3694 ----------------- runbot_merge/tests/test_by_branch.py | 180 - runbot_merge/tests/test_disabled_branch.py | 152 - runbot_merge/tests/test_multirepo.py | 1438 ------- runbot_merge/tests/test_oddities.py | 131 - runbot_merge/tests/test_provisioning.py | 102 - runbot_merge/tests/test_status_overrides.py | 214 - runbot_merge/utils.py | 32 - runbot_merge/views/configuration.xml | 43 - runbot_merge/views/mergebot.xml | 242 -- runbot_merge/views/queues.xml | 97 - runbot_merge/views/res_partner.xml | 87 - runbot_merge/views/runbot_merge_project.xml | 78 - runbot_merge/views/templates.xml | 420 -- runbot_populate/__init__.py | 3 - runbot_populate/__manifest__.py | 15 - runbot_populate/demo/runbot_demo.xml | 160 - runbot_populate/models/__init__.py | 3 - runbot_populate/models/runbot.py | 135 - 232 files changed, 1 insertion(+), 35366 deletions(-) delete mode 100644 conftest.py delete mode 100644 forwardport/__init__.py delete mode 100644 forwardport/__manifest__.py delete mode 100644 forwardport/changelog/2021-09/authorship-dedup.md delete mode 100644 forwardport/changelog/2021-09/authorship.md delete mode 100644 forwardport/changelog/2021-09/conflict-view.md delete mode 100644 forwardport/changelog/2021-09/draft.md delete mode 100644 forwardport/changelog/2021-09/feedback-missing-login.md delete mode 100644 forwardport/changelog/2021-09/followup-conflict.md delete mode 100644 forwardport/changelog/2021-09/fp-remote-view.md delete mode 100644 forwardport/changelog/2021-09/fwbot-rplus-error.md delete mode 100644 forwardport/changelog/2021-09/outstanding.md delete mode 100644 forwardport/changelog/2021-10/delegate-followup.md delete mode 100644 forwardport/changelog/2021-10/followupdate-race.md delete mode 100644 forwardport/changelog/2021-10/fw-reapproval.md delete mode 100644 forwardport/changelog/2021-10/outstanding-layout.md delete mode 100644 forwardport/changelog/2022-06/closed.md delete mode 100644 forwardport/changelog/2022-06/conflict-diff3.md delete mode 100644 forwardport/changelog/2022-06/detached.md delete mode 100644 forwardport/controllers.py delete mode 100644 forwardport/data/crons.xml delete mode 100644 forwardport/data/queues.xml delete mode 100644 forwardport/data/security.xml delete mode 100644 forwardport/data/views.xml delete mode 100644 forwardport/migrations/13.0.1.1/post-reminder-date.py delete mode 100644 forwardport/migrations/13.0.1.1/pre-tagging.py delete mode 100644 forwardport/models/__init__.py delete mode 100644 forwardport/models/forwardport.py delete mode 100644 forwardport/models/project.py delete mode 100644 forwardport/models/project_freeze.py delete mode 100644 forwardport/tests/conftest.py delete mode 100644 forwardport/tests/test_batches.py delete mode 100644 forwardport/tests/test_conflicts.py delete mode 100644 forwardport/tests/test_limit.py delete mode 100644 forwardport/tests/test_overrides.py delete mode 100644 forwardport/tests/test_simple.py delete mode 100644 forwardport/tests/test_updates.py delete mode 100644 forwardport/tests/test_weird.py delete mode 100644 mergebot_test_utils/utils.py delete mode 100644 requirements.txt delete mode 100644 runbot/__init__.py delete mode 100644 runbot/__manifest__.py delete mode 100644 runbot/common.py delete mode 100644 runbot/container.py delete mode 100644 runbot/controllers/__init__.py delete mode 100644 runbot/controllers/badge.py delete mode 100644 runbot/controllers/frontend.py delete mode 100644 runbot/controllers/hook.py delete mode 100644 runbot/data/build_parse.xml delete mode 100644 runbot/data/dockerfile_data.xml delete mode 100644 runbot/data/error_link.xml delete mode 100644 runbot/data/runbot_build_config_data.xml delete mode 100644 runbot/data/runbot_data.xml delete mode 100644 runbot/data/runbot_error_regex_data.xml delete mode 100644 runbot/data/website_data.xml delete mode 100644 runbot/documentation/images/Screenshot from 2020-09-23 12-20-40.png delete mode 100644 runbot/documentation/images/repo_odoo.png delete mode 100644 runbot/documentation/images/repo_runbot.png delete mode 100644 runbot/documentation/images/trigger.png delete mode 100644 runbot/example_scripts/nginx.conf delete mode 100755 runbot/example_scripts/runbot/builder.sh delete mode 100755 runbot/example_scripts/runbot/leader.sh delete mode 100755 runbot/example_scripts/runbot/runbot.sh delete mode 100644 runbot/example_scripts/services/builder.service delete mode 100644 runbot/example_scripts/services/leader.service delete mode 100644 runbot/example_scripts/services/runbot.service delete mode 100644 runbot/fields.py delete mode 100644 runbot/models/__init__.py delete mode 100644 runbot/models/batch.py delete mode 100644 runbot/models/branch.py delete mode 100644 runbot/models/build.py delete mode 100644 runbot/models/build_config.py delete mode 100644 runbot/models/build_error.py delete mode 100644 runbot/models/build_stat.py delete mode 100644 runbot/models/build_stat_regex.py delete mode 100644 runbot/models/bundle.py delete mode 100644 runbot/models/codeowner.py delete mode 100644 runbot/models/commit.py delete mode 100644 runbot/models/custom_trigger.py delete mode 100644 runbot/models/database.py delete mode 100644 runbot/models/dockerfile.py delete mode 100644 runbot/models/event.py delete mode 100644 runbot/models/host.py delete mode 100644 runbot/models/ir_cron.py delete mode 100644 runbot/models/ir_ui_view.py delete mode 100644 runbot/models/project.py delete mode 100644 runbot/models/repo.py delete mode 100644 runbot/models/res_config_settings.py delete mode 100644 runbot/models/res_users.py delete mode 100644 runbot/models/runbot.py delete mode 100644 runbot/models/upgrade.py delete mode 100644 runbot/models/user.py delete mode 100644 runbot/models/version.py delete mode 100644 runbot/security/ir.model.access.csv delete mode 100644 runbot/security/ir.rule.csv delete mode 100644 runbot/security/runbot_security.xml delete mode 100644 runbot/static/src/css/runbot.css delete mode 100644 runbot/static/src/js/json_field.js delete mode 100644 runbot/static/src/js/runbot.js delete mode 100644 runbot/static/src/js/stats.js delete mode 100644 runbot/templates/badge.xml delete mode 100644 runbot/templates/batch.xml delete mode 100644 runbot/templates/branch.xml delete mode 100644 runbot/templates/build.xml delete mode 100644 runbot/templates/build_error.xml delete mode 100644 runbot/templates/build_stats.xml delete mode 100644 runbot/templates/bundle.xml delete mode 100644 runbot/templates/commit.xml delete mode 100644 runbot/templates/dashboard.xml delete mode 100644 runbot/templates/dockerfile.xml delete mode 100644 runbot/templates/frontend.xml delete mode 100644 runbot/templates/git.xml delete mode 100644 runbot/templates/nginx.xml delete mode 100644 runbot/templates/utils.xml delete mode 100644 runbot/tests/__init__.py delete mode 100644 runbot/tests/common.py delete mode 100644 runbot/tests/test_branch.py delete mode 100644 runbot/tests/test_build.py delete mode 100644 runbot/tests/test_build_config_step.py delete mode 100644 runbot/tests/test_build_error.py delete mode 100644 runbot/tests/test_build_stat.py delete mode 100644 runbot/tests/test_command.py delete mode 100644 runbot/tests/test_commit.py delete mode 100644 runbot/tests/test_cron.py delete mode 100644 runbot/tests/test_dockerfile.py delete mode 100644 runbot/tests/test_event.py delete mode 100644 runbot/tests/test_repo.py delete mode 100644 runbot/tests/test_runbot.py delete mode 100644 runbot/tests/test_schedule.py delete mode 100644 runbot/tests/test_upgrade.py delete mode 100644 runbot/tests/test_version.py delete mode 100644 runbot/views/branch_views.xml delete mode 100644 runbot/views/build_error_views.xml delete mode 100644 runbot/views/build_views.xml delete mode 100644 runbot/views/bundle_views.xml delete mode 100644 runbot/views/codeowner_views.xml delete mode 100644 runbot/views/commit_views.xml delete mode 100644 runbot/views/config_views.xml delete mode 100644 runbot/views/custom_trigger_wizard_views.xml delete mode 100644 runbot/views/dashboard_views.xml delete mode 100644 runbot/views/dockerfile_views.xml delete mode 100644 runbot/views/error_log_views.xml delete mode 100644 runbot/views/host_views.xml delete mode 100644 runbot/views/menus.xml delete mode 100644 runbot/views/repo_views.xml delete mode 100644 runbot/views/res_config_settings_views.xml delete mode 100644 runbot/views/stat_views.xml delete mode 100644 runbot/views/upgrade.xml delete mode 100644 runbot/views/warning_views.xml delete mode 100644 runbot/wizards/__init__.py delete mode 100644 runbot/wizards/stat_regex_wizard.py delete mode 100644 runbot/wizards/stat_regex_wizard_views.xml delete mode 100755 runbot_builder/builder.py delete mode 100755 runbot_builder/dbmover.py delete mode 100755 runbot_builder/leader.py delete mode 100755 runbot_builder/tester.py delete mode 100644 runbot_builder/tools.py delete mode 100644 runbot_cla/__init__.py delete mode 100644 runbot_cla/__manifest__.py delete mode 100644 runbot_cla/build_config.py delete mode 100644 runbot_cla/data/runbot_build_config_data.xml delete mode 100644 runbot_merge/README.rst delete mode 100644 runbot_merge/__init__.py delete mode 100644 runbot_merge/__manifest__.py delete mode 100644 runbot_merge/changelog/2021-09/conflict_authorship.md delete mode 100644 runbot_merge/changelog/2021-09/different_project_link.md delete mode 100644 runbot_merge/changelog/2021-09/drafts.md delete mode 100644 runbot_merge/changelog/2021-09/fetch_closed.md delete mode 100644 runbot_merge/changelog/2021-09/persistent_linked_prs.md delete mode 100644 runbot_merge/changelog/2021-09/rebase_tagging.md delete mode 100644 runbot_merge/changelog/2021-09/staging_failure_message.md delete mode 100644 runbot_merge/changelog/2021-09/timestamps.md delete mode 100644 runbot_merge/changelog/2021-10/changelog.md delete mode 100644 runbot_merge/changelog/2021-10/commit-title-edition.md delete mode 100644 runbot_merge/changelog/2021-10/pr_description_up_to_date.md delete mode 100644 runbot_merge/changelog/2021-10/pr_errors.md delete mode 100644 runbot_merge/changelog/2021-10/pr_page.md delete mode 100644 runbot_merge/changelog/2021-10/review-without-email.md delete mode 100644 runbot_merge/changelog/2021-10/reviewer-merge-methods.md delete mode 100644 runbot_merge/changelog/2021-10/squash.md delete mode 100644 runbot_merge/changelog/2022-06/alerts.md delete mode 100644 runbot_merge/changelog/2022-06/branch.md delete mode 100644 runbot_merge/changelog/2022-06/empty-body.md delete mode 100644 runbot_merge/changelog/2022-06/pinging.md delete mode 100644 runbot_merge/changelog/2022-06/provisioning.md delete mode 100644 runbot_merge/changelog/2022-06/ui.md delete mode 100644 runbot_merge/changelog/2022-06/unstaging.md delete mode 100644 runbot_merge/controllers/__init__.py delete mode 100644 runbot_merge/controllers/dashboard.py delete mode 100644 runbot_merge/controllers/reviewer_provisioning.py delete mode 100644 runbot_merge/data/merge_cron.xml delete mode 100644 runbot_merge/exceptions.py delete mode 100644 runbot_merge/github.py delete mode 100644 runbot_merge/migrations/13.0.1.1/pre-migration.py delete mode 100644 runbot_merge/migrations/13.0.1.2/pre-migration.py delete mode 100644 runbot_merge/migrations/13.0.1.3/pre-migration.py delete mode 100644 runbot_merge/migrations/13.0.1.4/pre-migration.py delete mode 100644 runbot_merge/migrations/13.0.1.5/pre-migration.py delete mode 100644 runbot_merge/migrations/13.0.1.6/pre-migration.py delete mode 100644 runbot_merge/migrations/13.0.1.7/pre-migration.py delete mode 100644 runbot_merge/models/__init__.py delete mode 100644 runbot_merge/models/project.py delete mode 100644 runbot_merge/models/project_freeze/__init__.py delete mode 100644 runbot_merge/models/project_freeze/views.xml delete mode 100644 runbot_merge/models/pull_requests.py delete mode 100644 runbot_merge/models/res_partner.py delete mode 100644 runbot_merge/security/ir.model.access.csv delete mode 100644 runbot_merge/security/security.xml delete mode 100644 runbot_merge/static/project_freeze/index.js delete mode 100644 runbot_merge/static/scss/runbot_merge.scss delete mode 100644 runbot_merge/tests/README.rst delete mode 100644 runbot_merge/tests/conftest.py delete mode 100644 runbot_merge/tests/test_basic.py delete mode 100644 runbot_merge/tests/test_by_branch.py delete mode 100644 runbot_merge/tests/test_disabled_branch.py delete mode 100644 runbot_merge/tests/test_multirepo.py delete mode 100644 runbot_merge/tests/test_oddities.py delete mode 100644 runbot_merge/tests/test_provisioning.py delete mode 100644 runbot_merge/tests/test_status_overrides.py delete mode 100644 runbot_merge/utils.py delete mode 100644 runbot_merge/views/configuration.xml delete mode 100644 runbot_merge/views/mergebot.xml delete mode 100644 runbot_merge/views/queues.xml delete mode 100644 runbot_merge/views/res_partner.xml delete mode 100644 runbot_merge/views/runbot_merge_project.xml delete mode 100644 runbot_merge/views/templates.xml delete mode 100644 runbot_populate/__init__.py delete mode 100644 runbot_populate/__manifest__.py delete mode 100644 runbot_populate/demo/runbot_demo.xml delete mode 100644 runbot_populate/models/__init__.py delete mode 100644 runbot_populate/models/runbot.py diff --git a/README.md b/README.md index 72797379..e72098c5 100644 --- a/README.md +++ b/README.md @@ -1,317 +1 @@ -# Odoo Runbot Repository - -This repository contains the source code of Odoo testing bot [runbot.odoo.com](http://runbot.odoo.com/runbot) and related addons. - ------------------- - -## Warnings - -**Runbot will delete folders/ drop databases to free some space during usage.** Even if only elements created by runbot are concerned, don't use runbot on a server with sensitive data. - -**Runbot changes some default odoo behaviours** Runbot database may work with other modules, but without any guarantee. - -**Runbot is not safe by itsefl** This tutorial describes the minimal way to deploy runbot, without too many security considerations. Only trusted code should be executed with this single machine setup. For more security the builder should be deployed separately with minimal access. - -## Glossary/models -Runbot use a set of concept in order to cover all the use cases we need - -- **Project**: regroups a set of repositories that works together. Usually one project is enough and a default *R&D* project exists. -- **Repository**: A repository name regrouping repo and forks Ex: odoo, enterprise -- **Remote**: A remote for a repository. Example: odoo/odoo, odoo-dev/odoo -- **Build**: A test instance, using a set of commits and parameters to run some code and produce a result. -- **Trigger**: Indicates that a build should be created when a new commit is pushed on a repo. A trigger has both trigger repos, and dependency repo. Ex: new commit on runbot-> build with runbot and a dependency with odoo. -- **Bundle**: A set or branches that work together: all the branches with the same name and all linked pr in the same project. -- **Batch**: A container for builds and commits of a bundle. When a new commit is pushed on a branch, if a trigger exists for the repo of that branch, a new batch is created with this commit. After 60 seconds, if no other commit is added to the batch, a build is created by trigger having a new commit in this batch. - -## Processes - -Mainly to allow to distribute runbot on multiple machine and avoid cron worker limitations, the runbot is using 2 process besides the main server. - -- **runbot process**: the main runbot process, serving the frontend. This is the odoo-bin process. -- **leader process**: this process should only be started once, detect new commits and creates builds for builders. -- **builder process**: this process can run at most once per physical host, will pick unassigned builds and execute them. - -## HOW TO - -This section give the basic steps to follow to configure the runbot. The configuration may differ from one use to another, this one will describe how to test addons for odoo, needing to fetch odoo core but without testing vanilla odoo. As an example, the runbot odoo addon will be used as a test case. Runbotception. - -### DNS - -You may configure a DNS entry for your runbot domain as well as a CNAME for all subdomain. - -``` -* IN CNAME runbot.domain.com. -``` -This is mainly usefull to access running build but will also give more freedom for future configurations. -This is not needed but many features won't work without that. - -### nginx - -An exemple of config is given in the example_scripts folder. - -This may be adapted depending on your setup, mainly for domain names. This can be adapted during the install but serving at least the runbot frontend (proxy pass 80 to 8069) is the minimal config needed. -Note that runbot also has a dynamic nginx config listening on the 8080 port, mainly for running build. - -This config is an ir_ui_view (runbot.nginx_config) and can be edited if needed. The config is applied and updated automatically after some time by the builder process. - -It is also advised to adapt this config to work in https. - -### Requirements - -Runbot is an addon for odoo, meaning that both odoo and runbot code are needed to run. Some tips to configure odoo are available in [odoo setup documentation](https://www.odoo.com/documentation/15.0/setup/install.html#setup-install-source) (requirements, postgres, ...) This page will mainly focus on runbot specificities. - -You will also need to install docker and other requirements before running runbot. - -```bash -sudo apt-get install docker.io python3-unidiff python3-docker python3-matplotlib -``` - -### Setup - -Choose a workspace to clone both repositories and checkout the right branch in both of them. -The directory used in example scripts is `/home/$USER/odoo/` - -Note: It is highly advised to create a user for runbot. This example creates a new user `runbot` - -```bash -sudo adduser runbot - -# needed access rights, docker, postgress -sudo -u postgres createuser -d runbot -sudo adduser runbot docker -sudo systemctl restart docker - -# no sudo power needed for now - -su runbot -cd -mkdir odoo -cd odoo -``` - -You may [add valid ssh key linked to a github account](https://docs.github.com/en/authentication/connecting-to-github-with-ssh/adding-a-new-ssh-key-to-your-github-account) - to this user in order to clone the different repositories. You could clone in https but this may be a problem latter to access your ptivate repositories. -It is important to clone the repo with the runbot user - -```bash -git clone --depth=1 --branch=15.0 git@github.com:odoo/odoo.git -git clone git@github.com:odoo/runbot.git - -git -C odoo checkout 15.0 -git -C runbot checkout 15.0 - -mkdir logs -``` - -Note: `--depth=1 --branch=15.0 ` is optionnal but will help to reduce the disc usage for the odoo repo. - -Finally, check that you have acess to docker, listing the dockers should work without error (but will be empty). - -```bash -docker ps -``` -If it is not working, ensure you have the docker group and logout if needed. - -### Install and start runbot - -This parts only consist in configuring and starting the 3 services. - -Some example scripts are given in `runbot/runbot/example_scripts` - -```bash -mkdir ~/bin # if not exist -cp -r ~/odoo/runbot/runbot/example_scripts/runbot ~/bin/runbot -``` - -Scripts should be adapted, mainly forthe `--forced-host-name parameter` in builder.sh: - -```bash -sed -i "s/runbot.domain.com/runbot.my_real_domain.com/" ~/bin/runbot/builder.sh -``` - -*The hostname is initally the machine hostname but it should be different per process, having the same hostname for leader and builder is not ideal. This is why the script is using the forced-host-name parameter.* - -*The most important one is the builder hostname since it will be used to define running build, zip download and logs urls. We recommand setting your main domain name on this process. The nginx config given in example should be adapted if not.* - - -Create the corresponding services. You can copy them from the example scripts and adapt them: - -```bash -exit # go back to a sudoer user -runbot_user="runbot" -sudo bash -c "cp /home/${runbot_user}/odoo/runbot/runbot/example_scripts/services/* /etc/systemd/system/" -sudo sed -i "s/runbot_user/${runbot_user}/" /etc/systemd/system/runbot.service -sudo sed -i "s/runbot_user/${runbot_user}/" /etc/systemd/system/leader.service -sudo sed -i "s/runbot_user/${runbot_user}/" /etc/systemd/system/builder.service -``` - -Enable all services and start runbot frontend - -```bash -sudo systemctl enable runbot -sudo systemctl enable leader -sudo systemctl enable builder -sudo systemctl daemon-reload -sudo systemctl start runbot -sudo systemctl status runbot -``` - -Runbot service should be running - -You can now connect to your backend and preconfigure runbot. -- Install runbot module, if it wasn't done before. -- Navigate to `/web` to leave the website configurator. -- Connect as admin (default password: admin). - -Check odoo documentation for other needed security configuration (master password). This is mainly needed for production purpose. -You can check that in the `/web/database/manager` page. [More info here](https://www.odoo.com/documentation/15.0/administration/install/deploy.html#security) -Change your admin user login and password -You may want to check the runbot settings (`Runbot > Setting > setting`): -- Default number of workers should be the max number of parallel build, consider having max `#cpu - 1` -- Modify `Default odoorc for builds` to change the running build master password to something unique ([idealy a hashed one](https://github.com/odoo/odoo/blob/15.0/odoo/tools/config.py#L722)). -- Tweak the garbage collection settings if you have limited disk space -- The `number of running build` is the number of parallel running builds. -- `Max commit age (in days)` will limt the max age of commit to detect. Increase this limit to detect older branches. - -Finally, start the two other services - -```bash -systemctl start leader -systemctl start builder -``` - -Several log files should have been created in `/home/runbot/odoo/logs/`, one per service. - -#### Bootstrap -Once launched, the leader process should start to do basic work and bootstrap will start to setup some directories in static. - -```bash -su runbot -ls ~/odoo/runbot/runbot/static -``` - ->build  docker  nginx  repo  sources  src - -- **repo** contains the bare repositories -- **sources** contains the exported sources needed for each build -- **build** contains the different workspaces for dockers, containing logs/ filestore, ... -- **docker** contains DockerFile and docker build logs -- **nginx** contains the nginx config used to access running instances -All of them are empty for now. - -A database defined by *runbot.runbot_db_template* icp will be created. By default, runbot use template0. This database will be used as a template for testing builds. You can change this database for more customisation. - -Other cron operations are still disabled for now. - -#### DOCKER images -A default docker image is present in the database and should automatically be build (this may take some time, check builder logs). -Depending on your version it may not be enough. -You can modify it to fit your needs or ask us for the latest version of the Dockerfile waiting for an official link. - -#### Add remotes and repositories -Access runbot app and go to the `Runbot>Setting>Repositories` menu - -Create a new repo for odoo -![Odoo repo configuration](runbot/documentation/images/repo_odoo.png "Odoo repo configuration") - -- **Name**: `odoo` It will be used as the directory name to export the sources -- **Identityfile** is only usefull if you want to use another ssh key to access a repo -- **Project**: `R&D` by default. -- **Modules to install**: `-*` in order to remove them from the default `-i`. This will speed up installation. To install and test all modules, leave this space empty or use `*`. Some modules may be blacklisted individually, by using `*-module,-other_module, l10n_*`. -- **Server files**: `odoo-bin` will allow runbot to know the possible file to use to launch odoo. odoo-bin is the one to use for the last version, but you may want to add other server files for older versions (comma separated list). The same logic is used for manifest files. -- **Manifest files**: `__manifest__.py`. This field is only usefull to configure old versions of odoo. -- **Addons path**: `addons,odoo/addons`. The paths where addons are stored in this repository. -- **Mode**: `poll` since github won't hook your runbot instance. Poll mode is limited to one update every 5 minutes. *It is advised to set it in hook mode later and hook it manually of from a cron or automated action to have more control*. -- **Remotes**: `git@github.com:odoo/odoo.git` A single remote is added, the base odoo repo. Only branches will be fetched to limit disk usage and branches will be created in the backend. It is possible to add multiple remotes for forks. - -Create another project for your repositories `Runbot>Setting>Project` - -This is optionnal you could use the R&D one, but this may be more noisy since every update in odoo/odoo will be displayed on the same page as your own repo one. Splitting by project also allows to manage access rights. - -Create a repo for your custom addons repo -![Odoo repo configuration](runbot/documentation/images/repo_runbot.png "Odoo repo configuration") -- **Name**: `runbot` -- **Project**: `runbot`. -- **Modules to install**: `-*,runbot` ton only install the runbot module. -- No addons_path given to use repo root as default. -- (optionnal) For your custom repo, it is advised to configure the repo in `hook` mode if possible, adding a webhook on `/runbot/hook`. Use `/runbot/hook/` to do it manually. -- **Remotes**: `git@github.com:odoo/runbot.git` -- The remote *PR* option can be checked if needed to fetch pull request too . Will only work if a github token is given for this repo. - -A config file with your remotes should be created for each repo. You can check the content in `/runbot/static/repo/(runbot|odoo)/config`. The repo will be fetched, this operation may take some time too. After that, you should start seeing empty batches in both projects on the frontend (`/` or `/runbot`) - -#### Triggers and config -At this point, runbot will discover new branches, new commits, create bundle, but no build will be created. - -When a new commit is discovered, the branch is updated with a new commit. Then this commit is added in a batch, a container for new builds when they arrive, but only if a trigger corresponding to this repo exists. After one minute without a new commit update in the batch, the different triggers will create one build each. -In this example, we want to create a new build when a new commit is pushed on runbot, and this build needs a commit in odoo as a dependency. - -By default the basic config will use the step `all` to test all addons. The installed addons will depends on the repo configuration, but all dependencies tests will be executed too. -This may not be wanted because some `base` or `web` test may be broken. This is the case with runbot addons. Also, selecting only the test for the addons -we are interested in will speedup the build a lot. - -Even if it would be better to create new Config and steps, we will modify the curent `all` config step. - -`Runbot > Configs > Build Config Steps` - -Edit the `all` config step and set `/runbot` as **Test tags** - -We can also check the config were going to use: - -`Runbot > Configs > Build Config` - -Optionnaly, edit `Default no run` config and remove the `base` step. It will only test the module base. - -Config and steps can be usefull to create custom test behaviour but this is out of the scope of this tutorial. - -Create a new trigger like this: - -`Runbot>Triggers` - -- *Name*: `Runbot` Just for display -- *Project id*: `runbot` This is important since you can only chose repo triggering a new build in this project. -- *Triggers*: `runbot` A new build will be created int the project when pushing on this repo. -- *Dependencies*: `odoo` Runbot needs odoo to run -- *Config*: `Default no run` Will start a build but dont make it running at the end. You can still wake up a build. - -When a branch is pushed, a new batch will be created, and after one minute the new build will be created if no other change is detected. - -CI options will only be used to send status on remotes of trigger repositories having a valid token. - -You can either push, or go on the frontend bundle page and use the `Force new batch` button (refresh icon) to test this new trigger. - -#### Bundles - -Bundles can be marked as `no_build`, so that new commit won't create batch creation and the bundle won't be displayed on the main page. - -#### Hosts -Runbot is able to share pending builds across multiple hosts. In the present case, there is only one. A new host will never assign a pending build to himself by default. -Go in the Build Hosts menu and choose yours. Uncheck *Only accept assigned build*. You can also tweak the number of parallel builds for this host. - -### Modules filters -Modules to install can be filtered by repo, and by config step. The first filter to be applied is the repo one, creating the default list for a config step. -Addon -module on a repo will remove the module from the default, it is advised to reflect the default case on repo. To test only a custom module, adding `-*` on odoo repo will disable all odoo addons. Only dependencies of custom modules will be installed. Some specific modules can also be filtered using `-module1,-module1` or somme specific modules can be kept using `-*,module1,module2`. -Module can also be filtered on a config step with the same logic as repo filter, except that repo's blacklist can be disabled to allow all modules by starting the list with `*` (all available modules) -It is also possible to add test-tags to config step to allow more module to be installed but only testing some specific one. Test tags: `/module1,/module2` - -### db template -Db creation will use template0 by default. It is possible to specify a specific template to use in runbot config *Postgresql template*. It is mainly used to add extensions. This will also avoid having issue if template0 is used when creating a new database. - -It is recommended to generate a `template_runbot` database based on template0 and set this value in the runbot settings - -``` -createdb template_runbot -T template0 -``` - -## Dockerfiles - -Runbot is using a Dockerfile Odoo model to define the Dockerfile used for builds and is shipped with a default one. This default Dockerfile is based on Ubuntu Bionic and is intended to build recent supported versions of Odoo. - -The model is using Odoo QWeb views as templates. - -A new Dockerfile can be created as needed either by duplicating the default one and adapt parameters in the view. e.g.: changing the key `'from': 'ubuntu:bionic'` to `'from': 'debian:buster'` will create a new Dockerfile based on Debian instead of ubuntu. -Or by providing a plain Dockerfile in the template. - -Once the Dockerfile is created and the `to_build` field is checked, the Dockerfile will be built (pay attention that no other operations will occur during the build). - -A version or a bundle can be assigned a specific Dockerfile. +No master version available for runbot, please use the latest stable \ No newline at end of file diff --git a/conftest.py b/conftest.py deleted file mode 100644 index e3dd70d4..00000000 --- a/conftest.py +++ /dev/null @@ -1,1226 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Configuration: - -* an ``odoo`` binary in the path, which runs the relevant odoo; to ensure a - clean slate odoo is re-started and a new database is created before each - test (technically a "template" db is created first, then that DB is cloned - and the fresh clone is used for each test) - -* pytest.ini (at the root of the runbot repo or higher) with the following - sections and keys - - ``github`` - - owner, the name of the account (personal or org) under which test repos - will be created & deleted (note: some repos might be created under role - accounts as well) - - token, either personal or oauth, must have the scopes ``public_repo``, - ``delete_repo`` and ``admin:repo_hook``, if personal the owner must be - the corresponding user account, not an org. Also user:email for the - forwardport / forwardbot tests - - ``role_reviewer``, ``role_self_reviewer`` and ``role_other`` - - name (optional, used as partner name when creating that, otherwise github - login gets used) - - email (optional, used as partner email when creating that, otherwise - github email gets used, reviewer and self-reviewer must have an email) - - token, a personal access token with the ``public_repo`` scope (otherwise - the API can't leave comments), maybe eventually delete_repo (for personal - forks) - - .. warning:: the accounts must *not* be flagged, or the webhooks on - commenting or creating reviews will not trigger, and the - tests will fail - -* either ``ngrok`` or ``lt`` (localtunnel) available on the path. ngrok with - a configured account is recommended: ngrok is more reliable than localtunnel - but a free account is necessary to get a high-enough rate limiting for some - of the multi-repo tests to work - -Finally the tests aren't 100% reliable as they rely on quite a bit of network -traffic, it's possible that the tests fail due to network issues rather than -logic errors. -""" -import base64 -import collections -import configparser -import contextlib -import copy -import functools -import http.client -import itertools -import os -import pathlib -import pprint -import random -import re -import socket -import subprocess -import sys -import tempfile -import time -import uuid -import warnings -import xmlrpc.client -from contextlib import closing - -import psutil -import pytest -import requests - -NGROK_CLI = [ - 'ngrok', 'start', '--none', '--region', 'eu', -] - -def pytest_addoption(parser): - parser.addoption('--addons-path') - parser.addoption("--no-delete", action="store_true", help="Don't delete repo after a failed run") - parser.addoption('--log-github', action='store_true') - - parser.addoption( - '--tunnel', action="store", type="choice", choices=['', 'ngrok', 'localtunnel'], default='', - help="Which tunneling method to use to expose the local Odoo server " - "to hook up github's webhook. ngrok is more reliable, but " - "creating a free account is necessary to avoid rate-limiting " - "issues (anonymous limiting is rate-limited at 20 incoming " - "queries per minute, free is 40, multi-repo batching tests will " - "blow through the former); localtunnel has no rate-limiting but " - "the servers are way less reliable") - - -# noinspection PyUnusedLocal -def pytest_configure(config): - sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'mergebot_test_utils')) - - -@pytest.fixture(scope='session', autouse=True) -def _set_socket_timeout(): - """ Avoid unlimited wait on standard sockets during tests, this is mostly - an issue for non-trivial cron calls - """ - socket.setdefaulttimeout(120.0) - -@pytest.fixture(scope="session") -def config(pytestconfig): - """ Flat version of the pytest config file (pytest.ini), parses to a - simple dict of {section: {key: value}} - - """ - conf = configparser.ConfigParser(interpolation=None) - conf.read([pytestconfig.inifile]) - cnf = { - name: dict(s.items()) - for name, s in conf.items() - } - # special case user / owner / ... - cnf['role_user'] = { - 'token': conf['github']['token'] - } - return cnf - -@pytest.fixture(scope='session') -def rolemap(request, config): - # hack because capsys is not session-scoped - capmanager = request.config.pluginmanager.getplugin("capturemanager") - # only fetch github logins once per session - rolemap = {} - for k, data in config.items(): - if k.startswith('role_'): - role = k[5:] - elif k == 'github': - role = 'user' - else: - continue - - with capmanager.global_and_fixture_disabled(): - r = _rate_limited(lambda: requests.get('https://api.github.com/user', headers={'Authorization': 'token %s' % data['token']})) - r.raise_for_status() - - user = rolemap[role] = r.json() - data['user'] = user['login'] - return rolemap - -@pytest.fixture -def partners(env, config, rolemap): - m = {} - for role, u in rolemap.items(): - if role in ('user', 'other'): - continue - - login = u['login'] - conf = config['role_' + role] - m[role] = env['res.partner'].create({ - 'name': conf.get('name', login), - 'email': conf.get('email') or u['email'] or False, - 'github_login': login, - }) - return m - -@pytest.fixture -def setreviewers(partners): - def _(*repos): - partners['reviewer'].write({ - 'review_rights': [ - (0, 0, {'repository_id': repo.id, 'review': True}) - for repo in repos - ] - }) - partners['self_reviewer'].write({ - 'review_rights': [ - (0, 0, {'repository_id': repo.id, 'self_review': True}) - for repo in repos - ] - }) - return _ - -@pytest.fixture -def users(partners, rolemap): - return {k: v['login'] for k, v in rolemap.items()} - -@pytest.fixture(scope='session') -def tunnel(pytestconfig, port): - """ Creates a tunnel to localhost: using ngrok or localtunnel, should yield the - publicly routable address & terminate the process at the end of the session - """ - tunnel = pytestconfig.getoption('--tunnel') - if tunnel == '': - return f'http://localhost:{port}' - elif tunnel == 'ngrok': - web_addr = 'http://localhost:4040/api' - addr = 'localhost:%d' % port - # try to find out if ngrok is running, and if it's not attempt - # to start it - try: - # FIXME: this is for xdist to avoid workers running ngrok at the - # exact same time, use lockfile instead - time.sleep(random.SystemRandom().randint(1, 10)) - # FIXME: use config file so we can set web_addr to something else - # than localhost:4040 (otherwise we can't disambiguate - # between the ngrok we started and an ngrok started by - # some other user) - requests.get(web_addr) - except requests.exceptions.ConnectionError: - subprocess.Popen(NGROK_CLI, stdout=subprocess.DEVNULL) - for _ in range(5): - time.sleep(1) - with contextlib.suppress(requests.exceptions.ConnectionError): - requests.get(web_addr) - break - else: - raise Exception("Unable to connect to ngrok") - - requests.post(f'{web_addr}/tunnels', json={ - 'name': str(port), - 'proto': 'http', - 'bind_tls': True, # only https - 'addr': addr, - 'inspect': True, - }).raise_for_status() - - tunnel = f'{web_addr}/tunnels/{port}' - for _ in range(10): - time.sleep(2) - r = requests.get(tunnel) - # not created yet, wait and retry - if r.status_code == 404: - continue - # check for weird responses - r.raise_for_status() - try: - yield r.json()['public_url'] - finally: - requests.delete(tunnel) - for _ in range(10): - time.sleep(1) - r = requests.get(tunnel) - # check if deletion is done - if r.status_code == 404: - break - r.raise_for_status() - else: - raise TimeoutError("ngrok tunnel deletion failed") - - r = requests.get(f'{web_addr}/tunnels') - # there are still tunnels in the list -> bail - if r.ok and r.json()['tunnels']: - return - - # ngrok is broken or all tunnels have been shut down -> try to - # find and kill it (but only if it looks a lot like we started it) - for p in psutil.process_iter(): - if p.name() == 'ngrok' and p.cmdline() == NGROK_CLI: - p.terminate() - break - return - else: - raise TimeoutError("ngrok tunnel creation failed (?)") - elif tunnel == 'localtunnel': - p = subprocess.Popen(['lt', '-p', str(port)], stdout=subprocess.PIPE) - try: - r = p.stdout.readline() - m = re.match(br'your url is: (https://.*\.localtunnel\.me)', r) - assert m, "could not get the localtunnel URL" - yield m.group(1).decode('ascii') - finally: - p.terminate() - p.wait(30) - else: - raise ValueError("Unsupported %s tunnel method" % tunnel) - -class DbDict(dict): - def __init__(self, adpath): - super().__init__() - self._adpath = adpath - def __missing__(self, module): - self[module] = db = 'template_%s' % uuid.uuid4() - with tempfile.TemporaryDirectory() as d: - subprocess.run([ - 'odoo', '--no-http', - '--addons-path', self._adpath, - '-d', db, '-i', module + ',auth_oauth', - '--max-cron-threads', '0', - '--stop-after-init', - '--log-level', 'warn' - ], - check=True, - env={**os.environ, 'XDG_DATA_HOME': d} - ) - return db - -@pytest.fixture(scope='session') -def dbcache(request): - """ Creates template DB once per run, then just duplicates it before - starting odoo and running the testcase - """ - dbs = DbDict(request.config.getoption('--addons-path')) - yield dbs - for db in dbs.values(): - subprocess.run(['dropdb', db], check=True) - -@pytest.fixture -def db(request, module, dbcache): - rundb = str(uuid.uuid4()) - subprocess.run(['createdb', '-T', dbcache[module], rundb], check=True) - - yield rundb - - if not request.config.getoption('--no-delete'): - subprocess.run(['dropdb', rundb], check=True) - -def wait_for_hook(n=1): - time.sleep(10 * n) - -def wait_for_server(db, port, proc, mod, timeout=120): - """ Polls for server to be response & have installed our module. - - Raises socket.timeout on failure - """ - limit = time.time() + timeout - while True: - if proc.poll() is not None: - raise Exception("Server unexpectedly closed") - - try: - uid = xmlrpc.client.ServerProxy( - 'http://localhost:{}/xmlrpc/2/common'.format(port))\ - .authenticate(db, 'admin', 'admin', {}) - mods = xmlrpc.client.ServerProxy( - 'http://localhost:{}/xmlrpc/2/object'.format(port))\ - .execute_kw( - db, uid, 'admin', 'ir.module.module', 'search_read', [ - [('name', '=', mod)], ['state'] - ]) - if mods and mods[0].get('state') == 'installed': - break - except ConnectionRefusedError: - if time.time() > limit: - raise socket.timeout() - -@pytest.fixture(scope='session') -def port(): - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: - s.bind(('', 0)) - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - return s.getsockname()[1] - -@pytest.fixture(scope='session') -def dummy_addons_path(): - with tempfile.TemporaryDirectory() as dummy_addons_path: - mod = pathlib.Path(dummy_addons_path, 'saas_worker') - mod.mkdir(0o700) - (mod / '__init__.py').write_bytes(b'') - (mod / '__manifest__.py').write_text(pprint.pformat({ - 'name': 'dummy saas_worker', - 'version': '1.0', - }), encoding='utf-8') - (mod / 'util.py').write_text("""\ -def from_role(_): - return lambda fn: fn -""", encoding='utf-8') - - yield dummy_addons_path - -@pytest.fixture -def server(request, db, port, module, dummy_addons_path, tmpdir): - log_handlers = [ - 'odoo.modules.loading:WARNING', - ] - if not request.config.getoption('--log-github'): - log_handlers.append('github_requests:WARNING') - - addons_path = ','.join(map(str, [ - request.config.getoption('--addons-path'), - dummy_addons_path, - ])) - p = subprocess.Popen([ - 'odoo', '--http-port', str(port), - '--addons-path', addons_path, - '-d', db, - '--max-cron-threads', '0', # disable cron threads (we're running crons by hand) - *itertools.chain.from_iterable(('--log-handler', h) for h in log_handlers), - ], env={ - **os.environ, - # stop putting garbage in the user dirs, and potentially creating conflicts - # TODO: way to override this with macOS? - 'XDG_DATA_HOME': str(tmpdir.mkdir('share')), - 'XDG_CACHE_HOME': str(tmpdir.mkdir('cache')), - }) - - try: - wait_for_server(db, port, p, module) - - yield p - finally: - p.terminate() - p.wait(timeout=30) - -@pytest.fixture -def env(port, server, db, default_crons): - yield Environment(port, db, default_crons) - -def check(response): - assert response.ok, response.text or response.reason - return response -# users is just so I can avoid autouse on toplevel users fixture b/c it (seems -# to) break the existing local tests -@pytest.fixture -def make_repo(capsys, request, config, tunnel, users): - owner = config['github']['owner'] - github = requests.Session() - github.headers['Authorization'] = 'token %s' % config['github']['token'] - - # check whether "owner" is a user or an org, as repo-creation endpoint is - # different - with capsys.disabled(): - q = _rate_limited(lambda: github.get('https://api.github.com/users/{}'.format(owner))) - q.raise_for_status() - if q.json().get('type') == 'Organization': - endpoint = 'https://api.github.com/orgs/{}/repos'.format(owner) - else: - endpoint = 'https://api.github.com/user/repos' - r = check(github.get('https://api.github.com/user')) - assert r.json()['login'] == owner - - repos = [] - def repomaker(name): - name = 'ignore_%s_%s' % (name, base64.b64encode(os.urandom(6), b'-_').decode()) - fullname = '{}/{}'.format(owner, name) - repo_url = 'https://api.github.com/repos/{}'.format(fullname) - - # create repo - r = check(github.post(endpoint, json={ - 'name': name, - 'has_issues': False, - 'has_projects': False, - 'has_wiki': False, - 'auto_init': False, - # at least one merge method must be enabled :( - 'allow_squash_merge': False, - # 'allow_merge_commit': False, - 'allow_rebase_merge': False, - })) - r = r.json() - # wait for repository visibility - while True: - time.sleep(1) - if github.head(r['url']).ok: - break - - repo = Repo(github, fullname, repos) - - # create webhook - check(github.post('{}/hooks'.format(repo_url), json={ - 'name': 'web', - 'config': { - 'url': '{}/runbot_merge/hooks'.format(tunnel), - 'content_type': 'json', - 'insecure_ssl': '1', - }, - 'events': ['pull_request', 'issue_comment', 'status', 'pull_request_review'] - })) - time.sleep(1) - - check(github.put('{}/contents/{}'.format(repo_url, 'a'), json={ - 'path': 'a', - 'message': 'github returns a 409 (Git Repository is Empty) if trying to create a tree in a repo with no objects', - 'content': base64.b64encode(b'whee').decode('ascii'), - 'branch': 'garbage_%s' % uuid.uuid4() - })) - time.sleep(1) - return repo - - yield repomaker - - if not request.config.getoption('--no-delete'): - for repo in reversed(repos): - repo.delete() - - -def _rate_limited(req): - while True: - q = req() - if not q.ok and q.headers.get('X-RateLimit-Remaining') == '0': - reset = int(q.headers['X-RateLimit-Reset']) - delay = max(0, round(reset - time.time() + 1.0)) - print("Hit rate limit, sleeping for", delay, "seconds") - time.sleep(delay) - continue - break - return q - - -Commit = collections.namedtuple('Commit', 'id tree message author committer parents') -class Repo: - def __init__(self, session, fullname, repos): - self._session = session - self.name = fullname - self._repos = repos - self.hook = False - repos.append(self) - - @property - def owner(self): - return self.name.split('/')[0] - - def unsubscribe(self, token=None): - self._get_session(token).put('https://api.github.com/repos/{}/subscription'.format(self.name), json={ - 'subscribed': False, - 'ignored': True, - }) - - def add_collaborator(self, login, token): - # send invitation to user - r = check(self._session.put('https://api.github.com/repos/{}/collaborators/{}'.format(self.name, login))) - # accept invitation on behalf of user - check(requests.patch('https://api.github.com/user/repository_invitations/{}'.format(r.json()['id']), headers={ - 'Authorization': 'token ' + token - })) - # sanity check that user is part of collaborators - r = check(self._session.get('https://api.github.com/repos/{}/collaborators'.format(self.name))) - assert any(login == c['login'] for c in r.json()) - - def _get_session(self, token): - s = self._session - if token: - s = requests.Session() - s.headers['Authorization'] = 'token %s' % token - return s - - def delete(self): - r = self._session.delete('https://api.github.com/repos/{}'.format(self.name)) - if r.status_code != 204: - warnings.warn("Unable to delete repository %s (HTTP %s)" % (self.name, r.status_code)) - - def set_secret(self, secret): - assert self.hook - r = self._session.get( - 'https://api.github.com/repos/{}/hooks'.format(self.name)) - response = r.json() - assert 200 <= r.status_code < 300, response - [hook] = response - - r = self._session.patch('https://api.github.com/repos/{}/hooks/{}'.format(self.name, hook['id']), json={ - 'config': {**hook['config'], 'secret': secret}, - }) - assert 200 <= r.status_code < 300, r.json() - - def get_ref(self, ref): - # differs from .commit(ref).id for the sake of assertion error messages - # apparently commits/{ref} returns 422 or some other fool thing when the - # ref' does not exist which sucks for asserting "the ref' has been - # deleted" - # FIXME: avoid calling get_ref on a hash & remove this code - if re.match(r'[0-9a-f]{40}', ref): - # just check that the commit exists - r = self._session.get('https://api.github.com/repos/{}/git/commits/{}'.format(self.name, ref)) - assert 200 <= r.status_code < 300, r.reason or http.client.responses[r.status_code] - return r.json()['sha'] - - if ref.startswith('refs/'): - ref = ref[5:] - if not ref.startswith('heads'): - ref = 'heads/' + ref - - r = self._session.get('https://api.github.com/repos/{}/git/ref/{}'.format(self.name, ref)) - assert 200 <= r.status_code < 300, r.reason or http.client.responses[r.status_code] - res = r.json() - assert res['object']['type'] == 'commit' - return res['object']['sha'] - - def commit(self, ref): - if not re.match(r'[0-9a-f]{40}', ref): - if not ref.startswith(('heads/', 'refs/heads/')): - ref = 'refs/heads/' + ref - # apparently heads/ ~ refs/heads/ but are not - # necessarily up to date ??? unlike the git ref system where :ref - # starts at heads/ - if ref.startswith('heads/'): - ref = 'refs/' + ref - - r = self._session.get('https://api.github.com/repos/{}/commits/{}'.format(self.name, ref)) - response = r.json() - assert 200 <= r.status_code < 300, response - - return self._commit_from_gh(response) - - def _commit_from_gh(self, gh_commit): - c = gh_commit['commit'] - return Commit( - id=gh_commit['sha'], - tree=c['tree']['sha'], - message=c['message'], - author=c['author'], - committer=c['committer'], - parents=[p['sha'] for p in gh_commit['parents']], - ) - - def read_tree(self, commit): - """ read tree object from commit - - :param Commit commit: - :rtype: Dict[str, str] - """ - r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree)) - assert 200 <= r.status_code < 300, r.json() - - # read tree's blobs - tree = {} - for t in r.json()['tree']: - assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases" - r = self._session.get('https://api.github.com/repos/{}/git/blobs/{}'.format(self.name, t['sha'])) - assert 200 <= r.status_code < 300, r.json() - tree[t['path']] = base64.b64decode(r.json()['content']).decode() - - return tree - - def make_ref(self, name, commit, force=False): - assert self.hook - assert name.startswith('heads/') - r = self._session.post('https://api.github.com/repos/{}/git/refs'.format(self.name), json={ - 'ref': 'refs/' + name, - 'sha': commit, - }) - if force and r.status_code == 422: - self.update_ref(name, commit, force=force) - return - assert r.ok, r.text - - def update_ref(self, name, commit, force=False): - assert self.hook - r = self._session.patch('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, name), json={'sha': commit, 'force': force}) - assert r.ok, r.text - - def protect(self, branch): - assert self.hook - r = self._session.put('https://api.github.com/repos/{}/branches/{}/protection'.format(self.name, branch), json={ - 'required_status_checks': None, - 'enforce_admins': True, - 'required_pull_request_reviews': None, - 'restrictions': None, - }) - assert 200 <= r.status_code < 300, r.json() - - # FIXME: remove this (runbot_merge should use make_commits directly) - def make_commit(self, ref, message, author, committer=None, tree=None, wait=True): - assert tree - if isinstance(ref, list): - assert all(re.match(r'[0-9a-f]{40}', r) for r in ref) - ancestor_id = ref - ref = None - else: - ancestor_id = self.get_ref(ref) if ref else None - # if ref is already a commit id, don't pass it in - if ancestor_id == ref: - ref = None - - [h] = self.make_commits( - ancestor_id, - MakeCommit(message, tree=tree, author=author, committer=committer, reset=True), - ref=ref - ) - return h - - def make_commits(self, root, *commits, ref=None, make=True): - assert self.hook - if isinstance(root, list): - parents = root - tree = None - elif root: - c = self.commit(root) - tree = c.tree - parents = [c.id] - else: - tree = None - parents = [] - - hashes = [] - for commit in commits: - if commit.tree: - if commit.reset: - tree = None - r = self._session.post('https://api.github.com/repos/{}/git/trees'.format(self.name), json={ - 'tree': [ - {'path': k, 'mode': '100644', 'type': 'blob', 'content': v} - for k, v in commit.tree.items() - ], - 'base_tree': tree - }) - assert r.ok, r.text - tree = r.json()['sha'] - - data = { - 'parents': parents, - 'message': commit.message, - 'tree': tree, - } - if commit.author: - data['author'] = commit.author - if commit.committer: - data['committer'] = commit.committer - - r = self._session.post('https://api.github.com/repos/{}/git/commits'.format(self.name), json=data) - assert r.ok, r.text - - hashes.append(r.json()['sha']) - parents = [hashes[-1]] - - if ref: - fn = self.make_ref if make else self.update_ref - fn(ref, hashes[-1], force=True) - - return hashes - - def fork(self, *, token=None): - s = self._get_session(token) - - r = s.post('https://api.github.com/repos/{}/forks'.format(self.name)) - assert 200 <= r.status_code < 300, r.json() - - repo_name = r.json()['full_name'] - repo_url = 'https://api.github.com/repos/' + repo_name - # poll for end of fork - limit = time.time() + 60 - while s.head(repo_url, timeout=5).status_code != 200: - if time.time() > limit: - raise TimeoutError("No response for repo %s over 60s" % repo_name) - time.sleep(1) - - return Repo(s, repo_name, self._repos) - - def get_pr(self, number): - # ensure PR exists before returning it - self._session.head('https://api.github.com/repos/{}/pulls/{}'.format( - self.name, - number, - )).raise_for_status() - return PR(self, number) - - def make_pr(self, *, title=None, body=None, target, head, draft=False, token=None): - assert self.hook - self.hook = 2 - - if title is None: - assert ":" not in head, \ - "will not auto-infer titles for PRs in a remote repo" - c = self.commit(head) - parts = iter(c.message.split('\n\n', 1)) - title = next(parts) - body = next(parts, None) - - headers = {} - if token: - headers['Authorization'] = 'token {}'.format(token) - - # FIXME: change tests which pass a commit id to make_pr & remove this - if re.match(r'[0-9a-f]{40}', head): - ref = "temp_trash_because_head_must_be_a_ref_%d" % next(ct) - self.make_ref('heads/' + ref, head) - head = ref - - r = self._session.post( - 'https://api.github.com/repos/{}/pulls'.format(self.name), - json={ - 'title': title, - 'body': body, - 'head': head, - 'base': target, - 'draft': draft, - }, - headers=headers, - ) - pr = r.json() - assert 200 <= r.status_code < 300, pr - - return PR(self, pr['number']) - - def post_status(self, ref, status, context='default', **kw): - assert self.hook - assert status in ('error', 'failure', 'pending', 'success') - r = self._session.post('https://api.github.com/repos/{}/statuses/{}'.format(self.name, self.commit(ref).id), json={ - 'state': status, - 'context': context, - **kw - }) - assert 200 <= r.status_code < 300, r.json() - - def is_ancestor(self, sha, of): - return any(c['sha'] == sha for c in self.log(of)) - - def log(self, ref_or_sha): - for page in itertools.count(1): - r = self._session.get( - 'https://api.github.com/repos/{}/commits'.format(self.name), - params={'sha': ref_or_sha, 'page': page} - ) - assert 200 <= r.status_code < 300, r.json() - yield from r.json() - if not r.links.get('next'): - return - - def __enter__(self): - self.hook = 1 - return self - def __exit__(self, *args): - wait_for_hook(self.hook) - self.hook = 0 - class Commit: - def __init__(self, message, *, author=None, committer=None, tree, reset=False): - self.id = None - self.message = message - self.author = author - self.committer = committer - self.tree = tree - self.reset = reset -MakeCommit = Repo.Commit -ct = itertools.count() - -class Comment(tuple): - def __new__(cls, c): - self = super(Comment, cls).__new__(cls, (c['user']['login'], c['body'])) - self._c = c - return self - def __getitem__(self, item): - return self._c[item] - - -PR_SET_READY = ''' -mutation setReady($pid: ID!) { - markPullRequestReadyForReview(input: { pullRequestId: $pid}) { - clientMutationId - } -} -''' - -PR_SET_DRAFT = ''' -mutation setDraft($pid: ID!) { - convertPullRequestToDraft(input: { pullRequestId: $pid }) { - clientMutationId - } -} -''' -def state_prop(name: str) -> property: - @property - def _prop(self): - return self._pr[name] - return _prop.setter(lambda self, v: self._set_prop(name, v)) - -class PR: - def __init__(self, repo, number): - self.repo = repo - self.number = number - self.labels = LabelsProxy(self) - self._cache = None, {} - - @property - def _pr(self): - previous, caching = self._cache - r = self.repo._session.get( - 'https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number), - headers=caching - ) - assert r.ok, r.json() - if r.status_code == 304: - return previous - contents, caching = self._cache = r.json(), {} - if r.headers.get('etag'): - caching['If-None-Match'] = r.headers['etag'] - if r.headers.get('last-modified'): - caching['If-Modified-Since']= r.headers['Last-Modified'] - return contents - - title = state_prop('title') - body = state_prop('body') - base = state_prop('base') - - @property - def draft(self): - return self._pr['draft'] - @draft.setter - def draft(self, v): - assert self.repo.hook - # apparently it's not possible to update the draft flag via the v3 API, - # only the V4... - r = self.repo._session.post('https://api.github.com/graphql', json={ - 'query': PR_SET_DRAFT if v else PR_SET_READY, - 'variables': {'pid': self._pr['node_id']} - }) - assert r.ok, r.text - out = r.json() - assert 'errors' not in out, out['errors'] - - @property - def head(self): - return self._pr['head']['sha'] - - @property - def user(self): - return self._pr['user']['login'] - - @property - def state(self): - return self._pr['state'] - - @property - def comments(self): - r = self.repo._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number)) - assert 200 <= r.status_code < 300, r.json() - return [Comment(c) for c in r.json()] - - @property - def ref(self): - return 'heads/' + self.branch.branch - - def post_comment(self, body, token=None): - assert self.repo.hook - headers = {} - if token: - headers['Authorization'] = 'token %s' % token - r = self.repo._session.post( - 'https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number), - json={'body': body}, - headers=headers, - ) - assert 200 <= r.status_code < 300, r.json() - return r.json()['id'] - - def edit_comment(self, cid, body, token=None): - assert self.repo.hook - headers = {} - if token: - headers['Authorization'] = 'token %s' % token - r = self.repo._session.patch( - 'https://api.github.com/repos/{}/issues/comments/{}'.format(self.repo.name, cid), - json={'body': body}, - headers=headers - ) - assert 200 <= r.status_code < 300, r.json() - wait_for_hook() - - def delete_comment(self, cid, token=None): - assert self.repo.hook - headers = {} - if token: - headers['Authorization'] = 'token %s' % token - r = self.repo._session.delete( - 'https://api.github.com/repos/{}/issues/comments/{}'.format(self.repo.name, cid), - headers=headers - ) - assert r.status_code == 204, r.json() - - def _set_prop(self, prop, value, token=None): - assert self.repo.hook - headers = {} - if token: - headers['Authorization'] = 'token ' + token - r = self.repo._session.patch('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number), json={ - prop: value - }, headers=headers) - assert r.ok, r.text - - def open(self, token=None): - self._set_prop('state', 'open', token=token) - - def close(self, token=None): - self._set_prop('state', 'closed', token=token) - - @property - def branch(self): - r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format( - self.repo.name, - self.number, - )) - assert 200 <= r.status_code < 300, r.json() - info = r.json() - - repo = self.repo - reponame = info['head']['repo']['full_name'] - if reponame != self.repo.name: - # not sure deep copying the session object is safe / proper... - repo = Repo(copy.deepcopy(self.repo._session), reponame, []) - - return PRBranch(repo, info['head']['ref']) - - def post_review(self, state, body, token=None): - assert self.repo.hook - headers = {} - if token: - headers['Authorization'] = 'token %s' % token - r = self.repo._session.post( - 'https://api.github.com/repos/{}/pulls/{}/reviews'.format(self.repo.name, self.number), - json={'body': body, 'event': state,}, - headers=headers - ) - assert 200 <= r.status_code < 300, r.json() - -PRBranch = collections.namedtuple('PRBranch', 'repo branch') -class LabelsProxy(collections.abc.MutableSet): - def __init__(self, pr): - self._pr = pr - - @property - def _labels(self): - pr = self._pr - r = pr.repo._session.get('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number)) - assert r.ok, r.json() - return {label['name'] for label in r.json()} - - def __repr__(self): - return '' % self._labels - - def __eq__(self, other): - if isinstance(other, collections.abc.Set): - return other == self._labels - return NotImplemented - - def __contains__(self, label): - return label in self._labels - - def __iter__(self): - return iter(self._labels) - - def __len__(self): - return len(self._labels) - - def add(self, label): - pr = self._pr - assert pr.repo.hook - r = pr.repo._session.post('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number), json={ - 'labels': [label] - }) - assert r.ok, r.json() - - def discard(self, label): - pr = self._pr - assert pr.repo.hook - r = pr.repo._session.delete('https://api.github.com/repos/{}/issues/{}/labels/{}'.format(pr.repo.name, pr.number, label)) - # discard should do nothing if the item didn't exist in the set - assert r.ok or r.status_code == 404, r.json() - - def update(self, *others): - pr = self._pr - assert pr.repo.hook - # because of course that one is not provided by MutableMapping... - r = pr.repo._session.post('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number), json={ - 'labels': list(set(itertools.chain.from_iterable(others))) - }) - assert r.ok, r.json() - -class Environment: - def __init__(self, port, db, default_crons=()): - self._uid = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/common'.format(port)).authenticate(db, 'admin', 'admin', {}) - self._object = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/object'.format(port)) - self._db = db - self._default_crons = default_crons - - def __call__(self, model, method, *args, **kwargs): - return self._object.execute_kw( - self._db, self._uid, 'admin', - model, method, - args, kwargs - ) - - def __getitem__(self, name): - return Model(self, name) - - def run_crons(self, *xids, **kw): - crons = xids or self._default_crons - print('running crons', crons, file=sys.stderr) - for xid in crons: - t0 = time.time() - print('\trunning cron', xid, '...', file=sys.stderr) - _, model, cron_id = self('ir.model.data', 'xmlid_lookup', xid) - assert model == 'ir.cron', "Expected {} to be a cron, got {}".format(xid, model) - self('ir.cron', 'method_direct_trigger', [cron_id], **kw) - print('\tdone %.3fs' % (time.time() - t0), file=sys.stderr) - print('done', file=sys.stderr) - # sleep for some time as a lot of crap may have happened (?) - wait_for_hook() - -class Model: - __slots__ = ['env', '_name', '_ids', '_fields'] - def __init__(self, env, model, ids=(), fields=None): - object.__setattr__(self, 'env', env) - object.__setattr__(self, '_name', model) - object.__setattr__(self, '_ids', tuple(ids or ())) - - object.__setattr__(self, '_fields', fields or self.env(self._name, 'fields_get', attributes=['type', 'relation'])) - - @property - def ids(self): - return self._ids - - @property - def _env(self): return self.env - - @property - def _model(self): return self._name - - def __bool__(self): - return bool(self._ids) - - def __len__(self): - return len(self._ids) - - def __eq__(self, other): - if not isinstance(other, Model): - return NotImplemented - return self._model == other._model and set(self._ids) == set(other._ids) - - def __repr__(self): - return "{}({})".format(self._model, ', '.join(str(id_) for id_ in self._ids)) - - def browse(self, ids): - return Model(self._env, self._model, ids) - - def exists(self): - ids = self._env(self._model, 'exists', self._ids) - return Model(self._env, self._model, ids) - - def search(self, *args, **kwargs): - ids = self._env(self._model, 'search', *args, **kwargs) - return Model(self._env, self._model, ids) - - def name_search(self, *args, **kwargs): - return self._env(self._model, 'name_search', *args, **kwargs) - - def create(self, values): - return Model(self._env, self._model, [self._env(self._model, 'create', values)]) - - def check_object_reference(self, *args, **kwargs): - return self.env(self._model, 'check_object_reference', *args, **kwargs) - - def sorted(self, field): - rs = sorted(self.read([field]), key=lambda r: r[field]) - return Model(self._env, self._model, [r['id'] for r in rs]) - - def __getitem__(self, index): - if isinstance(index, str): - return getattr(self, index) - ids = self._ids[index] - if isinstance(ids, int): - ids = [ids] - - return Model(self._env, self._model, ids, fields=self._fields) - - def __getattr__(self, fieldname): - if fieldname in ['__dataclass_fields__', '__attrs_attrs__']: - raise AttributeError('%r is invalid on %s' % (fieldname, self._model)) - - field_description = self._fields.get(fieldname) - if field_description is None: - return functools.partial(self._call, fieldname) - - if not self._ids: - return False - - if fieldname == 'id': - return self._ids[0] - - val = self.read([fieldname])[0][fieldname] - field_description = self._fields[fieldname] - if field_description['type'] in ('many2one', 'one2many', 'many2many'): - val = val or [] - if field_description['type'] == 'many2one': - val = val[:1] # (id, name) => [id] - return Model(self._env, field_description['relation'], val) - - return val - - # because it's difficult to discriminate between methods and fields - def _call(self, name, *args, **kwargs): - return self._env(self._model, name, self._ids, *args, **kwargs) - - def __setattr__(self, fieldname, value): - self._env(self._model, 'write', self._ids, {fieldname: value}) - - def __iter__(self): - return ( - Model(self._env, self._model, [i], fields=self._fields) - for i in self._ids - ) - - def mapped(self, path): - field, *rest = path.split('.', 1) - descr = self._fields[field] - if descr['type'] in ('many2one', 'one2many', 'many2many'): - result = Model(self._env, descr['relation']) - for record in self: - result |= getattr(record, field) - - return result.mapped(rest[0]) if rest else result - - assert not rest - return [getattr(r, field) for r in self] - - def filtered(self, fn): - result = Model(self._env, self._model, fields=self._fields) - for record in self: - if fn(record): - result |= record - return result - - def __sub__(self, other): - if not isinstance(other, Model) or self._model != other._model: - return NotImplemented - - return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ not in other._ids), fields=self._fields) - - def __or__(self, other): - if not isinstance(other, Model) or self._model != other._model: - return NotImplemented - - return Model( - self._env, self._model, - self._ids + tuple(id_ for id_ in other.ids if id_ not in self._ids), - fields=self._fields - ) - __add__ = __or__ - - def __and__(self, other): - if not isinstance(other, Model) or self._model != other._model: - return NotImplemented - - return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ in other._ids), fields=self._fields) - - def invalidate_cache(self, fnames=None, ids=None): - pass # not a concern when every access is an RPC call diff --git a/forwardport/__init__.py b/forwardport/__init__.py deleted file mode 100644 index f7209b17..00000000 --- a/forwardport/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from . import models -from . import controllers diff --git a/forwardport/__manifest__.py b/forwardport/__manifest__.py deleted file mode 100644 index b86c425e..00000000 --- a/forwardport/__manifest__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -{ - 'name': 'forward port bot', - 'version': '1.1', - 'summary': "A port which forward ports successful PRs.", - 'depends': ['runbot_merge'], - 'data': [ - 'data/security.xml', - 'data/crons.xml', - 'data/views.xml', - 'data/queues.xml', - ], - 'license': 'LGPL-3', -} diff --git a/forwardport/changelog/2021-09/authorship-dedup.md b/forwardport/changelog/2021-09/authorship-dedup.md deleted file mode 100644 index 34f2d437..00000000 --- a/forwardport/changelog/2021-09/authorship-dedup.md +++ /dev/null @@ -1 +0,0 @@ -FIX: the deduplication of authorship in case of conflicts in multi-commit PRs diff --git a/forwardport/changelog/2021-09/authorship.md b/forwardport/changelog/2021-09/authorship.md deleted file mode 100644 index ffe67c90..00000000 --- a/forwardport/changelog/2021-09/authorship.md +++ /dev/null @@ -1 +0,0 @@ -FIX: loss of authorship on conflicts in multi-commit PRs, such conflicts now generate a commit with no authorship information, which can not be merged diff --git a/forwardport/changelog/2021-09/conflict-view.md b/forwardport/changelog/2021-09/conflict-view.md deleted file mode 100644 index 4c6cc103..00000000 --- a/forwardport/changelog/2021-09/conflict-view.md +++ /dev/null @@ -1 +0,0 @@ -ADD: better localisation of conflicts in multi-PR commits, list all the commits in the comment and add an arrow pointing to the one which broke diff --git a/forwardport/changelog/2021-09/draft.md b/forwardport/changelog/2021-09/draft.md deleted file mode 100644 index d92418a5..00000000 --- a/forwardport/changelog/2021-09/draft.md +++ /dev/null @@ -1 +0,0 @@ -REM: creation of forward ports in draft mode diff --git a/forwardport/changelog/2021-09/feedback-missing-login.md b/forwardport/changelog/2021-09/feedback-missing-login.md deleted file mode 100644 index 92e921e7..00000000 --- a/forwardport/changelog/2021-09/feedback-missing-login.md +++ /dev/null @@ -1 +0,0 @@ -FIX: some feedback messages didn't correctly ping the person being replied to diff --git a/forwardport/changelog/2021-09/followup-conflict.md b/forwardport/changelog/2021-09/followup-conflict.md deleted file mode 100644 index 6e8bf3bf..00000000 --- a/forwardport/changelog/2021-09/followup-conflict.md +++ /dev/null @@ -1 +0,0 @@ -IMP: properly notify the user when an update to a pull request causes a conflict when impacted on the followup diff --git a/forwardport/changelog/2021-09/fp-remote-view.md b/forwardport/changelog/2021-09/fp-remote-view.md deleted file mode 100644 index 97729354..00000000 --- a/forwardport/changelog/2021-09/fp-remote-view.md +++ /dev/null @@ -1 +0,0 @@ -IMP: add the forward-port remote to the repository view, so it can be set via the UI diff --git a/forwardport/changelog/2021-09/fwbot-rplus-error.md b/forwardport/changelog/2021-09/fwbot-rplus-error.md deleted file mode 100644 index e792d7ee..00000000 --- a/forwardport/changelog/2021-09/fwbot-rplus-error.md +++ /dev/null @@ -1 +0,0 @@ -IMP: error messages when trying to `@fw-bot r+` on pull requests not under its purview diff --git a/forwardport/changelog/2021-09/outstanding.md b/forwardport/changelog/2021-09/outstanding.md deleted file mode 100644 index 8e83757d..00000000 --- a/forwardport/changelog/2021-09/outstanding.md +++ /dev/null @@ -1 +0,0 @@ -ADD: list of outstanding forward-ports diff --git a/forwardport/changelog/2021-10/delegate-followup.md b/forwardport/changelog/2021-10/delegate-followup.md deleted file mode 100644 index e3fcfc8b..00000000 --- a/forwardport/changelog/2021-10/delegate-followup.md +++ /dev/null @@ -1 +0,0 @@ -FIX: allow delegate reviewers *on forward ports* to approve the followups, it worked fine for delegates on the original pull request but a delegation on a forward port would only work for that specific PR (note: only works if the followups don't already exist) diff --git a/forwardport/changelog/2021-10/followupdate-race.md b/forwardport/changelog/2021-10/followupdate-race.md deleted file mode 100644 index 06db5066..00000000 --- a/forwardport/changelog/2021-10/followupdate-race.md +++ /dev/null @@ -1 +0,0 @@ -FIX: rare condition where updating a forwardport would then require all followups to be individually approved diff --git a/forwardport/changelog/2021-10/fw-reapproval.md b/forwardport/changelog/2021-10/fw-reapproval.md deleted file mode 100644 index 8abdc008..00000000 --- a/forwardport/changelog/2021-10/fw-reapproval.md +++ /dev/null @@ -1 +0,0 @@ -FIX: don't trigger an error message when using `fw-bot r+` and some of the PRs were already approved diff --git a/forwardport/changelog/2021-10/outstanding-layout.md b/forwardport/changelog/2021-10/outstanding-layout.md deleted file mode 100644 index c8d0a71b..00000000 --- a/forwardport/changelog/2021-10/outstanding-layout.md +++ /dev/null @@ -1 +0,0 @@ -IMP: layout and features of the "outstanding forward port" page, show the oldest-merged PRs first and allow filtering by reviewer diff --git a/forwardport/changelog/2022-06/closed.md b/forwardport/changelog/2022-06/closed.md deleted file mode 100644 index 51b53e60..00000000 --- a/forwardport/changelog/2022-06/closed.md +++ /dev/null @@ -1 +0,0 @@ -IMP: notifications when reopening a closed forward-port (e.g. indicate that they're detached) diff --git a/forwardport/changelog/2022-06/conflict-diff3.md b/forwardport/changelog/2022-06/conflict-diff3.md deleted file mode 100644 index 7dc4b4c0..00000000 --- a/forwardport/changelog/2022-06/conflict-diff3.md +++ /dev/null @@ -1 +0,0 @@ -IMP: use the `diff3` conflict style, should make forward port conflicts clearer and easier to fix diff --git a/forwardport/changelog/2022-06/detached.md b/forwardport/changelog/2022-06/detached.md deleted file mode 100644 index 627d232f..00000000 --- a/forwardport/changelog/2022-06/detached.md +++ /dev/null @@ -1 +0,0 @@ -IMP: flag detached PRs in their dashboard diff --git a/forwardport/controllers.py b/forwardport/controllers.py deleted file mode 100644 index af464dcb..00000000 --- a/forwardport/controllers.py +++ /dev/null @@ -1,15 +0,0 @@ -import pathlib - -from odoo.addons.runbot_merge.controllers.dashboard import MergebotDashboard - -class Dashboard(MergebotDashboard): - def _entries(self): - changelog = pathlib.Path(__file__).parent / 'changelog' - if not changelog.is_dir(): - return super()._entries() - - return super()._entries() + [ - (d.name, [f.read_text(encoding='utf-8') for f in d.iterdir() if f.is_file()]) - for d in changelog.iterdir() - ] - diff --git a/forwardport/data/crons.xml b/forwardport/data/crons.xml deleted file mode 100644 index 02d2be1e..00000000 --- a/forwardport/data/crons.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - Check if there are merged PRs to port - - code - model._process() - 1 - minutes - -1 - - - - - Update followup FP PRs - - code - model._process() - 1 - minutes - -1 - - - - - Remind open PR - - code - model._reminder() - 1 - days - -1 - - - - - Remove branches of merged PRs - - code - model._process() - 1 - hours - -1 - - - diff --git a/forwardport/data/queues.xml b/forwardport/data/queues.xml deleted file mode 100644 index ba011344..00000000 --- a/forwardport/data/queues.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - Forward port batches - forwardport.batches - {'active_test': False} - - - Forward port batches - forwardport.batches - - - - - - - - - Forward port batch - forwardport.batches - -
- - - - -
-
-
- - - Followup Updates - forwardport.updates - - - Followup Updates - forwardport.updates - - - - - - - - - - -
diff --git a/forwardport/data/security.xml b/forwardport/data/security.xml deleted file mode 100644 index 99548e0c..00000000 --- a/forwardport/data/security.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - Admin access to batches - - - 1 - 1 - 1 - 1 - - - Admin access to updates - - - 1 - 1 - 1 - 1 - - - Admin access to branch remover - - - 1 - 1 - 1 - 1 - - - - No normal access to batches - - 0 - 0 - 0 - 0 - - - No normal access to updates - - 0 - 0 - 0 - 0 - - diff --git a/forwardport/data/views.xml b/forwardport/data/views.xml deleted file mode 100644 index 78499457..00000000 --- a/forwardport/data/views.xml +++ /dev/null @@ -1,215 +0,0 @@ - - - - - - - Outstanding forward ports - qweb - /forwardport/outstanding - - True - forwardport.outstanding_fp - - - - -
- -

List of pull requests with outstanding forward ports

- -
-

- merged by -

-
- - -
- - by - merged - - by - -
-
- Outstanding forward-ports: -
    -
  • - - () - targeting -
  • -
-
- -
-
-
-
-
-
- - - - - Show forwardport project fields - - runbot_merge.project - - - - - - - - - - - - - - - - - - - - - - - - - Show forwardport repository fields - - runbot_merge.repository - - - - - - - - - Show forwardport PR fields - - runbot_merge.pull_requests - - - - - - - - - - - - - - - - - - - - - Detached from forward porting (either conflicting - or explicitly updated). - - - - - - - - - - -
diff --git a/forwardport/migrations/13.0.1.1/post-reminder-date.py b/forwardport/migrations/13.0.1.1/post-reminder-date.py deleted file mode 100644 index 36d131a8..00000000 --- a/forwardport/migrations/13.0.1.1/post-reminder-date.py +++ /dev/null @@ -1,10 +0,0 @@ -def migrate(cr, version): - """ Set the merge_date field to the current write_date, and reset - the backoff to its default so we reprocess old PRs properly. - """ - cr.execute(""" - UPDATE runbot_merge_pull_requests - SET merge_date = write_date, - reminder_backoff_factor = -4 - WHERE state = 'merged' - """) diff --git a/forwardport/migrations/13.0.1.1/pre-tagging.py b/forwardport/migrations/13.0.1.1/pre-tagging.py deleted file mode 100644 index ef2ad801..00000000 --- a/forwardport/migrations/13.0.1.1/pre-tagging.py +++ /dev/null @@ -1,2 +0,0 @@ -def migrate(cr, version): - cr.execute("delete from ir_model where model = 'forwardport.tagging'") diff --git a/forwardport/models/__init__.py b/forwardport/models/__init__.py deleted file mode 100644 index 96d53ab5..00000000 --- a/forwardport/models/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -*- coding: utf-8 -*- -from . import project -from . import project_freeze -from . import forwardport diff --git a/forwardport/models/forwardport.py b/forwardport/models/forwardport.py deleted file mode 100644 index 9bdd395b..00000000 --- a/forwardport/models/forwardport.py +++ /dev/null @@ -1,254 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import uuid -from contextlib import ExitStack -from datetime import datetime - -from dateutil import relativedelta - -from odoo import fields, models -from odoo.addons.runbot_merge.github import GH - -# how long a merged PR survives -MERGE_AGE = relativedelta.relativedelta(weeks=2) - -_logger = logging.getLogger(__name__) - -class Queue: - __slots__ = () - limit = 100 - - def _process_item(self): - raise NotImplementedError - - def _process(self): - for b in self.search(self._search_domain(), order='create_date, id', limit=self.limit): - try: - b._process_item() - b.unlink() - self.env.cr.commit() - except Exception: - _logger.exception("Error while processing %s, skipping", b) - self.env.cr.rollback() - self.clear_caches() - - def _search_domain(self): - return [] - -class ForwardPortTasks(models.Model, Queue): - _name = 'forwardport.batches' - _description = 'batches which got merged and are candidates for forward-porting' - - limit = 10 - - batch_id = fields.Many2one('runbot_merge.batch', required=True) - source = fields.Selection([ - ('merge', 'Merge'), - ('fp', 'Forward Port Followup'), - ('insert', 'New branch port') - ], required=True) - - def _process_item(self): - batch = self.batch_id - newbatch = batch.prs._port_forward() - - if newbatch: - _logger.info( - "Processing %s (from %s): %s (%s) -> %s (%s)", - self.id, self.source, - batch, batch.prs, - newbatch, newbatch.prs, - ) - # insert new batch in ancestry sequence unless conflict (= no parent) - if self.source == 'insert': - for pr in newbatch.prs: - if not pr.parent_id: - break - newchild = pr.search([ - ('parent_id', '=', pr.parent_id.id), - ('id', '!=', pr.id), - ]) - if newchild: - newchild.parent_id = pr.id - else: # reached end of seq (or batch is empty) - # FIXME: or configuration is fucky so doesn't want to FP (maybe should error and retry?) - _logger.info( - "Processing %s (from %s): %s (%s) -> end of the sequence", - self.id, self.source, - batch, batch.prs - ) - batch.active = False - - -CONFLICT_TEMPLATE = "{ping}WARNING: the latest change ({previous.head}) triggered " \ - "a conflict when updating the next forward-port " \ - "({next.display_name}), and has been ignored.\n\n" \ - "You will need to update this pull request differently, " \ - "or fix the issue by hand on {next.display_name}." -CHILD_CONFLICT = "{ping}WARNING: the update of {previous.display_name} to " \ - "{previous.head} has caused a conflict in this pull request, " \ - "data may have been lost." -class UpdateQueue(models.Model, Queue): - _name = 'forwardport.updates' - _description = 'if a forward-port PR gets updated & has followups (cherrypick succeeded) the followups need to be updated as well' - - limit = 10 - - original_root = fields.Many2one('runbot_merge.pull_requests') - new_root = fields.Many2one('runbot_merge.pull_requests') - - def _process_item(self): - Feedback = self.env['runbot_merge.pull_requests.feedback'] - previous = self.new_root - with ExitStack() as s: - for child in self.new_root._iter_descendants(): - self.env.cr.execute(""" - SELECT id - FROM runbot_merge_pull_requests - WHERE id = %s - FOR UPDATE NOWAIT - """, [child.id]) - _logger.info( - "Re-port %s from %s (changed root %s -> %s)", - child.display_name, - previous.display_name, - self.original_root.display_name, - self.new_root.display_name - ) - if child.state in ('closed', 'merged'): - Feedback.create({ - 'repository': child.repository.id, - 'pull_request': child.number, - 'message': "%sancestor PR %s has been updated but this PR" - " is %s and can't be updated to match." - "\n\n" - "You may want or need to manually update any" - " followup PR." % ( - child.ping(), - self.new_root.display_name, - child.state, - ) - }) - return - - conflicts, working_copy = previous._create_fp_branch( - child.target, child.refname, s) - if conflicts: - _, out, err, _ = conflicts - Feedback.create({ - 'repository': previous.repository.id, - 'pull_request': previous.number, - 'message': CONFLICT_TEMPLATE.format( - ping=previous.ping(), - previous=previous, - next=child - ) - }) - Feedback.create({ - 'repository': child.repository.id, - 'pull_request': child.number, - 'message': CHILD_CONFLICT.format(ping=child.ping(), previous=previous, next=child)\ - + (f'\n\nstdout:\n```\n{out.strip()}\n```' if out.strip() else '') - + (f'\n\nstderr:\n```\n{err.strip()}\n```' if err.strip() else '') - }) - - new_head = working_copy.stdout().rev_parse(child.refname).stdout.decode().strip() - commits_count = int(working_copy.stdout().rev_list( - f'{child.target.name}..{child.refname}', - count=True - ).stdout.decode().strip()) - old_head = child.head - # update child's head to the head we're going to push - child.with_context(ignore_head_update=True).write({ - 'head': new_head, - # 'state': 'opened', - 'squash': commits_count == 1, - }) - # push the new head to the local cache: in some cases github - # doesn't propagate revisions fast enough so on the next loop we - # can't find the revision we just pushed - dummy_branch = str(uuid.uuid4()) - ref = previous._get_local_directory() - working_copy.push(ref._directory, f'{new_head}:refs/heads/{dummy_branch}') - ref.branch('--delete', '--force', dummy_branch) - # then update the child's branch to the new head - working_copy.push(f'--force-with-lease={child.refname}:{old_head}', - 'target', child.refname) - - # committing here means github could technically trigger its - # webhook before sending a response, but committing before - # would mean we can update the PR in database but fail to - # update on github, which is probably worse? - # alternatively we can commit, push, and rollback if the push - # fails - # FIXME: handle failures (especially on non-first update) - self.env.cr.commit() - - previous = child - -_deleter = _logger.getChild('deleter') -class DeleteBranches(models.Model, Queue): - _name = 'forwardport.branch_remover' - _description = "Removes branches of merged PRs" - - pr_id = fields.Many2one('runbot_merge.pull_requests') - - def _search_domain(self): - cutoff = self.env.context.get('forwardport_merged_before') \ - or fields.Datetime.to_string(datetime.now() - MERGE_AGE) - return [('pr_id.merge_date', '<', cutoff)] - - def _process_item(self): - _deleter.info( - "PR %s: checking deletion of linked branch %s", - self.pr_id.display_name, - self.pr_id.label - ) - - if self.pr_id.state != 'merged': - _deleter.info('✘ PR is not "merged" (got %s)', self.pr_id.state) - return - - repository = self.pr_id.repository - fp_remote = repository.fp_remote_target - if not fp_remote: - _deleter.info('✘ no forward-port target') - return - - repo_owner, repo_name = fp_remote.split('/') - owner, branch = self.pr_id.label.split(':') - if repo_owner != owner: - _deleter.info('✘ PR owner != FP target owner (%s)', repo_owner) - return # probably don't have access to arbitrary repos - - github = GH(token=repository.project_id.fp_github_token, repo=fp_remote) - refurl = 'git/refs/heads/' + branch - ref = github('get', refurl, check=False) - if ref.status_code != 200: - _deleter.info("✘ branch already deleted (%s)", ref.json()) - return - - ref = ref.json() - if isinstance(ref, list): - _deleter.info( - "✘ got a fuzzy match (%s), branch probably deleted", - ', '.join(r['ref'] for r in ref) - ) - return - - if ref['object']['sha'] != self.pr_id.head: - _deleter.info( - "✘ branch %s head mismatch, expected %s, got %s", - self.pr_id.label, - self.pr_id.head, - ref['object']['sha'] - ) - return - - r = github('delete', refurl, check=False) - assert r.status_code == 204, \ - "Tried to delete branch %s of %s, got %s" % ( - branch, self.pr_id.display_name, - r.json() - ) - _deleter.info('✔ deleted branch %s of PR %s', self.pr_id.label, self.pr_id.display_name) diff --git a/forwardport/models/project.py b/forwardport/models/project.py deleted file mode 100644 index 4e921b92..00000000 --- a/forwardport/models/project.py +++ /dev/null @@ -1,1224 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Technically could be independent from mergebot but would require a lot of -duplicate work e.g. keeping track of statuses (including on commits which -might not be in PRs yet), handling co-dependent PRs, ... - -However extending the mergebot also leads to messiness: fpbot should have -its own user / feedback / API keys, mergebot and fpbot both have branch -ordering but for mergebot it's completely cosmetics, being slaved to mergebot -means PR creation is trickier (as mergebot assumes opened event will always -lead to PR creation but fpbot wants to attach meaning to the PR when setting -it up), ... -""" -import ast -import base64 -import collections -import contextlib -import datetime -import itertools -import json -import logging -import operator -import os -import pathlib -import re -import subprocess -import tempfile -import typing - -import dateutil.relativedelta -import requests - -from odoo import _, models, fields, api -from odoo.osv import expression -from odoo.exceptions import UserError -from odoo.tools import topological_sort, groupby -from odoo.tools.appdirs import user_cache_dir -from odoo.addons.runbot_merge import utils -from odoo.addons.runbot_merge.models.pull_requests import RPLUS - -footer = '\nMore info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port\n' - -DEFAULT_DELTA = dateutil.relativedelta.relativedelta(days=3) - -_logger = logging.getLogger('odoo.addons.forwardport') - -class Project(models.Model): - _inherit = 'runbot_merge.project' - - fp_github_token = fields.Char() - fp_github_name = fields.Char(store=True, compute="_compute_git_identity") - fp_github_email = fields.Char(store=True, compute="_compute_git_identity") - - def _find_commands(self, comment): - if self.env.context.get('without_forward_port'): - return super()._find_commands(comment) - - return re.findall( - '^\s*[@|#]?{}:? (.*)$'.format(self.fp_github_name), - comment, re.MULTILINE | re.IGNORECASE - ) + super()._find_commands(comment) - - # technically the email could change at any moment... - @api.depends('fp_github_token') - def _compute_git_identity(self): - s = requests.Session() - for project in self: - if not project.fp_github_token: - continue - r0 = s.get('https://api.github.com/user', headers={ - 'Authorization': 'token %s' % project.fp_github_token - }) - if 'user:email' not in set(re.split(r',\s*', r0.headers['x-oauth-scopes'])): - raise UserError(_("The forward-port github token needs the user:email scope to fetch the bot's identity.")) - r1 = s.get('https://api.github.com/user/emails', headers={ - 'Authorization': 'token %s' % project.fp_github_token - }) - if not (r0.ok and r1.ok): - _logger.error("Failed to fetch bot information for project %s: %s", project.name, (r0.text or r0.content) if not r0.ok else (r1.text or r1.content)) - continue - project.fp_github_name = r0.json()['login'] - project.fp_github_email = next(( - entry['email'] - for entry in r1.json() - if entry['primary'] - ), None) - if not project.fp_github_email: - raise UserError(_("The forward-port bot needs a primary email set up.")) - - def write(self, vals): - Branches = self.env['runbot_merge.branch'] - # check on branches both active and inactive so disabling branches doesn't - # make it look like the sequence changed. - self_ = self.with_context(active_test=False) - branches_before = {project: project._forward_port_ordered() for project in self_} - - r = super().write(vals) - for p in self_: - # check if the branches sequence has been modified - bbefore = branches_before[p] - bafter = p._forward_port_ordered() - if bafter.ids == bbefore.ids: - continue - - logger = _logger.getChild('project').getChild(p.name) - logger.debug("branches updated %s -> %s", bbefore, bafter) - # if it's just that a branch was inserted at the end forwardport - # should keep on keeping normally - if bafter.ids[:-1] == bbefore.ids: - continue - - if bafter <= bbefore: - raise UserError("Branches can not be reordered or removed after saving.") - - # Last possibility: branch was inserted but not at end, get all - # branches before and all branches after - before = new = after = Branches - for b in bafter: - if b in bbefore: - if new: - after += b - else: - before += b - else: - if new: - raise UserError("Inserting multiple branches at the same time is not supported") - new = b - logger.debug('before: %s new: %s after: %s', before.ids, new.ids, after.ids) - # find all FPs whose ancestry spans the insertion - leaves = self.env['runbot_merge.pull_requests'].search([ - ('state', 'not in', ['closed', 'merged']), - ('target', 'in', after.ids), - ('source_id.target', 'in', before.ids), - ]) - # get all PRs just preceding the insertion point which either are - # sources of the above or have the same source - candidates = self.env['runbot_merge.pull_requests'].search([ - ('target', '=', before[-1].id), - '|', ('id', 'in', leaves.mapped('source_id').ids), - ('source_id', 'in', leaves.mapped('source_id').ids), - ]) - logger.debug("\nPRs spanning new: %s\nto port: %s", leaves, candidates) - # enqueue the creation of a new forward-port based on our candidates - # but it should only create a single step and needs to stitch batch - # the parents linked list, so it has a special type - for c in candidates: - self.env['forwardport.batches'].create({ - 'batch_id': self.env['runbot_merge.batch'].create({ - 'target': before[-1].id, - 'prs': [(4, c.id, 0)], - 'active': False, - }).id, - 'source': 'insert', - }) - return r - - def _forward_port_ordered(self, domain=()): - Branches = self.env['runbot_merge.branch'] - ordering_items = re.split(r',\s*', 'fp_sequence,' + Branches._order) - ordering = ','.join( - # reverse order (desc -> asc, asc -> desc) as we want the "lower" - # branches to be first in the ordering - f[:-5] if f.lower().endswith(' desc') else f + ' desc' - for f in ordering_items - ) - return Branches.search(expression.AND([ - [('project_id', '=', self.id)], - domain or [], - ]), order=ordering) - -class Repository(models.Model): - _inherit = 'runbot_merge.repository' - fp_remote_target = fields.Char(help="where FP branches get pushed") - -class Branch(models.Model): - _inherit = 'runbot_merge.branch' - - fp_sequence = fields.Integer(default=50) - fp_target = fields.Boolean(default=True) - fp_enabled = fields.Boolean(compute='_compute_fp_enabled') - - @api.depends('active', 'fp_target') - def _compute_fp_enabled(self): - for b in self: - b.fp_enabled = b.active and b.fp_target - -class PullRequests(models.Model): - _inherit = 'runbot_merge.pull_requests' - - limit_id = fields.Many2one('runbot_merge.branch', help="Up to which branch should this PR be forward-ported") - - parent_id = fields.Many2one( - 'runbot_merge.pull_requests', index=True, - help="a PR with a parent is an automatic forward port" - ) - source_id = fields.Many2one('runbot_merge.pull_requests', index=True, help="the original source of this FP even if parents were detached along the way") - forwardport_ids = fields.One2many('runbot_merge.pull_requests', 'source_id') - reminder_backoff_factor = fields.Integer(default=-4) - merge_date = fields.Datetime() - - fw_policy = fields.Selection([ - ('ci', "Normal"), - ('skipci', "Skip CI"), - # ('skipmerge', "Skip merge"), - ], required=True, default="ci") - - refname = fields.Char(compute='_compute_refname') - @api.depends('label') - def _compute_refname(self): - for pr in self: - pr.refname = pr.label.split(':', 1)[-1] - - @api.model_create_single - def create(self, vals): - # PR opened event always creates a new PR, override so we can precreate PRs - existing = self.search([ - ('repository', '=', vals['repository']), - ('number', '=', vals['number']), - ]) - if existing: - return existing - - if 'limit_id' not in vals: - branch = self.env['runbot_merge.branch'].browse(vals['target']) - repo = self.env['runbot_merge.repository'].browse(vals['repository']) - vals['limit_id'] = branch.project_id._forward_port_ordered( - ast.literal_eval(repo.branch_filter or '[]') - )[-1].id - if vals.get('parent_id') and 'source_id' not in vals: - vals['source_id'] = self.browse(vals['parent_id'])._get_root().id - if vals.get('state') == 'merged': - vals['merge_date'] = fields.Datetime.now() - return super().create(vals) - - def write(self, vals): - # if the PR's head is updated, detach (should split off the FP lines as this is not the original code) - # TODO: better way to do this? Especially because we don't want to - # recursively create updates - # also a bit odd to only handle updating 1 head at a time, but then - # again 2 PRs with same head is weird so... - newhead = vals.get('head') - with_parents = self.filtered('parent_id') - closed_fp = self.filtered(lambda p: p.state == 'closed' and p.source_id) - if newhead and not self.env.context.get('ignore_head_update') and newhead != self.head: - vals.setdefault('parent_id', False) - # if any children, this is an FP PR being updated, enqueue - # updating children - if self.search_count([('parent_id', '=', self.id)]): - self.env['forwardport.updates'].create({ - 'original_root': self._get_root().id, - 'new_root': self.id - }) - - if vals.get('parent_id') and 'source_id' not in vals: - vals['source_id'] = self.browse(vals['parent_id'])._get_root().id - if vals.get('state') == 'merged': - vals['merge_date'] = fields.Datetime.now() - r = super().write(vals) - if self.env.context.get('forwardport_detach_warn', True): - for p in with_parents: - if not p.parent_id: - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': p.repository.id, - 'pull_request': p.number, - 'message': "%sthis PR was modified / updated and has become a normal PR. " - "It should be merged the normal way (via @%s)" % ( - p.source_id.ping(), - p.repository.project_id.github_prefix, - ), - 'token_field': 'fp_github_token', - }) - for p in closed_fp.filtered(lambda p: p.state != 'closed'): - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': p.repository.id, - 'pull_request': p.number, - 'message': "%sthis PR was closed then reopened. " - "It should be merged the normal way (via @%s)" % ( - p.source_id.ping(), - p.repository.project_id.github_prefix, - ), - 'token_field': 'fp_github_token', - }) - if vals.get('state') == 'merged': - for p in self: - self.env['forwardport.branch_remover'].create({ - 'pr_id': p.id, - }) - # if we change the policy to skip CI, schedule followups on existing FPs - if vals.get('fw_policy') == 'skipci' and self.state == 'merged': - self.env['runbot_merge.pull_requests'].search([ - ('source_id', '=', self.id), - ('state', 'not in', ('closed', 'merged')), - ])._schedule_fp_followup() - return r - - def _try_closing(self, by): - r = super()._try_closing(by) - if r: - self.with_context(forwardport_detach_warn=False).parent_id = False - self.search([('parent_id', '=', self.id)]).parent_id = False - return r - - def _parse_commands(self, author, comment, login): - super(PullRequests, self.with_context(without_forward_port=True))._parse_commands(author, comment, login) - - tokens = [ - token - for line in re.findall('^\s*[@|#]?{}:? (.*)$'.format(self.repository.project_id.fp_github_name), comment['body'] or '', re.MULTILINE | re.IGNORECASE) - for token in line.split() - ] - if not tokens: - _logger.info("found no commands in comment of %s (%s) (%s)", author.github_login, author.display_name, - utils.shorten(comment['body'] or '', 50) - ) - return - - # TODO: don't use a mutable tokens iterator - tokens = iter(tokens) - while True: - token = next(tokens, None) - if token is None: - break - - ping = False - close = False - msg = None - if token in ('ci', 'skipci'): - pr = (self.source_id or self) - if pr._pr_acl(author).is_reviewer: - pr.fw_policy = token - msg = "Not waiting for CI to create followup forward-ports." if token == 'skipci' else "Waiting for CI to create followup forward-ports." - else: - ping = True - msg = "you can't configure ci." - - if token == 'ignore': # replace 'ignore' by 'up to ' - token = 'up' - tokens = itertools.chain(['to', self.target.name], tokens) - - if token in ('r+', 'review+'): - if not self.source_id: - ping = True - msg = "I can only do this on forward-port PRs and this is not one, see {}.".format( - self.repository.project_id.github_prefix - ) - elif not self.parent_id: - ping = True - msg = "I can only do this on unmodified forward-port PRs, ask {}.".format( - self.repository.project_id.github_prefix - ) - else: - merge_bot = self.repository.project_id.github_prefix - # don't update the root ever - for pr in (p for p in self._iter_ancestors() if p.parent_id if p.state in RPLUS): - # only the author is delegated explicitely on the - pr._parse_commands(author, {**comment, 'body': merge_bot + ' r+'}, login) - elif token == 'close': - if self.source_id._pr_acl(author).is_reviewer: - close = True - else: - ping = True - msg = "you can't close PRs." - - elif token == 'up' and next(tokens, None) == 'to': - limit = next(tokens, None) - ping = True - if not self._pr_acl(author).is_author: - msg = "you can't set a forward-port limit.".format(login) - elif not limit: - msg = "please provide a branch to forward-port to." - else: - limit_id = self.env['runbot_merge.branch'].with_context(active_test=False).search([ - ('project_id', '=', self.repository.project_id.id), - ('name', '=', limit), - ]) - if self.source_id: - msg = "forward-port limit can only be set on " \ - f"an origin PR ({self.source_id.display_name} " \ - "here) before it's merged and forward-ported." - elif self.state in ['merged', 'closed']: - msg = "forward-port limit can only be set before the PR is merged." - elif not limit_id: - msg = "there is no branch %r, it can't be used as a forward port target." % limit - elif limit_id == self.target: - ping = False - msg = "Forward-port disabled." - self.limit_id = limit_id - elif not limit_id.fp_enabled: - msg = "branch %r is disabled, it can't be used as a forward port target." % limit_id.name - else: - ping = False - msg = "Forward-porting to %r." % limit_id.name - self.limit_id = limit_id - - if msg or close: - if msg: - _logger.info("%s [%s]: %s", self.display_name, login, msg) - else: - _logger.info("%s [%s]: closing", self.display_name, login) - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': self.repository.id, - 'pull_request': self.number, - 'message': f'@{author.github_login} {msg}' if msg and ping else msg, - 'close': close, - 'token_field': 'fp_github_token', - }) - - def _notify_ci_failed(self, ci): - # only care about FP PRs which are not staged / merged yet - # NB: probably ignore approved PRs as normal message will handle them? - if not (self.state == 'opened' and self.parent_id): - return - - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': self.repository.id, - 'pull_request': self.number, - 'token_field': 'fp_github_token', - 'message': '%s%s failed on this forward-port PR' % ( - self.source_id.ping(), - ci, - ) - }) - - def _validate(self, statuses): - failed = super()._validate(statuses) - self._schedule_fp_followup() - return failed - - def _schedule_fp_followup(self): - _logger = logging.getLogger(__name__).getChild('forwardport.next') - # if the PR has a parent and is CI-validated, enqueue the next PR - for pr in self: - _logger.info('Checking if forward-port %s (%s)', pr.display_name, pr) - if not pr.parent_id: - _logger.info('-> no parent %s (%s)', pr.display_name, pr.parent_id) - continue - if self.source_id.fw_policy != 'skipci' and pr.state not in ['validated', 'ready']: - _logger.info('-> wrong state %s (%s)', pr.display_name, pr.state) - continue - - # check if we've already forward-ported this branch: - # it has a batch without a staging - batch = self.env['runbot_merge.batch'].with_context(active_test=False).search([ - ('staging_id', '=', False), - ('prs', 'in', pr.id), - ], limit=1) - # if the batch is inactive, the forward-port has been done *or* - # the PR's own forward port is in error, so bail - if not batch.active: - _logger.info('-> forward port done or in error (%s.active=%s)', batch, batch.active) - continue - - # otherwise check if we already have a pending forward port - _logger.info("%s %s %s", pr.display_name, batch, ', '.join(batch.mapped('prs.display_name'))) - if self.env['forwardport.batches'].search_count([('batch_id', '=', batch.id)]): - _logger.warning('-> already recorded') - continue - - # check if batch-mate are all valid - mates = batch.prs - # wait until all of them are validated or ready - if any(pr.source_id.fw_policy != 'skipci' and pr.state not in ('validated', 'ready') for pr in mates): - _logger.info("-> not ready (%s)", [(pr.display_name, pr.state) for pr in mates]) - continue - - # check that there's no weird-ass state - if not all(pr.parent_id for pr in mates): - _logger.warning("Found a batch (%s) with only some PRs having parents, ignoring", mates) - continue - if self.search_count([('parent_id', 'in', mates.ids)]): - _logger.warning("Found a batch (%s) with only some of the PRs having children", mates) - continue - - _logger.info('-> ok') - self.env['forwardport.batches'].create({ - 'batch_id': batch.id, - 'source': 'fp', - }) - - def _find_next_target(self, reference): - """ Finds the branch between target and limit_id which follows - reference - """ - if reference.target == self.limit_id: - return - # NOTE: assumes even disabled branches are properly sequenced, would - # probably be a good idea to have the FP view show all branches - branches = list(self.target.project_id - .with_context(active_test=False) - ._forward_port_ordered(ast.literal_eval(self.repository.branch_filter or '[]'))) - - # get all branches between max(root.target, ref.target) (excluded) and limit (included) - from_ = max(branches.index(self.target), branches.index(reference.target)) - to_ = branches.index(self.limit_id) - - # return the first active branch in the set - return next(( - branch - for branch in branches[from_+1:to_+1] - if branch.fp_enabled - ), None) - - def _commits_lazy(self): - s = requests.Session() - s.headers['Authorization'] = 'token %s' % self.repository.project_id.fp_github_token - for page in itertools.count(1): - r = s.get('https://api.github.com/repos/{}/pulls/{}/commits'.format( - self.repository.name, - self.number - ), params={'page': page}) - r.raise_for_status() - yield from r.json() - if not r.links.get('next'): - return - - def commits(self): - """ Returns a PR's commits oldest first (that's what GH does & - is what we want) - """ - commits = list(self._commits_lazy()) - # map shas to the position the commit *should* have - idx = { - c: i - for i, c in enumerate(topological_sort({ - c['sha']: [p['sha'] for p in c['parents']] - for c in commits - })) - } - return sorted(commits, key=lambda c: idx[c['sha']]) - - def _iter_descendants(self): - pr = self - while True: - pr = self.search([('parent_id', '=', pr.id)]) - if pr: - yield pr - else: - break - - @api.depends('parent_id.statuses') - def _compute_statuses(self): - super()._compute_statuses() - - def _get_overrides(self): - # NB: assumes _get_overrides always returns an "owned" dict which we can modify - p = self.parent_id._get_overrides() if self.parent_id else {} - p.update(super()._get_overrides()) - return p - - def _iter_ancestors(self): - while self: - yield self - self = self.parent_id - - def _get_root(self): - root = self - while root.parent_id: - root = root.parent_id - return root - - def _port_forward(self): - if not self: - return - - all_sources = [(p.source_id or p) for p in self] - all_targets = [s._find_next_target(p) for s, p in zip(all_sources, self)] - - ref = self[0] - base = all_sources[0] - target = all_targets[0] - if target is None: - _logger.info( - "Will not forward-port %s: no next target", - ref.display_name, - ) - return # QUESTION: do the prs need to be updated? - - # check if the PRs have already been forward-ported: is there a PR - # with the same source targeting the next branch in the series - for source in all_sources: - if self.search_count([('source_id', '=', source.id), ('target', '=', target.id)]): - _logger.info("Will not forward-port %s: already ported", ref.display_name) - return - - # check if all PRs in the batch have the same "next target" , bail if - # that's not the case as it doesn't make sense for forward one PR from - # a to b and a linked pr from a to c - different_target = next((t for t in all_targets if t != target), None) - if different_target: - different_pr = next(p for p, t in zip(self, all_targets) if t == different_target) - for pr, t in zip(self, all_targets): - linked, other = different_pr, different_target - if t != target: - linked, other = ref, target - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': pr.repository.id, - 'pull_request': pr.number, - 'token_field': 'fp_github_token', - 'message': "%sthis pull request can not be forward ported: " - "next branch is %r but linked pull request %s " - "has a next branch %r." % ( - pr.ping(), t.name, linked.display_name, other.name - ) - }) - _logger.warning( - "Cancelling forward-port of %s: found different next branches (%s)", - self, all_targets - ) - return - - proj = self.mapped('target.project_id') - if not proj.fp_github_token: - _logger.warning( - "Can not forward-port %s: no token on project %s", - ref.display_name, - proj.name - ) - return - - notarget = [p.repository.name for p in self if not p.repository.fp_remote_target] - if notarget: - _logger.error( - "Can not forward-port %s: repos %s don't have a remote configured", - self, ', '.join(notarget) - ) - return - - # take only the branch bit - new_branch = '%s-%s-%s-fw' % ( - target.name, - base.refname, - # avoid collisions between fp branches (labels can be reused - # or conflict especially as we're chopping off the owner) - base64.urlsafe_b64encode(os.urandom(3)).decode() - ) - # TODO: send outputs to logging? - conflicts = {} - with contextlib.ExitStack() as s: - for pr in self: - conflicts[pr], working_copy = pr._create_fp_branch( - target, new_branch, s) - - working_copy.push('target', new_branch) - - gh = requests.Session() - gh.headers['Authorization'] = 'token %s' % proj.fp_github_token - has_conflicts = any(conflicts.values()) - # problemo: this should forward port a batch at a time, if porting - # one of the PRs in the batch fails is huge problem, though this loop - # only concerns itself with the creation of the followup objects so... - new_batch = self.browse(()) - for pr in self: - owner, _ = pr.repository.fp_remote_target.split('/', 1) - source = pr.source_id or pr - root = pr._get_root() - - message = source.message + '\n\n' + '\n'.join( - "Forward-Port-Of: %s" % p.display_name - for p in root | source - ) - - title, body = re.match(r'(?P[^\n]+)\n*(?P<body>.*)', message, flags=re.DOTALL).groups() - self.env.cr.execute('LOCK runbot_merge_pull_requests IN SHARE MODE') - r = gh.post(f'https://api.github.com/repos/{pr.repository.name}/pulls', json={ - 'base': target.name, - 'head': f'{owner}:{new_branch}', - 'title': '[FW]' + (' ' if title[0] != '[' else '') + title, - 'body': body - }) - if not r.ok: - _logger.warning("Failed to create forward-port PR for %s, deleting branches", pr.display_name) - # delete all the branches this should automatically close the - # PRs if we've created any. Using the API here is probably - # simpler than going through the working copies - for repo in self.mapped('repository'): - d = gh.delete(f'https://api.github.com/repos/{repo.fp_remote_target}/git/refs/heads/{new_branch}') - if d.ok: - _logger.info("Deleting %s:%s=success", repo.fp_remote_target, new_branch) - else: - _logger.warning("Deleting %s:%s=%s", repo.fp_remote_target, new_branch, d.text) - raise RuntimeError("Forwardport failure: %s (%s)" % (pr.display_name, r.text)) - - new_pr = self._from_gh(r.json()) - _logger.info("Created forward-port PR %s", new_pr) - new_batch |= new_pr - - # allows PR author to close or skipci - source.delegates |= source.author - new_pr.write({ - 'merge_method': pr.merge_method, - 'source_id': source.id, - # only link to previous PR of sequence if cherrypick passed - 'parent_id': pr.id if not has_conflicts else False, - # Copy author & delegates of source as well as delegates of - # previous so they can r+ the new forward ports. - 'delegates': [(6, False, (source.delegates | pr.delegates).ids)] - }) - if has_conflicts and pr.parent_id and pr.state not in ('merged', 'closed'): - message = source.ping() + """\ -the next pull request (%s) is in conflict. You can merge the chain up to here by saying -> @%s r+ -%s""" % (new_pr.display_name, pr.repository.project_id.fp_github_name, footer) - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': pr.repository.id, - 'pull_request': pr.number, - 'message': message, - 'token_field': 'fp_github_token', - }) - # not great but we probably want to avoid the risk of the webhook - # creating the PR from under us. There's still a "hole" between - # the POST being executed on gh and the commit but... - self.env.cr.commit() - - for pr, new_pr in zip(self, new_batch): - source = pr.source_id or pr - (h, out, err, hh) = conflicts.get(pr) or (None, None, None, None) - - if h: - sout = serr = '' - if out.strip(): - sout = f"\nstdout:\n```\n{out}\n```\n" - if err.strip(): - serr = f"\nstderr:\n```\n{err}\n```\n" - - lines = '' - if len(hh) > 1: - lines = '\n' + ''.join( - '* %s%s\n' % (sha, ' <- on this commit' if sha == h else '') - for sha in hh - ) - message = f"""{source.ping()}cherrypicking of pull request {source.display_name} failed. -{lines}{sout}{serr} -Either perform the forward-port manually (and push to this branch, proceeding as usual) or close this PR (maybe?). - -In the former case, you may want to edit this PR message as well. -""" - elif has_conflicts: - message = """%s\ -while this was properly forward-ported, at least one co-dependent PR (%s) did \ -not succeed. You will need to fix it before this can be merged. - -Both this PR and the others will need to be approved via `@%s r+` as they are \ -all considered "in conflict". -%s""" % ( - source.ping(), - ', '.join(p.display_name for p in (new_batch - new_pr)), - proj.github_prefix, - footer - ) - elif base._find_next_target(new_pr) is None: - ancestors = "".join( - "* %s\n" % p.display_name - for p in pr._iter_ancestors() - if p.parent_id - ) - message = source.ping() + """\ -this PR targets %s and is the last of the forward-port chain%s -%s -To merge the full chain, say -> @%s r+ -%s""" % (target.name, ' containing:' if ancestors else '.', ancestors, pr.repository.project_id.fp_github_name, footer) - else: - message = """\ -This PR targets %s and is part of the forward-port chain. Further PRs will be created up to %s. -%s""" % (target.name, base.limit_id.name, footer) - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': new_pr.repository.id, - 'pull_request': new_pr.number, - 'message': message, - 'token_field': 'fp_github_token', - }) - labels = ['forwardport'] - if has_conflicts: - labels.append('conflict') - self.env['runbot_merge.pull_requests.tagging'].create({ - 'repository': new_pr.repository.id, - 'pull_request': new_pr.number, - 'tags_add': labels, - }) - - # batch the PRs so _validate can perform the followup FP properly - # (with the entire batch). If there are conflict then create a - # deactivated batch so the interface is coherent but we don't pickup - # an active batch we're never going to deactivate. - b = self.env['runbot_merge.batch'].create({ - 'target': target.id, - 'prs': [(6, 0, new_batch.ids)], - 'active': not has_conflicts, - }) - # if we're not waiting for CI, schedule followup immediately - if any(p.source_id.fw_policy == 'skipci' for p in b.prs): - b.prs[0]._schedule_fp_followup() - return b - - def _create_fp_branch(self, target_branch, fp_branch_name, cleanup): - """ Creates a forward-port for the current PR to ``target_branch`` under - ``fp_branch_name``. - - :param target_branch: the branch to port forward to - :param fp_branch_name: the name of the branch to create the FP under - :param ExitStack cleanup: so the working directories can be cleaned up - :return: A pair of an optional conflict information and a repository. If - present the conflict information is composed of the hash of the - conflicting commit, the stderr and stdout of the failed - cherrypick and a list of all PR commit hashes - :rtype: (None | (str, str, str, list[str]), Repo) - """ - source = self._get_local_directory() - root = self._get_root() - # update all the branches & PRs - r = source.with_params('gc.pruneExpire=1.day.ago')\ - .with_config( - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT - )\ - .fetch('-p', 'origin') - _logger.info("Updated %s:\n%s", source._directory, r.stdout.decode()) - source.cat_file(e=root.head) - # create working copy - _logger.info( - "Create working copy to forward-port %s (really %s) to %s", - self.display_name, root.display_name, target_branch.name) - working_copy = source.clone( - cleanup.enter_context( - tempfile.TemporaryDirectory( - prefix='%s-to-%s-' % ( - root.display_name, - target_branch.name - ), - dir=user_cache_dir('forwardport') - )), - branch=target_branch.name - ) - project_id = self.repository.project_id - # add target remote - working_copy.remote( - 'add', 'target', - 'https://{p.fp_github_name}:{p.fp_github_token}@github.com/{r.fp_remote_target}'.format( - r=self.repository, - p=project_id - ) - ) - _logger.info("Create FP branch %s", fp_branch_name) - working_copy.checkout(b=fp_branch_name) - - try: - root._cherry_pick(working_copy) - return None, working_copy - except CherrypickError as e: - # using git diff | git apply -3 to get the entire conflict set - # turns out to not work correctly: in case files have been moved - # / removed (which turns out to be a common source of conflicts - # when forward-porting) it'll just do nothing to the working copy - # so the "conflict commit" will be empty - # switch to a squashed-pr branch - root_branch = 'origin/pull/%d' % root.number - working_copy.checkout('-bsquashed', root_branch) - root_commits = root.commits() - # commits returns oldest first, so youngest (head) last - head_commit = root_commits[-1]['commit'] - - to_tuple = operator.itemgetter('name', 'email') - to_dict = lambda term, vals: { - 'GIT_%s_NAME' % term: vals[0], - 'GIT_%s_EMAIL' % term: vals[1], - 'GIT_%s_DATE' % term: vals[2], - } - authors, committers = set(), set() - for c in (c['commit'] for c in root_commits): - authors.add(to_tuple(c['author'])) - committers.add(to_tuple(c['committer'])) - fp_authorship = (project_id.fp_github_name, '', '') - author = fp_authorship if len(authors) != 1\ - else authors.pop() + (head_commit['author']['date'],) - committer = fp_authorship if len(committers) != 1 \ - else committers.pop() + (head_commit['committer']['date'],) - conf = working_copy.with_config(env={ - **to_dict('AUTHOR', author), - **to_dict('COMMITTER', committer), - 'GIT_COMMITTER_DATE': '', - }) - # squash to a single commit - conf.reset('--soft', root_commits[0]['parents'][0]['sha']) - conf.commit(a=True, message="temp") - squashed = conf.stdout().rev_parse('HEAD').stdout.strip().decode() - - # switch back to the PR branch - conf.checkout(fp_branch_name) - # cherry-pick the squashed commit to generate the conflict - conf.with_params('merge.renamelimit=0', 'merge.conflictstyle=diff3')\ - .with_config(check=False)\ - .cherry_pick(squashed, no_commit=True) - status = conf.stdout().status(short=True, untracked_files='no').stdout.decode() - h, out, err, hh = e.args - if err.strip(): - err = err.rstrip() + '\n----------\nstatus:\n' + status - else: - err = 'status:\n' + status - # if there was a single commit, reuse its message when committing - # the conflict - # TODO: still add conflict information to this? - if len(root_commits) == 1: - msg = root._make_fp_message(root_commits[0]) - conf.with_config(input=str(msg).encode())\ - .commit(all=True, allow_empty=True, file='-') - else: - conf.commit( - all=True, allow_empty=True, - message="""Cherry pick of %s failed - -stdout: -%s -stderr: -%s -""" % (h, out, err)) - return (h, out, err, hh), working_copy - - def _cherry_pick(self, working_copy): - """ Cherrypicks ``self`` into the working copy - - :return: ``True`` if the cherrypick was successful, ``False`` otherwise - """ - # <xxx>.cherrypick.<number> - logger = _logger.getChild('cherrypick').getChild(str(self.number)) - - # original head so we can reset - prev = original_head = working_copy.stdout().rev_parse('HEAD').stdout.decode().strip() - - commits = self.commits() - logger.info("%s: copy %s commits to %s\n%s", self, len(commits), original_head, '\n'.join( - '- %s (%s)' % (c['sha'], c['commit']['message'].splitlines()[0]) - for c in commits - )) - - for commit in commits: - commit_sha = commit['sha'] - # config (global -c) or commit options don't really give access to - # setting dates - cm = commit['commit'] # get the "git" commit object rather than the "github" commit resource - env = { - 'GIT_AUTHOR_NAME': cm['author']['name'], - 'GIT_AUTHOR_EMAIL': cm['author']['email'], - 'GIT_AUTHOR_DATE': cm['author']['date'], - 'GIT_COMMITTER_NAME': cm['committer']['name'], - 'GIT_COMMITTER_EMAIL': cm['committer']['email'], - } - configured = working_copy.with_config(env=env) - - conf = working_copy.with_config( - env={**env, 'GIT_TRACE': 'true'}, - stdout=subprocess.PIPE, stderr=subprocess.PIPE, - check=False - ) - # first try with default / low renamelimit - r = conf.cherry_pick(commit_sha) - logger.debug("Cherry-picked %s: %s\n%s\n%s", commit_sha, r.returncode, r.stdout.decode(), _clean_rename(r.stderr.decode())) - if r.returncode: - # if it failed, retry with high renamelimit - configured.reset('--hard', prev) - r = conf.with_params('merge.renamelimit=0').cherry_pick(commit_sha) - logger.debug("Cherry-picked %s (renamelimit=0): %s\n%s\n%s", commit_sha, r.returncode, r.stdout.decode(), _clean_rename(r.stderr.decode())) - - if r.returncode: # pick failed, reset and bail - # try to log inflateInit: out of memory errors as warning, they - # seem to return the status code 128 - logger.log( - logging.WARNING if r.returncode == 128 else logging.INFO, - "forward-port of %s (%s) failed at %s", - self, self.display_name, commit_sha) - configured.reset('--hard', original_head) - raise CherrypickError( - commit_sha, - r.stdout.decode(), - _clean_rename(r.stderr.decode()), - [commit['sha'] for commit in commits] - ) - - msg = self._make_fp_message(commit) - - # replace existing commit message with massaged one - configured \ - .with_config(input=str(msg).encode())\ - .commit(amend=True, file='-') - prev = configured.stdout().rev_parse('HEAD').stdout.decode() - logger.info('%s: success -> %s', commit_sha, prev) - - def _build_merge_message(self, message, related_prs=()): - msg = super()._build_merge_message(message, related_prs=related_prs) - - # ensures all reviewers in the review path are on the PR in order: - # original reviewer, then last conflict reviewer, then current PR - reviewers = (self | self._get_root() | self.source_id)\ - .mapped('reviewed_by.formatted_email') - - sobs = msg.headers.getlist('signed-off-by') - msg.headers.remove('signed-off-by') - msg.headers.extend( - ('signed-off-by', signer) - for signer in sobs - if signer not in reviewers - ) - msg.headers.extend( - ('signed-off-by', reviewer) - for reviewer in reversed(reviewers) - ) - - return msg - - def _make_fp_message(self, commit): - cmap = json.loads(self.commits_map) - msg = self._parse_commit_message(commit['commit']['message']) - # write the *merged* commit as "original", not the PR's - msg.headers['x-original-commit'] = cmap.get(commit['sha'], commit['sha']) - # don't stringify so caller can still perform alterations - return msg - - def _get_local_directory(self): - repos_dir = pathlib.Path(user_cache_dir('forwardport')) - repos_dir.mkdir(parents=True, exist_ok=True) - repo_dir = repos_dir / self.repository.name - - if repo_dir.is_dir(): - return git(repo_dir) - else: - _logger.info("Cloning out %s to %s", self.repository.name, repo_dir) - subprocess.run([ - 'git', 'clone', '--bare', - 'https://{}:{}@github.com/{}'.format( - self.repository.project_id.fp_github_name or '', - self.repository.project_id.fp_github_token, - self.repository.name, - ), - str(repo_dir) - ], check=True) - # add PR branches as local but namespaced (?) - repo = git(repo_dir) - # bare repos don't have a fetch spec by default (!) so adding one - # removes the default behaviour and stops fetching the base - # branches unless we add an explicit fetch spec for them - repo.config('--add', 'remote.origin.fetch', '+refs/heads/*:refs/heads/*') - repo.config('--add', 'remote.origin.fetch', '+refs/pull/*/head:refs/heads/pull/*') - return repo - - def _outstanding(self, cutoff): - """ Returns "outstanding" (unmerged and unclosed) forward-ports whose - source was merged before ``cutoff`` (all of them if not provided). - - :param str cutoff: a datetime (ISO-8601 formatted) - :returns: an iterator of (source, forward_ports) - """ - return groupby(self.env['runbot_merge.pull_requests'].search([ - # only FP PRs - ('source_id', '!=', False), - # active - ('state', 'not in', ['merged', 'closed']), - ('source_id.merge_date', '<', cutoff), - ], order='source_id, id'), lambda p: p.source_id) - - def _hall_of_shame(self): - """Provides data for the HOS view - - * outstanding forward ports per reviewer - * pull requests with outstanding forward ports, oldest-merged first - """ - cutoff_dt = datetime.datetime.now() - DEFAULT_DELTA - outstanding = self.env['runbot_merge.pull_requests'].search([ - ('source_id', '!=', False), - ('state', 'not in', ['merged', 'closed']), - ('source_id.merge_date', '<', cutoff_dt), - ], order=None) - # only keep merged because apparently some PRs are in a weird spot - # where they're sources but closed? - sources = outstanding.mapped('source_id').filtered('merge_date').sorted('merge_date') - outstandings = [] - reviewers = collections.Counter() - for source in sources: - outstandings.append(Outstanding(source=source, prs=source.forwardport_ids & outstanding)) - reviewers[source.reviewed_by] += 1 - return HallOfShame( - reviewers=reviewers.most_common(), - outstanding=outstandings, - ) - - def _reminder(self): - cutoff = self.env.context.get('forwardport_updated_before') \ - or fields.Datetime.to_string(datetime.datetime.now() - DEFAULT_DELTA) - cutoff_dt = fields.Datetime.from_string(cutoff) - - for source, prs in self._outstanding(cutoff): - backoff = dateutil.relativedelta.relativedelta(days=2**source.reminder_backoff_factor) - prs = list(prs) - if source.merge_date > (cutoff_dt - backoff): - continue - source.reminder_backoff_factor += 1 - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': source.repository.id, - 'pull_request': source.number, - 'message': "%sthis pull request has forward-port PRs awaiting action (not merged or closed):\n%s" % ( - source.ping(), - '\n- '.join(pr.display_name for pr in sorted(prs, key=lambda p: p.number)) - ), - 'token_field': 'fp_github_token', - }) - -class Stagings(models.Model): - _inherit = 'runbot_merge.stagings' - - def write(self, vals): - r = super().write(vals) - # we've just deactivated a successful staging (so it got ~merged) - if vals.get('active') is False and self.state == 'success': - # check al batches to see if they should be forward ported - for b in self.with_context(active_test=False).batch_ids: - # if all PRs of a batch have parents they're part of an FP - # sequence and thus handled separately, otherwise they're - # considered regular merges - if not all(p.parent_id for p in b.prs): - self.env['forwardport.batches'].create({ - 'batch_id': b.id, - 'source': 'merge', - }) - return r - -class Feedback(models.Model): - _inherit = 'runbot_merge.pull_requests.feedback' - - token_field = fields.Selection(selection_add=[('fp_github_token', 'Forwardport Bot')]) - -def git(directory): return Repo(directory, check=True) -class Repo: - def __init__(self, directory, **config): - self._directory = str(directory) - config.setdefault('stderr', subprocess.PIPE) - self._config = config - self._params = () - self._opener = subprocess.run - - def __getattr__(self, name): - return GitCommand(self, name.replace('_', '-')) - - def _run(self, *args, **kwargs): - opts = {**self._config, **kwargs} - args = ('git', '-C', self._directory)\ - + tuple(itertools.chain.from_iterable(('-c', p) for p in self._params))\ - + args - try: - return self._opener(args, **opts) - except subprocess.CalledProcessError as e: - _logger.error("git call error:%s", ('\n' + e.stderr.decode()) if e.stderr else e ) - raise - - def stdout(self, flag=True): - if flag is True: - return self.with_config(stdout=subprocess.PIPE) - elif flag is False: - return self.with_config(stdout=None) - return self.with_config(stdout=flag) - - def lazy(self): - r = self.with_config() - r._config.pop('check', None) - r._opener = subprocess.Popen - return r - - def check(self, flag): - return self.with_config(check=flag) - - def with_config(self, **kw): - opts = {**self._config, **kw} - r = Repo(self._directory, **opts) - r._opener = self._opener - r._params = self._params - return r - - def with_params(self, *args): - r = self.with_config() - r._params = args - return r - - def clone(self, to, branch=None): - self._run( - 'clone', - *([] if branch is None else ['-b', branch]), - self._directory, to, - ) - return Repo(to) - -class GitCommand: - def __init__(self, repo, name): - self._name = name - self._repo = repo - - def __call__(self, *args, **kwargs): - return self._repo._run(self._name, *args, *self._to_options(kwargs)) - - def _to_options(self, d): - for k, v in d.items(): - if len(k) == 1: - yield '-' + k - else: - yield '--' + k.replace('_', '-') - if v not in (None, True): - assert v is not False - yield str(v) - -class CherrypickError(Exception): - ... - -def _clean_rename(s): - """ Filters out the "inexact rename detection" spam of cherry-pick: it's - useless but there seems to be no good way to silence these messages. - """ - return '\n'.join( - l for l in s.splitlines() - if not l.startswith('Performing inexact rename detection') - ) - -class HallOfShame(typing.NamedTuple): - reviewers: list - outstanding: list - -class Outstanding(typing.NamedTuple): - source: object - prs: object diff --git a/forwardport/models/project_freeze.py b/forwardport/models/project_freeze.py deleted file mode 100644 index c8aec42f..00000000 --- a/forwardport/models/project_freeze.py +++ /dev/null @@ -1,22 +0,0 @@ -from odoo import models - - -class FreezeWizard(models.Model): - """ Override freeze wizard to disable the forward port cron when one is - created (so there's a freeze ongoing) and re-enable it once all freezes are - done. - - If there ever is a case where we have lots of projects, - """ - _inherit = 'runbot_merge.project.freeze' - - def create(self, vals_list): - r = super().create(vals_list) - self.env.ref('forwardport.port_forward').active = False - return r - - def unlink(self): - r = super().unlink() - if not self.search_count([]): - self.env.ref('forwardport.port_forward').active = True - return r diff --git a/forwardport/tests/conftest.py b/forwardport/tests/conftest.py deleted file mode 100644 index 408345af..00000000 --- a/forwardport/tests/conftest.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -import re - -import pytest -import requests - -@pytest.fixture -def default_crons(): - return [ - 'runbot_merge.process_updated_commits', - 'runbot_merge.merge_cron', - 'runbot_merge.staging_cron', - 'forwardport.port_forward', - 'forwardport.updates', - 'runbot_merge.check_linked_prs_status', - 'runbot_merge.feedback_cron', - ] - -# public_repo — necessary to leave comments -# admin:repo_hook — to set up hooks (duh) -# delete_repo — to cleanup repos created under a user -# user:email — fetch token/user's email addresses -TOKEN_SCOPES = { - 'github': {'admin:repo_hook', 'delete_repo', 'public_repo', 'user:email'}, - # TODO: user:email so they can fetch the user's email? - 'role_reviewer': {'public_repo'},# 'delete_repo'}, - 'role_self_reviewer': {'public_repo'},# 'delete_repo'}, - 'role_other': {'public_repo'},# 'delete_repo'}, -} -@pytest.fixture(autouse=True, scope='session') -def _check_scopes(config): - for section, vals in config.items(): - required_scopes = TOKEN_SCOPES.get(section) - if required_scopes is None: - continue - - response = requests.get('https://api.github.com/rate_limit', headers={ - 'Authorization': 'token %s' % vals['token'] - }) - assert response.status_code == 200 - x_oauth_scopes = response.headers['X-OAuth-Scopes'] - token_scopes = set(re.split(r',\s+', x_oauth_scopes)) - assert token_scopes >= required_scopes, \ - "%s should have scopes %s, found %s" % (section, token_scopes, required_scopes) - -@pytest.fixture() -def module(): - """ When a test function is (going to be) run, selects the containing - module (as needing to be installed) - """ - # NOTE: no request.fspath (because no request.function) in session-scoped fixture so can't put module() at the toplevel - return 'forwardport' diff --git a/forwardport/tests/test_batches.py b/forwardport/tests/test_batches.py deleted file mode 100644 index a637e7a0..00000000 --- a/forwardport/tests/test_batches.py +++ /dev/null @@ -1,89 +0,0 @@ -from utils import Commit, make_basic - - -def test_single_updated(env, config, make_repo): - """ Given co-dependent PRs getting merged, one of them being modified should - lead to a restart of the merge & forward port process. - - See test_update_pr for a simpler (single-PR) version - """ - r1, _ = make_basic(env, config, make_repo, reponame='repo-1') - r2, _ = make_basic(env, config, make_repo, reponame='repo-2') - - with r1: - r1.make_commits('a', Commit('1', tree={'1': '0'}), ref='heads/aref') - pr1 = r1.make_pr(target='a', head='aref') - r1.post_status('aref', 'success', 'legal/cla') - r1.post_status('aref', 'success', 'ci/runbot') - pr1.post_comment('hansen r+', config['role_reviewer']['token']) - with r2: - r2.make_commits('a', Commit('2', tree={'2': '0'}), ref='heads/aref') - pr2 = r2.make_pr(target='a', head='aref') - r2.post_status('aref', 'success', 'legal/cla') - r2.post_status('aref', 'success', 'ci/runbot') - pr2.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with r1, r2: - r1.post_status('staging.a', 'success', 'legal/cla') - r1.post_status('staging.a', 'success', 'ci/runbot') - r2.post_status('staging.a', 'success', 'legal/cla') - r2.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - pr1_id, pr11_id, pr2_id, pr21_id = pr_ids = env['runbot_merge.pull_requests'].search([]).sorted('display_name') - assert pr1_id.number == pr1.number - assert pr2_id.number == pr2.number - assert pr1_id.state == pr2_id.state == 'merged' - - assert pr11_id.parent_id == pr1_id - assert pr11_id.repository.name == pr1_id.repository.name == r1.name - - assert pr21_id.parent_id == pr2_id - assert pr21_id.repository.name == pr2_id.repository.name == r2.name - - assert pr11_id.target.name == pr21_id.target.name == 'b' - - # don't even bother faking CI failure, straight update pr21_id - repo, ref = r2.get_pr(pr21_id.number).branch - with repo: - repo.make_commits( - pr21_id.target.name, - Commit('Whops', tree={'2': '1'}), - ref='heads/' + ref, - make=False - ) - env.run_crons() - - assert not pr21_id.parent_id - - with r1, r2: - r1.post_status(pr11_id.head, 'success', 'legal/cla') - r1.post_status(pr11_id.head, 'success', 'ci/runbot') - r1.get_pr(pr11_id.number).post_comment('hansen r+', config['role_reviewer']['token']) - r2.post_status(pr21_id.head, 'success', 'legal/cla') - r2.post_status(pr21_id.head, 'success', 'ci/runbot') - r2.get_pr(pr21_id.number).post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - prs_again = env['runbot_merge.pull_requests'].search([]) - assert prs_again == pr_ids,\ - "should not have created FP PRs as we're now in a detached (iso new PR) state " \ - "(%s)" % prs_again.mapped('display_name') - - with r1, r2: - r1.post_status('staging.b', 'success', 'legal/cla') - r1.post_status('staging.b', 'success', 'ci/runbot') - r2.post_status('staging.b', 'success', 'legal/cla') - r2.post_status('staging.b', 'success', 'ci/runbot') - env.run_crons() - - new_prs = env['runbot_merge.pull_requests'].search([]).sorted('display_name') - pr_ids - assert len(new_prs) == 2, "should have created the new FP PRs" - pr12_id, pr22_id = new_prs - - assert pr12_id.source_id == pr1_id - assert pr12_id.parent_id == pr11_id - - assert pr22_id.source_id == pr2_id - assert pr22_id.parent_id == pr21_id diff --git a/forwardport/tests/test_conflicts.py b/forwardport/tests/test_conflicts.py deleted file mode 100644 index 0dd1c1c5..00000000 --- a/forwardport/tests/test_conflicts.py +++ /dev/null @@ -1,356 +0,0 @@ -import re -import time -from operator import itemgetter - -from utils import make_basic, Commit, validate_all, re_matches, seen, REF_PATTERN, to_pr - - -def test_conflict(env, config, make_repo, users): - """ Create a PR to A which will (eventually) conflict with C when - forward-ported. - """ - prod, other = make_basic(env, config, make_repo) - # create a d branch - with prod: - prod.make_commits('c', Commit('1111', tree={'i': 'a'}), ref='heads/d') - project = env['runbot_merge.project'].search([]) - project.write({ - 'branch_ids': [ - (0, 0, {'name': 'd', 'fp_sequence': 4, 'fp_target': True}) - ] - }) - - # generate a conflict: create a h file in a PR to a - with prod: - [p_0] = prod.make_commits( - 'a', Commit('p_0', tree={'h': 'xxx'}), - ref='heads/conflicting' - ) - pr = prod.make_pr(target='a', head='conflicting') - prod.post_status(p_0, 'success', 'legal/cla') - prod.post_status(p_0, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - pra_id, prb_id = env['runbot_merge.pull_requests'].search([], order='number') - # mark pr b as OK so it gets ported to c - with prod: - validate_all([prod], [prb_id.head]) - env.run_crons() - - pra_id, prb_id, prc_id = env['runbot_merge.pull_requests'].search([], order='number') - # should have created a new PR - # but it should not have a parent, and there should be conflict markers - assert not prc_id.parent_id - assert prc_id.source_id == pra_id - assert prc_id.state == 'opened' - - p = prod.commit(p_0) - c = prod.commit(prc_id.head) - assert c.author == p.author - # ignore date as we're specifically not keeping the original's - without_date = itemgetter('name', 'email') - assert without_date(c.committer) == without_date(p.committer) - assert prod.read_tree(c) == { - 'f': 'c', - 'g': 'a', - 'h': re_matches(r'''<<<\x3c<<< HEAD -a -|||||||| parent of [\da-f]{7,}.* -======= -xxx ->>>\x3e>>> [\da-f]{7,}.* -'''), - } - prb = prod.get_pr(prb_id.number) - assert prb.comments == [ - seen(env, prb, users), - (users['user'], '''\ -This PR targets b and is part of the forward-port chain. Further PRs will be created up to d. - -More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port -'''), - (users['user'], """@%s @%s the next pull request (%s) is in conflict. \ -You can merge the chain up to here by saying -> @%s r+ - -More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port -""" % ( - users['user'], users['reviewer'], - prc_id.display_name, - project.fp_github_name - )) - ] - - # check that CI passing does not create more PRs - with prod: - validate_all([prod], [prc_id.head]) - env.run_crons() - time.sleep(5) - env.run_crons() - assert pra_id | prb_id | prc_id == env['runbot_merge.pull_requests'].search([], order='number'),\ - "CI passing should not have resumed the FP process on a conflicting PR" - - # fix the PR, should behave as if this were a normal PR - prc = prod.get_pr(prc_id.number) - pr_repo, pr_ref = prc.branch - with pr_repo: - pr_repo.make_commits( - # if just given a branch name, goes and gets it from pr_repo whose - # "b" was cloned before that branch got rolled back - 'c', - Commit('h should indeed be xxx', tree={'h': 'xxx'}), - ref='heads/%s' % pr_ref, - make=False, - ) - env.run_crons() - assert prod.read_tree(prod.commit(prc_id.head)) == { - 'f': 'c', - 'g': 'a', - 'h': 'xxx', - } - assert prc_id.state == 'opened', "state should be open still" - assert ('#%d' % pra_id.number) in prc_id.message - - # check that merging the fixed PR fixes the flow and restarts a forward - # port process - with prod: - prod.post_status(prc.head, 'success', 'legal/cla') - prod.post_status(prc.head, 'success', 'ci/runbot') - prc.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - assert prc_id.staging_id - with prod: - prod.post_status('staging.c', 'success', 'legal/cla') - prod.post_status('staging.c', 'success', 'ci/runbot') - env.run_crons() - - *_, prd_id = env['runbot_merge.pull_requests'].search([], order='number') - assert ('#%d' % pra_id.number) in prd_id.message, \ - "check that source / PR A is referenced by resume PR" - assert ('#%d' % prc_id.number) in prd_id.message, \ - "check that parent / PR C is referenced by resume PR" - assert prd_id.parent_id == prc_id - assert prd_id.source_id == pra_id - assert re.match( - REF_PATTERN.format(target='d', source='conflicting'), - prd_id.refname - ) - assert prod.read_tree(prod.commit(prd_id.head)) == { - 'f': 'c', - 'g': 'a', - 'h': 'xxx', - 'i': 'a', - } - -def test_conflict_deleted(env, config, make_repo): - prod, other = make_basic(env, config, make_repo) - # remove f from b - with prod: - prod.make_commits( - 'b', Commit('33', tree={'g': 'c'}, reset=True), - ref='heads/b' - ) - - # generate a conflict: update f in a - with prod: - [p_0] = prod.make_commits( - 'a', Commit('p_0', tree={'f': 'xxx'}), - ref='heads/conflicting' - ) - pr = prod.make_pr(target='a', head='conflicting') - prod.post_status(p_0, 'success', 'legal/cla') - prod.post_status(p_0, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - env.run_crons() - # wait a bit for PR webhook... ? - time.sleep(5) - env.run_crons() - - # should have created a new PR - pr0, pr1 = env['runbot_merge.pull_requests'].search([], order='number') - # but it should not have a parent - assert not pr1.parent_id - assert pr1.source_id == pr0 - assert prod.read_tree(prod.commit('b')) == { - 'g': 'c', - } - assert pr1.state == 'opened' - # NOTE: no actual conflict markers because pr1 essentially adds f de-novo - assert prod.read_tree(prod.commit(pr1.head)) == { - 'f': 'xxx', - 'g': 'c', - } - - # check that CI passing does not create more PRs - with prod: - validate_all([prod], [pr1.head]) - env.run_crons() - time.sleep(5) - env.run_crons() - assert pr0 | pr1 == env['runbot_merge.pull_requests'].search([], order='number'),\ - "CI passing should not have resumed the FP process on a conflicting PR" - - # fix the PR, should behave as if this were a normal PR - get_pr = prod.get_pr(pr1.number) - pr_repo, pr_ref = get_pr.branch - with pr_repo: - pr_repo.make_commits( - # if just given a branch name, goes and gets it from pr_repo whose - # "b" was cloned before that branch got rolled back - prod.commit('b').id, - Commit('f should indeed be removed', tree={'g': 'c'}, reset=True), - ref='heads/%s' % pr_ref, - make=False, - ) - env.run_crons() - assert prod.read_tree(prod.commit(pr1.head)) == { - 'g': 'c', - } - assert pr1.state == 'opened', "state should be open still" - -def test_multiple_commits_same_authorship(env, config, make_repo): - """ When a PR has multiple commits by the same author and its - forward-porting triggers a conflict, the resulting (squashed) conflict - commit should have the original author (same with the committer). - """ - author = {'name': 'George Pearce', 'email': 'gp@example.org'} - committer = {'name': 'G. P. W. Meredith', 'email': 'gpwm@example.org'} - prod, _ = make_basic(env, config, make_repo) - with prod: - # conflict: create `g` in `a`, using two commits - prod.make_commits( - 'a', - Commit('c0', tree={'g': '1'}, - author={**author, 'date': '1932-10-18T12:00:00Z'}, - committer={**committer, 'date': '1932-11-02T12:00:00Z'}), - Commit('c1', tree={'g': '2'}, - author={**author, 'date': '1932-11-12T12:00:00Z'}, - committer={**committer, 'date': '1932-11-13T12:00:00Z'}), - ref='heads/conflicting' - ) - pr = prod.make_pr(target='a', head='conflicting') - prod.post_status('conflicting', 'success', 'legal/cla') - prod.post_status('conflicting', 'success', 'ci/runbot') - pr.post_comment('hansen r+ rebase-ff', config['role_reviewer']['token']) - env.run_crons() - - pr_id = to_pr(env, pr) - assert pr_id.state == 'ready' - assert pr_id.staging_id - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - for _ in range(20): - pr_ids = env['runbot_merge.pull_requests'].search([], order='number') - if len(pr_ids) == 2: - _ , pr2_id = pr_ids - break - time.sleep(0.5) - else: - assert 0, "timed out" - - c = prod.commit(pr2_id.head) - get = itemgetter('name', 'email') - assert get(c.author) == get(author) - assert get(c.committer) == get(committer) - -def test_multiple_commits_different_authorship(env, config, make_repo, users, rolemap): - """ When a PR has multiple commits by different authors, the resulting - (squashed) conflict commit should have - """ - author = {'name': 'George Pearce', 'email': 'gp@example.org'} - committer = {'name': 'G. P. W. Meredith', 'email': 'gpwm@example.org'} - prod, _ = make_basic(env, config, make_repo) - with prod: - # conflict: create `g` in `a`, using two commits - # just swap author and committer in the commits - prod.make_commits( - 'a', - Commit('c0', tree={'g': '1'}, - author={**author, 'date': '1932-10-18T12:00:00Z'}, - committer={**committer, 'date': '1932-11-02T12:00:00Z'}), - Commit('c1', tree={'g': '2'}, - author={**committer, 'date': '1932-11-12T12:00:00Z'}, - committer={**author, 'date': '1932-11-13T12:00:00Z'}), - ref='heads/conflicting' - ) - pr = prod.make_pr(target='a', head='conflicting') - prod.post_status('conflicting', 'success', 'legal/cla') - prod.post_status('conflicting', 'success', 'ci/runbot') - pr.post_comment('hansen r+ rebase-ff', config['role_reviewer']['token']) - env.run_crons() - - pr_id = to_pr(env, pr) - assert pr_id.state == 'ready' - assert pr_id.staging_id - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - for _ in range(20): - pr_ids = env['runbot_merge.pull_requests'].search([], order='number') - if len(pr_ids) == 2: - _ , pr2_id = pr_ids - break - time.sleep(0.5) - else: - assert 0, "timed out" - - c = prod.commit(pr2_id.head) - assert len(c.parents) == 1 - get = itemgetter('name', 'email') - rm = rolemap['user'] - assert get(c.author) == (rm['login'], ''), \ - "In a multi-author PR, the squashed conflict commit should have the " \ - "author set to the bot but an empty email" - assert get(c.committer) == (rm['login'], '') - - assert re.match(r'''<<<\x3c<<< HEAD -b -|||||||| parent of [\da-f]{7,}.* -======= -2 ->>>\x3e>>> [\da-f]{7,}.* -''', prod.read_tree(c)['g']) - - # I'd like to fix the conflict so everything is clean and proper *but* - # github's API apparently rejects creating commits with an empty email. - # - # So fuck that, I'll just "merge the conflict". Still works at simulating - # a resolution error as technically that's the sort of things people do. - - pr2 = prod.get_pr(pr2_id.number) - with prod: - prod.post_status(pr2_id.head, 'success', 'legal/cla') - prod.post_status(pr2_id.head, 'success', 'ci/runbot') - pr2.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - assert pr2.comments == [ - seen(env, pr2, users), - (users['user'], re_matches(r'@%s @%s .*CONFLICT' % (users['user'], users['reviewer']), re.DOTALL)), - (users['reviewer'], 'hansen r+'), - (users['user'], f"@{users['user']} @{users['reviewer']} unable to stage: " - "All commits must have author and committer email, " - f"missing email on {pr2_id.head} indicates the " - "authorship is most likely incorrect."), - ] - assert pr2_id.state == 'error' - assert not pr2_id.staging_id, "staging should have been rejected" diff --git a/forwardport/tests/test_limit.py b/forwardport/tests/test_limit.py deleted file mode 100644 index 8310619d..00000000 --- a/forwardport/tests/test_limit.py +++ /dev/null @@ -1,293 +0,0 @@ -# -*- coding: utf-8 -*- -import collections -import time - -import pytest - -from utils import seen, Commit, make_basic - -Description = collections.namedtuple('Restriction', 'source limit') -def test_configure(env, config, make_repo): - """ Checks that configuring an FP limit on a PR is respected - - * limits to not the latest - * limits to the current target (= no FP) - * limits to an earlier branch (???) - """ - prod, other = make_basic(env, config, make_repo) - bot_name = env['runbot_merge.project'].search([]).fp_github_name - descriptions = [ - Description(source='a', limit='b'), - Description(source='b', limit='b'), - Description(source='b', limit='a'), - ] - originals = [] - with prod: - for i, descr in enumerate(descriptions): - [c] = prod.make_commits( - descr.source, Commit('c %d' % i, tree={str(i): str(i)}), - ref='heads/branch%d' % i, - ) - pr = prod.make_pr(target=descr.source, head='branch%d'%i) - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+\n%s up to %s' % (bot_name, descr.limit), config['role_reviewer']['token']) - originals.append(pr.number) - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - prod.post_status('staging.b', 'success', 'legal/cla') - prod.post_status('staging.b', 'success', 'ci/runbot') - env.run_crons() - - # should have created a single FP PR for 0, none for 1 and none for 2 - prs = env['runbot_merge.pull_requests'].search([], order='number') - assert len(prs) == 4 - assert prs[-1].parent_id == prs[0] - assert prs[0].number == originals[0] - assert prs[1].number == originals[1] - assert prs[2].number == originals[2] - - -def test_self_disabled(env, config, make_repo): - """ Allow setting target as limit even if it's disabled - """ - prod, other = make_basic(env, config, make_repo) - bot_name = env['runbot_merge.project'].search([]).fp_github_name - branch_a = env['runbot_merge.branch'].search([('name', '=', 'a')]) - branch_a.fp_target = False - with prod: - [c] = prod.make_commits('a', Commit('c', tree={'0': '0'}), ref='heads/mybranch') - pr = prod.make_pr(target='a', head='mybranch') - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+\n%s up to a' % bot_name, config['role_reviewer']['token']) - env.run_crons() - pr_id = env['runbot_merge.pull_requests'].search([('number', '=', pr.number)]) - assert pr_id.limit_id == branch_a - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - assert env['runbot_merge.pull_requests'].search([]) == pr_id,\ - "should not have created a forward port" - - -def test_ignore(env, config, make_repo): - """ Provide an "ignore" command which is equivalent to setting the limit - to target - """ - prod, other = make_basic(env, config, make_repo) - bot_name = env['runbot_merge.project'].search([]).fp_github_name - branch_a = env['runbot_merge.branch'].search([('name', '=', 'a')]) - with prod: - [c] = prod.make_commits('a', Commit('c', tree={'0': '0'}), ref='heads/mybranch') - pr = prod.make_pr(target='a', head='mybranch') - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+\n%s ignore' % bot_name, config['role_reviewer']['token']) - env.run_crons() - pr_id = env['runbot_merge.pull_requests'].search([('number', '=', pr.number)]) - assert pr_id.limit_id == branch_a - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - assert env['runbot_merge.pull_requests'].search([]) == pr_id,\ - "should not have created a forward port" - - -@pytest.mark.parametrize('enabled', ['active', 'fp_target']) -def test_disable(env, config, make_repo, users, enabled): - """ Checks behaviour if the limit target is disabled: - - * disable target while FP is ongoing -> skip over (and stop there so no FP) - * forward-port over a disabled branch - * request a disabled target as limit - - Disabling (with respect to forward ports) can be performed by marking the - branch as !active (which also affects mergebot operations), or as - !fp_target (won't be forward-ported to). - """ - prod, other = make_basic(env, config, make_repo) - project = env['runbot_merge.project'].search([]) - bot_name = project.fp_github_name - with prod: - [c] = prod.make_commits('a', Commit('c 0', tree={'0': '0'}), ref='heads/branch0') - pr = prod.make_pr(target='a', head='branch0') - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+\n%s up to b' % bot_name, config['role_reviewer']['token']) - - [c] = prod.make_commits('a', Commit('c 1', tree={'1': '1'}), ref='heads/branch1') - pr = prod.make_pr(target='a', head='branch1') - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - # disable branch b - env['runbot_merge.branch'].search([('name', '=', 'b')]).write({enabled: False}) - env.run_crons() - - # should have created a single PR (to branch c, for pr 1) - _0, _1, p = env['runbot_merge.pull_requests'].search([], order='number') - assert p.parent_id == _1 - assert p.target.name == 'c' - - project.fp_github_token = config['role_other']['token'] - bot_name = project.fp_github_name - with prod: - [c] = prod.make_commits('a', Commit('c 2', tree={'2': '2'}), ref='heads/branch2') - pr = prod.make_pr(target='a', head='branch2') - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+\n%s up to' % bot_name, config['role_reviewer']['token']) - pr.post_comment('%s up to b' % bot_name, config['role_reviewer']['token']) - pr.post_comment('%s up to foo' % bot_name, config['role_reviewer']['token']) - pr.post_comment('%s up to c' % bot_name, config['role_reviewer']['token']) - env.run_crons() - - # use a set because git webhooks delays might lead to mis-ordered - # responses and we don't care that much - assert set(pr.comments) == { - (users['reviewer'], "hansen r+\n%s up to" % bot_name), - (users['other'], "@%s please provide a branch to forward-port to." % users['reviewer']), - (users['reviewer'], "%s up to b" % bot_name), - (users['other'], "@%s branch 'b' is disabled, it can't be used as a forward port target." % users['reviewer']), - (users['reviewer'], "%s up to foo" % bot_name), - (users['other'], "@%s there is no branch 'foo', it can't be used as a forward port target." % users['reviewer']), - (users['reviewer'], "%s up to c" % bot_name), - (users['other'], "Forward-porting to 'c'."), - seen(env, pr, users), - } - - -def test_default_disabled(env, config, make_repo, users): - """ If the default limit is disabled, it should still be the default - limit but the ping message should be set on the actual last FP (to the - last non-deactivated target) - """ - prod, other = make_basic(env, config, make_repo) - branch_c = env['runbot_merge.branch'].search([('name', '=', 'c')]) - branch_c.fp_target = False - - with prod: - [c] = prod.make_commits('a', Commit('c', tree={'0': '0'}), ref='heads/branch0') - pr = prod.make_pr(target='a', head='branch0') - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - assert env['runbot_merge.pull_requests'].search([]).limit_id == branch_c - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - p1, p2 = env['runbot_merge.pull_requests'].search([], order='number') - assert p1.number == pr.number - pr2 = prod.get_pr(p2.number) - - cs = pr2.comments - assert len(cs) == 2 - assert pr2.comments == [ - seen(env, pr2, users), - (users['user'], """\ -@%(user)s @%(reviewer)s this PR targets b and is the last of the forward-port chain. - -To merge the full chain, say -> @%(user)s r+ - -More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port -""" % users) - ] - -def test_limit_after_merge(env, config, make_repo, users): - """ If attempting to set a limit (<up to>) on a PR which is merged - (already forward-ported or not), or is a forward-port PR, fwbot should - just feedback that it won't do it - """ - prod, other = make_basic(env, config, make_repo) - reviewer = config['role_reviewer']['token'] - branch_c = env['runbot_merge.branch'].search([('name', '=', 'c')]) - bot_name = env['runbot_merge.project'].search([]).fp_github_name - with prod: - [c] = prod.make_commits('a', Commit('c', tree={'0': '0'}), ref='heads/abranch') - pr1 = prod.make_pr(target='a', head='abranch') - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr1.post_comment('hansen r+', reviewer) - env.run_crons() - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - p1, p2 = env['runbot_merge.pull_requests'].search([], order='number') - assert p1.limit_id == p2.limit_id == branch_c, "check that limit is correctly set" - pr2 = prod.get_pr(p2.number) - with prod: - pr1.post_comment(bot_name + ' up to b', reviewer) - pr2.post_comment(bot_name + ' up to b', reviewer) - env.run_crons() - - assert p1.limit_id == p2.limit_id == branch_c, \ - "check that limit was not updated" - assert pr1.comments == [ - (users['reviewer'], "hansen r+"), - seen(env, pr1, users), - (users['reviewer'], bot_name + ' up to b'), - (bot_name, "@%s forward-port limit can only be set before the PR is merged." % users['reviewer']), - ] - assert pr2.comments == [ - seen(env, pr2, users), - (users['user'], """\ -This PR targets b and is part of the forward-port chain. Further PRs will be created up to c. - -More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port -"""), - (users['reviewer'], bot_name + ' up to b'), - (bot_name, "@%s forward-port limit can only be set on an origin PR" - " (%s here) before it's merged and forward-ported." % ( - users['reviewer'], - p1.display_name, - )), - ] - - # update pr2 to detach it from pr1 - with other: - other.make_commits( - p2.target.name, - Commit('updated', tree={'1': '1'}), - ref=pr2.ref, - make=False - ) - env.run_crons() - assert not p2.parent_id - assert p2.source_id == p1 - - with prod: - pr2.post_comment(bot_name + ' up to b', reviewer) - env.run_crons() - - assert pr2.comments[4:] == [ - (bot_name, "@%s @%s this PR was modified / updated and has become a normal PR. " - "It should be merged the normal way (via @%s)" % ( - users['user'], users['reviewer'], - p2.repository.project_id.github_prefix - )), - (users['reviewer'], bot_name + ' up to b'), - (bot_name, f"@{users['reviewer']} forward-port limit can only be set on an origin PR " - f"({p1.display_name} here) before it's merged and forward-ported." - ), - ] diff --git a/forwardport/tests/test_overrides.py b/forwardport/tests/test_overrides.py deleted file mode 100644 index e8a18d35..00000000 --- a/forwardport/tests/test_overrides.py +++ /dev/null @@ -1,116 +0,0 @@ -import json - -from utils import Commit, make_basic - -def statuses(pr): - return { - k: v['state'] - for k, v in json.loads(pr.statuses_full).items() - } -def test_override_inherited(env, config, make_repo, users): - """ A forwardport should inherit its parents' overrides, until it's edited. - """ - repo, other = make_basic(env, config, make_repo) - project = env['runbot_merge.project'].search([]) - env['res.partner'].search([('github_login', '=', users['reviewer'])])\ - .write({'override_rights': [(0, 0, { - 'repository_id': project.repo_ids.id, - 'context': 'ci/runbot', - })]}) - - with repo: - repo.make_commits('a', Commit('C', tree={'a': '0'}), ref='heads/change') - pr = repo.make_pr(target='a', head='change') - repo.post_status('change', 'success', 'legal/cla') - pr.post_comment('hansen r+ override=ci/runbot', config['role_reviewer']['token']) - env.run_crons() - - original = env['runbot_merge.pull_requests'].search([('repository.name', '=', repo.name), ('number', '=', pr.number)]) - assert original.state == 'ready' - - with repo: - repo.post_status('staging.a', 'success', 'legal/cla') - repo.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number') - assert pr0_id == original - assert pr1_id.parent_id, pr0_id - - with repo: - repo.post_status(pr1_id.head, 'success', 'legal/cla') - env.run_crons() - assert pr1_id.state == 'validated' - assert statuses(pr1_id) == {'ci/runbot': 'success', 'legal/cla': 'success'} - - # now we edit the child PR - pr_repo, pr_ref = repo.get_pr(pr1_id.number).branch - with pr_repo: - pr_repo.make_commits( - pr1_id.target.name, - Commit('wop wop', tree={'a': '1'}), - ref=f'heads/{pr_ref}', - make=False - ) - env.run_crons() - assert pr1_id.state == 'opened' - assert not pr1_id.parent_id - assert statuses(pr1_id) == {}, "should not have any status left" - -def test_override_combination(env, config, make_repo, users): - """ A forwardport should inherit its parents' overrides, until it's edited. - """ - repo, other = make_basic(env, config, make_repo) - project = env['runbot_merge.project'].search([]) - env['res.partner'].search([('github_login', '=', users['reviewer'])]) \ - .write({'override_rights': [ - (0, 0, { - 'repository_id': project.repo_ids.id, - 'context': 'ci/runbot', - }), - (0, 0, { - 'repository_id': project.repo_ids.id, - 'context': 'legal/cla', - }) - ]}) - - with repo: - repo.make_commits('a', Commit('C', tree={'a': '0'}), ref='heads/change') - pr = repo.make_pr(target='a', head='change') - repo.post_status('change', 'success', 'legal/cla') - pr.post_comment('hansen r+ override=ci/runbot', config['role_reviewer']['token']) - env.run_crons() - - pr0_id = env['runbot_merge.pull_requests'].search([('repository.name', '=', repo.name), ('number', '=', pr.number)]) - assert pr0_id.state == 'ready' - assert statuses(pr0_id) == {'ci/runbot': 'success', 'legal/cla': 'success'} - - with repo: - repo.post_status('staging.a', 'success', 'legal/cla') - repo.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - # check for combination: ci/runbot is overridden through parent, if we - # override legal/cla then the PR should be validated - pr1_id = env['runbot_merge.pull_requests'].search([('parent_id', '=', pr0_id.id)]) - assert pr1_id.state == 'opened' - assert statuses(pr1_id) == {'ci/runbot': 'success'} - with repo: - repo.get_pr(pr1_id.number).post_comment('hansen override=legal/cla', config['role_reviewer']['token']) - env.run_crons() - assert pr1_id.state == 'validated' - - # editing the child should devalidate - pr_repo, pr_ref = repo.get_pr(pr1_id.number).branch - with pr_repo: - pr_repo.make_commits( - pr1_id.target.name, - Commit('wop wop', tree={'a': '1'}), - ref=f'heads/{pr_ref}', - make=False - ) - env.run_crons() - assert pr1_id.state == 'opened' - assert not pr1_id.parent_id - assert statuses(pr1_id) == {'legal/cla': 'success'}, \ - "should only have its own status left" diff --git a/forwardport/tests/test_simple.py b/forwardport/tests/test_simple.py deleted file mode 100644 index 172cee51..00000000 --- a/forwardport/tests/test_simple.py +++ /dev/null @@ -1,1019 +0,0 @@ -# -*- coding: utf-8 -*- -import collections -import re -import time -from datetime import datetime, timedelta - -import pytest - -from utils import seen, Commit, make_basic, REF_PATTERN, MESSAGE_TEMPLATE, validate_all, part_of - -FMT = '%Y-%m-%d %H:%M:%S' -FAKE_PREV_WEEK = (datetime.now() + timedelta(days=1)).strftime(FMT) - -# need: -# * an odoo server -# - connected to a database -# - with relevant modules loaded / installed -# - set up project -# - add repo, branch(es) -# - provide visibility to contents si it can be queried & al -# * a tunnel so the server is visible from the outside (webhooks) -# * the ability to create repos on github -# - repo name -# - a github user to create a repo with -# - a github owner to create a repo *for* -# - provide ability to create commits, branches, prs, ... -def test_straightforward_flow(env, config, make_repo, users): - # TODO: ~all relevant data in users when creating partners - # get reviewer's name - reviewer_name = env['res.partner'].search([ - ('github_login', '=', users['reviewer']) - ]).name - - prod, other = make_basic(env, config, make_repo) - other_user = config['role_other'] - other_user_repo = prod.fork(token=other_user['token']) - - project = env['runbot_merge.project'].search([]) - b_head = prod.commit('b') - c_head = prod.commit('c') - with prod, other_user_repo: - # create PR as a user with no access to prod (or other) - [_, p_1] = other_user_repo.make_commits( - 'a', - Commit('p_0', tree={'x': '0'}), - Commit('p_1', tree={'x': '1'}), - ref='heads/hugechange' - ) - pr = prod.make_pr( - target='a', title="super important change", - head=other_user['user'] + ':hugechange', - token=other_user['token'] - ) - prod.post_status(p_1, 'success', 'legal/cla') - prod.post_status(p_1, 'success', 'ci/runbot') - # use rebase-ff (instead of rebase-merge) so we don't have to dig in - # parents of the merge commit to find the cherrypicks - pr.post_comment('hansen r+ rebase-ff', config['role_reviewer']['token']) - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', prod.name), - ('number', '=', pr.number), - ]) - assert not pr_id.merge_date,\ - "PR obviously shouldn't have a merge date before being merged" - - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - # should merge the staging then create the FP PR - env.run_crons() - - assert datetime.now() - datetime.strptime(pr_id.merge_date, FMT) <= timedelta(minutes=1),\ - "check if merge date was set about now (within a minute as crons and " \ - "RPC calls yield various delays before we're back)" - - p_1_merged = prod.commit('a') - - assert p_1_merged.id != p_1 - assert p_1_merged.message == MESSAGE_TEMPLATE.format( - message='p_1', - repo=prod.name, - number=pr.number, - headers='', - name=reviewer_name, - email=config['role_reviewer']['email'], - ) - assert prod.read_tree(p_1_merged) == { - 'f': 'e', - 'x': '1', - }, "ensure p_1_merged has ~ the same contents as p_1 but is a different commit" - [p_0_merged] = p_1_merged.parents - - # wait a bit for PR webhook... ? - time.sleep(5) - env.run_crons() - - pr0, pr1 = env['runbot_merge.pull_requests'].search([], order='number') - assert pr0.number == pr.number - # 50 lines in, we can start checking the forward port... - assert pr1.parent_id == pr0 - assert pr1.source_id == pr0 - other_owner = other.name.split('/')[0] - assert re.match(other_owner + ':' + REF_PATTERN.format(target='b', source='hugechange'), pr1.label), \ - "check that FP PR was created in FP target repo" - c = prod.commit(pr1.head) - # TODO: add original committer (if !author) as co-author in commit message? - assert c.author['name'] == other_user['user'], "author should still be original's probably" - assert c.committer['name'] == other_user['user'], "committer should also still be the original's, really" - assert prod.read_tree(c) == { - 'f': 'c', - 'g': 'b', - 'x': '1' - } - with prod: - prod.post_status(pr1.head, 'success', 'ci/runbot') - prod.post_status(pr1.head, 'success', 'legal/cla') - - env.run_crons() - env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK}) - - pr0_, pr1_, pr2 = env['runbot_merge.pull_requests'].search([], order='number') - - assert pr.comments == [ - (users['reviewer'], 'hansen r+ rebase-ff'), - seen(env, pr, users), - (users['user'], 'Merge method set to rebase and fast-forward.'), - (users['user'], '@%s @%s this pull request has forward-port PRs awaiting action (not merged or closed):\n%s' % ( - users['other'], users['reviewer'], - '\n- '.join((pr1 | pr2).mapped('display_name')) - )), - ] - - assert pr0_ == pr0 - assert pr1_ == pr1 - assert pr1.parent_id == pr1.source_id == pr0 - assert pr2.parent_id == pr1 - assert pr2.source_id == pr0 - assert not pr0.squash, "original PR has >1 commit" - assert not (pr1.squash or pr2.squash), "forward ports should also have >1 commit" - assert re.match(REF_PATTERN.format(target='c', source='hugechange'), pr2.refname), \ - "check that FP PR was created in FP target repo" - assert prod.read_tree(prod.commit(pr2.head)) == { - 'f': 'c', - 'g': 'a', - 'h': 'a', - 'x': '1' - } - pr2_remote = prod.get_pr(pr2.number) - assert pr2_remote.comments == [ - seen(env, pr2_remote, users), - (users['user'], """\ -@%s @%s this PR targets c and is the last of the forward-port chain containing: -* %s - -To merge the full chain, say -> @%s r+ - -More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port -""" % ( - users['other'], users['reviewer'], - pr1.display_name, - project.fp_github_name - )), - ] - with prod: - prod.post_status(pr2.head, 'success', 'ci/runbot') - prod.post_status(pr2.head, 'success', 'legal/cla') - - pr2_remote.post_comment('%s r+' % project.fp_github_name, config['role_reviewer']['token']) - - env.run_crons() - - assert pr1.staging_id - assert pr2.staging_id - # two branches so should have two stagings - assert pr1.staging_id != pr2.staging_id - # validate - with prod: - prod.post_status('staging.b', 'success', 'ci/runbot') - prod.post_status('staging.b', 'success', 'legal/cla') - prod.post_status('staging.c', 'success', 'ci/runbot') - prod.post_status('staging.c', 'success', 'legal/cla') - - # and trigger merge - env.run_crons() - - # apparently github strips out trailing newlines when fetching through the - # API... - message_template = MESSAGE_TEMPLATE.format( - message='p_1', - repo=prod.name, - number='%s', - headers='X-original-commit: {}\n'.format(p_1_merged.id), - name=reviewer_name, - email=config['role_reviewer']['email'], - ) - - old_b = prod.read_tree(b_head) - head_b = prod.commit('b') - assert head_b.message == message_template % pr1.number - assert prod.commit(head_b.parents[0]).message == part_of(f'p_0\n\nX-original-commit: {p_0_merged}', pr1, separator='\n') - b_tree = prod.read_tree(head_b) - assert b_tree == { - **old_b, - 'x': '1', - } - old_c = prod.read_tree(c_head) - head_c = prod.commit('c') - assert head_c.message == message_template % pr2.number - assert prod.commit(head_c.parents[0]).message == part_of(f'p_0\n\nX-original-commit: {p_0_merged}', pr2, separator='\n') - c_tree = prod.read_tree(head_c) - assert c_tree == { - **old_c, - 'x': '1', - } - # check that we didn't just smash the original trees - assert prod.read_tree(prod.commit('a')) != b_tree != c_tree - - prs = env['forwardport.branch_remover'].search([]).mapped('pr_id') - assert prs == pr0 | pr1 | pr2, "pr1 and pr2 should be slated for branch deletion" - env.run_crons('forwardport.remover', context={'forwardport_merged_before': FAKE_PREV_WEEK}) - - # should not have deleted the base branch (wrong repo) - assert other_user_repo.get_ref(pr.ref) == p_1 - - # should have deleted all PR branches - pr1_ref = prod.get_pr(pr1.number).ref - with pytest.raises(AssertionError, match='Not Found'): - other.get_ref(pr1_ref) - - pr2_ref = pr2_remote.ref - with pytest.raises(AssertionError, match="Not Found"): - other.get_ref(pr2_ref) - -def test_empty(env, config, make_repo, users): - """ Cherrypick of an already cherrypicked (or separately implemented) - commit -> conflicting pr. - """ - prod, other = make_basic(env, config, make_repo) - # merge change to b - with prod: - [p_0] = prod.make_commits( - 'b', Commit('p', tree={'x': '0'}), - ref='heads/early' - ) - pr0 = prod.make_pr(target='b', head='early') - prod.post_status(p_0, 'success', 'legal/cla') - prod.post_status(p_0, 'success', 'ci/runbot') - pr0.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - with prod: - prod.post_status('staging.b', 'success', 'legal/cla') - prod.post_status('staging.b', 'success', 'ci/runbot') - - # merge same change to a afterwards - with prod: - [p_1] = prod.make_commits( - 'a', Commit('p_0', tree={'x': '0'}), - ref='heads/late' - ) - pr1 = prod.make_pr(target='a', head='late') - prod.post_status(p_1, 'success', 'legal/cla') - prod.post_status(p_1, 'success', 'ci/runbot') - pr1.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - env.run_crons() - assert prod.read_tree(prod.commit('a')) == { - 'f': 'e', - 'x': '0', - } - assert prod.read_tree(prod.commit('b')) == { - 'f': 'c', - 'g': 'b', - 'x': '0', - } - # should have 4 PRs: - # PR 0 - # FP of PR 0 to C - # PR 1 - # failed FP of PR1 to B - prs = env['runbot_merge.pull_requests'].search([], order='number') - assert len(prs) == 4 - pr0_id = prs.filtered(lambda p: p.number == pr0.number) - pr1_id = prs.filtered(lambda p: p.number == pr1.number) - fp_id = prs.filtered(lambda p: p.parent_id == pr0_id) - fail_id = prs - (pr0_id | pr1_id | fp_id) - assert fp_id - assert fail_id - # unlinked from parent since cherrypick failed - assert not fail_id.parent_id - # the tree should be clean... - assert prod.read_tree(prod.commit(fail_id.head)) == { - 'f': 'c', - 'g': 'b', - 'x': '0', - } - - with prod: - validate_all([prod], [fp_id.head, fail_id.head]) - env.run_crons() - - # should not have created any new PR - assert env['runbot_merge.pull_requests'].search([], order='number') == prs - # change FP token to see if the feedback comes from the proper user - project = env['runbot_merge.project'].search([]) - project.fp_github_token = config['role_other']['token'] - assert project.fp_github_name == users['other'] - - # check reminder - env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK}) - env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK}) - - awaiting = ( - users['other'], - '@%s @%s this pull request has forward-port PRs awaiting action (not merged or closed):\n%s' % ( - users['user'], users['reviewer'], - fail_id.display_name - ) - ) - assert pr1.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr1, users), - awaiting, - awaiting, - ], "each cron run should trigger a new message on the ancestor" - # check that this stops if we close the PR - with prod: - prod.get_pr(fail_id.number).close() - env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK}) - assert pr1.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr1, users), - awaiting, - awaiting, - ] - -def test_partially_empty(env, config, make_repo): - """ Check what happens when only some commits of the PR are now empty - """ - prod, other = make_basic(env, config, make_repo) - # merge change to b - with prod: - [p_0] = prod.make_commits( - 'b', Commit('p', tree={'x': '0'}), - ref='heads/early' - ) - pr0 = prod.make_pr(target='b', head='early') - prod.post_status(p_0, 'success', 'legal/cla') - prod.post_status(p_0, 'success', 'ci/runbot') - pr0.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - with prod: - prod.post_status('staging.b', 'success', 'legal/cla') - prod.post_status('staging.b', 'success', 'ci/runbot') - - # merge same change to a afterwards - with prod: - [*_, p_1] = prod.make_commits( - 'a', - Commit('p_0', tree={'w': '0'}), - Commit('p_1', tree={'x': '0'}), - Commit('p_2', tree={'y': '0'}), - ref='heads/late' - ) - pr1 = prod.make_pr(target='a', head='late') - prod.post_status(p_1, 'success', 'legal/cla') - prod.post_status(p_1, 'success', 'ci/runbot') - pr1.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - env.run_crons() - assert prod.read_tree(prod.commit('a')) == { - 'f': 'e', - 'w': '0', - 'x': '0', - 'y': '0', - } - assert prod.read_tree(prod.commit('b')) == { - 'f': 'c', - 'g': 'b', - 'x': '0', - } - - fail_id = env['runbot_merge.pull_requests'].search([ - ('number', 'not in', [pr0.number, pr1.number]), - ('parent_id', '=', False), - ], order='number') - assert fail_id - # unlinked from parent since cherrypick failed - assert not fail_id.parent_id - # the tree should be clean... - assert prod.read_tree(prod.commit(fail_id.head)) == { - 'f': 'c', - 'g': 'b', - 'w': '0', - 'x': '0', - 'y': '0', - } - -Case = collections.namedtuple('Case', 'author reviewer delegate success') -ACL = [ - Case('reviewer', 'reviewer', None, True), - Case('reviewer', 'self_reviewer', None, False), - Case('reviewer', 'other', None, False), - Case('reviewer', 'other', 'other', True), - - Case('self_reviewer', 'reviewer', None, True), - Case('self_reviewer', 'self_reviewer', None, True), - Case('self_reviewer', 'other', None, False), - Case('self_reviewer', 'other', 'other', True), - - Case('other', 'reviewer', None, True), - Case('other', 'self_reviewer', None, False), - Case('other', 'other', None, True), - Case('other', 'other', 'other', True), -] -@pytest.mark.parametrize(Case._fields, ACL) -def test_access_rights(env, config, make_repo, users, author, reviewer, delegate, success): - """Validates the review rights *for the forward-port sequence*, the original - PR is always reviewed by `user`. - """ - prod, other = make_basic(env, config, make_repo) - project = env['runbot_merge.project'].search([]) - - # create a partner for `user` - c = env['res.partner'].create({ - 'name': users['user'], - 'github_login': users['user'], - 'email': 'user@example.org', - }) - c.write({ - 'review_rights': [ - (0, 0, {'repository_id': repo.id, 'review': True}) - for repo in project.repo_ids - ] - }) - # create a partner for `other` so we can put an email on it - env['res.partner'].create({ - 'name': users['other'], - 'github_login': users['other'], - 'email': 'other@example.org', - }) - - author_token = config['role_' + author]['token'] - fork = prod.fork(token=author_token) - with prod, fork: - [c] = fork.make_commits('a', Commit('c_0', tree={'y': '0'}), ref='heads/accessrights') - pr = prod.make_pr( - target='a', title='my change', - head=users[author] + ':accessrights', - token=author_token, - ) - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+', token=config['github']['token']) - if delegate: - pr.post_comment('hansen delegate=%s' % users[delegate], token=config['github']['token']) - env.run_crons() - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - pr0, pr1 = env['runbot_merge.pull_requests'].search([], order='number') - assert pr0.state == 'merged' - with prod: - prod.post_status(pr1.head, 'success', 'ci/runbot') - prod.post_status(pr1.head, 'success', 'legal/cla') - env.run_crons() - - _, _, pr2 = env['runbot_merge.pull_requests'].search([], order='number') - with prod: - prod.post_status(pr2.head, 'success', 'ci/runbot') - prod.post_status(pr2.head, 'success', 'legal/cla') - prod.get_pr(pr2.number).post_comment( - '%s r+' % project.fp_github_name, - token=config['role_' + reviewer]['token'] - ) - env.run_crons() - if success: - assert pr1.staging_id and pr2.staging_id,\ - "%s should have approved FP of PRs by %s" % (reviewer, author) - st = prod.commit('staging.b') - # Should be signed-off by both original reviewer and forward port reviewer - original_signoff = signoff(config['role_user'], st.message) - forward_signoff = signoff(config['role_' + reviewer], st.message) - assert st.message.index(original_signoff) <= st.message.index(forward_signoff),\ - "Check that FP approver is after original PR approver as that's " \ - "the review path for the PR" - else: - assert not (pr1.staging_id or pr2.staging_id),\ - "%s should *not* have approved FP of PRs by %s" % (reviewer, author) -def signoff(conf, message): - for n in filter(None, [conf.get('name'), conf.get('user')]): - signoff = 'Signed-off-by: ' + n - if signoff in message: - return signoff - raise AssertionError("Failed to find signoff by %s in %s" % (conf, message)) - - -def test_delegate_fw(env, config, make_repo, users): - """If a user is delegated *on a forward port* they should be able to approve - *the followup*. - """ - prod, _ = make_basic(env, config, make_repo) - # create a partner for `other` so we can put an email on it - env['res.partner'].create({ - 'name': users['other'], - 'github_login': users['other'], - 'email': 'other@example.org', - }) - author_token = config['role_self_reviewer']['token'] - fork = prod.fork(token=author_token) - with prod, fork: - [c] = fork.make_commits('a', Commit('c_0', tree={'y': '0'}), ref='heads/accessrights') - pr = prod.make_pr( - target='a', title='my change', - head=users['self_reviewer'] + ':accessrights', - token=author_token, - ) - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+', token=config['role_reviewer']['token']) - env.run_crons() - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - # ensure pr1 has to be approved to be forward-ported - _, pr1_id = env['runbot_merge.pull_requests'].search([], order='number') - # detatch from source - pr1_id.parent_id = False - with prod: - prod.post_status(pr1_id.head, 'success', 'legal/cla') - prod.post_status(pr1_id.head, 'success', 'ci/runbot') - env.run_crons() - pr1 = prod.get_pr(pr1_id.number) - # delegate review to "other" consider PR fixed, and have "other" approve it - with prod: - pr1.post_comment('hansen delegate=' + users['other'], - token=config['role_reviewer']['token']) - prod.post_status(pr1_id.head, 'success', 'ci/runbot') - pr1.post_comment('hansen r+', token=config['role_other']['token']) - env.run_crons() - - with prod: - prod.post_status('staging.b', 'success', 'legal/cla') - prod.post_status('staging.b', 'success', 'ci/runbot') - env.run_crons() - - _, _, pr2_id = env['runbot_merge.pull_requests'].search([], order='number') - pr2 = prod.get_pr(pr2_id.number) - # make "other" also approve this one - with prod: - prod.post_status(pr2_id.head, 'success', 'ci/runbot') - prod.post_status(pr2_id.head, 'success', 'legal/cla') - pr2.post_comment('hansen r+', token=config['role_other']['token']) - env.run_crons() - - assert pr2.comments == [ - seen(env, pr2, users), - (users['user'], '''@{self_reviewer} @{reviewer} this PR targets c and is the last of the forward-port chain. - -To merge the full chain, say -> @{user} r+ - -More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port -'''.format_map(users)), - (users['other'], 'hansen r+') - ] - - -def test_redundant_approval(env, config, make_repo, users): - """If a forward port sequence has been partially approved, fw-bot r+ should - not perform redundant approval as that triggers warning messages. - """ - prod, _ = make_basic(env, config, make_repo) - [project] = env['runbot_merge.project'].search([]) - with prod: - prod.make_commits( - 'a', Commit('p', tree={'x': '0'}), - ref='heads/early' - ) - pr0 = prod.make_pr(target='a', head='early') - prod.post_status('heads/early', 'success', 'legal/cla') - prod.post_status('heads/early', 'success', 'ci/runbot') - pr0.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number asc') - with prod: - prod.post_status(pr1_id.head, 'success', 'legal/cla') - prod.post_status(pr1_id.head, 'success', 'ci/runbot') - env.run_crons() - - _, _, pr2_id = env['runbot_merge.pull_requests'].search([], order='number asc') - assert pr2_id.parent_id == pr1_id - assert pr1_id.parent_id == pr0_id - - pr1 = prod.get_pr(pr1_id.number) - pr2 = prod.get_pr(pr2_id.number) - with prod: - pr1.post_comment('hansen r+', config['role_reviewer']['token']) - with prod: - pr2.post_comment(f'{project.fp_github_name} r+', config['role_reviewer']['token']) - env.run_crons() - - assert pr1.comments == [ - seen(env, pr1, users), - (users['user'], 'This PR targets b and is part of the forward-port chain. ' - 'Further PRs will be created up to c.\n\n' - 'More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port\n'), - (users['reviewer'], 'hansen r+'), - ] - - -def test_batched(env, config, make_repo, users): - """ Tests for projects with multiple repos & sync'd branches. Batches - should be FP'd to batches - """ - main1, _ = make_basic(env, config, make_repo, reponame='main1') - main2, _ = make_basic(env, config, make_repo, reponame='main2') - main1.unsubscribe(config['role_reviewer']['token']) - main2.unsubscribe(config['role_reviewer']['token']) - - friendo = config['role_other'] - other1 = main1.fork(token=friendo['token']) - other2 = main2.fork(token=friendo['token']) - - with main1, other1: - [c1] = other1.make_commits( - 'a', Commit('commit repo 1', tree={'1': 'a'}), - ref='heads/contribution' - ) - pr1 = main1.make_pr( - target='a', title="My contribution", - head=friendo['user'] + ':contribution', - token=friendo['token'] - ) - # we can ack it directly as it should not be taken in account until - # we run crons - validate_all([main1], [c1]) - pr1.post_comment('hansen r+', config['role_reviewer']['token']) - with main2, other2: - [c2] = other2.make_commits( - 'a', Commit('commit repo 2', tree={'2': 'a'}), - ref='heads/contribution' # use same ref / label as pr1 - ) - pr2 = main2.make_pr( - target='a', title="Main2 part of my contribution", - head=friendo['user'] + ':contribution', - token=friendo['token'] - ) - validate_all([main2], [c2]) - pr2.post_comment('hansen r+', config['role_reviewer']['token']) - - env.run_crons() - - # sanity check: this should have created a staging with 1 batch with pr1 and pr2 - stagings = env['runbot_merge.stagings'].search([]) - assert len(stagings) == 1 - assert stagings.target.name == 'a' - assert len(stagings.batch_ids) == 1 - assert stagings.mapped('batch_ids.prs.number') == [pr1.number, pr2.number] - - with main1, main2: - validate_all([main1, main2], ['staging.a']) - env.run_crons() - - PullRequests = env['runbot_merge.pull_requests'] - # created the first forward port, need to validate it so the second one is - # triggered (FP only goes forward on CI+) (?) - pr1b = PullRequests.search([ - ('source_id', '!=', False), - ('repository.name', '=', main1.name), - ]) - pr2b = PullRequests.search([ - ('source_id', '!=', False), - ('repository.name', '=', main2.name), - ]) - # check that relevant users were pinged - ping = (users['user'], """\ -This PR targets b and is part of the forward-port chain. Further PRs will be created up to c. - -More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port -""") - pr_remote_1b = main1.get_pr(pr1b.number) - pr_remote_2b = main2.get_pr(pr2b.number) - assert pr_remote_1b.comments == [seen(env, pr_remote_1b, users), ping] - assert pr_remote_2b.comments == [seen(env, pr_remote_2b, users), ping] - - with main1, main2: - validate_all([main1], [pr1b.head]) - validate_all([main2], [pr2b.head]) - env.run_crons() # process updated statuses -> generate followup FP - - # should have created two PRs whose source is p1 and two whose source is p2 - pr1a, pr1b, pr1c = PullRequests.search([ - ('repository.name', '=', main1.name), - ], order='number') - pr2a, pr2b, pr2c = PullRequests.search([ - ('repository.name', '=', main2.name), - ], order='number') - - assert pr1a.number == pr1.number - assert pr2a.number == pr2.number - assert pr1a.state == pr2a.state == 'merged' - - assert pr1b.label == pr2b.label, "batched source should yield batched FP" - assert pr1c.label == pr2c.label, "batched source should yield batched FP" - assert pr1b.label != pr1c.label - - project = env['runbot_merge.project'].search([]) - # ok main1 PRs - with main1: - validate_all([main1], [pr1c.head]) - main1.get_pr(pr1c.number).post_comment('%s r+' % project.fp_github_name, config['role_reviewer']['token']) - env.run_crons() - - # check that the main1 PRs are ready but blocked on the main2 PRs - assert pr1b.state == 'ready' - assert pr1c.state == 'ready' - assert pr1b.blocked - assert pr1c.blocked - - # ok main2 PRs - with main2: - validate_all([main2], [pr2c.head]) - main2.get_pr(pr2c.number).post_comment('%s r+' % project.fp_github_name, config['role_reviewer']['token']) - env.run_crons() - - stb, stc = env['runbot_merge.stagings'].search([], order='target') - assert stb.target.name == 'b' - assert stc.target.name == 'c' - - with main1, main2: - validate_all([main1, main2], ['staging.b', 'staging.c']) - -class TestClosing: - def test_closing_before_fp(self, env, config, make_repo, users): - """ Closing a PR should preclude its forward port - """ - prod, other = make_basic(env, config, make_repo) - with prod: - [p_1] = prod.make_commits( - 'a', - Commit('p_0', tree={'x': '0'}), - ref='heads/hugechange' - ) - pr = prod.make_pr(target='a', head='hugechange') - prod.post_status(p_1, 'success', 'legal/cla') - prod.post_status(p_1, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - # should merge the staging then create the FP PR - env.run_crons() - - pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number') - # close the FP PR then have CI validate it - pr1 = prod.get_pr(pr1_id.number) - with prod: - pr1.close() - assert pr1_id.state == 'closed' - assert not pr1_id.parent_id, "closed PR should should be detached from its parent" - with prod: - prod.post_status(pr1_id.head, 'success', 'legal/cla') - prod.post_status(pr1_id.head, 'success', 'ci/runbot') - env.run_crons() - env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron') - - assert env['runbot_merge.pull_requests'].search([], order='number') == pr0_id | pr1_id,\ - "closing the PR should suppress the FP sequence" - assert pr1.comments == [ - seen(env, pr1, users), - (users['user'], """\ -This PR targets b and is part of the forward-port chain. Further PRs will be created up to c. - -More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port -""") - ] - - def test_closing_after_fp(self, env, config, make_repo, users): - """ Closing a PR which has been forward-ported should not touch the - followups - """ - prod, other = make_basic(env, config, make_repo) - project = env['runbot_merge.project'].search([]) - with prod: - [p_1] = prod.make_commits( - 'a', - Commit('p_0', tree={'x': '0'}), - ref='heads/hugechange' - ) - pr = prod.make_pr(target='a', head='hugechange') - prod.post_status(p_1, 'success', 'legal/cla') - prod.post_status(p_1, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - # should merge the staging then create the FP PR - env.run_crons() - - pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number') - with prod: - prod.post_status(pr1_id.head, 'success', 'legal/cla') - prod.post_status(pr1_id.head, 'success', 'ci/runbot') - # should create the second staging - env.run_crons() - - pr0_id2, pr1_id2, pr2_id = env['runbot_merge.pull_requests'].search([], order='number') - assert pr0_id2 == pr0_id - assert pr1_id2 == pr1_id - - pr1 = prod.get_pr(pr1_id.number) - with prod: - pr1.close() - - assert pr1_id.state == 'closed' - assert not pr1_id.parent_id - assert pr2_id.state == 'opened' - assert not pr2_id.parent_id, \ - "the descendant of a closed PR doesn't really make sense, maybe?" - - with prod: - pr1.open() - assert pr1_id.state == 'validated' - env.run_crons() - assert pr1.comments[-1] == ( - users['user'], - "@{} @{} this PR was closed then reopened. " - "It should be merged the normal way (via @{})".format( - users['user'], - users['reviewer'], - project.github_prefix, - ) - ) - - with prod: - pr1.post_comment(f'{project.fp_github_name} r+', config['role_reviewer']['token']) - env.run_crons() - assert pr1.comments[-1] == ( - users['user'], - "@{} I can only do this on unmodified forward-port PRs, ask {}.".format( - users['reviewer'], - project.github_prefix, - ), - ) - -class TestBranchDeletion: - def test_delete_normal(self, env, config, make_repo): - """ Regular PRs should get their branch deleted as long as they're - created in the fp repository - """ - prod, other = make_basic(env, config, make_repo) - with prod, other: - [c] = other.make_commits('a', Commit('c', tree={'0': '0'}), ref='heads/abranch') - pr = prod.make_pr( - target='a', head='%s:abranch' % other.owner, - title="a pr", - ) - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', prod.name), - ('number', '=', pr.number) - ]) - assert pr_id.state == 'merged' - removers = env['forwardport.branch_remover'].search([]) - to_delete_branch = removers.mapped('pr_id') - assert to_delete_branch == pr_id - - env.run_crons('forwardport.remover', context={'forwardport_merged_before': FAKE_PREV_WEEK}) - with pytest.raises(AssertionError, match="Not Found"): - other.get_ref('heads/abranch') - - def test_not_merged(self, env, config, make_repo): - """ The branches of PRs which are still open or have been closed (rather - than merged) should not get deleted - """ - prod, other = make_basic(env, config, make_repo) - with prod, other: - [c] = other.make_commits('a', Commit('c1', tree={'1': '0'}), ref='heads/abranch') - pr1 = prod.make_pr(target='a', head='%s:abranch' % other.owner, title='a') - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr1.post_comment('hansen r+', config['role_reviewer']['token']) - - other.make_commits('a', Commit('c2', tree={'2': '0'}), ref='heads/bbranch') - pr2 = prod.make_pr(target='a', head='%s:bbranch' % other.owner, title='b') - pr2.close() - - [c] = other.make_commits('a', Commit('c3', tree={'3': '0'}), ref='heads/cbranch') - pr3 = prod.make_pr(target='a', head='%s:cbranch' % other.owner, title='c') - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - - other.make_commits('a', Commit('c3', tree={'4': '0'}), ref='heads/dbranch') - pr4 = prod.make_pr(target='a', head='%s:dbranch' % other.owner, title='d') - pr4.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - PR = env['runbot_merge.pull_requests'] - # check PRs are in states we expect - pr_heads = [] - for p, st in [(pr1, 'ready'), (pr2, 'closed'), (pr3, 'validated'), (pr4, 'approved')]: - p_id = PR.search([ - ('repository.name', '=', prod.name), - ('number', '=', p.number), - ]) - assert p_id.state == st - pr_heads.append(p_id.head) - - env.run_crons('forwardport.remover', context={'forwardport_merged_before': FAKE_PREV_WEEK}) - - # check that the branches still exist - assert other.get_ref('heads/abranch') == pr_heads[0] - assert other.get_ref('heads/bbranch') == pr_heads[1] - assert other.get_ref('heads/cbranch') == pr_heads[2] - assert other.get_ref('heads/dbranch') == pr_heads[3] - -def sPeNgBaB(s): - return ''.join( - l if i % 2 == 0 else l.upper() - for i, l in enumerate(s) - ) -def test_spengbab(): - assert sPeNgBaB("spongebob") == 'sPoNgEbOb' - -class TestRecognizeCommands: - def make_pr(self, env, config, make_repo): - r, _ = make_basic(env, config, make_repo) - - with r: - r.make_commits('c', Commit('p', tree={'x': '0'}), ref='heads/testbranch') - pr = r.make_pr(target='a', head='testbranch') - - return r, pr, env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', r.name), - ('number', '=', pr.number), - ]) - - def test_botname_casing(self, env, config, make_repo): - """ Test that the botname is case-insensitive as people might write - bot names capitalised or titlecased or uppercased or whatever - """ - repo, pr, pr_id = self.make_pr(env, config, make_repo) - assert pr_id.state == 'opened' - botname = env['runbot_merge.project'].search([]).fp_github_name - [a] = env['runbot_merge.branch'].search([ - ('name', '=', 'a') - ]) - [c] = env['runbot_merge.branch'].search([ - ('name', '=', 'c') - ]) - - names = [ - botname, - botname.upper(), - botname.capitalize(), - sPeNgBaB(botname), - ] - - for n in names: - assert pr_id.limit_id == c - with repo: - pr.post_comment('@%s up to a' % n, config['role_reviewer']['token']) - assert pr_id.limit_id == a - # reset state - pr_id.write({'limit_id': c.id}) - - @pytest.mark.parametrize('indent', ['', '\N{SPACE}', '\N{SPACE}'*4, '\N{TAB}']) - def test_botname_indented(self, env, config, make_repo, indent): - """ matching botname should ignore leading whitespaces - """ - repo, pr, pr_id = self.make_pr(env, config, make_repo) - assert pr_id.state == 'opened' - botname = env['runbot_merge.project'].search([]).fp_github_name - [a] = env['runbot_merge.branch'].search([ - ('name', '=', 'a') - ]) - [c] = env['runbot_merge.branch'].search([ - ('name', '=', 'c') - ]) - - assert pr_id.limit_id == c - with repo: - pr.post_comment('%s@%s up to a' % (indent, botname), config['role_reviewer']['token']) - assert pr_id.limit_id == a diff --git a/forwardport/tests/test_updates.py b/forwardport/tests/test_updates.py deleted file mode 100644 index cc7de7c2..00000000 --- a/forwardport/tests/test_updates.py +++ /dev/null @@ -1,414 +0,0 @@ -""" -Test cases for updating PRs during after the forward-porting process after the -initial merge has succeeded (and forward-porting has started) -""" -import re -import sys - -import pytest - -from utils import seen, re_matches, Commit, make_basic, to_pr - - -def test_update_pr(env, config, make_repo, users): - """ Even for successful cherrypicks, it's possible that e.g. CI doesn't - pass or the reviewer finds out they need to update the code. - - In this case, all following forward ports should... be detached? Or maybe - only this one and its dependent should be updated? - """ - prod, _ = make_basic(env, config, make_repo) - with prod: - [p_1] = prod.make_commits( - 'a', - Commit('p_0', tree={'x': '0'}), - ref='heads/hugechange' - ) - pr = prod.make_pr(target='a', head='hugechange') - prod.post_status(p_1, 'success', 'legal/cla') - prod.post_status(p_1, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - # should merge the staging then create the FP PR - env.run_crons() - - pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number') - - fp_intermediate = (users['user'], '''\ -This PR targets b and is part of the forward-port chain. Further PRs will be created up to c. - -More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port -''') - ci_warning = (users['user'], '@%(user)s @%(reviewer)s ci/runbot failed on this forward-port PR' % users) - - # oh no CI of the first FP PR failed! - # simulate status being sent multiple times (e.g. on multiple repos) with - # some delivery lag allowing for the cron to run between each delivery - for st, ctx in [('failure', 'ci/runbot'), ('failure', 'ci/runbot'), ('success', 'legal/cla'), ('success', 'legal/cla')]: - with prod: - prod.post_status(pr1_id.head, st, ctx) - env.run_crons() - with prod: # should be ignored because the description doesn't matter - prod.post_status(pr1_id.head, 'failure', 'ci/runbot', description="HAHAHAHAHA") - env.run_crons() - # check that FP did not resume & we have a ping on the PR - assert env['runbot_merge.pull_requests'].search([], order='number') == pr0_id | pr1_id,\ - "forward port should not continue on CI failure" - pr1_remote = prod.get_pr(pr1_id.number) - assert pr1_remote.comments == [seen(env, pr1_remote, users), fp_intermediate, ci_warning] - - # it was a false positive, rebuild... it fails again! - with prod: - prod.post_status(pr1_id.head, 'failure', 'ci/runbot', target_url='http://example.org/4567890') - env.run_crons() - # check that FP did not resume & we have a ping on the PR - assert env['runbot_merge.pull_requests'].search([], order='number') == pr0_id | pr1_id,\ - "ensure it still hasn't restarted" - assert pr1_remote.comments == [seen(env, pr1_remote, users), fp_intermediate, ci_warning, ci_warning] - - # nb: updating the head would detach the PR and not put it in the warning - # path anymore - - # rebuild again, finally passes - with prod: - prod.post_status(pr1_id.head, 'success', 'ci/runbot') - env.run_crons() - - pr0_id, pr1_id, pr2_id = env['runbot_merge.pull_requests'].search([], order='number') - assert pr1_id.parent_id == pr0_id - assert pr2_id.parent_id == pr1_id - pr1_head = pr1_id.head - pr2_head = pr2_id.head - - # turns out branch b is syntactically but not semantically compatible! It - # needs x to be 5! - pr_repo, pr_ref = prod.get_pr(pr1_id.number).branch - with pr_repo: - # force-push correct commit to PR's branch - [new_c] = pr_repo.make_commits( - pr1_id.target.name, - Commit('whop whop', tree={'x': '5'}), - ref='heads/%s' % pr_ref, - make=False - ) - env.run_crons() - - assert pr1_id.head == new_c != pr1_head, "the FP PR should be updated" - assert not pr1_id.parent_id, "the FP PR should be detached from the original" - assert pr1_remote.comments == [ - seen(env, pr1_remote, users), - fp_intermediate, ci_warning, ci_warning, - (users['user'], "@%s @%s this PR was modified / updated and has become a normal PR. " - "It should be merged the normal way (via @%s)" % ( - users['user'], users['reviewer'], - pr1_id.repository.project_id.github_prefix - )), - ], "users should be warned that the PR has become non-FP" - # NOTE: should the followup PR wait for pr1 CI or not? - assert pr2_id.head != pr2_head - assert pr2_id.parent_id == pr1_id, "the followup PR should still be linked" - - assert prod.read_tree(prod.commit(pr1_id.head)) == { - 'f': 'c', - 'g': 'b', - 'x': '5' - }, "the FP PR should have the new code" - assert prod.read_tree(prod.commit(pr2_id.head)) == { - 'f': 'c', - 'g': 'a', - 'h': 'a', - 'x': '5' - }, "the followup FP should also have the update" - -def test_update_merged(env, make_repo, config, users): - """ Strange things happen when an FP gets closed / merged but then its - parent is modified and the forwardport tries to update the (now merged) - child. - - Turns out the issue is the followup: given a PR a and forward port targets - B -> C -> D. When a is merged we get b, c and d. If c gets merged *then* - b gets updated, the fwbot will update c in turn, then it will look for the - head of the updated c in order to create d. - - However it *will not* find that head, as update events don't get propagated - on closed PRs (this is generally a good thing). As a result, the sanity - check when trying to port c to d will fail. - - After checking with nim, the safest behaviour seems to be: - - * stop at the update of the first closed or merged PR - * signal on that PR that something fucky happened - * also maybe disable or exponentially backoff the update job after some - number of attempts? - """ - prod, _ = make_basic(env, config, make_repo) - # add a 4th branch - with prod: - prod.make_ref('heads/d', prod.commit('c').id) - env['runbot_merge.project'].search([]).write({ - 'branch_ids': [(0, 0, { - 'name': 'd', 'fp_sequence': -1, 'fp_target': True, - })] - }) - - with prod: - [c] = prod.make_commits('a', Commit('p_0', tree={'0': '0'}), ref='heads/hugechange') - pr = prod.make_pr(target='a', head='hugechange') - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - _, pr1_id = env['runbot_merge.pull_requests'].search([], order='number') - with prod: - prod.post_status(pr1_id.head, 'success', 'legal/cla') - prod.post_status(pr1_id.head, 'success', 'ci/runbot') - env.run_crons() - - pr0_id, pr1_id, pr2_id = env['runbot_merge.pull_requests'].search([], order='number') - pr2 = prod.get_pr(pr2_id.number) - with prod: - pr2.post_comment('hansen r+', config['role_reviewer']['token']) - prod.post_status(pr2_id.head, 'success', 'legal/cla') - prod.post_status(pr2_id.head, 'success', 'ci/runbot') - env.run_crons() - - assert pr2_id.staging_id - with prod: - prod.post_status('staging.c', 'success', 'legal/cla') - prod.post_status('staging.c', 'success', 'ci/runbot') - env.run_crons() - assert pr2_id.state == 'merged' - assert pr2.state == 'closed' - - # now we can try updating pr1 and see what happens - repo, ref = prod.get_pr(pr1_id.number).branch - with repo: - repo.make_commits( - pr1_id.target.name, - Commit('2', tree={'0': '0', '1': '1'}), - ref='heads/%s' % ref, - make=False - ) - updates = env['forwardport.updates'].search([]) - assert updates - assert updates.original_root == pr0_id - assert updates.new_root == pr1_id - env.run_crons() - assert not pr1_id.parent_id - assert not env['forwardport.updates'].search([]) - - assert pr2.comments == [ - seen(env, pr2, users), - (users['user'], '''This PR targets c and is part of the forward-port chain. Further PRs will be created up to d. - -More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port -'''), - (users['reviewer'], 'hansen r+'), - (users['user'], """@%s @%s ancestor PR %s has been updated but this PR is merged and can't be updated to match. - -You may want or need to manually update any followup PR.""" % ( - users['user'], - users['reviewer'], - pr1_id.display_name, - )) - ] - -def test_duplicate_fw(env, make_repo, setreviewers, config, users): - """ Test for #451 - """ - # 0 - 1 - 2 - 3 - 4 master - # \ - 31 v3 - # \ - 21 v2 - # \ - 11 v1 - repo = make_repo('proj') - with repo: - _, c1, c2, c3, _ = repo.make_commits( - None, - Commit('0', tree={'f': 'a'}), - Commit('1', tree={'f': 'b'}), - Commit('2', tree={'f': 'c'}), - Commit('3', tree={'f': 'd'}), - Commit('4', tree={'f': 'e'}), - ref='heads/master' - ) - repo.make_commits(c1, Commit('11', tree={'g': 'a'}), ref='heads/v1') - repo.make_commits(c2, Commit('21', tree={'h': 'a'}), ref='heads/v2') - repo.make_commits(c3, Commit('31', tree={'i': 'a'}), ref='heads/v3') - - proj = env['runbot_merge.project'].create({ - 'name': 'a project', - 'github_token': config['github']['token'], - 'github_prefix': 'hansen', - 'fp_github_token': config['github']['token'], - 'branch_ids': [ - (0, 0, {'name': 'master', 'fp_sequence': 0, 'fp_target': True}), - (0, 0, {'name': 'v3', 'fp_sequence': 1, 'fp_target': True}), - (0, 0, {'name': 'v2', 'fp_sequence': 2, 'fp_target': True}), - (0, 0, {'name': 'v1', 'fp_sequence': 3, 'fp_target': True}), - ], - 'repo_ids': [ - (0, 0, { - 'name': repo.name, - 'required_statuses': 'ci', - 'fp_remote_target': repo.name, - }) - ] - }) - setreviewers(*proj.repo_ids) - - # create a PR in v1, merge it, then create all 3 ports - with repo: - repo.make_commits('v1', Commit('c0', tree={'z': 'a'}), ref='heads/hugechange') - prv1 = repo.make_pr(target='v1', head='hugechange') - repo.post_status('hugechange', 'success', 'ci') - prv1.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - PRs = env['runbot_merge.pull_requests'] - prv1_id = PRs.search([ - ('repository.name', '=', repo.name), - ('number', '=', prv1.number), - ]) - assert prv1_id.state == 'ready' - with repo: - repo.post_status('staging.v1', 'success', 'ci') - env.run_crons() - assert prv1_id.state == 'merged' - - parent = prv1_id - while True: - child = PRs.search([('parent_id', '=', parent.id)]) - if not child: - break - - assert child.state == 'opened' - with repo: - repo.post_status(child.head, 'success', 'ci') - env.run_crons() - parent = child - pr_ids = _, prv2_id, prv3_id, prmaster_id = PRs.search([], order='number') - _, prv2, prv3, prmaster = [repo.get_pr(p.number) for p in pr_ids] - assert pr_ids.mapped('target.name') == ['v1', 'v2', 'v3', 'master'] - assert pr_ids.mapped('state') == ['merged', 'validated', 'validated', 'validated'] - assert repo.read_tree(repo.commit(prmaster_id.head)) == {'f': 'e', 'z': 'a'} - - with repo: - repo.make_commits('v2', Commit('c0', tree={'z': 'b'}), ref=prv2.ref, make=False) - env.run_crons() - assert pr_ids.mapped('state') == ['merged', 'opened', 'validated', 'validated'] - assert repo.read_tree(repo.commit(prv2_id.head)) == {'f': 'c', 'h': 'a', 'z': 'b'} - assert repo.read_tree(repo.commit(prv3_id.head)) == {'f': 'd', 'i': 'a', 'z': 'b'} - assert repo.read_tree(repo.commit(prmaster_id.head)) == {'f': 'e', 'z': 'b'} - - assert prv2_id.source_id == prv1_id - assert not prv2_id.parent_id - - env.run_crons() - assert PRs.search([], order='number') == pr_ids - - with repo: - repo.post_status(prv2.head, 'success', 'ci') - prv2.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - with repo: - repo.post_status('staging.v2', 'success', 'ci') - env.run_crons() - # env.run_crons() - assert PRs.search([], order='number') == pr_ids - -def test_subsequent_conflict(env, make_repo, config, users): - """ Test for updating an fw PR in the case where it produces a conflict in - the followup. Cf #467. - """ - repo, fork = make_basic(env, config, make_repo) - - # create a PR in branch A which adds a new file - with repo: - repo.make_commits('a', Commit('newfile', tree={'x': '0'}), ref='heads/pr1') - pr_1 = repo.make_pr(target='a', head='pr1') - repo.post_status('pr1', 'success', 'legal/cla') - repo.post_status('pr1', 'success', 'ci/runbot') - pr_1.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - with repo: - repo.post_status('staging.a', 'success', 'legal/cla') - repo.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - pr1_id = to_pr(env, pr_1) - assert pr1_id.state == 'merged' - - pr2_id = env['runbot_merge.pull_requests'].search([('source_id', '=', pr1_id.id)]) - assert pr2_id - with repo: - repo.post_status(pr2_id.head, 'success', 'legal/cla') - repo.post_status(pr2_id.head, 'success', 'ci/runbot') - env.run_crons() - - pr3_id = env['runbot_merge.pull_requests'].search([('parent_id', '=', pr2_id.id)]) - assert pr3_id - assert repo.read_tree(repo.commit(pr3_id.head)) == { - 'f': 'c', - 'g': 'a', - 'h': 'a', - 'x': '0', - } - - # update pr2: add a file "h" - pr2 = repo.get_pr(pr2_id.number) - t = {**repo.read_tree(repo.commit(pr2_id.head)), 'h': 'conflict!'} - with fork: - fork.make_commits(pr2_id.target.name, Commit('newfiles', tree=t), ref=pr2.ref, make=False) - env.run_crons() - - assert repo.read_tree(repo.commit(pr3_id.head)) == { - 'f': 'c', - 'g': 'a', - 'h': re_matches(r'''<<<\x3c<<< HEAD -a -|||||||| parent of [\da-f]{7,}.* -======= -conflict! ->>>\x3e>>> [\da-f]{7,}.* -'''), - 'x': '0', - } - # skip comments: - # 1. link to mergebot status page - # 2. "forward port chain" bit - # 3. updated / modified & got detached - assert pr2.comments[3:] == [ - (users['user'], f"@{users['user']} WARNING: the latest change ({pr2_id.head}) triggered " - f"a conflict when updating the next forward-port " - f"({pr3_id.display_name}), and has been ignored.\n\n" - f"You will need to update this pull request " - f"differently, or fix the issue by hand on " - f"{pr3_id.display_name}.") - ] - # skip comments: - # 1. link to status page - # 2. forward-port chain thing - assert repo.get_pr(pr3_id.number).comments[2:] == [ - (users['user'], re_matches(f'''\ -@{users['user']} WARNING: the update of {pr2_id.display_name} to {pr2_id.head} has caused a \ -conflict in this pull request, data may have been lost. - -stdout: -```.*? -CONFLICT \(add/add\): Merge conflict in h.*? -``` - -stderr: -``` -\\d{{2}}:\\d{{2}}:\\d{{2}}.\\d+ .* {pr2_id.head} -error: could not apply [0-9a-f]+\\.\\.\\. newfiles -''', re.DOTALL)) - ] diff --git a/forwardport/tests/test_weird.py b/forwardport/tests/test_weird.py deleted file mode 100644 index aa9320c9..00000000 --- a/forwardport/tests/test_weird.py +++ /dev/null @@ -1,814 +0,0 @@ -# -*- coding: utf-8 -*- -import pytest - -from utils import seen, Commit, to_pr - - -def make_basic(env, config, make_repo, *, fp_token, fp_remote): - """ Creates a basic repo with 3 forking branches - - 0 -- 1 -- 2 -- 3 -- 4 : a - | - `-- 11 -- 22 : b - | - `-- 111 : c - each branch just adds and modifies a file (resp. f, g and h) through the - contents sequence a b c d e - """ - Projects = env['runbot_merge.project'] - project = Projects.search([('name', '=', 'myproject')]) - if not project: - project = Projects.create({ - 'name': 'myproject', - 'github_token': config['github']['token'], - 'github_prefix': 'hansen', - 'fp_github_token': fp_token and config['github']['token'], - 'branch_ids': [ - (0, 0, {'name': 'a', 'sequence': 2, 'fp_target': True}), - (0, 0, {'name': 'b', 'sequence': 1, 'fp_target': True}), - (0, 0, {'name': 'c', 'sequence': 0, 'fp_target': True}), - ], - }) - - prod = make_repo('proj') - with prod: - a_0, a_1, a_2, a_3, a_4, = prod.make_commits( - None, - Commit("0", tree={'f': 'a'}), - Commit("1", tree={'f': 'b'}), - Commit("2", tree={'f': 'c'}), - Commit("3", tree={'f': 'd'}), - Commit("4", tree={'f': 'e'}), - ref='heads/a', - ) - b_1, b_2 = prod.make_commits( - a_2, - Commit('11', tree={'g': 'a'}), - Commit('22', tree={'g': 'b'}), - ref='heads/b', - ) - prod.make_commits( - b_1, - Commit('111', tree={'h': 'a'}), - ref='heads/c', - ) - other = prod.fork() - repo = env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': prod.name, - 'required_statuses': 'legal/cla,ci/runbot', - 'fp_remote_target': fp_remote and other.name, - }) - env['res.partner'].search([ - ('github_login', '=', config['role_reviewer']['user']) - ]).write({ - 'review_rights': [(0, 0, {'repository_id': repo.id, 'review': True})] - }) - env['res.partner'].search([ - ('github_login', '=', config['role_self_reviewer']['user']) - ]).write({ - 'review_rights': [(0, 0, {'repository_id': repo.id, 'self_review': True})] - }) - - return project, prod, other - -def test_no_token(env, config, make_repo): - """ if there's no token on the repo, nothing should break though should - log - """ - # create project configured with remotes on the repo but no token - proj, prod, _ = make_basic(env, config, make_repo, fp_token=False, fp_remote=True) - - with prod: - prod.make_commits( - 'a', Commit('c0', tree={'a': '0'}), ref='heads/abranch' - ) - pr = prod.make_pr(target='a', head='abranch') - prod.post_status(pr.head, 'success', 'legal/cla') - prod.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - # wanted to use capfd, however it's not compatible with the subprocess - # being created beforehand and server() depending on capfd() would remove - # all its output from the normal pytest capture (dumped on test failure) - # - # so I'd really have to hand-roll the entire thing by having server() - # pipe stdout/stderr to temp files, yield those temp files, and have the - # tests mess around with reading those files, and finally have the server - # dump the file contents back to the test runner's stdout/stderr on - # fixture teardown... - env.run_crons() - assert len(env['runbot_merge.pull_requests'].search([], order='number')) == 1,\ - "should not have created forward port" - -def test_remove_token(env, config, make_repo): - proj, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=True) - proj.fp_github_token = False - - with prod: - prod.make_commits( - 'a', Commit('c0', tree={'a': '0'}), ref='heads/abranch' - ) - pr = prod.make_pr(target='a', head='abranch') - prod.post_status(pr.head, 'success', 'legal/cla') - prod.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - env.run_crons() - assert len(env['runbot_merge.pull_requests'].search([], order='number')) == 1,\ - "should not have created forward port" - -def test_no_target(env, config, make_repo): - proj, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=False) - - with prod: - prod.make_commits( - 'a', Commit('c0', tree={'a': '0'}), ref='heads/abranch' - ) - pr = prod.make_pr(target='a', head='abranch') - prod.post_status(pr.head, 'success', 'legal/cla') - prod.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - - env.run_crons() - assert len(env['runbot_merge.pull_requests'].search([], order='number')) == 1,\ - "should not have created forward port" - -def test_failed_staging(env, config, make_repo): - proj, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=True) - - reviewer = config['role_reviewer']['token'] - with prod: - prod.make_commits('a', Commit('c', tree={'a': '0'}), ref='heads/abranch') - pr1 = prod.make_pr(target='a', head='abranch') - prod.post_status(pr1.head, 'success', 'legal/cla') - prod.post_status(pr1.head, 'success', 'ci/runbot') - pr1.post_comment('hansen r+', reviewer) - env.run_crons() - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - pr1_id, pr2_id = env['runbot_merge.pull_requests'].search([], order='number') - assert pr2_id.parent_id == pr2_id.source_id == pr1_id - with prod: - prod.post_status(pr2_id.head, 'success', 'legal/cla') - prod.post_status(pr2_id.head, 'success', 'ci/runbot') - env.run_crons() - - pr1_id, pr2_id, pr3_id = env['runbot_merge.pull_requests'].search([], order='number') - pr3 = prod.get_pr(pr3_id.number) - with prod: - prod.post_status(pr3_id.head, 'success', 'legal/cla') - prod.post_status(pr3_id.head, 'success', 'ci/runbot') - pr3.post_comment('%s r+' % proj.fp_github_name, reviewer) - env.run_crons() - - prod.commit('staging.c') - - with prod: - prod.post_status('staging.b', 'success', 'legal/cla') - prod.post_status('staging.b', 'success', 'ci/runbot') - prod.post_status('staging.c', 'failure', 'ci/runbot') - env.run_crons() - - pr3_head = env['runbot_merge.commit'].search([ - ('sha', '=', pr3_id.head), - ]) - assert len(pr3_head) == 1 - - assert not pr3_id.batch_id, "check that the PR indeed has no batch anymore" - assert not pr3_id.batch_ids.filtered(lambda b: b.active) - - assert len(env['runbot_merge.batch'].search([ - ('prs', 'in', pr3_id.id), - '|', ('active', '=', True), - ('active', '=', False), - ])) == 2, "check that there do exist batches" - - # send a new status to the PR, as if somebody had rebuilt it or something - with prod: - pr3.post_comment('hansen retry', reviewer) - prod.post_status(pr3_id.head, 'success', 'foo/bar') - prod.post_status(pr3_id.head, 'success', 'legal/cla') - assert pr3_head.to_check, "check that the commit was updated as to process" - env.run_crons() - assert not pr3_head.to_check, "check that the commit was processed" - -class TestNotAllBranches: - """ Check that forward-ports don't behave completely insanely when not all - branches are supported on all repositories. - - repo A branches a -> b -> c - a0 -> a1 -> a2 branch a - `-> a11 -> a22 branch b - `-> a111 branch c - repo B branches a -> c - b0 -> b1 -> b2 branch a - | - `-> b000 branch c - """ - @pytest.fixture - def repos(self, env, config, make_repo, setreviewers): - a = make_repo('A') - with a: - _, a_, _ = a.make_commits( - None, - Commit('a0', tree={'a': '0'}), - Commit('a1', tree={'a': '1'}), - Commit('a2', tree={'a': '2'}), - ref='heads/a' - ) - b_, _ = a.make_commits( - a_, - Commit('a11', tree={'b': '11'}), - Commit('a22', tree={'b': '22'}), - ref='heads/b' - ) - a.make_commits(b_, Commit('a111', tree={'c': '111'}), ref='heads/c') - a_dev = a.fork() - b = make_repo('B') - with b: - _, _a, _ = b.make_commits( - None, - Commit('b0', tree={'a': 'x'}), - Commit('b1', tree={'a': 'y'}), - Commit('b2', tree={'a': 'z'}), - ref='heads/a' - ) - b.make_commits(_a, Commit('b000', tree={'c': 'x'}), ref='heads/c') - b_dev = b.fork() - - project = env['runbot_merge.project'].create({ - 'name': 'proj', - 'github_token': config['github']['token'], - 'github_prefix': 'hansen', - 'fp_github_token': config['github']['token'], - 'branch_ids': [ - (0, 0, {'name': 'a', 'fp_sequence': 2, 'fp_target': True}), - (0, 0, {'name': 'b', 'fp_sequence': 1, 'fp_target': True}), - (0, 0, {'name': 'c', 'fp_sequence': 0, 'fp_target': True}), - ] - }) - repo_a = env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': a.name, - 'required_statuses': 'ci/runbot', - 'fp_remote_target': a_dev.name, - }) - repo_b = env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': b.name, - 'required_statuses': 'ci/runbot', - 'fp_remote_target': b_dev.name, - 'branch_filter': '[("name", "in", ["a", "c"])]', - }) - setreviewers(repo_a, repo_b) - return project, a, a_dev, b, b_dev - - def test_single_first(self, env, repos, config): - """ A merge in A.a should be forward-ported to A.b and A.c - """ - project, a, a_dev, b, _ = repos - with a, a_dev: - [c] = a_dev.make_commits('a', Commit('pr', tree={'pr': '1'}), ref='heads/change') - pr = a.make_pr(target='a', title="a pr", head=a_dev.owner + ':change') - a.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - p = env['runbot_merge.pull_requests'].search([('repository.name', '=', a.name), ('number', '=', pr.number)]) - env.run_crons() - assert p.staging_id - with a, b: - for repo in a, b: - repo.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - a_head = a.commit('a') - assert a_head.message.startswith('pr\n\n') - assert a.read_tree(a_head) == {'a': '2', 'pr': '1'} - - pr0, pr1 = env['runbot_merge.pull_requests'].search([], order='number') - with a: - a.post_status(pr1.head, 'success', 'ci/runbot') - env.run_crons() - - pr0, pr1, pr2 = env['runbot_merge.pull_requests'].search([], order='number') - with a: - a.post_status(pr2.head, 'success', 'ci/runbot') - a.get_pr(pr2.number).post_comment( - '%s r+' % project.fp_github_name, - config['role_reviewer']['token']) - env.run_crons() - assert pr1.staging_id - assert pr2.staging_id - with a, b: - a.post_status('staging.b', 'success', 'ci/runbot') - a.post_status('staging.c', 'success', 'ci/runbot') - b.post_status('staging.c', 'success', 'ci/runbot') - env.run_crons() - - assert pr0.state == 'merged' - assert pr1.state == 'merged' - assert pr2.state == 'merged' - assert a.read_tree(a.commit('b')) == {'a': '1', 'b': '22', 'pr': '1'} - assert a.read_tree(a.commit('c')) == {'a': '1', 'b': '11', 'c': '111', 'pr': '1'} - - def test_single_second(self, env, repos, config): - """ A merge in B.a should "skip ahead" to B.c - """ - project, a, _, b, b_dev = repos - with b, b_dev: - [c] = b_dev.make_commits('a', Commit('pr', tree={'pr': '1'}), ref='heads/change') - pr = b.make_pr(target='a', title="a pr", head=b_dev.owner + ':change') - b.post_status(c, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with a, b: - a.post_status('staging.a', 'success', 'ci/runbot') - b.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - assert b.read_tree(b.commit('a')) == {'a': 'z', 'pr': '1'} - - pr0, pr1 = env['runbot_merge.pull_requests'].search([], order='number') - with b: - b.post_status(pr1.head, 'success', 'ci/runbot') - b.get_pr(pr1.number).post_comment( - '%s r+' % project.fp_github_name, - config['role_reviewer']['token']) - env.run_crons() - with a, b: - a.post_status('staging.c', 'success', 'ci/runbot') - b.post_status('staging.c', 'success', 'ci/runbot') - env.run_crons() - - assert pr0.state == 'merged' - assert pr1.state == 'merged' - assert b.read_tree(b.commit('c')) == {'a': 'y', 'c': 'x', 'pr': '1'} - - def test_both_first(self, env, repos, config, users): - """ A merge in A.a, B.a should... not be forward-ported at all? - """ - project, a, a_dev, b, b_dev = repos - with a, a_dev: - [c_a] = a_dev.make_commits('a', Commit('pr a', tree={'pr': 'a'}), ref='heads/change') - pr_a = a.make_pr(target='a', title='a pr', head=a_dev.owner + ':change') - a.post_status(c_a, 'success', 'ci/runbot') - pr_a.post_comment('hansen r+', config['role_reviewer']['token']) - with b, b_dev: - [c_b] = b_dev.make_commits('a', Commit('pr b', tree={'pr': 'b'}), ref='heads/change') - pr_b = b.make_pr(target='a', title='b pr', head=b_dev.owner + ':change') - b.post_status(c_b, 'success', 'ci/runbot') - pr_b.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with a, b: - for repo in a, b: - repo.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - pr_a_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', a.name), - ('number', '=', pr_a.number), - ]) - pr_b_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', b.name), - ('number', '=', pr_b.number) - ]) - assert pr_a_id.state == pr_b_id.state == 'merged' - assert env['runbot_merge.pull_requests'].search([]) == pr_a_id | pr_b_id - # should have refused to create a forward port because the PRs have - # different next target - assert pr_a.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr_a, users), - (users['user'], "@%s @%s this pull request can not be forward ported:" - " next branch is 'b' but linked pull request %s " - "has a next branch 'c'." % ( - users['user'], users['reviewer'], pr_b_id.display_name, - )), - ] - assert pr_b.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr_b, users), - (users['user'], "@%s @%s this pull request can not be forward ported:" - " next branch is 'c' but linked pull request %s " - "has a next branch 'b'." % ( - users['user'], users['reviewer'], pr_a_id.display_name, - )), - ] - -def test_new_intermediate_branch(env, config, make_repo): - """ In the case of a freeze / release a new intermediate branch appears in - the sequence. New or ongoing forward ports should pick it up just fine (as - the "next target" is decided when a PR is ported forward) however this is - an issue for existing yet-to-be-merged sequences e.g. given the branches - 1.0, 2.0 and master, if a branch 3.0 is forked off from master and inserted - before it, we need to create a new *intermediate* forward port PR - """ - def validate(commit): - prod.post_status(commit, 'success', 'ci/runbot') - prod.post_status(commit, 'success', 'legal/cla') - project, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=True) - original_c_tree = prod.read_tree(prod.commit('c')) - prs = [] - with prod: - for i in ['0', '1', '2']: - prod.make_commits('a', Commit(i, tree={i:i}), ref='heads/branch%s' % i) - pr = prod.make_pr(target='a', head='branch%s' % i) - prs.append(pr) - validate(pr.head) - pr.post_comment('hansen r+', config['role_reviewer']['token']) - # cancel validation of PR2 - prod.post_status(prs[2].head, 'failure', 'ci/runbot') - # also add a PR targeting b forward-ported to c, in order to check - # for an insertion right after the source - prod.make_commits('b', Commit('x', tree={'x': 'x'}), ref='heads/branchx') - prx = prod.make_pr(target='b', head='branchx') - validate(prx.head) - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - with prod: - validate('staging.a') - validate('staging.b') - env.run_crons() - - # should have merged pr1, pr2 and prx and created their forward ports, now - # validate pr0's FP so the c-targeted FP is created - PRs = env['runbot_merge.pull_requests'] - pr0_id = PRs.search([ - ('repository.name', '=', prod.name), - ('number', '=', prs[0].number), - ]) - pr0_fp_id = PRs.search([ - ('source_id', '=', pr0_id.id), - ]) - assert pr0_fp_id - assert pr0_fp_id.target.name == 'b' - with prod: - validate(pr0_fp_id.head) - env.run_crons() - original0 = PRs.search([('parent_id', '=', pr0_fp_id.id)]) - assert original0, "Could not find FP of PR0 to C" - assert original0.target.name == 'c' - - # also check prx's fp - prx_id = PRs.search([('repository.name', '=', prod.name), ('number', '=', prx.number)]) - prx_fp_id = PRs.search([('source_id', '=', prx_id.id)]) - assert prx_fp_id - assert prx_fp_id.target.name == 'c' - - # NOTE: the branch must be created on git(hub) first, probably - # create new branch forked from the "current master" (c) - c = prod.commit('c').id - with prod: - prod.make_ref('heads/new', c) - currents = {branch.name: branch.id for branch in project.branch_ids} - # insert a branch between "b" and "c" - project.write({ - 'branch_ids': [ - (1, currents['a'], {'fp_sequence': 3}), - (1, currents['b'], {'fp_sequence': 2}), - (0, False, {'name': 'new', 'fp_sequence': 1, 'fp_target': True}), - (1, currents['c'], {'fp_sequence': 0}) - ] - }) - env.run_crons() - descendants = PRs.search([('source_id', '=', pr0_id.id)]) - new0 = descendants - pr0_fp_id - original0 - assert len(new0) == 1 - assert new0.parent_id == pr0_fp_id - assert original0.parent_id == new0 - - descx = PRs.search([('source_id', '=', prx_id.id)]) - newx = descx - prx_fp_id - assert len(newx) == 1 - assert newx.parent_id == prx_id - assert prx_fp_id.parent_id == newx - - # finish up: merge pr1 and pr2, ensure all the content is present in both - # "new" (the newly inserted branch) and "c" (the tippity tip) - with prod: # validate pr2 - prod.post_status(prs[2].head, 'success', 'ci/runbot') - env.run_crons() - # merge pr2 - with prod: - validate('staging.a') - env.run_crons() - # ci on pr1/pr2 fp to b - sources = [ - env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', prod.name), - ('number', '=', pr.number), - ]).id - for pr in prs - ] - sources.append(prx_id.id) - # CI all the forward port PRs (shouldn't hurt to re-ci the forward port of - # prs[0] to b aka pr0_fp_id - for target in ['b', 'new', 'c']: - fps = PRs.search([('source_id', 'in', sources), ('target.name', '=', target)]) - with prod: - for fp in fps: - validate(fp.head) - env.run_crons() - # now fps should be the last PR of each sequence, and thus r+-able - with prod: - for pr in fps: - assert pr.target.name == 'c' - prod.get_pr(pr.number).post_comment( - '%s r+' % project.fp_github_name, - config['role_reviewer']['token']) - assert all(p.state == 'merged' for p in PRs.browse(sources)), \ - "all sources should be merged" - assert all(p.state == 'ready' for p in PRs.search([('id', 'not in', sources)])),\ - "All PRs except sources should be ready" - env.run_crons() - with prod: - for target in ['b', 'new', 'c']: - validate('staging.' + target) - env.run_crons() - assert all(p.state == 'merged' for p in PRs.search([])), \ - "All PRs should be merged now" - - assert prod.read_tree(prod.commit('c')) == { - **original_c_tree, - '0': '0', '1': '1', '2': '2', # updates from PRs - 'x': 'x', - }, "check that C got all the updates" - assert prod.read_tree(prod.commit('new')) == { - **original_c_tree, - '0': '0', '1': '1', '2': '2', # updates from PRs - 'x': 'x', - }, "check that new got all the updates (should be in the same state as c really)" - -def test_author_can_close_via_fwbot(env, config, make_repo): - project, prod, xxx = make_basic(env, config, make_repo, fp_token=True, fp_remote=True) - other_user = config['role_other'] - other_token = other_user['token'] - other = prod.fork(token=other_token) - - with prod, other: - [c] = other.make_commits('a', Commit('c', tree={'0': '0'}), ref='heads/change') - pr = prod.make_pr( - target='a', title='my change', - head=other_user['user'] + ':change', - token=other_token - ) - # should be able to close and open own PR - pr.close(other_token) - pr.open(other_token) - prod.post_status(c, 'success', 'legal/cla') - prod.post_status(c, 'success', 'ci/runbot') - pr.post_comment('%s close' % project.fp_github_name, other_token) - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - assert pr.state == 'open' - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number') - assert pr0_id.number == pr.number - pr1 = prod.get_pr(pr1_id.number) - # `other` can't close fw PR directly, because that requires triage (and even - # write depending on account type) access to the repo, which an external - # contributor probably does not have - with prod, pytest.raises(Exception): - pr1.close(other_token) - # use can close via fwbot - with prod: - pr1.post_comment('%s close' % project.fp_github_name, other_token) - env.run_crons() - assert pr1.state == 'closed' - assert pr1_id.state == 'closed' - -def test_skip_ci_all(env, config, make_repo): - project, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=True) - - with prod: - prod.make_commits('a', Commit('x', tree={'x': '0'}), ref='heads/change') - pr = prod.make_pr(target='a', head='change') - prod.post_status(pr.head, 'success', 'legal/cla') - prod.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('%s skipci' % project.fp_github_name, config['role_reviewer']['token']) - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', prod.name), - ('number', '=', pr.number) - ]).fw_policy == 'skipci' - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - # run cron a few more times for the fps - env.run_crons() - env.run_crons() - env.run_crons() - - pr0_id, pr1_id, pr2_id = env['runbot_merge.pull_requests'].search([], order='number') - assert pr1_id.state == 'opened' - assert pr1_id.source_id == pr0_id - assert pr2_id.state == 'opened' - assert pr2_id.source_id == pr0_id - -def test_skip_ci_next(env, config, make_repo): - project, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=True) - - with prod: - prod.make_commits('a', Commit('x', tree={'x': '0'}), ref='heads/change') - pr = prod.make_pr(target='a', head='change') - prod.post_status(pr.head, 'success', 'legal/cla') - prod.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with prod: - prod.post_status('staging.a', 'success', 'legal/cla') - prod.post_status('staging.a', 'success', 'ci/runbot') - env.run_crons() - - pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number') - with prod: - prod.get_pr(pr1_id.number).post_comment( - '%s skipci' % project.fp_github_name, - config['role_user']['token'] - ) - assert pr0_id.fw_policy == 'skipci' - env.run_crons() - - _, _, pr2_id = env['runbot_merge.pull_requests'].search([], order='number') - assert pr1_id.state == 'opened' - assert pr2_id.state == 'opened' - -def test_retarget_after_freeze(env, config, make_repo, users): - """Turns out it was possible to trip the forwardbot if you're a bit of a - dick: the forward port cron was not resilient to forward port failure in - case of filling in new branches (forward ports existing across a branch - insertion so the fwbot would have to "fill in" for the new branch). - - But it turns out causing such failure is possible by e.g. regargeting the - latter port. In that case the reinsertion task should just do nothing, and - the retargeted PR should be forward-ported normally once merged. - """ - project, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=True) - with prod: - [c] = prod.make_commits('b', Commit('thing', tree={'x': '1'}), ref='heads/mypr') - pr = prod.make_pr(target='b', head='mypr') - prod.post_status(c, 'success', 'ci/runbot') - prod.post_status(c, 'success', 'legal/cla') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - original_pr_id = to_pr(env, pr) - assert original_pr_id.state == 'ready' - assert original_pr_id.staging_id - - with prod: - prod.post_status('staging.b', 'success', 'ci/runbot') - prod.post_status('staging.b', 'success', 'legal/cla') - env.run_crons() - # should have created a pr targeted to C - port_id = env['runbot_merge.pull_requests'].search([('state', 'not in', ('merged', 'closed'))]) - assert len(port_id) == 1 - assert port_id.target.name == 'c' - assert port_id.source_id == original_pr_id - assert port_id.parent_id == original_pr_id - - # because the module doesn't update the ordering of `branch_ids` to take - # `fp_sequence` in account so it's misleading - branch_c, branch_b, branch_a = branches_before = project.branch_ids.sorted('fp_sequence') - assert [branch_a.name, branch_b.name, branch_c.name] == ['a', 'b', 'c'] - # create branch so cron runs correctly - with prod: prod.make_ref('heads/bprime', prod.get_ref('c')) - project.write({ - 'branch_ids': [ - (1, branch_c.id, {'sequence': 1, 'fp_sequence': 20}), - (0, 0, {'name': 'bprime', 'sequence': 2, 'fp_sequence': 20, 'fp_target': True}), - (1, branch_b.id, {'sequence': 3, 'fp_sequence': 20}), - (1, branch_a.id, {'sequence': 4, 'fp_sequence': 20}), - ] - }) - new_branch = project.branch_ids - branches_before - assert new_branch.name == 'bprime' - - # should have added a job for the new fp - job = env['forwardport.batches'].search([]) - assert job - - # fuck up yo life: retarget the existing FP PR to the new branch - port_id.target = new_branch.id - - env.run_crons('forwardport.port_forward') - assert not job.exists(), "job should have succeeded and apoptosed" - - # since the PR was "already forward-ported" to the new branch it should not - # be touched - assert env['runbot_merge.pull_requests'].search([('state', 'not in', ('merged', 'closed'))]) == port_id - - # merge the retargered PR - port_pr = prod.get_pr(port_id.number) - with prod: - prod.post_status(port_pr.head, 'success', 'ci/runbot') - prod.post_status(port_pr.head, 'success', 'legal/cla') - port_pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - with prod: - prod.post_status('staging.bprime', 'success', 'ci/runbot') - prod.post_status('staging.bprime', 'success', 'legal/cla') - env.run_crons() - - new_pr_id = env['runbot_merge.pull_requests'].search([('state', 'not in', ('merged', 'closed'))]) - assert len(new_pr_id) == 1 - assert new_pr_id.parent_id == port_id - assert new_pr_id.target == branch_c - -def test_approve_draft(env, config, make_repo, users): - _, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=True) - - with prod: - prod.make_commits('a', Commit('x', tree={'x': '0'}), ref='heads/change') - pr = prod.make_pr(target='a', head='change', draft=True) - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - pr_id = to_pr(env, pr) - assert pr_id.state == 'opened' - assert pr.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr, users), - (users['user'], f"I'm sorry, @{users['reviewer']}: draft PRs can not be approved."), - ] - - with prod: - pr.draft = False - assert pr.draft is False - with prod: - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - assert pr_id.state == 'approved' - -def test_freeze(env, config, make_repo, users): - """Freeze: - - - should not forward-port the freeze PRs themselves - """ - project, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=True) - # branches here are "a" (older), "b", and "c" (master) - with prod: - [root, _] = prod.make_commits( - None, - Commit('base', tree={'version': '', 'f': '0'}), - Commit('release 1.0', tree={'version': '1.0'}), - ref='heads/b' - ) - prod.make_commits(root, Commit('other', tree={'f': '1'}), ref='heads/c') - with prod: - prod.make_commits( - 'c', - Commit('Release 1.1', tree={'version': '1.1'}), - ref='heads/release-1.1' - ) - release = prod.make_pr(target='c', head='release-1.1') - env.run_crons() - - w = project.action_prepare_freeze() - assert w['res_model'] == 'runbot_merge.project.freeze' - w_id = env[w['res_model']].browse([w['res_id']]) - assert w_id.release_pr_ids.repository_id.name == prod.name - release_id = to_pr(env, release) - w_id.release_pr_ids.pr_id = release_id.id - - assert not w_id.errors - w_id.action_freeze() - # run crons to process the feedback, run a second time in case of e.g. - # forward porting - env.run_crons() - env.run_crons() - - assert release_id.state == 'merged' - assert not env['runbot_merge.pull_requests'].search([ - ('state', '!=', 'merged') - ]), "the release PRs should not be forward-ported" diff --git a/mergebot_test_utils/utils.py b/mergebot_test_utils/utils.py deleted file mode 100644 index 092a1241..00000000 --- a/mergebot_test_utils/utils.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -import itertools -import re - -from lxml import html - -MESSAGE_TEMPLATE = """{message} - -closes {repo}#{number} - -{headers}Signed-off-by: {name} <{email}>""" -# target branch '-' source branch '-' base64 unique '-fw' -REF_PATTERN = r'{target}-{source}-[a-zA-Z0-9_-]{{4}}-fw' - -class Commit: - def __init__(self, message, *, author=None, committer=None, tree, reset=False): - self.id = None - self.message = message - self.author = author - self.committer = committer - self.tree = tree - self.reset = reset - -def validate_all(repos, refs, contexts=('ci/runbot', 'legal/cla')): - """ Post a "success" status for each context on each ref of each repo - """ - for repo, branch, context in itertools.product(repos, refs, contexts): - repo.post_status(branch, 'success', context) - -def get_partner(env, gh_login): - return env['res.partner'].search([('github_login', '=', gh_login)]) - -def _simple_init(repo): - """ Creates a very simple initialisation: a master branch with a commit, - and a PR by 'user' with two commits, targeted to the master branch - """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c2) - return prx - -class re_matches: - def __init__(self, pattern, flags=0): - self._r = re.compile(pattern, flags) - - def __eq__(self, text): - return self._r.match(text) - - def __repr__(self): - return self._r.pattern + '...' - -def seen(env, pr, users): - return users['user'], f'[Pull request status dashboard]({to_pr(env, pr).url}).' - -def make_basic(env, config, make_repo, *, reponame='proj', project_name='myproject'): - """ Creates a basic repo with 3 forking branches - - f = 0 -- 1 -- 2 -- 3 -- 4 : a - | - g = `-- 11 -- 22 : b - | - h = `-- 111 : c - each branch just adds and modifies a file (resp. f, g and h) through the - contents sequence a b c d e - """ - Projects = env['runbot_merge.project'] - project = Projects.search([('name', '=', project_name)]) - if not project: - project = env['runbot_merge.project'].create({ - 'name': project_name, - 'github_token': config['github']['token'], - 'github_prefix': 'hansen', - 'fp_github_token': config['github']['token'], - 'branch_ids': [ - (0, 0, {'name': 'a', 'fp_sequence': 10, 'fp_target': True}), - (0, 0, {'name': 'b', 'fp_sequence': 8, 'fp_target': True}), - (0, 0, {'name': 'c', 'fp_sequence': 6, 'fp_target': True}), - ], - }) - - prod = make_repo(reponame) - with prod: - a_0, a_1, a_2, a_3, a_4, = prod.make_commits( - None, - Commit("0", tree={'f': 'a'}), - Commit("1", tree={'f': 'b'}), - Commit("2", tree={'f': 'c'}), - Commit("3", tree={'f': 'd'}), - Commit("4", tree={'f': 'e'}), - ref='heads/a', - ) - b_1, b_2 = prod.make_commits( - a_2, - Commit('11', tree={'g': 'a'}), - Commit('22', tree={'g': 'b'}), - ref='heads/b', - ) - prod.make_commits( - b_1, - Commit('111', tree={'h': 'a'}), - ref='heads/c', - ) - other = prod.fork() - repo = env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': prod.name, - 'required_statuses': 'legal/cla,ci/runbot', - 'fp_remote_target': other.name, - }) - env['res.partner'].search([ - ('github_login', '=', config['role_reviewer']['user']) - ]).write({ - 'review_rights': [(0, 0, {'repository_id': repo.id, 'review': True})] - }) - env['res.partner'].search([ - ('github_login', '=', config['role_self_reviewer']['user']) - ]).write({ - 'review_rights': [(0, 0, {'repository_id': repo.id, 'self_review': True})] - }) - - return prod, other - -def pr_page(page, pr): - return html.fromstring(page(f'/{pr.repo.name}/pull/{pr.number}')) - -def to_pr(env, pr): - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', pr.repo.name), - ('number', '=', pr.number), - ]) - assert len(pr) == 1, f"Expected to find {pr.repo.name}#{pr.number}, got {pr}." - return pr - -def part_of(label, pr_id, *, separator='\n\n'): - """ Adds the "part-of" pseudo-header in the footer. - """ - return f'{label}{separator}Part-of: {pr_id.display_name}' diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index c0654147..00000000 --- a/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -matplotlib==3.5.0 -unidiff -docker==4.1.0; python_version < '3.10' -docker==5.0.3; python_version >= '3.10' # (Jammy) diff --git a/runbot/__init__.py b/runbot/__init__.py deleted file mode 100644 index 718604bc..00000000 --- a/runbot/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- - -from . import controllers -from . import models -from . import common -from . import container -from . import wizards diff --git a/runbot/__manifest__.py b/runbot/__manifest__.py deleted file mode 100644 index 2b8c49b1..00000000 --- a/runbot/__manifest__.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -{ - 'name': "runbot", - 'summary': "Runbot", - 'description': "Runbot for Odoo 15.0", - 'author': "Odoo SA", - 'website': "http://runbot.odoo.com", - 'category': 'Website', - 'version': '5.1', - 'application': True, - 'depends': ['base', 'base_automation', 'website'], - 'data': [ - 'templates/dockerfile.xml', - 'data/dockerfile_data.xml', - 'data/build_parse.xml', - 'data/error_link.xml', - 'data/runbot_build_config_data.xml', - 'data/runbot_data.xml', - 'data/runbot_error_regex_data.xml', - 'data/website_data.xml', - - 'security/runbot_security.xml', - 'security/ir.model.access.csv', - 'security/ir.rule.csv', - - 'templates/utils.xml', - 'templates/badge.xml', - 'templates/batch.xml', - 'templates/branch.xml', - 'templates/build.xml', - 'templates/build_stats.xml', - 'templates/bundle.xml', - 'templates/commit.xml', - 'templates/dashboard.xml', - 'templates/frontend.xml', - 'templates/git.xml', - 'templates/nginx.xml', - 'templates/build_error.xml', - - 'views/branch_views.xml', - 'views/build_error_views.xml', - 'views/build_views.xml', - 'views/bundle_views.xml', - 'views/codeowner_views.xml', - 'views/commit_views.xml', - 'views/config_views.xml', - 'views/dashboard_views.xml', - 'views/dockerfile_views.xml', - 'views/error_log_views.xml', - 'views/host_views.xml', - 'views/repo_views.xml', - 'views/res_config_settings_views.xml', - 'views/stat_views.xml', - 'views/upgrade.xml', - 'views/warning_views.xml', - 'views/custom_trigger_wizard_views.xml', - 'wizards/stat_regex_wizard_views.xml', - 'views/menus.xml', - ], - 'license': 'LGPL-3', - - 'assets': { - 'web.assets_backend': [ - 'runbot/static/src/js/json_field.js', - ], - } - -} diff --git a/runbot/common.py b/runbot/common.py deleted file mode 100644 index 20292b96..00000000 --- a/runbot/common.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- coding: utf-8 -*- - -import contextlib -import itertools -import logging -import psycopg2 -import re -import socket -import time -import os - -from collections import OrderedDict -from datetime import timedelta -from babel.dates import format_timedelta -from markupsafe import Markup - -from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT, html_escape - -_logger = logging.getLogger(__name__) - -dest_reg = re.compile(r'^\d{5,}-.+$') - - -class RunbotException(Exception): - pass - - -def fqdn(): - return socket.getfqdn() - - -def time2str(t): - return time.strftime(DEFAULT_SERVER_DATETIME_FORMAT, t) - - -def dt2time(datetime): - """Convert datetime to time""" - return time.mktime(datetime.timetuple()) - - -def now(): - return time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) - - -def findall(filename, pattern): - return set(re.findall(pattern, open(filename).read())) - - -def grep(filename, string): - if os.path.isfile(filename): - return find(filename, string) != -1 - return False - - -def find(filename, string): - return open(filename).read().find(string) - - -def uniq_list(l): - return OrderedDict.fromkeys(l).keys() - - -def flatten(list_of_lists): - return list(itertools.chain.from_iterable(list_of_lists)) - - -def rfind(filename, pattern): - """Determine in something in filename matches the pattern""" - if os.path.isfile(filename): - regexp = re.compile(pattern, re.M) - with open(filename, 'r') as f: - if regexp.findall(f.read()): - return True - return False - - -def time_delta(time): - if isinstance(time, timedelta): - return time - return timedelta(seconds=-time) - - -def s2human(time): - """Convert a time in second into an human readable string""" - return format_timedelta( - time_delta(time), - format="narrow", - threshold=2.1, - ) - - -def s2human_long(time): - return format_timedelta( - time_delta(time), - threshold=2.1, - add_direction=True, locale='en' - ) - - -@contextlib.contextmanager -def local_pgadmin_cursor(): - cnx = None - try: - cnx = psycopg2.connect("dbname=postgres") - cnx.autocommit = True # required for admin commands - yield cnx.cursor() - finally: - if cnx: - cnx.close() - - -def list_local_dbs(additionnal_conditions=None): - additionnal_condition_str = '' - if additionnal_conditions: - additionnal_condition_str = 'AND (%s)' % ' OR '.join(additionnal_conditions) - with local_pgadmin_cursor() as local_cr: - local_cr.execute(""" - SELECT datname - FROM pg_database - WHERE pg_get_userbyid(datdba) = current_user - %s - """ % additionnal_condition_str) - return [d[0] for d in local_cr.fetchall()] - - -def pseudo_markdown(text): - text = html_escape(text) - - # first, extract code blocs: - codes = [] - def code_remove(match): - codes.append(match.group(1)) - return f'<code>{len(codes)-1}</code>' - - patterns = { - r'`(.+?)`': code_remove, - r'\*\*(.+?)\*\*': '<strong>\\g<1></strong>', - r'~~(.+?)~~': '<del>\\g<1></del>', # it's not official markdown but who cares - r'__(.+?)__': '<ins>\\g<1></ins>', # same here, maybe we should change the method name - r'\r?\n': '<br/>', - } - - for p, b in patterns.items(): - text = re.sub(p, b, text, flags=re.DOTALL) - - # icons - re_icon = re.compile(r'@icon-([a-z0-9-]+)') - text = re_icon.sub('<i class="fa fa-\\g<1>"></i>', text) - - # links - re_links = re.compile(r'\[(.+?)\]\((.+?)\)') - text = re_links.sub('<a href="\\g<2>">\\g<1></a>', text) - - def code_replace(match): - return f'<code>{codes[int(match.group(1))]}</code>' - - text = Markup(re.sub(r'<code>(\d+)</code>', code_replace, text, flags=re.DOTALL)) - return text diff --git a/runbot/container.py b/runbot/container.py deleted file mode 100644 index 7796e607..00000000 --- a/runbot/container.py +++ /dev/null @@ -1,303 +0,0 @@ -# -*- coding: utf-8 -*- -"""Containerize builds - -The docker image used for the build is always tagged like this: - odoo:runbot_tests -This file contains helpers to containerize builds with Docker. -When testing this file: - the first parameter should be a directory containing Odoo. - The second parameter is the exposed port -""" -import configparser -import io -import logging -import os -import re -import subprocess -import warnings - -# unsolved issue https://github.com/docker/docker-py/issues/2928 -with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", - message="The distutils package is deprecated.*", - category=DeprecationWarning - ) - import docker - - -_logger = logging.getLogger(__name__) - - -class Command(): - - def __init__(self, pres, cmd, posts, finals=None, config_tuples=None, cmd_checker=None): - """ Command object that represent commands to run in Docker container - :param pres: list of pre-commands - :param cmd: list of main command only run if the pres commands succeed (&&) - :param posts: list of post commands posts only run if the cmd command succedd (&&) - :param finals: list of finals commands always executed - :param config_tuples: list of key,value tuples to write in config file - :param cmd_checker: a checker object that must have a `_cmd_check` method that will be called at build - returns a string of the full command line to run - """ - self.pres = pres or [] - self.cmd = cmd - self.posts = posts or [] - self.finals = finals or [] - self.config_tuples = config_tuples or [] - self.cmd_checker = cmd_checker - - def __getattr__(self, name): - return getattr(self.cmd, name) - - def __getitem__(self, key): - return self.cmd[key] - - def __add__(self, l): - return Command(self.pres, self.cmd + l, self.posts, self.finals, self.config_tuples, self.cmd_checker) - - def __str__(self): - return ' '.join(self) - - def __repr__(self): - return self.build().replace('&& ', '&&\n').replace('|| ', '||\n\t').replace(';', ';\n') - - def build(self): - if self.cmd_checker: - self.cmd_checker._cmd_check(self) - cmd_chain = [] - cmd_chain += [' '.join(pre) for pre in self.pres if pre] - cmd_chain.append(' '.join(self)) - cmd_chain += [' '.join(post) for post in self.posts if post] - cmd_chain = [' && '.join(cmd_chain)] - cmd_chain += [' '.join(final) for final in self.finals if final] - return ' ; '.join(cmd_chain) - - def add_config_tuple(self, option, value): - assert '-' not in option - self.config_tuples.append((option, value)) - - def get_config(self, starting_config=''): - """ returns a config file content based on config tuples and - and eventually update the starting config - """ - config = configparser.ConfigParser() - config.read_string(starting_config) - if self.config_tuples and not config.has_section('options'): - config.add_section('options') - for option, value in self.config_tuples: - config.set('options', option, value) - res = io.StringIO() - config.write(res) - res.seek(0) - return res.read() - - -def docker_build(build_dir, image_tag): - return _docker_build(build_dir, image_tag) - - -def _docker_build(build_dir, image_tag): - """Build the docker image - :param build_dir: the build directory that contains Dockerfile. - :param image_tag: name used to tag the resulting docker image - :return: tuple(success, msg) where success is a boolean and msg is the error message or None - """ - docker_client = docker.from_env() - try: - docker_client.images.build(path=build_dir, tag=image_tag, rm=True) - except docker.errors.APIError as e: - _logger.error('Build of image %s failed with this API error:', image_tag) - return (False, e.explanation) - except docker.errors.BuildError as e: - _logger.error('Build of image %s failed with this BUILD error:', image_tag) - msg = f"{e.msg}\n{''.join(l.get('stream') or '' for l in e.build_log)}" - return (False, msg) - _logger.info('Dockerfile %s finished build', image_tag) - return (True, None) - - -def docker_run(*args, **kwargs): - return _docker_run(*args, **kwargs) - - -def _docker_run(cmd=False, log_path=False, build_dir=False, container_name=False, image_tag=False, exposed_ports=None, cpu_limit=None, memory=None, preexec_fn=None, ro_volumes=None, env_variables=None): - """Run tests in a docker container - :param run_cmd: command string to run in container - :param log_path: path to the logfile that will contain odoo stdout and stderr - :param build_dir: the build directory that contains the Odoo sources to build. - This directory is shared as a volume with the container - :param container_name: used to give a name to the container for later reference - :param image_tag: Docker image tag name to select which docker image to use - :param exposed_ports: if not None, starting at 8069, ports will be exposed as exposed_ports numbers - :param memory: memory limit in bytes for the container - :params ro_volumes: dict of dest:source volumes to mount readonly in builddir - :params env_variables: list of environment variables - """ - assert cmd and log_path and build_dir and container_name - run_cmd = cmd - image_tag = image_tag or 'odoo:DockerDefault' - container_name = sanitize_container_name(container_name) - if isinstance(run_cmd, Command): - cmd_object = run_cmd - run_cmd = cmd_object.build() - else: - cmd_object = Command([], run_cmd.split(' '), []) - _logger.info('Docker run command: %s', run_cmd) - run_cmd = 'cd /data/build;touch start-%s;%s;cd /data/build;touch end-%s' % (container_name, run_cmd, container_name) - docker_clear_state(container_name, build_dir) # ensure that no state are remaining - open(os.path.join(build_dir, 'exist-%s' % container_name), 'w+').close() - logs = open(log_path, 'w') - logs.write("Docker command:\n%s\n=================================================\n" % cmd_object) - # create start script - volumes = { - '/var/run/postgresql': {'bind': '/var/run/postgresql', 'mode': 'rw'}, - f'{build_dir}': {'bind': '/data/build', 'mode': 'rw'}, - f'{log_path}': {'bind': '/data/buildlogs.txt', 'mode': 'rw'} - } - - if ro_volumes: - for dest, source in ro_volumes.items(): - logs.write("Adding readonly volume '%s' pointing to %s \n" % (dest, source)) - volumes[source] = {'bind': dest, 'mode': 'ro'} - logs.close() - - ports = {} - if exposed_ports: - for dp, hp in enumerate(exposed_ports, start=8069): - ports[f'{dp}/tcp'] = ('127.0.0.1', hp) - - ulimits = [docker.types.Ulimit(name='core', soft=0, hard=0)] # avoid core dump in containers - if cpu_limit: - ulimits.append(docker.types.Ulimit(name='cpu', soft=cpu_limit, hard=cpu_limit)) - - docker_client = docker.from_env() - container = docker_client.containers.run( - image_tag, - name=container_name, - volumes=volumes, - shm_size='128m', - mem_limit=memory, - ports=ports, - ulimits=ulimits, - environment=env_variables, - init=True, - command=['/bin/bash', '-c', - f'exec &>> /data/buildlogs.txt ;{run_cmd}'], - auto_remove=True, - detach=True - ) - if container.status not in ('running', 'created') : - _logger.error('Container %s started but status is not running or created: %s', container_name, container.status) # TODO cleanup - else: - _logger.info('Started Docker container %s', container_name) - return - - -def docker_stop(container_name, build_dir=None): - return _docker_stop(container_name, build_dir) - - -def _docker_stop(container_name, build_dir): - """Stops the container named container_name""" - container_name = sanitize_container_name(container_name) - _logger.info('Stopping container %s', container_name) - docker_client = docker.from_env() - if build_dir: - end_file = os.path.join(build_dir, 'end-%s' % container_name) - subprocess.run(['touch', end_file]) - else: - _logger.info('Stopping docker without defined build_dir') - try: - container = docker_client.containers.get(container_name) - container.stop(timeout=1) - except docker.errors.NotFound: - _logger.error('Cannnot stop container %s. Container not found', container_name) - except docker.errors.APIError as e: - _logger.error('Cannnot stop container %s. API Error "%s"', container_name, e) - -def docker_state(container_name, build_dir): - container_name = sanitize_container_name(container_name) - exist = os.path.exists(os.path.join(build_dir, 'exist-%s' % container_name)) - started = os.path.exists(os.path.join(build_dir, 'start-%s' % container_name)) - - if not exist: - return 'VOID' - - if os.path.exists(os.path.join(build_dir, f'end-{container_name}')): - return 'END' - - state = 'UNKNOWN' - if started: - docker_client = docker.from_env() - try: - container = docker_client.containers.get(container_name) - # possible statuses: created, restarting, running, removing, paused, exited, or dead - state = 'RUNNING' if container.status in ('created', 'running', 'paused') else 'GHOST' - except docker.errors.NotFound: - state = 'GHOST' - # check if the end- file has been written in between time - if state == 'GHOST' and os.path.exists(os.path.join(build_dir, f'end-{container_name}')): - state = 'END' - return state - - -def docker_clear_state(container_name, build_dir): - """Return True if container is still running""" - container_name = sanitize_container_name(container_name) - if os.path.exists(os.path.join(build_dir, 'start-%s' % container_name)): - os.remove(os.path.join(build_dir, 'start-%s' % container_name)) - if os.path.exists(os.path.join(build_dir, 'end-%s' % container_name)): - os.remove(os.path.join(build_dir, 'end-%s' % container_name)) - if os.path.exists(os.path.join(build_dir, 'exist-%s' % container_name)): - os.remove(os.path.join(build_dir, 'exist-%s' % container_name)) - - -def docker_get_gateway_ip(): - """Return the host ip of the docker default bridge gateway""" - docker_client = docker.from_env() - try: - bridge_net = docker_client.networks.get([n.id for n in docker_client.networks.list('bridge')][0]) - return bridge_net.attrs['IPAM']['Config'][0]['Gateway'] - except (KeyError, IndexError): - return None - - -def docker_ps(): - return _docker_ps() - - -def _docker_ps(): - """Return a list of running containers names""" - docker_client = docker.client.from_env() - return [ c.name for c in docker_client.containers.list()] - -def sanitize_container_name(name): - """Returns a container name with unallowed characters removed""" - name = re.sub('^[^a-zA-Z0-9]+', '', name) - return re.sub('[^a-zA-Z0-9_.-]', '', name) - - -############################################################################## -# Ugly monkey patch to set runbot in set runbot in testing mode -# No Docker will be started, instead a fake docker_run function will be used -############################################################################## - -if os.environ.get('RUNBOT_MODE') == 'test': - _logger.warning('Using Fake Docker') - - def fake_docker_run(run_cmd, log_path, build_dir, container_name, exposed_ports=None, cpu_limit=None, preexec_fn=None, ro_volumes=None, env_variables=None, *args, **kwargs): - _logger.info('Docker Fake Run: %s', run_cmd) - open(os.path.join(build_dir, 'exist-%s' % container_name), 'w').write('fake end') - open(os.path.join(build_dir, 'start-%s' % container_name), 'w').write('fake start\n') - open(os.path.join(build_dir, 'end-%s' % container_name), 'w').write('fake end') - with open(log_path, 'w') as log_file: - log_file.write('Fake docker_run started\n') - log_file.write('run_cmd: %s\n' % run_cmd) - log_file.write('build_dir: %s\n' % container_name) - log_file.write('container_name: %s\n' % container_name) - log_file.write('.modules.loading: Modules loaded.\n') - log_file.write('Initiating shutdown\n') - - docker_run = fake_docker_run diff --git a/runbot/controllers/__init__.py b/runbot/controllers/__init__.py deleted file mode 100644 index 96d149ab..00000000 --- a/runbot/controllers/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# -*- coding: utf-8 -*- - -from . import frontend -from . import hook -from . import badge diff --git a/runbot/controllers/badge.py b/runbot/controllers/badge.py deleted file mode 100644 index b5de642d..00000000 --- a/runbot/controllers/badge.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -import hashlib - -import werkzeug -from matplotlib.font_manager import FontProperties -from matplotlib.textpath import TextToPath - -from odoo.http import request, route, Controller - - -class RunbotBadge(Controller): - - @route([ - '/runbot/badge/<int:repo_id>/<name>.svg', - '/runbot/badge/trigger/<int:trigger_id>/<name>.svg', - '/runbot/badge/<any(default,flat):theme>/<int:repo_id>/<name>.svg', - '/runbot/badge/trigger/<any(default,flat):theme>/<int:trigger_id>/<name>.svg', - ], type="http", auth="public", methods=['GET', 'HEAD'], sitemap=False) - def badge(self, name, repo_id=False, trigger_id=False, theme='default'): - # Sudo is used here to allow the badge to be returned for projects - # which have restricted permissions. - Trigger = request.env['runbot.trigger'].sudo() - Repo = request.env['runbot.repo'].sudo() - Batch = request.env['runbot.batch'].sudo() - Bundle = request.env['runbot.bundle'].sudo() - if trigger_id: - triggers = Trigger.browse(trigger_id) - project = triggers.project_id - else: - triggers = Trigger.search([('repo_ids', 'in', repo_id)]) - project = Repo.browse(repo_id).project_id - # -> hack to use repo. Would be better to change logic and use a trigger_id in params - bundle = Bundle.search([('name', '=', name), - ('project_id', '=', project.id)]) - if not bundle or not triggers: - return request.not_found() - batch = Batch.search([ - ('bundle_id', '=', bundle.id), - ('state', '=', 'done'), - ('category_id', '=', request.env.ref('runbot.default_category').id) - ], order='id desc', limit=1) - - builds = batch.slot_ids.filtered(lambda s: s.trigger_id in triggers).mapped('build_id') - if not builds: - state = 'testing' - else: - result = builds.result_multi() - if result == 'ok': - state = 'success' - elif result == 'warn': - state = 'warning' - else: - state = 'failed' - - etag = request.httprequest.headers.get('If-None-Match') - retag = hashlib.md5(state.encode()).hexdigest() - if etag == retag: - return werkzeug.wrappers.Response(status=304) - - # from https://github.com/badges/shields/blob/master/colorscheme.json - color = { - 'testing': "#dfb317", - 'success': "#4c1", - 'failed': "#e05d44", - 'warning': "#fe7d37", - }[state] - - def text_width(s): - fp = FontProperties(family='DejaVu Sans', size=11) - w, h, d = TextToPath().get_text_width_height_descent(s, fp, False) - return int(w + 1) - - class Text(object): - __slot__ = ['text', 'color', 'width'] - - def __init__(self, text, color): - self.text = text - self.color = color - self.width = text_width(text) + 10 - - data = { - 'left': Text(name, '#555'), - 'right': Text(state, color), - } - headers = [ - ('Content-Type', 'image/svg+xml'), - ('Cache-Control', 'max-age=%d' % (10*60,)), - ('ETag', retag), - ] - return request.render("runbot.badge_" + theme, data, headers=headers) diff --git a/runbot/controllers/frontend.py b/runbot/controllers/frontend.py deleted file mode 100644 index 18ca8e70..00000000 --- a/runbot/controllers/frontend.py +++ /dev/null @@ -1,570 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -import werkzeug -import logging -import functools - -import werkzeug.utils -import werkzeug.urls - -from collections import defaultdict, OrderedDict -from werkzeug.exceptions import NotFound, Forbidden - -from odoo.addons.http_routing.models.ir_http import slug -from odoo.addons.website.controllers.main import QueryURL - -from odoo.http import Controller, Response, request, route as o_route -from odoo.osv import expression - -_logger = logging.getLogger(__name__) - - -def route(routes, **kw): - def decorator(f): - @o_route(routes, **kw) - @functools.wraps(f) - def response_wrap(*args, **kwargs): - projects = request.env['runbot.project'].search([]) - more = request.httprequest.cookies.get('more', False) == '1' - filter_mode = request.httprequest.cookies.get('filter_mode', 'all') - keep_search = request.httprequest.cookies.get('keep_search', False) == '1' - cookie_search = request.httprequest.cookies.get('search', '') - refresh = kwargs.get('refresh', False) - nb_build_errors = request.env['runbot.build.error'].search_count([('random', '=', True), ('parent_id', '=', False)]) - nb_assigned_errors = request.env['runbot.build.error'].search_count([('responsible', '=', request.env.user.id)]) - kwargs['more'] = more - kwargs['projects'] = projects - - response = f(*args, **kwargs) - if isinstance(response, Response): - if keep_search and cookie_search and 'search' not in kwargs: - search = cookie_search - else: - search = kwargs.get('search', '') - if keep_search and cookie_search != search: - response.set_cookie('search', search) - - project = response.qcontext.get('project') or projects and projects[0] - - response.qcontext['projects'] = projects - response.qcontext['more'] = more - response.qcontext['keep_search'] = keep_search - response.qcontext['search'] = search - response.qcontext['current_path'] = request.httprequest.full_path - response.qcontext['refresh'] = refresh - response.qcontext['filter_mode'] = filter_mode - response.qcontext['default_category'] = request.env['ir.model.data']._xmlid_to_res_id('runbot.default_category') - - response.qcontext['qu'] = QueryURL('/runbot/%s' % (slug(project) if project else ''), path_args=['search'], search=search, refresh=refresh) - if 'title' not in response.qcontext: - response.qcontext['title'] = 'Runbot %s' % project.name or '' - response.qcontext['nb_build_errors'] = nb_build_errors - response.qcontext['nb_assigned_errors'] = nb_assigned_errors - return response - return response_wrap - return decorator - - -class Runbot(Controller): - - def _pending(self): - ICP = request.env['ir.config_parameter'].sudo().get_param - warn = int(ICP('runbot.pending.warning', 5)) - crit = int(ICP('runbot.pending.critical', 12)) - pending_count = request.env['runbot.build'].search_count([('local_state', '=', 'pending'), ('build_type', '!=', 'scheduled')]) - scheduled_count = request.env['runbot.build'].search_count([('local_state', '=', 'pending'), ('build_type', '=', 'scheduled')]) - level = ['info', 'warning', 'danger'][int(pending_count > warn) + int(pending_count > crit)] - return pending_count, level, scheduled_count - - @o_route([ - '/runbot/submit' - ], type='http', auth="public", methods=['GET', 'POST'], csrf=False) - def submit(self, more=False, redirect='/', keep_search=False, category=False, filter_mode=False, update_triggers=False, **kwargs): - response = werkzeug.utils.redirect(redirect) - response.set_cookie('more', '1' if more else '0') - response.set_cookie('keep_search', '1' if keep_search else '0') - response.set_cookie('filter_mode', filter_mode or 'all') - response.set_cookie('category', category or '0') - if update_triggers: - enabled_triggers = [] - project_id = int(update_triggers) - for key in kwargs.keys(): - if key.startswith('trigger_'): - enabled_triggers.append(key.replace('trigger_', '')) - - key = 'trigger_display_%s' % project_id - if len(request.env['runbot.trigger'].search([('project_id', '=', project_id)])) == len(enabled_triggers): - response.delete_cookie(key) - else: - response.set_cookie(key, '-'.join(enabled_triggers)) - return response - - @route(['/', - '/runbot', - '/runbot/<model("runbot.project"):project>', - '/runbot/<model("runbot.project"):project>/search/<search>'], website=True, auth='public', type='http') - def bundles(self, project=None, search='', projects=False, refresh=False, **kwargs): - search = search if len(search) < 60 else search[:60] - env = request.env - categories = env['runbot.category'].search([]) - if not project and projects: - project = projects[0] - - pending_count, level, scheduled_count = self._pending() - context = { - 'categories': categories, - 'search': search, - 'message': request.env['ir.config_parameter'].sudo().get_param('runbot.runbot_message'), - 'pending_total': pending_count, - 'pending_level': level, - 'scheduled_count': scheduled_count, - 'hosts_data': request.env['runbot.host'].search([('assigned_only', '=', False)]), - } - if project: - domain = [('last_batch', '!=', False), ('project_id', '=', project.id), ('no_build', '=', False)] - - filter_mode = request.httprequest.cookies.get('filter_mode', False) - if filter_mode == 'sticky': - domain.append(('sticky', '=', True)) - elif filter_mode == 'nosticky': - domain.append(('sticky', '=', False)) - - if search: - search_domains = [] - pr_numbers = [] - for search_elem in search.split("|"): - if search_elem.isnumeric(): - pr_numbers.append(int(search_elem)) - search_domains.append([('name', 'like', search_elem)]) - if pr_numbers: - res = request.env['runbot.branch'].search([('name', 'in', pr_numbers)]) - if res: - search_domains.append([('id', 'in', res.mapped('bundle_id').ids)]) - search_domain = expression.OR(search_domains) - domain = expression.AND([domain, search_domain]) - - e = expression.expression(domain, request.env['runbot.bundle']) - query = e.query - query.order = """ - (case when "runbot_bundle".sticky then 1 when "runbot_bundle".sticky is null then 2 else 2 end), - case when "runbot_bundle".sticky then "runbot_bundle".version_number end collate "C" desc, - "runbot_bundle".last_batch desc - """ - query.limit=40 - bundles = env['runbot.bundle'].browse(query) - - category_id = int(request.httprequest.cookies.get('category') or 0) or request.env['ir.model.data']._xmlid_to_res_id('runbot.default_category') - - trigger_display = request.httprequest.cookies.get('trigger_display_%s' % project.id, None) - if trigger_display is not None: - trigger_display = [int(td) for td in trigger_display.split('-') if td] - bundles = bundles.with_context(category_id=category_id) - - triggers = env['runbot.trigger'].search([('project_id', '=', project.id)]) - context.update({ - 'active_category_id': category_id, - 'bundles': bundles, - 'project': project, - 'triggers': triggers, - 'trigger_display': trigger_display, - }) - - context.update({'message': request.env['ir.config_parameter'].sudo().get_param('runbot.runbot_message')}) - res = request.render('runbot.bundles', context) - return res - - @route([ - '/runbot/bundle/<model("runbot.bundle"):bundle>', - '/runbot/bundle/<model("runbot.bundle"):bundle>/page/<int:page>' - ], website=True, auth='public', type='http', sitemap=False) - def bundle(self, bundle=None, page=1, limit=50, **kwargs): - domain = [('bundle_id', '=', bundle.id), ('hidden', '=', False)] - batch_count = request.env['runbot.batch'].search_count(domain) - pager = request.website.pager( - url='/runbot/bundle/%s' % bundle.id, - total=batch_count, - page=page, - step=50, - ) - batchs = request.env['runbot.batch'].search(domain, limit=limit, offset=pager.get('offset', 0), order='id desc') - - context = { - 'bundle': bundle, - 'batchs': batchs, - 'pager': pager, - 'project': bundle.project_id, - 'title': 'Bundle %s' % bundle.name - } - - return request.render('runbot.bundle', context) - - @o_route([ - '/runbot/bundle/<model("runbot.bundle"):bundle>/force', - '/runbot/bundle/<model("runbot.bundle"):bundle>/force/<int:auto_rebase>', - ], type='http', auth="user", methods=['GET', 'POST'], csrf=False) - def force_bundle(self, bundle, auto_rebase=False, **_post): - _logger.info('user %s forcing bundle %s', request.env.user.name, bundle.name) # user must be able to read bundle - batch = bundle.sudo()._force() - batch._log('Batch forced by %s', request.env.user.name) - batch._prepare(auto_rebase) - return werkzeug.utils.redirect('/runbot/batch/%s' % batch.id) - - @route(['/runbot/batch/<int:batch_id>'], website=True, auth='public', type='http', sitemap=False) - def batch(self, batch_id=None, **kwargs): - batch = request.env['runbot.batch'].browse(batch_id) - context = { - 'batch': batch, - 'project': batch.bundle_id.project_id, - 'title': 'Batch %s (%s)' % (batch.id, batch.bundle_id.name) - } - return request.render('runbot.batch', context) - - @o_route(['/runbot/batch/slot/<model("runbot.batch.slot"):slot>/build'], auth='user', type='http') - def slot_create_build(self, slot=None, **kwargs): - build = slot.sudo()._create_missing_build() - return werkzeug.utils.redirect('/runbot/build/%s' % build.id) - - @route(['/runbot/commit/<model("runbot.commit"):commit>'], website=True, auth='public', type='http', sitemap=False) - def commit(self, commit=None, **kwargs): - status_list = request.env['runbot.commit.status'].search([('commit_id', '=', commit.id)], order='id desc') - last_status_by_context = dict() - for status in status_list: - if status.context in last_status_by_context: - continue - last_status_by_context[status.context] = status - context = { - 'commit': commit, - 'project': commit.repo_id.project_id, - 'reflogs': request.env['runbot.ref.log'].search([('commit_id', '=', commit.id)]), - 'status_list': status_list, - 'last_status_by_context': last_status_by_context, - 'title': 'Commit %s' % commit.name[:8] - } - return request.render('runbot.commit', context) - - @o_route(['/runbot/commit/resend/<int:status_id>'], website=True, auth='user', type='http') - def resend_status(self, status_id=None, **kwargs): - CommitStatus = request.env['runbot.commit.status'] - status = CommitStatus.browse(status_id) - if not status.exists(): - raise NotFound() - last_status = CommitStatus.search([('commit_id', '=', status.commit_id.id), ('context', '=', status.context)], order='id desc', limit=1) - if status != last_status: - raise Forbidden("Only the last status can be resent") - if not last_status.sent_date or (datetime.datetime.now() - last_status.sent_date).seconds > 60: # ensure at least 60sec between two resend - new_status = status.sudo().copy() - new_status.description = 'Status resent by %s' % request.env.user.name - new_status._send() - _logger.info('github status %s resent by %s', status_id, request.env.user.name) - return werkzeug.utils.redirect('/runbot/commit/%s' % status.commit_id.id) - - @o_route([ - '/runbot/build/<int:build_id>/<operation>', - ], type='http', auth="public", methods=['POST'], csrf=False) - def build_operations(self, build_id, operation, **post): - build = request.env['runbot.build'].sudo().browse(build_id) - if operation == 'rebuild': - build = build._rebuild() - elif operation == 'kill': - build._ask_kill() - elif operation == 'wakeup': - build._wake_up() - - return str(build.id) - - @route([ - '/runbot/build/<int:build_id>', - '/runbot/batch/<int:from_batch>/build/<int:build_id>' - ], type='http', auth="public", website=True, sitemap=False) - def build(self, build_id, search=None, from_batch=None, **post): - """Events/Logs""" - - if from_batch: - from_batch = request.env['runbot.batch'].browse(int(from_batch)) - if build_id not in from_batch.with_context(active_test=False).slot_ids.build_id.ids: - # the url may have been forged replacing the build id, redirect to hide the batch - return werkzeug.utils.redirect('/runbot/build/%s' % build_id) - - from_batch = from_batch.with_context(batch=from_batch) - Build = request.env['runbot.build'].with_context(batch=from_batch) - - build = Build.browse([build_id])[0] - if not build.exists(): - return request.not_found() - siblings = (build.parent_id.children_ids if build.parent_id else from_batch.slot_ids.build_id if from_batch else build).sorted('id') - context = { - 'build': build, - 'from_batch': from_batch, - 'project': build.params_id.trigger_id.project_id, - 'title': 'Build %s' % build.id, - 'siblings': siblings, - # following logic is not the most efficient but good enough - 'prev_ko': next((b for b in reversed(siblings) if b.id < build.id and b.global_result != 'ok'), Build), - 'prev_bu': next((b for b in reversed(siblings) if b.id < build.id), Build), - 'next_bu': next((b for b in siblings if b.id > build.id), Build), - 'next_ko': next((b for b in siblings if b.id > build.id and b.global_result != 'ok'), Build), - } - return request.render("runbot.build", context) - - @route([ - '/runbot/build/search', - ], website=True, auth='public', type='http', sitemap=False) - def builds(self, **kwargs): - domain = [] - for key in ('config_id', 'version_id', 'project_id', 'trigger_id', 'create_batch_id.bundle_id', 'create_batch_id'): # allowed params - value = kwargs.get(key) - if value: - domain.append((f'params_id.{key}', '=', int(value))) - - for key in ('global_state', 'local_state', 'global_result', 'local_result'): - value = kwargs.get(key) - if value: - domain.append((f'{key}', '=', value)) - - for key in ('description',): - if key in kwargs: - domain.append((f'{key}', 'ilike', kwargs.get(key))) - - context = { - 'builds': request.env['runbot.build'].search(domain, limit=100), - } - - return request.render('runbot.build_search', context) - - @route([ - '/runbot/branch/<model("runbot.branch"):branch>', - ], website=True, auth='public', type='http', sitemap=False) - def branch(self, branch=None, **kwargs): - pr_branch = branch.bundle_id.branch_ids.filtered(lambda rec: not rec.is_pr and rec.id != branch.id and rec.remote_id.repo_id == branch.remote_id.repo_id)[:1] - branch_pr = branch.bundle_id.branch_ids.filtered(lambda rec: rec.is_pr and rec.id != branch.id and rec.remote_id.repo_id == branch.remote_id.repo_id)[:1] - context = { - 'branch': branch, - 'project': branch.remote_id.repo_id.project_id, - 'title': 'Branch %s' % branch.name, - 'pr_branch': pr_branch, - 'branch_pr': branch_pr - } - - return request.render('runbot.branch', context) - - @route([ - '/runbot/glances', - '/runbot/glances/<int:project_id>' - ], type='http', auth='public', website=True, sitemap=False) - def glances(self, project_id=None, **kwargs): - project_ids = [project_id] if project_id else request.env['runbot.project'].search([]).ids # search for access rights - bundles = request.env['runbot.bundle'].search([('sticky', '=', True), ('project_id', 'in', project_ids)]) - pending = self._pending() - qctx = { - 'pending_total': pending[0], - 'pending_level': pending[1], - 'bundles': bundles, - 'title': 'Glances' - } - return request.render("runbot.glances", qctx) - - @route(['/runbot/monitoring', - '/runbot/monitoring/<int:category_id>', - '/runbot/monitoring/<int:category_id>/<int:view_id>'], type='http', auth='user', website=True, sitemap=False) - def monitoring(self, category_id=None, view_id=None, **kwargs): - pending = self._pending() - hosts_data = request.env['runbot.host'].search([]) - if category_id: - category = request.env['runbot.category'].browse(category_id) - assert category.exists() - else: - category = request.env.ref('runbot.nightly_category') - category_id = category.id - bundles = request.env['runbot.bundle'].search([('sticky', '=', True)]) # NOTE we dont filter on project - qctx = { - 'category': category, - 'pending_total': pending[0], - 'pending_level': pending[1], - 'scheduled_count': pending[2], - 'bundles': bundles, - 'hosts_data': hosts_data, - 'auto_tags': request.env['runbot.build.error'].disabling_tags(), - 'build_errors': request.env['runbot.build.error'].search([('random', '=', True)]), - 'kwargs': kwargs, - 'title': 'monitoring' - } - return request.render(view_id if view_id else "runbot.monitoring", qctx) - - @route(['/runbot/errors', - '/runbot/errors/page/<int:page>' - ], type='http', auth='user', website=True, sitemap=False) - def build_errors(self, sort=None, page=1, limit=20, **kwargs): - sort_order_choices = { - 'last_seen_date desc': 'Last seen date: Newer First', - 'last_seen_date asc': 'Last seen date: Older First', - 'build_count desc': 'Number seen: High to Low', - 'build_count asc': 'Number seen: Low to High', - 'responsible asc': 'Assignee: A - Z', - 'responsible desc': 'Assignee: Z - A', - 'module_name asc': 'Module name: A - Z', - 'module_name desc': 'Module name: Z -A' - } - - sort_order = sort if sort in sort_order_choices else 'last_seen_date desc' - - current_user_errors = request.env['runbot.build.error'].search([ - ('responsible', '=', request.env.user.id), - ('parent_id', '=', False), - ], order='last_seen_date desc, build_count desc') - - domain = [('parent_id', '=', False), ('responsible', '!=', request.env.user.id), ('build_count', '>', 1)] - build_errors_count = request.env['runbot.build.error'].search_count(domain) - url_args = {} - url_args['sort'] = sort - pager = request.website.pager(url='/runbot/errors/', url_args=url_args, total=build_errors_count, page=page, step=limit) - - build_errors = request.env['runbot.build.error'].search(domain, order=sort_order, limit=limit, offset=pager.get('offset', 0)) - - qctx = { - 'current_user_errors': current_user_errors, - 'build_errors': build_errors, - 'title': 'Build Errors', - 'sort_order_choices': sort_order_choices, - 'pager': pager - } - return request.render('runbot.build_error', qctx) - - @route(['/runbot/teams', '/runbot/teams/<model("runbot.team"):team>',], type='http', auth='user', website=True, sitemap=False) - def team_dashboards(self, team=None, hide_empty=False, **kwargs): - teams = request.env['runbot.team'].search([]) if not team else None - domain = [('id', 'in', team.build_error_ids.ids)] if team else [] - - # Sort & Filter - sortby = kwargs.get('sortby', 'count') - filterby = kwargs.get('filterby', 'not_one') - searchbar_sortings = { - 'date': {'label': 'Recently Seen', 'order': 'last_seen_date desc'}, - 'count': {'label': 'Nb Seen', 'order': 'build_count desc'}, - } - order = searchbar_sortings[sortby]['order'] - searchbar_filters = { - 'all': {'label': 'All', 'domain': []}, - 'unassigned': {'label': 'Unassigned', 'domain': [('responsible', '=', False)]}, - 'not_one': {'label': 'Seen more than once', 'domain': [('build_count', '>', 1)]}, - } - domain = expression.AND([domain, searchbar_filters[filterby]['domain']]) - - qctx = { - 'team': team, - 'teams': teams, - 'build_error_ids': request.env['runbot.build.error'].search(domain, order=order), - 'hide_empty': bool(hide_empty), - 'searchbar_sortings': searchbar_sortings, - 'sortby': sortby, - 'searchbar_filters': OrderedDict(sorted(searchbar_filters.items())), - 'filterby': filterby, - 'default_url': request.httprequest.path, - } - return request.render('runbot.team', qctx) - - @route(['/runbot/dashboards/<model("runbot.dashboard"):dashboard>',], type='http', auth='user', website=True, sitemap=False) - def dashboards(self, dashboard=None, hide_empty=False, **kwargs): - qctx = { - 'dashboard': dashboard, - 'hide_empty': bool(hide_empty), - } - return request.render('runbot.dashboard_page', qctx) - - @route(['/runbot/build/stats/<int:build_id>'], type='http', auth="public", website=True, sitemap=False) - def build_stats(self, build_id, search=None, **post): - """Build statistics""" - - Build = request.env['runbot.build'] - - build = Build.browse([build_id])[0] - if not build.exists(): - return request.not_found() - - build_stats = defaultdict(dict) - for stat in build.stat_ids: - for module, value in sorted(stat.values.items(), key=lambda item: item[1], reverse=True): - build_stats[stat.category][module] = value - - context = { - 'build': build, - 'build_stats': build_stats, - 'project': build.params_id.trigger_id.project_id, - 'title': 'Build %s statistics' % build.id - } - return request.render("runbot.build_stats", context) - - - @route(['/runbot/stats/'], type='json', auth="public", website=False, sitemap=False) - def stats_json(self, bundle_id=False, trigger_id=False, key_category='', center_build_id=False, limit=100, search=None, **post): - """ Json stats """ - trigger_id = trigger_id and int(trigger_id) - bundle_id = bundle_id and int(bundle_id) - center_build_id = center_build_id and int(center_build_id) - limit = min(int(limit), 1000) - - trigger = request.env['runbot.trigger'].browse(trigger_id) - bundle = request.env['runbot.bundle'].browse(bundle_id) - if not trigger_id or not bundle_id or not trigger.exists() or not bundle.exists(): - return request.not_found() - - builds_domain = [ - ('global_state', 'in', ('running', 'done')), - ('slot_ids.batch_id.bundle_id', '=', bundle_id), - ('params_id.trigger_id', '=', trigger.id), - ] - builds = request.env['runbot.build'].with_context(active_test=False) - if center_build_id: - builds = builds.search( - expression.AND([builds_domain, [('id', '>=', center_build_id)]]), - order='id', limit=limit/2) - builds_domain = expression.AND([builds_domain, [('id', '<=', center_build_id)]]) - limit -= len(builds) - - builds |= builds.search(builds_domain, order='id desc', limit=limit) - if not builds: - return {} - - builds = builds.search([('id', 'child_of', builds.ids)]) - - parents = {b.id: b.top_parent.id for b in builds.with_context(prefetch_fields=False)} - request.env.cr.execute("SELECT build_id, values FROM runbot_build_stat WHERE build_id IN %s AND category = %s", [tuple(builds.ids), key_category]) # read manually is way faster than using orm - res = {} - for (build_id, values) in request.env.cr.fetchall(): - if values: - res.setdefault(parents[build_id], {}).update(values) - # we need to update here to manage the post install case: we want to combine stats from all post_install childrens. - return res - - @route(['/runbot/stats/<model("runbot.bundle"):bundle>/<model("runbot.trigger"):trigger>'], type='http', auth="public", website=True, sitemap=False) - def modules_stats(self, bundle, trigger, search=None, **post): - """Modules statistics""" - - categories = request.env['runbot.build.stat.regex'].search([]).mapped('name') - - context = { - 'stats_categories': categories, - 'bundle': bundle, - 'trigger': trigger, - } - - return request.render("runbot.modules_stats", context) - - @route(['/runbot/load_info'], type='http', auth="user", website=True, sitemap=False) - def load_infos(self, **post): - build_by_bundle = {} - - for build in request.env['runbot.build'].search([('local_state', 'in', ('pending', 'testing'))], order='id'): - build_by_bundle.setdefault(build.params_id.create_batch_id.bundle_id, []).append(build) - - build_by_bundle = list(build_by_bundle.items()) - build_by_bundle.sort(key=lambda x: -len(x[1])) - pending_count, level, scheduled_count = self._pending() - context = { - 'build_by_bundle': build_by_bundle, - 'pending_total': pending_count, - 'pending_level': level, - 'scheduled_count': scheduled_count, - 'hosts_data': request.env['runbot.host'].search([('assigned_only', '=', False)]), - } - - return request.render("runbot.load_info", context) diff --git a/runbot/controllers/hook.py b/runbot/controllers/hook.py deleted file mode 100644 index fb82c7ff..00000000 --- a/runbot/controllers/hook.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- - -import time -import json -import logging - -from odoo import http -from odoo.http import request - -_logger = logging.getLogger(__name__) - - -class Hook(http.Controller): - - @http.route(['/runbot/hook', '/runbot/hook/<int:remote_id>'], type='http', auth="public", website=True, csrf=False) - def hook(self, remote_id=None, **_post): - event = request.httprequest.headers.get("X-Github-Event") - payload = json.loads(request.params.get('payload', '{}')) - if remote_id is None: - repo_data = payload.get('repository') - if repo_data: - remote_domain = [ - '|', '|', '|', - ('name', '=', repo_data['ssh_url']), - ('name', '=', repo_data['ssh_url'].replace('.git', '')), - ('name', '=', repo_data['clone_url']), - ('name', '=', repo_data['clone_url'].replace('.git', '')), - ] - remote = request.env['runbot.remote'].sudo().search( - remote_domain, limit=1) - remote_id = remote.id - if not remote_id: - _logger.error("Remote %s not found", repo_data['ssh_url']) - remote = request.env['runbot.remote'].sudo().browse(remote_id) - _logger.info('Remote found %s', remote) - - # force update of dependencies too in case a hook is lost - if not payload or event == 'push': - remote.repo_id.set_hook_time(time.time()) - elif event == 'pull_request': - pr_number = payload.get('pull_request', {}).get('number', '') - branch = request.env['runbot.branch'].sudo().search([('remote_id', '=', remote.id), ('name', '=', pr_number)]) - branch.recompute_infos(payload.get('pull_request', {})) - if payload.get('action') in ('synchronize', 'opened', 'reopened'): - remote.repo_id.set_hook_time(time.time()) - # remaining recurrent actions: labeled, review_requested, review_request_removed - elif event == 'delete': - if payload.get('ref_type') == 'branch': - branch_ref = payload.get('ref') - _logger.info('Branch %s in repo %s was deleted', branch_ref, remote.repo_id.name) - branch = request.env['runbot.branch'].sudo().search([('remote_id', '=', remote.id), ('name', '=', branch_ref)]) - branch.alive = False - return "" diff --git a/runbot/data/build_parse.xml b/runbot/data/build_parse.xml deleted file mode 100644 index 9514aa6c..00000000 --- a/runbot/data/build_parse.xml +++ /dev/null @@ -1,22 +0,0 @@ -<odoo> - <record model="ir.actions.server" id="action_parse_build_logs"> - <field name="name">Parse build logs</field> - <field name="model_id" ref="runbot.model_runbot_build" /> - <field name="binding_model_id" ref="runbot.model_runbot_build" /> - <field name="type">ir.actions.server</field> - <field name="state">code</field> - <field name="code"> - action = records._parse_logs() - </field> - </record> - <record model="ir.actions.server" id="action_parse_log"> - <field name="name">Parse log entry</field> - <field name="model_id" ref="runbot.model_runbot_error_log" /> - <field name="binding_model_id" ref="runbot.model_runbot_error_log" /> - <field name="type">ir.actions.server</field> - <field name="state">code</field> - <field name="code"> - action = records._parse_logs() - </field> - </record> -</odoo> diff --git a/runbot/data/dockerfile_data.xml b/runbot/data/dockerfile_data.xml deleted file mode 100644 index d28cba53..00000000 --- a/runbot/data/dockerfile_data.xml +++ /dev/null @@ -1,9 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <record model="runbot.dockerfile" id="runbot.docker_default"> - <field name="name">Docker Default</field> - <field name="template_id" ref="runbot.docker_base"/> - <field name="to_build">True</field> - <field name="description">Default Dockerfile for latest Odoo versions.</field> - </record> -</odoo> diff --git a/runbot/data/error_link.xml b/runbot/data/error_link.xml deleted file mode 100644 index 3917f0e6..00000000 --- a/runbot/data/error_link.xml +++ /dev/null @@ -1,22 +0,0 @@ -<odoo> - <record model="ir.actions.server" id="action_link_build_errors"> - <field name="name">Link build errors</field> - <field name="model_id" ref="runbot.model_runbot_build_error" /> - <field name="binding_model_id" ref="runbot.model_runbot_build_error" /> - <field name="type">ir.actions.server</field> - <field name="state">code</field> - <field name="code"> - records.link_errors() - </field> - </record> - <record model="ir.actions.server" id="action_clean_build_errors"> - <field name="name">Re-clean build errors</field> - <field name="model_id" ref="runbot.model_runbot_build_error" /> - <field name="binding_model_id" ref="runbot.model_runbot_build_error" /> - <field name="type">ir.actions.server</field> - <field name="state">code</field> - <field name="code"> - records.clean_content() - </field> - </record> -</odoo> diff --git a/runbot/data/runbot_build_config_data.xml b/runbot/data/runbot_build_config_data.xml deleted file mode 100644 index e320c92b..00000000 --- a/runbot/data/runbot_build_config_data.xml +++ /dev/null @@ -1,160 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data noupdate="1"> - <record id="runbot_build_config_step_test_base" model="runbot.build.config.step"> - <field name="name">base</field> - <field name="install_modules">-*,base</field> - <field name="cpu_limit">600</field> - <field name="test_enable" eval="False"/> - <field name="protected" eval="True"/> - <field name="default_sequence">10</field> - </record> - - <record id="runbot_build_config_step_test_all" model="runbot.build.config.step"> - <field name="name">all</field> - <field name="install_modules"></field> - <field name="test_enable" eval="True"/> - <field name="protected" eval="True"/> - <field name="default_sequence">20</field> - </record> - - <record id="runbot_build_config_step_run" model="runbot.build.config.step"> - <field name="name">run</field> - <field name="job_type">run_odoo</field> - <field name="protected" eval="True"/> - <field name="default_sequence">1000</field> - </record> - - <record id="runbot_build_config_default" model="runbot.build.config"> - <field name="name">Default</field> - <field name="step_order_ids" eval="[(5,0,0), - (0, 0, {'step_id': ref('runbot_build_config_step_test_base')}), - (0, 0, {'step_id': ref('runbot_build_config_step_test_all')}), - (0, 0, {'step_id': ref('runbot_build_config_step_run')})]"/> - <field name="protected" eval="True"/> - </record> - - <record id="runbot_build_config_default_no_run" model="runbot.build.config"> - <field name="name">Default no run</field> - <field name="step_order_ids" eval="[(5,0,0), - (0, 0, {'step_id': ref('runbot_build_config_step_test_base')}), - (0, 0, {'step_id': ref('runbot_build_config_step_test_all')})]"/> - <field name="protected" eval="True"/> - </record> - - <record id="runbot_build_config_light_test" model="runbot.build.config"> - <field name="name">All only</field> - <field name="description">Test all only, usefull for multibuild</field> - <field name="step_order_ids" eval="[(5,0,0), (0, 0, {'step_id': ref('runbot_build_config_step_test_all')})]"/> - <field name="protected" eval="True"/> - </record> - - <!-- Coverage--> - <record id="runbot_build_config_step_test_coverage" model="runbot.build.config.step"> - <field name="name">coverage</field> - <field name="install_modules"></field> - <field name="cpu_limit">7000</field> - <field name="test_enable" eval="True"/> - <field name="coverage" eval="True"/> - <field name="protected" eval="True"/> - <field name="default_sequence">30</field> - </record> - - <record id="runbot_build_config_test_coverage" model="runbot.build.config"> - <field name="name">Coverage</field> - <field name="step_order_ids" eval="[(5,0,0), (0, 0, {'step_id': ref('runbot_build_config_step_test_coverage')})]"/> - <field name="protected" eval="True"/> - </record> - - <!-- Multi build--> - <record id="runbot_build_config_step_create_light_multi" model="runbot.build.config.step"> - <field name="name">create_light_multi</field> - <field name="job_type">create_build</field> - <field name="create_config_ids" eval="[(4, ref('runbot_build_config_light_test'))]"/> - <field name="number_builds">20</field> - <field name="protected" eval="True"/> - </record> - - <record id="runbot_build_config_multibuild" model="runbot.build.config"> - <field name="name">Multi build</field> - <field name="description">Run 20 children build with the same hash and dependencies. Use to detect undeterministic issues</field> - <field name="step_order_ids" eval="[(5,0,0), (0, 0, {'step_id': ref('runbot_build_config_step_create_light_multi')})]"/> - <field name="protected" eval="True"/> - </record> - <!-- l10n --> - <record id="runbot_build_config_step_test_l10n" model="runbot.build.config.step"> - <field name="name">l10n</field> - <field name="install_modules"></field> - <field name="test_enable" eval="True"/> - <field name="protected" eval="True"/> - <field name="default_sequence">30</field> - <field name="test_tags">l10nall</field> - <field name="protected" eval="True"/> - </record> - - <record id="runbot_build_config_l10n" model="runbot.build.config"> - <field name="name">L10n</field> - <field name="description">A simple test_all with a l10n test_tags</field> - <field name="step_order_ids" eval="[(5,0,0), (0, 0, {'step_id': ref('runbot_build_config_step_test_l10n')})]"/> - <field name="protected" eval="True"/> - </record> - <!-- Click all--> - <record id="runbot_build_config_step_test_click_all" model="runbot.build.config.step"> - <field name="name">clickall</field> - <field name="install_modules"></field> - <field name="cpu_limit">5400</field> - <field name="test_enable" eval="True"/> - <field name="protected" eval="True"/> - <field name="default_sequence">40</field> - <field name="test_tags">click_all</field> - <field name="protected" eval="True"/> - </record> - <record id="runbot_build_config_click_all" model="runbot.build.config"> - <field name="name">Click All</field> - <field name="description">Used for nightly click all, test all filters and menus.</field> - <field name="step_order_ids" eval="[(5,0,0), (0, 0, {'step_id': ref('runbot_build_config_step_test_click_all')})]"/> - <field name="protected" eval="True"/> - </record> - - <record id="runbot_build_config_step_restore" model="runbot.build.config.step"> - <field name="name">restore</field> - <field name="job_type">restore</field> - <field name="default_sequence">2</field> - </record> - - <record id="runbot_build_config_step_test_only" model="runbot.build.config.step"> - <field name="name">test_only</field> - <field name="custom_db_name">all</field> - <field name="create_db" eval="False"/> - <field name="install_modules">-*</field> - <field name="test_enable" eval="True"/> - <field name="protected" eval="True"/> - <field name="default_sequence">30</field> - </record> - - <record id="runbot_build_config_restore_and_test" model="runbot.build.config"> - <field name="name">Restore and Test</field> - <field name="step_order_ids" eval="[(5,0,0), - (0, 0, {'step_id': ref('runbot_build_config_step_restore')}), - (0, 0, {'step_id': ref('runbot_build_config_step_test_only')})]"/> - <field name="protected" eval="True"/> - </record> - - <!-- Multi build custom--> - <record id="runbot_build_config_step_custom_multi_create" model="runbot.build.config.step"> - <field name="name">custom_create_multi</field> - <field name="job_type">create_build</field> - <field name="create_config_ids" eval="[(4, ref('runbot_build_config_restore_and_test'))]"/> - <field name="number_builds">1</field> - <field name="protected" eval="True"/> - </record> - - <record id="runbot_build_config_custom_multi" model="runbot.build.config"> - <field name="name">Custom Multi</field> - <field name="description">Generic multibuild to use with custom trigger wizard</field> - <field name="step_order_ids" eval="[(5,0,0), (0, 0, {'step_id': ref('runbot_build_config_step_create_light_multi')})]"/> - <field name="protected" eval="True"/> - </record> - - </data> -</odoo> diff --git a/runbot/data/runbot_data.xml b/runbot/data/runbot_data.xml deleted file mode 100644 index fae57653..00000000 --- a/runbot/data/runbot_data.xml +++ /dev/null @@ -1,111 +0,0 @@ -<odoo> - <record model="runbot.category" id="runbot.default_category"> - <field name="name">Default</field> - <field name="icon">gear</field> - </record> - <record model="runbot.category" id="runbot.nightly_category"> - <field name="name">Nightly</field> - <field name="icon">moon-o</field> - </record> - <record model="runbot.category" id="runbot.weekly_category"> - <field name="name">Weekly</field> - <field name="icon">tasks</field> - </record> - - <record model="runbot.project" id="runbot.main_project"> - <field name="name">R&D</field> - </record> - - <data noupdate="1"> - <record model="runbot.bundle" id="runbot.bundle_master" > - <field name="name">master</field> - <field name="is_base">True</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - <record model="runbot.bundle" id="runbot.bundle_dummy"> - <field name="name">Dummy</field> - <field name="no_build">True</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - - <record model="ir.config_parameter" id="runbot.runbot_upgrade_exception_message"> - <field name="key">runbot.runbot_upgrade_exception_message</field> - <field name="value">Upgrade exception [#{exception.id}]({base_url}/web/#id={exception.id}&view_type=form&model=runbot.upgrade.exception) added\ - {exception.elements} - </field> - </record> - - <record model="ir.config_parameter" id="runbot.runbot_default_odoorc"> - <field name="key">runbot.runbot_default_odoorc</field> - <field name="value">[options]\nadmin_passwd=running_master_password</field> - </record> - - </data> - - <record model="ir.config_parameter" id="runbot.runbot_is_base_regex"> - <field name="key">runbot.runbot_is_base_regex</field> - <field name="value">^((master)|(saas-)?\d+\.\d+)$</field> - </record> - - <record model="ir.actions.server" id="action_toggle_is_base"> - <field name="name">Mark is base</field> - <field name="model_id" ref="runbot.model_runbot_bundle" /> - <field name="binding_model_id" ref="runbot.model_runbot_bundle" /> - <field name="type">ir.actions.server</field> - <field name="state">code</field> - <field name="code"> - records.write({'is_base': True}) - </field> - </record> - <record model="ir.actions.server" id="action_mark_no_build"> - <field name="name">Mark no build</field> - <field name="model_id" ref="runbot.model_runbot_bundle" /> - <field name="binding_model_id" ref="runbot.model_runbot_bundle" /> - <field name="type">ir.actions.server</field> - <field name="state">code</field> - <field name="code"> - records.write({'no_build': True}) - </field> - </record> - <record model="ir.actions.server" id="action_mark_build"> - <field name="name">Mark build</field> - <field name="model_id" ref="runbot.model_runbot_bundle" /> - <field name="binding_model_id" ref="runbot.model_runbot_bundle" /> - <field name="type">ir.actions.server</field> - <field name="state">code</field> - <field name="code"> - records.write({'no_build': False}) - </field> - </record> - - <record id="ir_cron_runbot" model="ir.cron"> - <field name="name">Runbot</field> - <field name="active" eval="False"/> - <field name="interval_number">10</field> - <field name="interval_type">seconds</field> - <field name="numbercall">-1</field> - <field name="doall" eval="False"/> - <field name="model_id" ref="model_runbot_runbot"/> - <field name="code">model._cron()</field> - <field name="state">code</field> - </record> - - - <record id="bundle_create" model="base.automation"> - <field name="name">Base, staging and tmp management</field> - <field name="model_id" ref="runbot.model_runbot_bundle"/> - <field name="trigger">on_create</field> - <field name="active" eval="True"/> - <field name="state">code</field> - <field name="code"> -if record.name.startswith('tmp.'): - record['no_build'] = True -elif record.name.startswith('staging.'): - name = record.name.replace('staging.', '') - base = record.env['runbot.bundle'].search([('name', '=', name), ('project_id', '=', record.project_id.id), ('is_base', '=', True)], limit=1) - record['build_all'] = True - if base: - record['defined_base_id'] = base - </field> - </record> -</odoo> diff --git a/runbot/data/runbot_error_regex_data.xml b/runbot/data/runbot_error_regex_data.xml deleted file mode 100644 index 79bf8614..00000000 --- a/runbot/data/runbot_error_regex_data.xml +++ /dev/null @@ -1,17 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data noupdate="1"> - <record id="runbot_error_regex_clean_numbers" model="runbot.error.regex"> - <field name="regex">, line \d+,</field> - <field name="re_type">cleaning</field> - </record> - <record id="runbot_error_regex_filter_failures" model="runbot.error.regex"> - <field name="regex">Module .+: \d+ failures, \d+ errors</field> - <field name="re_type">filter</field> - </record> - <record id="runbot_error_regex_filter_failed" model="runbot.error.regex"> - <field name="regex">At least one test failed when loading the modules.</field> - <field name="re_type">filter</field> - </record> - </data> -</odoo> diff --git a/runbot/data/website_data.xml b/runbot/data/website_data.xml deleted file mode 100644 index 104c9acf..00000000 --- a/runbot/data/website_data.xml +++ /dev/null @@ -1,5 +0,0 @@ -<odoo> - <record id="website.homepage_page" model="website.page"> - <field name="url">/home</field> - </record> -</odoo> diff --git a/runbot/documentation/images/Screenshot from 2020-09-23 12-20-40.png b/runbot/documentation/images/Screenshot from 2020-09-23 12-20-40.png deleted file mode 100644 index ac7ae73543308af51f7448fe7692074c0f30a253..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 22177 zcmcG$Wmr_-8#SzwB2p4VOGyX<0@5Mf-3>!Xcf$}O3@zOu%}~<a10&ra@T0q%p?L@Y z*ZbjppU)2;fWrmn?6dd1?|ZFvuT7Y;qSRXq5{wrwUc8l&7FT`o;-xR*`%!dM#OJFK z>jK0VWEW8xb#%msFS;2RaZT(hq3x>XVBzXv;%xrH(%!+&oW%v`Y;JDv^3B2Z7^y?( z#f$eZWW>L!duAN8S;f=##}>kiQ*Q{g3^k_FNMM)&1i#QFu1y;6{qA)>-b>F_Cd^SW z_GD)EVg-C%UUW0FAOock3m4zux^Xs>Mpann3kvEa5)-ekuKw^+Fi^PEz@?4rH~XCG zuoC)HM#uL7XB+RPV%z+&+hYXr(;=l`=Id1X59cC(g0d06hW-D&6xL~9K>dC9F;n70 ze7~9DAhrSE=1{`OS;qo#?MwQISCYW(`?MZMb1(lP{zmg&S@c0{=E9%5dc_iP@0dO# zrYrxs+<0y~j#T+A`W~ZtW3#SN#ADG!37`MM>;3<Q%X+Ej7t5wyUgYI?144}R{u{dR z&_>y@N96xrH7`zz%;Ga*Kzip7)c37c>Hp_@|No}p|H5Vb`rTWVekA4RanSdB5IvBs zK2I4E)xRMN;QVg}|Bp+pUBtT`|9CIH_A#d8sA|*ZvqQ}DTW#^vpx^oj?X?WtbBFnD z9mb;wBNoUZJhu~U0zma%ewy2sGL1f4rlyuQjN?chf}6_kGt>AGT+G65bq<z4@pdDY zrBZ^>KbqOx_uddk92pnv1ge8(ta;7;i;R(!T*p+36z*?g-qOfl*-U?rsN$oH@J6DW z4&C3Pdw6(UJfu$w@(!P`$Exum#zWAjFn7^I`)39K1!TDKY!~l^Lzo!#1KqHIn0+Z$ zUqy^NmR$rWcqw8s6zN;xPyp44Ufc34CiD5?zvl($I2#cnztq6M)f^7bWndT#%4IG| zPlJ^Go`H;!c7=b128{op_8cv-;XR5u!1lp{Ar-BMnE=$Ui@1kh=X8A0IpIjf%&{VQ zA9ljrj@qRyw@S9Km{iTo7!j8{R=lJWQAILM$_}u>eMw`%SBRYjp!QpK=~zBr>vg%h zL+TlO2c!-DdG-_wSf}0^jeWwF8F^kHj*SAI1%l{8tK!mdi@I(Clo8!Bg<aAf(d6}u zHn7kqYMynRz+7eq3@Hr)0iUbLhewfRH&;I;T9FK*w2Ymb)&A#g7Wcbv7Y+B9D@ui< zy17i7?)VhJ3}2*56%jjt!+zxxa#nF{8+2EjdYxPx`h~zvjiyhT2lnV_+}K8U;q>3T zk^98wITBDdyG7bclO?dS`C&2ryXCJcApy0rrpA12VhO&Q5fgwW?Y-m#^$Tez__6Fb zZ<=RDH^ar2`(wo?ci+oPwN}c;jt+WEowKL3=Uqs_fcv()&KNYugEGUn*PP2F7l|v5 zX|7^i?zhRKcF``s|GVrW%u<6t77P!BRJ<^$(K+uzJ9hE!*ghZa$T@uUh1qZ2#UkR) zt5du@N}xyg7xNp$eHbus97j}=di6DG%qDKx-H=Q<JAjc%&T$h$E@76P;F<ko^FVgJ z@amN*ikr2FB1_8@jvHaJ=j9tp-HO14H0M@))L#Wt(RQ((f;=Bynh~16g#JwX&HQQe z4u-D`UI`5ZAen2Jl$ij$Be`t3^^T5@c*$CR3|Wpp{rWdgj)u{5<#lCRA2s*6ez-sZ zbzAKfJGZmw4!$>`Jky7IHkX6ikp#$M=z)1~%$|UnB<DKQ9iP2<v0$}@3D7Ms)kgR! zWg23e4Iyp0#pxyzlwPK3Od4m31t4N_;EXvg8yl}keN?*#+c?rVvX{Uf4+O-d5)bL( z@QiWMJ)0|9)-dl~b=;V%*(Sp->X_dK75~e7mbPC=XZE?=3`&Kg*%K-{`6A@J;K_Nn zDYkf~n8?RrQ2e>{sT}jV{7GHY$P{!u8ck$+R7Q+}YO}?%#+K&wNsC^_*P!Tgf1c-T zft`|kh(FN%d(m#r_6w)NcNZ*6@*j1&{lLYk^p40&8XQwvPo-rKD8Ny1|56IbwM}L^ zN&X$cEP9L<1jiq#f>=j7+0NSRL4^2W%(udr|5hOQBSw8XOX;*Vq=mJtM+eDpkUI)c zhHk;sOU6R{stfyK1m~XNI&H#f#w0@`QUa>n6x7XY_}@|>9N%0*P4j(jl%YoIkH<=p z=X}p$61DjNU$$&ncvVtjeV@sN9>jra$=R0Yb+hq{!+kMYLA#`9hHb{WunOGwzOP!C z3|(lfp5;f)l*a2mN4KW++r9ARhlS(idADmly5~fhs$H(xe_WW-dZ<9V`CnJKvTlT& z;Wc#-7pi5{C-lC2YchgIC-$-F7b5R<m`UG<be_8AGJT8`nRTx%B0PT3Ysd3X|F;zH zS{9QE?HM9OX`FCv#Zf4JqZ4+;e$g|pm6d+vll*Y^qe{#}9~}ySTFcVoFiv*jTh{u_ znlfYp7t6F~hp7u_tfa$1**A~>ZpO4V@_XTsVGvJi$e=EdAUZ7Q>%-9-_P?}Se3%j} zaZv#>RunP@^)uQ~YZ9rl=`ihA?PXJ}!kuGqZz&U&h0C-{i?-i4vHcPpgAYcM&ohT4 zsa-iM;C&&@Z95nQ8p9xYiOIC38M$>7RZy$Bf3TQaCEKZOfC<GI*$px-r@3MNM*0ai zEVkK7`>kN87!(Z~eS}w2LChfOparYHr9l|*FFQ&G7naN&eo}pCqk~Fgenm0jOaw;t z>Xd4H$sKQ2$h!Vu_cxl6+~(~}Gbe3t;Iy<q^D3v1a$^t#LX4_3>lO^Cno&+7(9kGZ z$xdZvj-30vocdggP+!=Px#gK2mvs?S0luSTIN_TzludUS1d@JqkbdNaZlva!r_X%b zIF@|!wdo?^+PW|-k7r4c=u%FC;#^cvBbVXsHTV6u*f#T<1zYF|^lwfBqN!Z_MvxT7 zAQ`5SJ5jyFb}1y>X23`n#`Zq~EFTrbaB-o9RiQIDy*NN=v7tQ8N5D7}CCvppo^K~u zS&QJ+HNaWn5nCKFn_DkG7fGf(R_upM8}IN9*TDj6_--I2OtRbPpfTbG#`Z4}R+<)N zZQMIAT=GWJB~~3P-iPCe(n8VMU%U)U8{}pzs$9A$F45Idc%!rx`dqQk1Hi1k{p(6@ z2ImXQ_aQq-aCif!Tjt(Axq^Xn)uJi^9jl>$oN4v|XRr0^tql)Ws8iW5+(-_@CQ;$> zMZb8+G0o2PF@2A!gc%5!3WcG?0K_zszyJ*3?!#}Mh2)|mi*}Z+QDHg+nqgTNziu&l z2!~S$uZ|CMm0Bb+0jW_PXOuPQGz<z8{HC60Z<dO%^djXsY`4esYrSCravwY-s(h2p zuWI}zfVjWdu0=`f4_T9^=<E|yTpiapqjjwBVR!!Q|4;xFK#WSxqK%K0n*@xeOV0mA zmzEs!Mo?+`^s~>3T(v}fiI6`4wG|r^WV~~W-&=YD!vWl8{o4PgM1OQ_@8#qH+nqX# zh!I-ze~5XC#TL3Je%khb9KBEWzokFAx^qqbJhQotveIIVa<jaAC4;~-4N@<nLj_AO z5eT7Jmg~z$MDa}02A+a)j`<aOBYOrUT&_Pz!1prqNNZ<wFtn8Mv1FQjgfW>Ofwv#T zn=1<Tbwf;6Tm>9`JYb85f<dd_;leA&{+Aw868%0(OWSb&YorE`hHC&}e<t(w@{f#( zelrT3{!NS1nsAZXdf$g>tpm7EE4Fc8wQM<rmkj&AD7v`8fs9_Kx2Qm5{;dnO&FFTh zjd+LjW&oBPYj{wuYpva2O=UJ-NMOg^oYcL;)A>`k|MkiC)YnD`$z##-eUipv#^&+1 z)5lZ?F+~Jq!%ivi3v`q5Eh-jR@e`4UVp9lXPH9La(AKIH>I<>sNpPErfsHwXu$O44 z)iCJA$&)@~39t?ZNkai*vu+54<epA`yezP0(bi>Bg4)>8wtcde<Y)m&%DX$3eNGVH z!N06XAkBS+CiFs{K~=_NeI%woxovE{^?L>I#ArzfJj?sSEXN;s#(Z0WJei9BEiMDg zH^<ju{~df{hh^FGsOSt@X8R5tKVBN`F1(BSNK88c7fZT7mj2&{fx`-erPDMC5TK~@ zXIiGL)=qJ^dWaP?pXBzO<T2T0mtEqP9a)<9K1Ey@IR-9N;Qh*O;j}2`37TvKl2NPl zytu|~v54Y#BZJdJCQj#yvd=DhXFYS>uM079cOM?ZXZIY!o-6G44gDOX*t<I)tUJL& z_wO}?Ho-jMEnM0MYXrFB<e$H)55by=DRIL9c_m~tAz;Zns^E?EyexR=dMpvE;mPsS zBmFICWy>O}Drvh&5N_8gw*m5BS!We87m)z%_}t6kW%E0AkpIhnU1&!~M~!VQ>t|=8 zQkc3$Ue`EN52{-d)WOm^7&Cmduf5QldETP}saLj!7aIRED5_O9ug8uGKV=BtKVpMB z9sJL2X41*;da{IT1<h4lyqtQ4A3L5BpRVrw#o9PI{9pfj47=pjo!|^0q`fHW<v@7* z@wcb~S1e1Gr91o^sc6`{%-V#Cl#onPo8J<Fb*MCalV9+{gt+vq1`shK;Qtt;B=nd9 z+PT?e_3t0`Kb3o-Xs4vY)tI}s`#}D5=J1s1XItcwh`$?y>lb~{A^7i!eSnIJisDk; z(B!-bkp=f)lNIo<ijQUBdFC>v6Q=e8H!yp%3jfRk+zHZvMsQ;?rVBb2gsq1u2|wT; z3wAtSR^Cjr`b$hs{kOEJhK~|MDO{*yca7M^PuI!I579KrY{{{G$?cz)qyOc5<<<Kh z%BRlL#-b7%{j(ZIyO&*RgmfA@IAX>cvwW{VcFQoEz7H0ey{xs9Td>(KiwV?NT*&|e zP@CJ86g25uDX%Xp{fAloFF$IQ5+G1~J0-(FT({1A{bsz9u(>~<d<$C8m+;gJ__a3S zgF`5oP{ABQA^&_6vv{kXSZM0Lz}0cP$?Emg{|=H%dmYH;nG4T|mim(ZHqnI#xZTBo zfS)K-l7_8~<|jxBHKr|#zYQx(AQ%Dd`=_8Wcy08d`=CV)DsFTr%}bM-In#)^<)>4n z`=X}{|C5-fvr$^4k(y#q83D$I^wYvWsw;->C~(PHw_Y=veYP|H*6mzQ)~2arAG|bK zM{Y3dbhn10pi#46%Mea|bKvRyVti`&)HBD1Kjhc8E8jThTZKvG=Ac~LVn{fInKAqo zZk`X)jH>qFuIEPl@>A^7ng5fp@9msrn&F!MSWTn9@H4t2R_(yo(3ph7@hgnmbXBya zd~A8$1-~O^>Xm6eME27s*2e$rtEkChSVAANx_mydeqy6WWT~-B=|yE-m_?mw+tSXC z*!I*$*SEzexUwdJ-|~5={}avAtnlNzr`zQxG?-dwFPr@9j3J}{k@F08H=Y|Gr7TC6 zb1=ldI6UsxK3K?Df?f@m#!YQ<ieTX|$*pG1ljo#~vT=F2X7K<__3g2Px#{m=V`HPS zM^#=0X=J`^=88rikQORpLKbSW5+5VH)$uUszggN7HqRsfsqen)AK5F!?|*b$$%_H? ztxG8t4}?P)*od_7XC|@~ClgA$iQOc=h}KwZm#hED7X0`ccczqJzYsw#5DT0>9X>$7 zH*HP!iy??PgVm2DWUZ}e|88S8z7=l7H+wyhIyk44A(%$|k9MJp@FUMt;rQn0aPerp z2x%^1j9m5d4dIa79v$GB)kMojP=xwYf|kQ1#72m0W-fC6sj&DP0+~wW*P0$m{K7ZG z<{49!G;Q7MUAJ|zMo8%V<hOSeujl{aAMN=KmBC3ah$@;4tJtrT375t+&iKVFM{a5_ zZn>yiK0%BYhpvo|(sD>B&7p!|4(yW3-%%t6KgKsGV($8hRy%{ixGcT;;@9iE?S>~c zwf?04P(5Mlz-L&<hnGiipO7vft@$VrN+PF`bZwHt98JSCW9RegI6hF<tYHC9EX9x% zDQzHZod>Fu6@?pwmOIb*E)PqX$j_eAHCCIX?rX)wP5VDIMY3t5n^mz7rZUc)oc9pM zS<|O`2YfHd;U|nK<~)IaqCymmLsk4I;^pN+Feib;gDce7ROCa%9nXUh-s`}UJ`m$Y z)A-(9{IigPol-+9eb$7#(t|C$(w_&zm-q?@!4LF7Xb&V}tDI|<0~nSpl0#;{x6lxv z(bZ0F8BWr#m-l@rYFu?_TsV89h4R${8;&TH0L$`?qMaNjMe+xSC=X25bA@#*m$>bW zXJv%gIJyuKKu1^6Y744_65;h+o7CUvq7<PMFE{NXJtyf+>cQwEI=;dy-(hH_l~FHA zhc_~a#X5ar;7Ch%GDBLW1jijuLQ2`f^x|$awlg;Ew|h1So-%$%f#jJq-m$^_*zjKJ znc-P5{v1K43Zhb-C5WkT(B_JYW|aRf*UeT;rHh{C85aBNc(<A2Sddy7pKgQ~Cu${% z1CZA<9EFRM7uz<AH-x}f>05w;H|#s&rR%HUs_686QgLR`38olHVv10pJ=$oc-%W>? z9^bP<<JB4Y`P<k{wHThE=*sE3lr1-Z8EzTZIa`9dBrn!S_;>p2m)}<=l+UV=ISTa! zshO^G88ZR3_bvcbf>Tguco%@+66Y0l_u?Tr74krJA%kogPiqWjn^LZD#Laz?j&C#b zh1~zRVF6j6y5GAO%MJhOFFX3VN$B8fr<PhHm$!M0nDpjj=}T#7BxwAoB4n2wWSJ}s z0E{y%f6Eev7MEn;)Sl6c+cqQZ+yw%D*=t@rbgnu=(HQh4bf&>P)J(TxWFzy{8}bL+ z$JGZPc|#HM2Kd`+Gm|OlDD<NB*!JT(*Uy`?-?7z_Z87<R#)!Xv#L(AyKZ6IxdDRAY zHHdK`9CHda4WbbC%>0#p#DK9q!R}^nuai&~9tY17SUqQL&pi~7WL7W$QYNaqMY+%! z2|lEaG+bQ7+c_t}x&H}WFIJHEnzwS9n_B-mxw(~V@K$ubt}UW5SXR^iF4g8JAKV3K zoh>W@0e;taf_FOOD`-@y3Q{GhWUZ?)zM>V8j_+ine+{1C4Z+!Mj$S_n!?UUx7{Y_9 z^{eKR^!4?t77U!7oO)-KI<`uSjGQ;1rKi51R`lZ#D|kb?Do_{uc|Nx(1<<Z@UB)f| z@)gsV);BVGhW(ld*#Dntm@W4{V<iQRuEsk7F?0a!a7~mtdIbY6kjgU`_f;SjDy?*y zTxriZY>U~Fc}54B>OJzCL$;H(UP0EZjvT?PzlLSM*Is<Dp<A%)nZBf1@_!P`67m_) zGV<@A+*Z2Tm>m5k*3d{eX!8mjZRz_@T6hdel_Tt!``O7mI10ML)zGP)t6)Pv+=Xft zy=&(;dCRHr)(@GcL`qA~0#-c?ppMJEY%ZD5*Xvn96Giu&`rsmq=7mi~{e?y86)Kf1 z?XC2(2ZA#IzR#>ijCmkY7nSV6d`2zWOxo-l+nZ3?eNU&g$du4f?=Kif;{>o94Y;2< z5%(YU+7u=;aWn5$2-D1P+G1~XX~diI5n@zCbz&@i^)jTw+@Hs>5uv%wskI*8+>C+> zN$VtkGUyhjPH?hI^05OJs&y;f7-Z2ezBe1n%D3|d5YwmTc@WObo#t1Km}EM32%jD7 za$Mc{3{*qGUGMQVetg96b8Z%6lLV_Y*gW+(_zyaO&h?(B3ziKUA~FIgQZ7i$S!68j zQ~g`Men)$hdj<4dQ)?D#R6U)v=3f;yX_EAcTnY+mvtaZY*OHr(&bvU96SY0_4&l>- zIS%`Z%D=Si^w*qat@M~;X}ScRLyYm&V>b;CY{gT3;}!}}zh~CUdUc8-P}71)P)8RR z1sfVXkSR+BN%Hu6Q_RZA{Ag3E<XsPNX85BuCdW7_tCz}jDoKRy?$x4;X#5U{>K?J< zpP?{+v}XEXcH&*Kpat^x8Wf4u#wIWENzO9OkKt2-E%-oc%!f`!3i?^ubUw?LE*HM~ z+M?+0MD4&8pOE%E2fYwbN{fxy$|EQy-`mI7c}l&e?&ctaou??h+X=vy8*g)bWPfv3 zTJ+!2(z`qakEd3QNzItOAS)_d?Ek*~TlmG^xPQ~4sn9J-`e8^BBK8L>JIws&{d+X@ zulCwA;5Q46bkEiM9@$e3i^8yu5GS~s4*}qah>|%K%D;D&PhWt!E)0}rv9IwnXElCJ zmftFtzboV>#6<o6gW<wpRD||At9_E}ZYLR-2?V?v@f-fhWD_Zq2a~W%<dS0ji(eJP zNRJaaJ$nq7vUfP%@s80B<=7ik!WWg^vde#XRT>cnIO!4b;ZZ?I63n>bgFK7Cv}TjG zImFg^gP6{94vxkfjE)LZe|^c0xs02lXI!(Dxi$JmUS1}ALbFWEX0N%BK`yP?qDDt8 zX>&t{%nurxGk#xH;NXL?U2GQ^Mi?QvE3TR;fM1bcLR!Xi1(yn`=nZ{oQ>$yaGmR}J zKg#Awt;CxZji`Al2+{Ocg>`thN<MzHJMcB&@~gYKSfr!RQ_yI;??BBq{C#w_En7V; z?YRZz6x8lN=4f`+1Qd4Ec_3`eXAS4InDqzrSW)JdO9Xjzbn#M9n<7(`LNJG-`U1FM zAh@fzjy@m7#7pUNlJGY6IX+s#18^g-AET7m0cAW0m3+}2X7ABuU8l;L9?Qr^&^qfA z*naK`Lr`<J0-O-=F)C&VAZn%`d)|HDR>t6q_WrjRkN=jJS9?SZl6ioqLKAQb=y&wT zB^Mw6(3&I&Q&V%Z=qF?w6vj8?!4#c<QUc+=S4k~IXhM%0E|1qmN=<FT)pG|75hUwm z#eUdS#qYM+uKS@1;@?`5uKs?VdTV~)n0kO)O4?q)!jK*`T<PC~&_H+3Ed@IKy3V&p zc{4Z(IA_mOdVkz4;P^a8QfoTxB4c>a*bgmhSUI4~EkUB+-luC_DrT;%;aZ*Ircd6V ztlW7@(j<dbZYn%=yvHy*Kgc1ox9OGT8-$M9Gv>laej<c-pR;XI>x~v%zaJpYOkSYQ zlq~6JE@PmDFClYLa)WWg?(E?MgHigAWD7ke+&$p#%Z5WF%V<nrlGjhb1rhwCexE_V z%e&2V2d!BxhEm{%OS4KQ=3i4}pe6QWm8n%t6=x}z)-1R0X>YRc&1N#0B3iVc95nB| zeko!5Y4<s=cuC>b>P?eGoh9cQC7=+sys2M!BXqk$V;WGuKjkUUpSQK1A1B|y;IIBt zp4T;RnScMpNqU<1Rvj3rOHtI(ut2`#Lv7;ROdIaKjr00DpD_ti-AC<mEn9cJ85;$b zAJX#-if{G(Mjyw{ZvK@3B`+tRP_F1|YaFNd(IzLlYjfEDwKr}$@*xDCVMrHXutadY z-mcoL_;+7Hn0{Wv$U)xg_bP1K@>I5EJs{=Zh7O*fyP%@cY$qR|(3qaN4(JU-Qg+3T zC89I1B%vDZeS-8!;>;kKPZ|XR*;+e`8(gFdT~|rZV$<GcP^Gxtj9N<BTb5!^R^R%j zdKm#8FVSG^<~FT;GsDMn&P;|tQk=3+^G?=EWLQ4U6*QisD`-#=9Rk}gHWif>jqU9l zdwb!{_Ba>O#yhAY%MEru?T>rJMt+F(xkKvHPDA8%>qr*1`kZ}1nA+gt10tn1Xxq%$ zH`spBvv+XVy$xiWKGCbQrDz?fk@-$a=g02vjuRPnP|sh#Ku$EPrT?Gd`Ngi<<26he zFIP#+QU2n-wwK}=86684zB?AD9^pfb!|QD0HTNtxPR{<68=j1c?F3g?ob~dyWYJGi zBi!-uOh|q$7LDjaR$+<8mjcO1E*{GZ9;)g^J4V?$5&H!i4><h7KnlItDI#zhVo_)4 z<WvQ$vqn%^0JWXRnybK?UydUR=me2(&!>}(T>NK)(1)!RR%J!;6*FOJoh5cR_YthT z<S}J4A1MUAhb$WM_Pq@bOrgAj^z2FPlsGhCE-Td}-t;{F5$8xNsuLACfl|oUcwTsE zsm+%K@{YU34rd-A33}v|j<CIle--4<XTBJ5h0GTUla4MfJ$X_w-ymssEn=VF>&c@T z-EQSe=HX2D7{*<Z%5a^OfR+GxGk@5v^OD#h5#Q|%y^pOHqVHOKY)$5DxyxqW?q7!B zAbq)6ror7u2ELWRTLt`}^1NM|bVEXLUhV^4ifFPcsiUJSWCzE3SuS|eP8VODY<tUc zjqpK9X<B-^6k<7oun(}&5Im;5hXAX}x?f-^6CcROWI&|DbajR|{4wy0J8usfrSG3z z*EOXltfDKC&rw@N6r0as2%=Wk=)Jt;($dmW*C?S22MT&6^n;9tG+@2isPU>?X%Y3N zb(7QIjtnpPxMYdtbO^QgckE)|g6y5UuBIKrhz8mC)JIcHb@LzKE&{&)Xz{?glU29v zJ$E9rR)!t?ZW;4|S5uQ&x(HEd_wJ>%70q}!Wi0RER2})@ZZTaVH+Pv<DW^#(%`IEg zHxgi&N+<-33#rbxCprA@(5Ji_JA{EzlL)Nh^J;5Nj<|uFp){`JX>(a^4x!-Oyla57 zo~RwRe<cdIxosb4@e=;|oF>pxH>jKVNpRg;$dlb{&4>SAFF<l~g1O9sJ2E9hMh|>X zyTr)C`|=<wp82h*IB5#CCTCe4W2n*;BsJv#Q04^KXP8hCothedcnGwp%VjQg1$h@K z^Az1U2jck2o);^K&U##%<T&_5!?KR13Z$3CIPuNHjEVU!N6})~bxSGz5XJ42dvaOa z-)zMq36i2d-lF&#QQAI{U8X&V$zs{*(!1Nh;=Benb~|+ri>*=qlz1duMdp}F3Kj79 zLs<LvqpMU#FzCNO4_5z(Fulz97t|QbhPIXQVVbn&LQ3y4z;yZs8>7s@a3gQ(l+8u? z%R+(2Z%zUW_&cBMUS!&u*Ew4beFxZtA<tI;{ds=#Qk0O|<yZe`FB&Q3(rsE7d{{Z9 zTJw{3yA7<E{L^|E!bmU)r`X0Y#Zg?0J(@DHlQAI0iKq+FsOi>~IuTmL-DwwH#VF&l zG7`n9DoYE8K#IdvC$)s$s~ii+zlb!BzlNzZ;uCdUGhD}23_^5HYp{j#_jQ|<bQODI zyZKO;pU|I3um3e`TTB0B40bd&uB@%Sq6uj~t|~~&@;aV$&z0BU9yD|N$+LNqhmKe1 zX_V(pt56jV-Epd)g7txg=ZN+8acDB;-!*u6s4EQD|5fJWm*^8xXxh6A1WB*I@uNjP z5z9IBp4UXn!u~F~C??E?pVdy<*s;9sXS|5NS4Qm1gvNDU8)_m>&cVf<Ed)G6a-OL+ zEN2W?CCXOZ-s~&(A)*cno|5O^yr5fr$luS5CPqS_bP;33#9IpAfq^~<Y9OrWA$vOH zwXM;9)8G*CFDb}J1|C`r9+^t%Bw&_|GY0>3zG5W7HjbLyVw4{D6Un6;Xfz=OJkZf) zdHf71epi^`hF^fOaxH|8h)IoUOtDc!{+;0!J&7kym$8ad*{EZD6GAmsCkO)$o{l*Q zTp=*uvQ5JAU0{2MmP>sFaM8A8KMjZ4kEsEFD%8?-knHZ2tF@-|njK?iI_5I+2`+l^ zA9|_xFKugO=C;W|w07U}IzE3z@0VhbUD-NIJIXV1<T>zihi|$bChg{Azj~H9JxEs1 z*5V=-Q7T+9(FpFV)F+8UyG$Z?;HHJDL!Gu7WahTDB>c8wd-c;NT4Tjuf_YLCe&l49 zvwNG46?ALMEnu~JIz>>TDyBy_S1=CZZD-uBVT(cj>{>0n3+)Mfc<kJD%(vmcHKcJk zrh)Z@wNK99F1jZlnKB~^l3(St6yLSJn$+hc^=PD7HLa?eqzM9^eKv4m9I6t)R~K_q zl<kTn^5!l@7qWGQWFV5;CGHB4L)Ib`0Nn>Kv=AZW_s`Jv{l}7}n^#%?TNIt7gHIk) z3gx(V5d3l1P=r#18HD_uU6V%j!Yi%nXF?^L6^TZ*6_}(cx&%$;xM2EInvcUm(t}A1 zrFVFB&c2WP!dvR-db`sl7i+z}LX$S#8pFyup{J?wOTHL)#GYDSF8$p=-jkEaiD*q5 z5@E3kc-@1C-9y0z``)79c^r$0ZCUq0_RkVmdB8&OH=$*MpA#w%Y-3rv_bcpbQ<d!P zxcsjLCX4+0-*H5`-O88LR5N%zJz#rYtr2JYJ|3e#7QLIN|5sDl_Ak>07Yi_sN-}Nn zLQKZS156nPg2rG8Nk*O<&=lthCM#Vpt3sfjy1=yC=M^ZO6X|cwL9&OO+$C(m8H^c{ z@Ff*OEZ`5s%XvKpU3w1>4|BCF{>aLPXDE^*bn5lfd=35*dXY#fEF>fUhlKh=ue0md zTC=kNOhgez7uC^lT?_yhI7;B%rO2w}*-wQR!(wU>Nv|eaI^*6QA-aoyvo6g&&M?el zSHfoOj<Gs-XCBl)T9YgnuMWa@bn`+;?T;?)*Jtq#oOM20&+1`oo^<e{y_61JO%+%% z^enJ~6pYGJ<*LaIRNE-4yJ9MH$A8Fojt~3`nC=U}&cYEG(IuBj5naw@uu3C|EoP)C zjrE(Y!%QUk<yBddzJ8hzl9SL_P3sw#=mZ59FmtgUhvv9l^oY!2>l^4hxtycZB(3j? zFI=jdzc&4eKNk}*^H+t<o=Ferx{*uj^Wwc{oyLw@-I!;xaVG!rP_)Vqil~kC+OLm% z!uK~VxeR~pJoF6>PcJ7&m#5q2xc`CD(r<Vs_E%(TY_~1xrBVqM`N!Vq%JZ*Sk-DUy z!<#9lU1$LtRPnZirjRkRW6VDMKg=Lh(&>z0up<veAXr*L1%Jv7X%{o9iZRM1*RG{M z@!XZqiH9uyH@Ci_0U{p@Ba4o`)c|T#=bZ-soy_7xI0uxLe6qU}Gj#1HREy-=-VnEh zuIJXPmpZw+%;dYqD0WZuRpY!`3bh*TD)`0vWlrkCPx#P4oXYF4bBE-%8nnLyb+yDp z*~TPVAlD+A74W-7S^t|*b2jUd^1N&Gfd2^u^CsI*xR+`s+0Kw6ivhaTf0<U5xP0;I zcvpAe$Xplc<cg6(6vJLk$t_f*_EUYZavSISJPvHi**da!E~pFJv(o3K*!)id&h8^0 zw~^Ks{@>nCQYteUSoF6e?|f-dFH5^hyV94^&`U2vs+sH92Yt%w)J6p4DMYGU1tu56 z(tNX|4t_F4<Q}O3Xd1hB4Fb3>dOBAg)SbiGwPWXQDHf*;F&gA${HDw~Fn*N$t?)ok z`g0PGbBlYvosDqjfjl<~Sp9690<R-tKwo#6L@ABmE95)#FgmIl2O0N%7~ihr3K|51 z@O*HGHwW=sOlj?`NlFbWRKO~f4HYT2*ce*loTQPB8=bbejObls^+=(9gAa?blO@0% z0VffZrTtS8zk99Ww9-=2$Z3Unc7yNi;;{*7vXrLyPp2HjA3{fWdq&DpJS)9GK$Z#y zwi!0$%ahZtyifVo4>;)O-RutpxVc#Ccx`=ONNm{v4#o2QcihJ=b!{@a!pZI<y%on6 zS@h4+sotvo&S(d>di_RT_JcmX;iVvQmt;iGDpV!q)ob8le>Z_kLbgQ1AvL2@R)??= zvFlon6Q3@I80P%f{j#wL7&elH#&4bE0EOMeZ~&U=#r4;nyUyJl0)eGa&hLb-5!yCe znWcYi-)Ih}P40@%VyhSGPQ8iR%>SIdioaT<L><u^IsQddO-Z-wyEh5}7OmHrYx&4u z0u%JMqj%u<l5A92ZNt9PTl0SJR9({R{?lJP*%<c`=<pd4fv5Q|c*H;J%*W`snFYFk zgAiy{-CWSzoEBr94Vy-@oS={BB~(Oz$Rx4Ank8@jG$xr-CR-RI$Hb7fZ&<C=0*LS_ znbhg3B@lp9(`!v>QSIs{@SB%p<i(^_nPB0+m_9bQZ+i+@Uq&OmZzM{J8yc$#I+xqn zxEedjU&eBJ^dgHdMY%DrS9U`K*LVM{2ukQECs~a(57}&->AnX30_g?>J?h~XZrtwS zM);2HvmWq28$-6Fw)OG1SoG|YnFhlVV@{h?^zy!Ji<N-d7XmYrKZY=Nu^nPDm%S*2 zQ`+Fc#+CDm!`h9=j!ANLIle()6COzBDk`&4pI&{92x`Sf_|z8MGv+y6zF^Gour)8^ z;P}~tUE6dS5oyUl3t+R|5+if5-joz=mA-pAWt=pkcI+(ufa*Q$i*CIhU_X&H{|?a$ z#X-{0fRtC~H2F|}2IT+>QN*Pi7=?B!Tf-lxH@P&!U4sb8Bd+b^5jMdgJ<~Jiu)NV` z(6n1m?-BK|Srp@sU$3$=usAWWnH)`Q#Yy(N)x1%!M@YkicCwh{t*CMHw!X~_&0q=1 zQ#`ECvtuH`?*v18Vsjm*+!Rz2@PEj`W^_G{5iC9yw4+n5S(e5{bzQP&31JGaxnhmu zpLEmvT<GfR^cngMm&V%;UFKd8Hi?Tf(Cfv5d8$KkGbF`P&W^Xe&8^Vm-T?F*f7e^Y zU*?hi92yQ`>;$<x0dHT<>=UFPeh{SJyC_r_0;r)x;f{Cm5@+-_Muv14S)17Mea?GL z3LInQ8-fzJ8jL>XAju%yi3TLv6;ZmeJtm`*mP&KS*w;S?n1>rjJ2}l32~gY&*7d`G znhP9rXD+llvkY=>w<O%aB~+9(2u6yh#V+O}>z0psGj%l;hL4;>Yf$VKoUIo&`d!-j zqOBk}>-qz!;){SDg!&VD+@Z9Z@z}yVdP>f+_*}Q{cZM#>T==A3AYg8dZu!U;lOpNl z6l7uMBdm!J=CYQN<n_CAE}CYtfkXZssycW?Ryj2;bUI`JQS<}&Ueshg`FD6Tt_}GJ zpLnF?m3d%|`#=QR)PpksHRKn4pLA@iLm#8Y;3&Uk8sRom>`#i<le25Ql_gtMBc^eK z+aUi=fJw_jmHiA9@R#zc){fsGant{tE=4Y&uL?}<ZK=Wic7)f^(0J2_xltIEw;;a1 zl}jCC8@cYcVYznYj4m!sNPtq^%4f73HLn5~TDCLH1xi|U>m$QRJaVy9AaZr!d(lYJ z3)u`3nj>K;iez{6_pdCp7CE6%6)$JRlW}q6Lcy20d!sgQ75&o<_0U(5Rc@ovfL${) zn)`)?N+w5^N5&%=++}-8c^mBt4hIBE#ZXo)90Gh1$}PFyf=U8P-jD7Zg^&!EfI;YJ zifl`3Yd*v}`jCQ>$OrqF;TeoJK>w<jR<AKc#6c>YBz}q7VR~j~H4d>y%)f65S`phT z-?YtE>jOWPLCm|Z+XwfC(fiP;(rQBTN=#upAd6Xea88sekkhujfZ@&Mh>{1AX&**b zj@U@ge`8$k1JO<19c<JQ=Zsl0+X;F^i*EuF)V4S2S%UdcZ|n08T{~9VU`h;wanBO% z(UGDZQ5h0V<szx)VFxL&`6F!)PckGjXz69EmxSN`bMtSBX<f2_&*+Q7cNwHEnC^Lv zY~tx$Kawys0NLfSwRktCq}Ay*?U;6i`cZ4dd<h$%CYV!WzhAg^bVL}*hS@`V|KsED z!W@P-rs&Pu3ey~E4cSY&t|qBM37wmp1QI&5mU4D4B0A5B8V=xS$iE=6ii{7`mW22& zdwZKLsgos}H-{{*TA!AD)ZKa`7W5Eo8`lP5<5M>;h|+hsrPtN3;O(AO*G=8p9ZB9@ zB>0~)$dweB!yW>Gd`m-39st0i{sMplgW07z=t4Kck*PN~owt4>zZvbQ)$l8$bmD7! zL|1yz*@sN|bO}<5r&&uaP;%9(=PL@%2;4M^NCQ1TbmzT@PS5_M?F4NdXMXwB^Zpt) z{g6-#-wIt$CbFA8+c$_(z(Dx7wRQXLtqsD8(nTNhX!T3(4*OKUexah^m>*#!b<-Gk z6CBMJE~51Q<W7WeTq+`ys@YUh1Fdc6(TK@qy0pb~2zLvPL5YgbshS0s=+$Zluuiwi zu+r&AG#0iM*($@3S`#%}$bh9)Oe4<YPv?mbH?tXS4jB`8li5}*Rn}c4QEFCQuc=nl zAcYVnM8o^;Aq!9&R9wacfryK9yA?sm`Y>```xq;QJ`9L~=V45-xeO(x)`a(vZt&*q zZQ7;vFv9W(`B*3ZjbE`RCpwi8e$l5efo`r+y<k7Jap=0?Do|xjQvRg0EpYbshiIiS z)KtN^ZOORY{A7^%sYT}B$l0_4^IuZ@0M^Z5kkgd(Erm{*^Gm_w4+j%0w8f{@%qsoU z*`L~#tEwveX9{k7!bn!rzo{BDQG7&j%k!jara8L?wT+HN+qpw`zNE2HzZX{1GVe-8 zmTl_A&>4A%p4&6KuF%3WrG1RWK`?EM?~Y#27~JX{K7H`$%uZbCyh`R~U{YRisEcUX znnanj=@j+VI~A=jY@Z}M*pves79egH4gQ;YG|zn}8V-wrOP_Kt*1_<?k~%2sw*%qX zzy2<%%QqkFXoqb2JuGT9$JT!^CbDm>Nnq?1QbDCPl5Fcq_aGBYS!Vq>L9_T_l%rUE ze&L(C9Enk~8|R}!NXRyaX6E{h2r|-hzRl`-@rxZHoxcltnSNy7r?61++0rDKP%OW! zHlpDe4N>Y{1<sayj#ZKpm)atOo(y`01o^|2H4^7J?velXOu$3c0=b%<5;_x8ZzYjC z7(z3QuB{i>E|ep6>+s^mOL-OM8x<Ct2f<lY)-Ec2JfL@zg^+Ks@+;z9CsA})y9k%) zVGE@Fx|v^kgDZ8BwajwSMe+6wMXK_dH2n6(S*!+XZ$Y<>OV}czVcm?~59fN3fOes* zxZ}1X59wz6$n;6e@?N<J)V3|Z%BH(T_nynkX?35Dzpsskn*9g};B%Fh_U;jTb3lKR zpr=WmLHR9*;PHDK_U2$nnKswkk=+2?@^4}BC+%x{m#81Alt@!5IVkUHwz+jbkjfxQ zUbL;J48kdL3uO_WG#x=p;p>{^(2c|RtI+NB!ee@y_58U(+1!467HeJW8*{g8572O} zFdJx>*M`1&KyUdnLs3ex=-uorx9$_`d4LDIi{)wh<FuiH9Zc7E3vbp#z#seBs|a!> zBzB{YEv)!!SI<*AgEGSGGPuhHB=NuHC(wyT`NDX4UYG=`lfHJR`(ER+b)8n*PeSuy z^Cok;i<-pD29yo{Viv_)kTdRcHjqJcUlsDmH7iaS1JL<|dVB5_EkrIwIM~}AV5hI& z+>Vf?9IF-(haN(EocLh7H<cOv`#OBuIiDuh=h*oy6@5N#SKiMYpU%?$!@{{tOl6Jz z#`0sWZ68j|?U};!BJ;t#Id1$;6{#^1oIMldF3#~}a<7XZsX75k?wmm26j|j5Uipel zZ_Lz#$RFw^Q)MNPG`3*;+vr$<Ap`xPEuh0U#kBmpL?f7&iQE&F$|SRs{@6q*QAz5` zckd&QO!vQ7f_6T#<|z(_?~VN6y&b)^G8K2oBZPcli*{WH?@PV^!E_NEf;Ygq3T{pf z4mvMme90{<&B;<j!HJZ?=>Cm2##YI~ORpoX`38^ofx|bFqQ74W&2f;{8ZPD^hsDW~ z(6C?Z`uNqkBem?_HDRicTpYW7O2OOaj60Ipk*vld&7RbAy5XHLf#IiIy%Q0>K<Gu@ zZMjy~M@Pqh%ebu!JPll*hSS#r*{2f-+i-LexfFEP9p~92nF-@Oqz?X>Y4bP+!r@37 zI+Ih@opg*u%<Ga?0!n@vi_xrvK{(rA=Ybp6Rg*8yC7Au)u`1s^B-61*mn4`q<aiLB zvk?Y`x6kT?^veme5L#62$-|O2yNoU?!XmI<%Cz6-nr1umRf5tsg{0=mlWKPbu3zGs zv@Eh+J3Ss8u)4OVJ`8)^%XF`66Bkq#m!?G=q9F47`d>;ynM@D41F9ldgjrP_x?Tba zuV$WBG=><5#RCTJFSM9EknIf5){T#PS(8TfZ-qYz@DwfkEu52cGZ}eb5YV!SG~X~; zdJ7Eh{Usi(-Q^YX@a@XLTeCIS&U(7gmX(O=bDl(IsfECrf_hd(RQ{gg4Zj&I&0a~Y zL?EI8jh-DBYX;l?=}iKKHt*fP#}v=Ozl0Ynt;VHpUP3@;vg9c!XA@a>t>Ph~kX9ob z3B>_~P=tz=QAJa5$<X%5-MAn@%`D$rsSqFIY=sn`9&A0lobXbp0m57-O>t9<Jgz$B z*5GlMCvs7$YFa098iMEbVj0+;^}nN}xJo9quuU?1b>)^3GV*)P&2E^bzIZa6-yu2> z7-@z2l~&q1yv8`nyk=fGk*->wZ0W0WFhVb(!?c#v#FsdOs;u%xY!vyrdEaxnvkc-% z`}G6J{l$d5n_}+<Cb!-${fr##V-MPHz+#nKV3>F#0b+5!@-+m3NIu!C)?!r_g4iNP z8P07KocCxG4fysMGV<*7$iHFnDR23NtV}8=uq|2c@XjbGsNnk_Z8~KB5383h60|HT z`fUI+BndxDoH1DlNu05NsN##5<SYONeR(QYPUHNp39vJrDwa0{mY-Nq%LURD=(o#2 z*X8{Exi4CWY=|PYBE0+Z$5V<;>-A7u5oQ&sS&w*@n!2k4&(E6)?GH`ux3+#yrGp6D z;$?%-$O83q^-21BRL?n#0c55MDH-<PlO!+IRNbQv$?<sgh8ieUiV<TRC)k}bn&t8& z7kV!YD>ct@*&NCCJG_9UFWISlhLUD+Gw$E4f37<qS>#akAqrvls2s)4Tk&Z7Qq#*B z@Wbhp#_hd-!;js9!{8EHPGo<J?yHJ_UWRe7Wp2}gJ%aUs(Gi9PnS!q$RD#$;DT<C^ zfy?X2B{+3GT*#pxUDu}6A5&f(RY8y!oipNCr86M1wB-H*R}7|9;--+&hLO)%+JL-$ z^XmsyzP&m^-1P?M_J9oT(Eil-to&}UwEPk>A*j{KZR6><yWrhJ;QHyGF*tN*_YTh{ zUg^ox$uyXA<6dt$Ge3g&lPy(9lMNA-sq`@ZvTsI%$nzm8VLaBS4u1^|T~;CQl_7HR z$RM-~iC>lPQUTOmp}S3abU}swX6-;@+|3TgY62O?jDYN5VP->aAqVfSrf)IJDL0;# zEm)PMS78yyMlPni9I5Iy!opsVWa3h998vJagUB*A<`M_y?WUh+c~-|S>;t_M=iP}3 z&)db)>3@{y4>I+_p`z426%?TQ2>Op%K`Bx`I#n$V^`wjA0%^(T_N&d{-7nZ(Kn<a# z8a~LKE%h-SXQT|>R6arq^PEg~%T*CMt_yp8>;m0It~22nz-^TL=Bt3u@IB9DWA|y{ z#sOYrwqLAVYPg%K*o>+^Y?ik)y`ks|`mo5Nrm`nT@7Rv^Kr-2klJ`?7RC+m*wW+;* z%h`w5&yaBT$lza^>0qj?bX~UdNHYv*ina6wsmCYjTwFNFP#=jIpCI+Ht$e4)BM!*| z?SQMkLWeX2^R`FS5A)*c9MMwHsdi2uc>KRCO5{hW1IsaNnc~%j@3aQl=uHx7FDk{m zYKq@j#U;ormN+fd0MxG83vb%bNr$_cEvikxPKro6HuNnRlxy9u)wqAraTUunj|`%g z%|1j}K`fkbrK;%na82_om>|tTEy5lRXIKC541w3p(`q>YA<dMQew2rhq{_Y-r5+VA z#&6;&HM(WLro9E>`zp=4U-aZqoS+Lqr%pJ1T%%*FC=gbg>-9xs#sRG=OYedz)NQgE z$aEFPqgOPDabg`dKAA1yD(ms*tbaW(q$XoLr6Jio%`7r=QU-@Gy{;JtfLrD=`A7N~ z;WnH~e+Qfp`auEOlZ<ZtbaxT;R9y+aIXl`Se?BU7uC+l&4%3CXHy~iKx8^YZ%b2@b z%VCjNoif-EEM@*Qu;Mt7Cz0yPK<XPw8M{nvyp|6D=ya|+9i=EnTj1z=J<v$mnefDI z`g<lHKRgRgHa42s>Y}Ju$R8=4@<~)ajpqwUftq)Djjhgzg|KKdv;S|#m*v|?4;0;Y z5^+27f$H1}QagDUMbhd%S9Q$2agJpng<1WzeR2M1dZA0Post+2j`h~1XI&FXlF*5< zQ8YAk%58Y}Z<brtUN$3!Q9g@lM$NI@*Vl+3C4V`*KlnalKB;g&7BaOB4d_y|WQ$_S zo_NpLK=BGR*0u6^{#~TdsVNRWw^(n2`@|gG_FEmbBR&?3$&h2fS0%<m-ZdjL>kn&d zpUj3%GY*lUE?q|Ut}t~9tFo1h>6W<KIU8}fyeK}c_y@6!MxK{m243dgs^I?0&`lOh zsKn&#VD>)wzl=vyZB2r3baBZl#eQ%iG*qZ8K4CqK9ThcBxiSk)aKJ`xZeMr&tENFP zqhr@D8=Yh-UZfoLW(crZ_<4xEJBF%oSK(zY&X{DyDVA8@@M6Ll@%g2T&RPOv_EJx% z*`{&I9r_x>o>lPL#p}yYv^XPKgS1yp-bQ{su937?RMDu;FNK3kKF;{WRFMSkbD>*v znM?#R0T~N$)=TS{<4uCTxl+RFL_3i?k;`g$EQ<=<nt|6#Pc}EJPERM;Pbc-GRPi_I z^mtW$XB($L3Sm&UMcvkvyLOeDq9M0Ua&+u^#OS^aBClj(UGk8@FM8M{;w4u9BD@Fn zdIPy;_!M0C)DoK|CX99y(c@<+gOpK&I1?KT5jFN{TrGdo4f36}GxCJi&_;O<d~3e~ zjseD)Lq_zWdxU0v7#Zp^oLQD;@;1X(pSH<>^iH%~hIK5-4SHCiKH=S}8g%=YsM{Gy zo{B!G{mpg|LS6JnX9F1|#h*7=z1m-yjbGjM2%4`qf{x>yC{Fg*?;9GBH_#ETL|h#8 zgaLKL85vNl7O~63Z4cn1(kp@9$j=g!>n@_oq=D=WWVx_RGB-8)u57Goo-a6cCUS2V zEY54K9uHGOjX*E@_`ly2TScXP5g=;87EhuOAW_^m+19KaKQm$8CqBgLs~yhhBk2Vh z8h*g8-Aw;ZGH@w~@E4wXu6Xj{nWnbdudS8j$t+gK?IbRq_o9HAE`S(=Y*%(HS-1A- zMdr`_eB?4GglM-)#x98}OuLvOdemD3o9bl_2Bx9Cj~vevcLT}434v$r<R#g7D_H&! zEf%|_F5vA$_m^#A4VN!eP|-<<?qlEm?2x6nolK#W3Xw^HNpn;GEk34-@b)G42hrJa z`7d$G_}!Adz<Xul+DoRSJcf~{uF7{!aU9C)##Q*kR7B6ED2XNbr>gwmnUbN`a<6wD z<7ZA5@u~ZEKo>P8G&woi<CTsX2I4TPQ5d2T6-o*Ev)M~G!-L#q>dK@JF2O%5AY#cI ztXiYnv#|SjR0nZv;pWgq6fzvc{#&=)6X7XA@EAUuB+h(V<e->lKz_(4)x~Dqyrx^H zC>bv&;M);G9xdbY>6XY^o*%9wX=&`02~#IDNH1Oxl0N@m04~&H;(J?Slc<sQX)NQd z$RX-(%6DSaK?<8m)zVeW4}OnNR~2@=aimRI$dKlAjtylRhcd&9C;yF;c)K4o?%W8= zO)FlgUx{cT<=Ly8W+@^)F;%P#1FZhNW)9b%JgGdXaRUloX|?}N7|u0U@MK3{-^Dyd zDP;?-dfrDOT5vFIY|r~G+?GYmTw9l7e8qw~*2A~|6*uC)V5HA~En+H=oqf{N@$lyH zGSt7XO2A>_@-WO0o2Hu*;Q5m53e6Xxt*B)w&jfkRBcUX*B`UwJ5bc&^g*SWA+v5Bz zG>dIAPWv2S2IAgp%8L>aXf~&LoZvhONR(nS_#kVsV7#E~CYG<7A}2TT-%iMB!GjDZ zFQkYd;sUoIR%QNg2_s0bBJ27SA$uNMK-kZ`+G*PNas$>Boo_c9eb5-aK7DQ8&K>4J zLQmYbFtdd#QvYEz$@<{7$qd>_Uh3aB#(C`Qd}C~&MHVAPhUFpMjz<^-T>`WkrPZ3F z##qa#l$cgYX!V+<X4K3h(Xaeo24-pStyTB!rpN)xH3nZ1tO)X1oOZ!NkRogi+DfX? zMJXn}fSKaQqI~QZ`6R?;mOvg8G1=?JgD<DJRc#K%vP^P{Z4>E`6z1q&Y-E%T=UU>M zxhT~OemX#c9d@ji4^!I?FQS|atUCS$rg1ploUvDieZ59PVhz~dw!FV&_3whkEU&FT zzO>5b?1s13b#YY8!C@^8QFq(=NfIqQi3G2@>^rcX)3;wUnuFPrauw=c8Wa~)q?HRR z)koM|#TLA_Y!K>%u6(G5A8*{C`r!e$B)MbMJQ=?URubg&VbTjx8Hue-^XOJhDszip zv~?~8rj?r0_`c?-`$G?g*4RLmG&`UM1XB()_xW)IRyB!>AWI6FEM)k7&jP~;S+FqM z$*aaPqWjq*rAGZvJVKOCPmdI#ixe#aCIgd&6TxY$oEz5ef3E71f>bfkJM9~K5;%~_ z%tMDF-B_qXrPo2bT2}ASATa(ghn0`A4Z+yU7Q#PczopY2LNg^%t+RN+%kWF5QB;cL zOw%Y0fdD}wg3bZv)QHQQ@`!eU|EGoXj%UO9|F~KeqXaeEB1WiDHLA5oP_@<Gt7gsC zUZGlwQWdS@W5lWud&XY1l4`Bksx~E7Z1Pjz-~Dy}-QDB;zUTY(zI)#RW-pVvNBXyH z&)!mGqfEw{#Umc1Q-DD!<Dl~Cxs4KsYlF9M_C*8-caDrOP_$>uhsVd9g`9WwdhK3b zgb0I$R{S2A%fXY&dvbLqj1AV8%H^dtK50nU&0+NI1weH>8dQ-}e0P-{edAWOEyNC^ z5nJpF0qnC<60U{L`o`<S8h8FO-{4^xZ}QSmIZgEfw3hda+<PmvNveEm*QWFxg{aJI z9n3n&gTR16@hlGd*~MK$Yho{{8uCSdR89SEQlBuGuW=2OqyD#reE9p%&x#1$NfCr- zF9rEOR~OR^2G0Ud&!b0n6BgHrV^2|CXf$jsUXoA1R)C0QM7G$3TED(In_It;%a%wd ztM<p;uys4Qsq<2nEdJFt=o~eW>i+TX-NFQKfTNlRDyn|C{#PjVJq)q<x7fw|p(D%P zdr1H029iRgrmdQQ8^;z|4r;GHyfXET!OVYzgF!?H^()BJ6>USup71X|N87go0z!B| z?3P|Hpf<E`c5aeeGs@kVQE2Y%iV3+hch`FY=R;VK%TnOjJ=q5qjj(<hj2jH<cwyCU zst;*1HEy6q%ML9lJz9>=^b2YWD0VAf-_xn$|60l#=pxQEg(cIYcicWiZ$Dzy$ryFp zH8^r*JQoQ*vTeSYYhk`nj2>kJJ&XlBH7K~f%SzNkEXJ474|yIi{jh_gjWb!%Hh%8u zhW2sIn+XmI;V1Piufo!xycw&mof2E3IB}lxhLC_5*%zgQzIlDsgNDtI$ros;n?bE7 z$29Vx`>{b;AB&<dntPk47p{t4u9n~<o3(QuDn>4!3%iXLB)!v!?y|e-J3aqFwOopJ zK`s5(%}8U~B;Eh|HV)0pZC)flTO>-4RFuNru4@&ZteS{jv?y*~XiF#h{PDH&wAV5d zqLJnp%5#4`aF_|eSMn5cm>$p1S#qDg-UB+mV6vLA>H933<X9aB&ivwh-dl7vzTD!; zL3C~AZfNNDDj_emdas8R5w^cmcRZzBR|P?$<@^QX?cyb@*agodnE1-<tt0n;#kR-m zMPTjAdrAE6N&}dP?`oc9T+@69Knb$xlFJ`fHCrlbFuP8METbmp_&o>UZOhYN`@MQU z>8p8Pb2&mp?#2WR5g`l`B{{I+4`~*rgbHBc-Z!ANF04ZPaY0gf6P%3-=;PyEeH?e= z!#kz<)mG6)$V9ncrD`w7?*pTnGp^pz9gtj}*0(LJpNNYrk3M~R!Lx$AhOgklV*M4+ z7VfzY*j5uO-bjnA6@3O|6q$N0E6`Y&DT-lzTk9nZ-iuRh$SXMPGcwfp&b>4$!I(UO z<@O|G^$Gg(pF6N*=~nO*E9NcfO}{zi-YDbI8Z(HL?F66Y|EK#=z=*!*6SXpZ*n<r* zgyBXbWyE)TfFW{)D?eY*4(wREN|nYs0`rL=+=4--In5ou2<Rpg^s0`Gu1rQ!P+Cd; zXZ;{ye_33fBci)FvtGsR=;D7<r-}k?kKFr4OG}$p2mmq0XFanM8gz><m4I6^xp!j( zSw2_rLI9jl?=OzP>X!jWPqAaE-D6^f$>HgH1D_GF<Cn~qCa72r&D4f+X9j$NzN^Ek zYj2E&*MJB`J0hMQtpT&Q&{m_$hgI08tpJ*;NN#sjiFz}MK6nb4;Bd+(-6F{FI$GQ< z2|sy)fEqxK>o}1V#yWlOoo}fQyMK3^BBfWy#oHvWIl>j_xTTM~xR5)Hk<lAqlH-Q2 zrjErpSuydR?i85S8mLo#<T>|JM$v9HgPqI#QVZ8!ViH0CaNg3--4+_^+3P4+<s}<l zh-*1|J7Tair!nTxIa0nTAU&?5bN<U{k0C^Q|7#)j9H!m8;<Gq}@-sfFR)|i3PxqvD zR63oZ$8Rj9?U}tfZ^5ZT{D4YR7wN}|dFAKie-|e(N`LX!PRwfu7;T!p!bw1%nFmwa z7*ZcHEAnM9s%N9$?=7;)%DmWt$3TGM4qq9M9R(Pg&?jpLAWlv%F817dgRY&s^Q<Or z?I(iuqT3k+Eq{ma-Y9Gy`N3$QZ}OFUt+-`-_DV<ID599Qnvk5>uF#&Tg3@=jY`xMk z)f8rJv82=~;uiTr<;&+SLg(_NWayr45b-l3(3?O4QEk=EJMlj{aLoK6lD_3Sf|#hQ z6sS=oB_%zcnf*!@C0ql#!TkNiw`2EpC#p=++$*-*Baz*&8Iy=0%^kH#`RY$K`?4Lj z7aX(kc+waLN!VsiNr#bU_SI_^hGOf4s@;w;3YSEfq^up}9~0zEaUJzAHH(k?+Svdl z`V-v600oQMUr$)Bf*wwNY|%z{9)gDtWAH6gIsIelk*dsZF2>)Qmw)&t6l}=kSjk-J zMGK;1uKcn{qwxom@ol@(E9Ndo;eGW1>asuZedE*gz8t(~Ikx$1>srObs(TwLB8`Y8 zGYghZH~dAj|H!bldb$mgXa5~!Bx^Caqxe9<KYu>QumA6N%;izqx=Y<8!qqsnyCE%1 zY`wI&H0>eRazZAUB8SFR2)8h%fRubC_~LD}<Cu!yxbMEt93$W4Whyy2c`K#tt?;8u zxz@978FWgI1;|!h)k1{1^vcFHAwDfVPum)-pg?&bm#&!wGn)XAsE|!x-L*jV(8#EA zfIV-eITul(Lor<5E5ILnWSl=ezC0hy&NVbMy8od)!=cFxB5o1upD_1L7C3HHAw(rI zNC~Z&#`&0$4x+B|5{Gm!dOsRenpl9wr{_4?nWG<;IxAtAEA$dneBT_jMPrv{fdBZ< z34zYaE?zVRY^=9hYTf7O-;DD!tF&gi@H~|(AT<qpoPpKhxkH9O`WS?$A<4PLQmC3D znpT3<8QU=;-!rOJ>cgNrmIF9QYvSbA!#po`@|61os&yH#FD$TZ@=E`Skuka%SxL+o zli;zAc09kturI9i8(g0Irz`S02#^&GlI;f(zpyIUtYeH$M>~~|R#IUpvN7Yz?aFdN zgGiZR3zmN}D~gUNDTzaLIoi)mPZs3-sMAJbKq0%5F_m{uf@>K7(b(ME(qxwBbq|XD z=y)KFRMRo42ag<0(&zc-U|Ya}44>E++{F7_c;mtpGrmF@g7d~~fUoy4&cqP*4%e$Q zO=2@GG0Q=?^co!}TwBLreD5nvZ1}Z)r-IGSP9QC`(|S@<@8Z-trCgjl1^IhS1HT%+ zw|7`wcP#=aNPlmOF79@b(BEh-yFX~!Z8;uQWeRKic7m-EvSnJ*AQIGe+^_Y*bXg`6 znUkres~T(j`f_(b$Y@xiNRzV~WQ~66n^~+B3&P7bf#nouOn1l6Je`61s-*3om#yCp z4bL`h92GEHCdQYQ^4g|5Q?R@>5h}=`sDK&C!~ouCQBqaaAEkyJ{?Zmc{WLnqj==^0 z6MsZ7%6sym?J=|*a2VaoA1O==U;K=QZBlm{4HZJbq1at6QW5izxe+bxD&xe`4{Cj{ zzdk~NzlCz;cSLkg`KHuo=&N!*<%HSJA(~&_0<~G6`*KfCehS~TpDbJ2N^&jFYfO@q z2+L2Xr5J~YLEn@1jsR8%?Vm7iZ}U%1bY^zE*S>{_9@?3?X;v&GHIl`G2q!O>;rt(@ z(c%l2mvV<^{|(S;zaDzqq3Rd9^~C+Kv%;v57>XE5oKVznfbv8v#EGb`4LR<`gw)g% zYtc_Ab1PAEY$fD6gt>)#<(W@X-(vS=Xo~S(JtsTQ<eanYx~YVtAO|hM6j|+tO2==z zfy1P;edMz%=W)tqyS@01B-Qr!$;s@vu<}RnZ5-%WSmF{@4ZTi0h@$)pP*+72z#v9! zd~%b&>mF)%O=}q__c*wN$%2T~C3~vwllp>mOwM4QwTAepA)e^#>wBD1rdoBY_^rYc z$aH7MNaPCdLGfE_b5M4Sgh_>S{6#|x^Yan2iqE$#Sp;0g(`0#LMU8VbbV{Uttp|4Q z?@oqpI=45U_3tq|_WS%W^SEz-I00h*Dhuh)8i89pByYBJo<~M9t2|?kX;P`j2wJE0 z^tF;sin~wdD{5HxkHP)Nvq}p@S2A8Cg<n2^z=pe5;r-?o2&3hbR?4b-fs4KVq7&91 zEC+x`vq0Lb6Q<TitmmBVo2L@mM-Mp3b%5Hoy8F?x%Pg|p@XJ5d1qR--cX!W2ySpwz z(9e}0Qc*4X@_@90hWR<*fcgC8tX2jRedLIvOzUw#py2i9B!0>|ZfVJ_dlVU2A&jkG zlV>_I1>^wZ4qa4h^(Y#PW=bP*KV8tZq$3L6lf4J0h8Dpk`et&n`MFLOyr&c1B!%5x z<K&~&C%<ZkTRqFQjXE8&tz@t<8lw%?It*~LMjHA}RFozlNE5ThbT*)L3>7ieeD6lE z0WJwWeZuu{vQs2SQIC?a*A(IK{}X!emgh5=i3)!p{59x=gFUxFt%V7B|KYUt2Y)}# zozyDgV!)i*m&6(**!NAI+ucU>X#*^WYI}>3t6K$OA2L6_tZDfH#o!63PWhWLsftxG zdA-0uv;;%ChA8V?-Dwdxy=WSP2UTS1O#3%aLVty6-pDCaG7EhxTr8CY_DBf=0@@FH zxkIjXUwk{J8HQtX?SzkUTH7p<SS9d|vyW*v#g0B1UjEQ??k^Fg{$Z5+lb1#@@7ll$ zDP>Y{s-L+5PLfauT9M9vHk;&EoI27%h~J(+TuYOfXllF=+ww_K!|zL|<eWB35$`~d zDZXa${r1vn!SDbx=9WYnPII$#MZ;2a@K+~YB3>RRiel;&+(jK7{?WDmAQK%D-=3Rw z?OyNhRy((pbd5o&+@Ici)LAQMM%rA;kA8f|e=P1sT?9C7E!iUdmRB&BU@v3Eliinh zX*UpNwwl`}j#$@AKkr6qn^$rEd|a3iz$R&RlnzYs`M47=#58EkL2Ct}3d2iFe*@9C zZz7DD1Wq{llIwnUH((0WguM&|Gf2&UbvK&@;tx;4|Itd5a}^NNrGyV0|DiRd65=4F z#$HwBq99*0G&PViq^nCLA#lfo#*Y5kmzphB0ec)$75bpP6PaZw^`izk<9d`UJ>6hs z-<bMU?`t+4pA*!iUvJhEcjJ)@K(o(0mrt_F&@yWVq~sSIp&S`96<?;|(#rDF44N4C z$oi;ik^_pnc@+@PTN{w>y3fDcwZ&`-muc1?P)}tNj|%;*tbSxyVR8stb-sGlgMLLR zY(t266D5T9V1De^(a`a{zU825Y{|pjBDt{zcJaLS3`JlXZHd|Nc%I}c?fpLIb$WOU z>H*m+uaMbZc!Z{<X5gQt6)))2_Am0O$MrOpC=5qQggpTtd=Kw0`Y+89%pJb32>V#g zdkz7ij@F)k;~fkC>EaNo&JvUw1>-Zccc&RVZE_W8Ow}z@z=zer^-6kdW}PCc1lRPV zX>{whfF5lPPU1wK&O&9`GP~cWxy4`S1qXxIw>{I>E>2HQ*W{gm@G%+s>I&&Q<C8z9 zzyHHbc|y>@1n!8=r%}<Q{~eCGUOHTGO>d-?_8X~lw9Ui}XVFyORSB(&1Li@`f5Kza z^6>5{O3B(kE2o)l@^FuLbYREg?3XxcCMU_{S7$A}!HS!!mBj{#_ZIIj!ic;zz(Buf zVWE#pZxCy7E}kpW`E22K9pUxr&jdEk=cWH;q@lczZ~Q$BxpV7gpyPd-qLATHDamsu z@FXM`UVWp^S4D(c3lnE9ldUPum>Z6)XiS`Rx4f&Yi%s$7UT9bFZ6ZGiinXzqm+_Dx zL7l8Tg4o#f;~!S?58&!Gt+7X%m!2rGiNm>2!u_U7O|RUojCVFHS6rpLoMw8(_%E2Z zQX*LX)c(ocl`iSUDYEx8p%>5UP%Rw4?|$iIo2>6DqNx+8c?CZEE}hGU#*&sr%u~zm zHkHFZ?B?{xU81nVbSHjy^;>Yleghw}L-~G=V{58DN6V=abb`aOn%wo(53-uaw#EdW z4=S>z2p68^T_XaQkoYaO%ozG^HN;zg3qGoz@25s5P9%d&(S2Py#m7owbZ-N6%tGo3 z^i;oSm`h0q=427Dx#dqzKkdy@YL9w0SGLs<rUvc3n{jMCgDV%V1v=00C-gOyfNE$g z@>aHk-fjsq-2lp73MU~oGP7=xxO?sHKj^+?q2bG4w!8lHn^z8G{M>DP#Cx<xKm5y7 z&{DmfEnMt!dGBLQfI*WMzEalkQzPj2G{Ewx@I9Mm(%&{%!$s=VClS9#aQQ7FsMUrt zCf}+f4|)=p(fMAz`k)AVnx(~1$xpf>s}-gOa*^$YiR)1fuvbW))2;H~JxcQtgbTtS zT}jcawxiy4m771g?h+UxGGBJJ|7!#QfyCDL#x~CZ`uo)%vqT!II*>AD+o=Bohvzzx diff --git a/runbot/documentation/images/repo_odoo.png b/runbot/documentation/images/repo_odoo.png deleted file mode 100644 index aced03ebf163307fb5c96c92af60695a19996df4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 33221 zcmc$_1yEhlwk?QjfZ#4cgIj=uLxA8MG`PFFySpd2B)A24cZY)pcX#LDbZ(M+`~LUu zuh;#n`uDCnb*fgas=fDIYt1pom}7^_%ZZ~P5g<W8K%huUh$=!rzyKg1AUP1=evLrY z%ilskz*l)Ft2rs^yOP;C*qWGI8Iw7=+ZmG?yP2CnK)5Z|C0i#gm!OBd)<iHszAr&a z>e=I75KhX5clJC^J+d6&mb5I$|K#^=HI^>;xxzQ=98G|GmLS5T&IITF%V&*Kt(m)r z)^XaTy~leAZ=aR5?8U(SM>fZ#r7P#2+oKGglLv3VWY@>LWfHG;CI@feb$eHD1@<z{ z+L3F=Lzcv4DSw-9=q<_I2-=eS>)w6VE&u)8S|4J+;%*|*WfEolnMXEx#p}557B3=N z`x;Q%y?tk|3Yj!nH$Zi907tNo>DCqPo+#q&$x2sy@BT{Gwe({HqHU0mmO=z+k$3Y3 z0sqV0{X3VvGLLbelX6@NMV!_22b}J`h?u~!wj6z;@eYR*&?JCPGlQ&N$kA>zPGeZ~ zF)!is;X&SWhG`l@)plm4<2d`s^yfau5hW&=9g+hh_!R8CfN2<x@7%s+RQw=o(3MFE zpF5SuZm&@|x-WM(h~PDS%P_~0$yuqn3+CY(3ZN@j6BW7qT!8DBcNJ4CaN!+u4tWFy z!G0E!&t_5OoZoVjW&Pm2m&6A^e*TbjE<4h`idjjP^@D*L5kXWsXGrlngP&?st*fx+ zl7R~Ctwf1?zFp_1UWmfoN2`7}j|Y>n3X?KeGG!Sevp7_}uQ}%f(!nCyV$$*|N^>Q$ zE#cTo?2auYv)ZkV%}t&+yKPd8Dy1qFb@hvyHUYurHM`$ue`=iZ)v`OAIu(kaQJ~=) z?^kZ*#&A9a3Dn4Ba(e$<Fwu1%`uH-E6sn@C>0OBO)n%dC`mwy_#AkgmUMYrz*XPi@ z(D&QzryF*Q17wGePmFB}En!~^vwg**O~AHFd6qODtk6|j7-5fM5Fhs<)ys;%c*VD* z^d+GOo5)Lhbg3VZ!vz=@meF|Yfr*=RH*sp8&~5I|;*fEhUtt|KK7v1;Ry01t)Xio` zk)sKz26GiCp!U0zX$oWH&*r!6>-$@N__|@;;LcE7*a{{WE@~WS>a{&lY;evn8?Djq zJ~u2EsUOb_;bmFLm!j!L^$K~#R~UtI5;;^i64Gau#qG9K7@_5E(EEW|3qv2#FZabj zqm5C5Rg*4vceSBE35SfEBw1R04DNfZ%e2yz%(>|s#{>Zc?I`JjWa~Th(v36F_`BD8 zA<5E1v(wTWbr0tJ=1?~lJF91-ceO1l27=2m`zdK5kn>tgHT7c54XA5rpoPJ9+;@_N z=>3@Mht*Q`ae$Cyo2DM9bT|p??0}toA1e;LwYKrvVE79^9gBwMwz5PsByUfs!;n0- zNC|fb@VrqU@u67@j|Baiu%boFOvPIRe>jzBy!I8TG5uV;rX<)P#w07Y4G>hspajrS z!7JLb5W&gnFt^qvbPqb5iaSr8EjyWoUfCEJQNN-w{=RfuV|)y&l9#JKG+1OoH>>B2 z=jk#-qXA)0&B^W!@>E5+M3Yom&76DA+wMLK&Z-a9;L|FO_u`=uQZ^{8@*x&;9>ZAc zt}e1BT?C7>Qw|RI2d+KR1c_wZO}X-HKWA}~I?|H}V-20<72kyYV85<*A$0qh#EwR7 zN^yobEyJX1F{Uv`z=0wa#}1y*wEigihH19Rc%6%WuQEwucr5r5o67KAk?96k139HL zCO9%;Q_enmc6a7x*xNHIK9xgLyB_*9wf>;hO+3jT8i9|`c|3KDG8xi!41DVB%3ZwF zDLbHt)~PACwoh`0yQD!4e*_UF+`f9U@zFZ9Zsl%(Gx_*6&`hwG-dQcXI9dGzUVkji z1hX;W_cS;r$swc0iaW*&%y-(HQ`qoKVz3Q^P;Pjs@0d8XTrN7tc_`@2AZ;=EZ#-_l z=biJfFTcHf(--lD6+WgC2X=k={Uj&US=8Z><9otFiu0&J7iwYtdhSBj!OtNh8DY^g z9zQ}-Mc0dIt0^~Rwnbc&_s2x>b3xlPqBcUU$rg7di};73H+P3y>-x1LYYm}ms6tAG z4aJ{0_)Lea{J4HhGY^`1Y>!-Td<Ufy7>)2nj)ukg8ExT_31@ZkIkp=9BvwMv#uC(o z7lTq4Opq(TQ$%@0rQ6n{L%L)xmeB1vpOIXJ66#*euxM*pBMDUgA{K8(0jL`O^h!Hl z?oIMG*M6Ts0){vyCOLwsp$U$!xE&qUCuZ^@EkM%{Ct7sYWJUEG)i$DyJ=4jWftX8m z#M>dLJ-hqtx?fLf2EW`;1<40NDN&TX0M5K}F3)V3kV4@uvZO6TI?%sJ;9Jap16U`N zh&vjwuqv~l_WGolO{|AA3!%Of7h7HjbIIma;39;FJIE&d^Ke9Bvg0HB&nC`3xKvi_ zv-<t7apEl%nKSZI_e5?89Gx1li?%IPP(36zZiJzsd?u2WdqKv)Sey#FI6P-mx(oVs zA_)e3>MudhVkKk|XT->N=b!3Fyc=@JyN{1kD|@Du<=riO*tjoy)uWIC!7sUOv(lB( z)`4udP7T2pbm584lN5$M_n-K07q$x4^lXG-T}7}A@;h_E7z-F6@*@71N>V@z$a;#M z?frwtttIHWYENah=@I3Ii`d7N^jp?`FpbwtHkaN3_MYmO%rwNUY;0Yf%HnItG?oCK z1l9X<+c_Vp5^>h-m0uVRXCN&u`s@GKbuZH5`|A(UPD0%g0)mO+A3sR9d_k9AgYZt0 zvSRSth$zS~2=l2&KnMsj2uV?4Ww*uS6<@EJS^u+FRs1XTQyC>eTH}fIc;_in=%nyx zj0KZJ<AAwd+vo-A^RE-ZdMIUZbQkbOZEB1LkqVH={$JNaCS*60)*E7yLUh7f*gn%} z^Jqz`y<;-DjZBP;N@ufi*+qUf5<exygu$99seRL)9Se`fmVK3a#huMg<D20+em}Yw zLn`P)6o2&|BYfSZWfq<M_vNq}C662S=aMK;>U{TSm>eGn8TxnFh!2+N?`Xn>92x)> zwz0G|s=AuH>{u#;&!;jIcE^~(>_91Y2==?}G~aF+Vkw2_?+XTSgcJklN@Bd$I3z>= zb2Xi7I}`-UHKdE?1x7E~7>M1^qh+zoOY8M`Ky^gF1uocoL==_(=-LZ9$7EN(O^C#= z)A4wH;r4((akJ489Sb5MAPm(7wq#SW^8+B@x@*gA3mk2~P|7VUm8v<lWLI%|1F|_T zw10f4g$dW_Y;n1?DKgm@^&%27(B&=t>^cz&J*^T3Le0RV5P#fvD~a!1y*r0s`exz7 z@uCGSG4VL6@zeH<f*Xn@<IaS!pm{<$H1c;Kv_!`n19}=w2(U4~fm3B+;Ma65&{wY! z#+du?U)!+D?tCCye|D7=22lDjKud`2d)bF2G}T<hL``2=@sDJY<`~&<z}^D*K~|Ii zh__VdX8fABMhZ?KizMGQX$Mc}8;}|OsOfko>Suw;V8>I7v@DApb1^FQCDqA02Pnlh zD|O;dI?s%x!Yre)C;IT?XnmB}fz>1r4fqJP2O~^bSzxtMx6jWCgtxF5+R=Q~jFINC zoRinen5BEo2o>s*1Afg~zgJ~D2MxfGp}edS!H%DEW7?L{uGH7b>x#n22&~?xMkDb> z*WpzQU8PKtN*2L^T}zGbs0^5rbdf-bcipmj3eI2tumD5<>H6&islgs0PJfW13D)k$ z1?bz|2u{3SSo;@&3{Zk75VtKCnf8`v3SNd(Y)%MtsuOln_~`0>S5BxS`(Y7yA?o?C zr~GB&6mZXlwZ%5I#e5b*uJx?n&APvW9U|FxCte72b&I6^;E4z}d;R)*1qS=J6WB?| zyHi*K4{cBKd<HzARB_{A-=HM-yVOlLy3@3ENE_t)Vd;_b+`DL^#xUa@Am;P)>vK5D zd1p#=*s@5n!TwxDS?2NXIS<0Fyup+f&Z;l=GNFXS)C0ZS`vO`X<&<du+AGyBx!I{N zsJ=VsB7{LEM{YilWu3t=I^HFmAUZ|P4G1#K*!ogsKA;~e$|lJZh5379$47QinKC=9 zr>lhTHAKT?!48BUtn0<?b{u@$i_Cv=Q$o3?=qbyniCQ0MdN6i5=47EB-@UjG&dBJ{ zk&4;P%BGF6f`3D@A%Qfy5l10lrVna=WB7%b7ZKyk7z0OqGkhakmgttUN;nb>71>2W zS(%t<tp^&H3Z<dROXy9EOSD!k{O3mflu>gXVX*77Bz7re<o1XdCzaPw)|$SaDbfa` z33UxcX?WfC^{%JfCm1bzsndgB^js*)71eq2wzsEjv6<AG?9R&G4f&$KW&aqTL<Cxx z2Q1XJePQP=ur(&|v8x9{EtKbAnYYzoFe%iwS`v($NhAlUc5S^Tc}+*UI=P;z1<}%g zB6tOh!Jiof*CumGC;TTyO(|Lqrv}g<#+r)`HK5%cG9Ama@(PB*PK_`D$zh*ndt6@w zKHsHs*e;5WMTYO-j<)h@L)i8Mr^^REAqd{`aH>%z@WzDc)dE{{(4wc|2dma}cN{-r zkn3roM>V8dinfxJ_twg&*Q&o`Mt06en#b6JwZqi9pprrY_w^Vdj~SiQ+}l+oEr?)_ z<=3AYKi33fKl>>#Jw;m5P9_)q$#+`H0<KSY7h8vL+}Y&u2sRCx#DA0G-WY<j|DQ== zMm+zP6tV37j!N|Z_Zjs6yUG7|ZYFhlD>%JS!O2os7UiD)An-L(Jn#gjyGNB^|66%$ z)PW$9L1yt2TAZ6czOk{f=^a;WzqcOjQL4`8Pb-e0ko!7Zq1BhLA4n(s-u}tjL!!w~ zL*-V%h*nm)rcwgfu+&CCgHvy5fI-XvK<R!!Pe)0(FHdRB()4LZFREB?^4Z6sf5!~w zZ(Ous#)<9TxJ<wFYCw5zvGhv^ulaa1Ij5`_)YW#(NMEbrj3ZLbEUU<N?O)SHXwF@) zXSN`KPfd7k`}AcXRh%Q=kDcb(=uouLuHW+vyMXj=M7XWI%Hjv}`mjcU=YiA1BdOdb zgy}7gsckkWUz>zVVt?|UnT&kPrjm`kh<?z!A|?^U-f<@d*uX-UY2bJBgKgCn1a6^s z!o490(xIG)qeSY{yEQF}+i01ITmL@wAnYCyXuWdZ#{=8CWeXr}q0ZJ2r=~h*uBqyg z?UVAg<G6?6@m;B}vY3vq-$S;O9Ue>dl=Jmdb>R16jv2WtA>XiFS>Et;0gK&77RamS z;Ig6?{N$4_5rXmp(diM$m4)O%%{d2-4?RI^P>v4hu({(7@Zo?^!%*NouCMLmh<cxE z<>7`&Ksb~|a)rqd*1ktc)-H*3uGsH9F1iine2Yy~jL_mbp#LRynA_)4yz6e;CV*a~ zXQwSlCp=&U*jMXy8RcangZS3Nheo?fQ7UwfH)X9X=fMLlkF2&XbY1hg_kGtE+q9|I zg%qC+CY)ST{TSuh^G-7E7Q2Yki&9EYL!+K+io|f9TLid_p5CBCt6%r|a>k>L!Af0j zlADLJie3VUu{=$G?x_{Wi*m-ST7A_=V$ljq!1}jn!2RZEr66fX2{|w85P}J_B~4Ak zRZO%oNM`pz;@P!jDpV0CkHMfSu`VT-neb)EzX$T1RrO;O-M4K`IUZl^9Qw~eRlHE+ z22!o2YXAcsRoOADJz8-GyCsl!>YW}Gau}aC*ECmQtUAuQOB6uQV)UPz&Tbst4lrmB z5ojNy*7;t7C?-B=0~?cj+X!0S=hV%Lb5)761@H-4JKM*Qf9}!#`H;Jt`J@-1xAEkH zPO`*(N;Vf+asCFh4jAB&VTTVyXlh_E9hw=D!@)xhCj95i71am^_U!iaP6p-bFKPw^ zxjIkWS%K^TEHIpC?Y2Dnm7(h+=&SBDANPPq#ky;fx`z~6+#?W#y`CaL9yyc(QunG6 z=dm^W-sPDjBf)ED_cOe`_OkDEc&}YgzR}#GR<&uW-AM3TdtwkU{_CEOd`2_h*6TG) z++-hoXFG?JNG@#{`pvi6sNQCAORIY&M$S8y7Pa_dXI53+6|Bg-6vf)@*!xITaBf7S z5^$d83En6SYu^=m=&^)zj058z`K|}|F)Yr~W&IgPLyVpIT8rVg4$YP2@JlEE-qgYW zVfkWfAl6hd5VOl;_s5&9<`Y{rR@v_&)M29JjVm_JL4V?&!If+zBMt9?c;hsE4}O<W zkR5^XuW%(KaYGLH>sFV%Lnuhb2hXfK?)Ut`2K4yasHGh8RB=t^zq{Lu@H^Ar^b;>r z=qK`LSd=Xb3iWR-Ipl@}_&Z9uMZ5mb$x;6{d6nu+Fb*&w>?jStF=yDzh}?79%U}GZ z>5A^$sI+JMsS4F;qOv{n{2g+&Hr7~4e*c-`z@PUR4CV2L6(<+n5{Y*DbbR-6iHbAF zWE3(-c4@$l_5&?J-goHUp*FpLU=hFhz+QvXwjjUUuf6c|l#jeN#h;*&o?c@U*@Dp} z16c1(N=BaXAbgnJ6Lkg%uIjEW86N7nrtW0I20P!XH^C}1s7dU=z(a(-R{5*^5GD_F zaVS()WSJODYSY1~XBc>lb$*)dM`-U8Gz?$!Qx1E;zX>WY*|k;u%yO&cOo}Am@GW(J zCU}%|LGnmYG%R+s@3oC;Mn@3GPt+DZS{AbVmB7#}4-+(FfwQ`6ScZa!Fx}jqI^L#` z0I&!CFqk}faY}tUXv3k*Q0bj2Hk{r%rpH#+!u)o!BS1H>Q|$Y9O)}(kyX^1X_!jg| zG=M7<(Y3Z{laEP(0fbL?tq0+z2C5VzKb<igRjZ!DNfq^7rt7^JzYCN7>{{$N$m7~E z?%&iXh(?`m!mY$7JuoNTE91>9k2_xcxQ~oIGlP}r$?$QnpYx-*e;1jh?~KH`D-#_q zbN(${V*8%z#rduPnbi8ENB!|o&VJS`!)tr;_AT6D_Q5nlqfqZ-E@fsDU=2CrjTRG4 zRUj*abL^jG^a`6+wfmJ!cH^m1fba<Ii^7_!@Ya6z?vOiI7R6~+g<N>#F^Li8`EJYW z_c5`Z)nu6)FsrUT*Ba|-oaw{ed?{9!CaNinwq5z8i&#e)&X27<hy&Pt%be2#YOO@> z#ff#EIig!}^9_mTWR0dON(%b{L+$|nNA62l-p{^qnYL<pn9`{VDWQS*Sih=PZoHhc zB)-eqh%F!!tC9akr~PmhLk|SKf1l@l(%+pYjsSq5m=mA%7AUEdxE7yZ!T#p$7{&<v zkT6#p*Q<vifpOR6#1j&Th#c02O~ZPKfTNejeK2e#=ODBGqehFL!%3slew)a7i`XWc zJIUz;mFRY(p&j|c?O_phny9@#yl^7<#a%bS2&t@%u+jM;WWy4=-p7Cz%T5b|h+<m* z6Vxg)cSsDi{uhyrkiTIpnudzGD}{t&7OMjO_&#O1K6ta<Vf?PcmvMBFKQtVbU^~^N zlKXv|E$vQ|`dJ%z>bT0NEg>tae+#iQNI8lEn&zluCJB`hV~A2u=m|kwkwyx|VO)W0 zz+>E<q98R;I7pN)D~r4k6?3&cU9BFzV<~0@uD^GIPoU%N<51er<neug-fnZH?Ma~^ zY}ST7#);s^Q$X@RbHTlQAwPbPO(eQURbu_Xq1pn*H$zLm+nbx(lPWzKfqOY<kTYyb zG2RAl_jL#6){V8L`&|{vu@@l`CbAvjTkr3wLaHh2(_>EU-crxWEiC@4CnqqvDCl?Q zG?QQ{kTy8fUWTsMGIa2k6RM>dk-shJ>k;8WnBNu>sX&AOjYrtJKi(P9Va`c-sCu1q zN7k9$_r_)zQeN{ll0GPm$3^oqdxZ~Tx|sBTGsF1pga{h{^(wL{Wa0`Z+^Z||hBou` z4X|R05L#83*4KFseAkcLc~_y+noYW1w`R4q2Xe-E$qa9(jTp9#>A&+L*<y~Iyy|el z^x9F0iF)Roi+MB6YX{pNL4NsSMZ_(xvz6`Xa&dVolCt)KtOf6aKGd9M>-%`*b)e<^ z`U=1QB$jfAE$~A6nyn9=^PRDkXsq9Jy5YW=JvIO_7jOR6vi{kCP}`Qk>*n=Ez?Ywf zbRdlR{Kis?h_^AFSTj3!YsYs(d(Jnv`jC0h(d=!8?S&iBRqxO5H8H)DZWWwv&W_N& zHC55N$Nsh}_N2bt`d@hD|6~z?ncrH(6Q7**ftwv_^dpkmldJRvz093V#D9SjiW%6y z_>*w(Z-p~2=od=Viv6vF#sz=;$K3z_3Ml@!Cnsmz9?JW*;C#T8%yh8&5^nhsy~ehp z-?eX`b$H}P#+O0#)Y^9>&D>B>AuzYgrTZ3x4oSw_12=I-lkf3vBq%o1!w!amnf?Bu zp<Q^vD6tGPG<4I6A9t6Fl?+zZ>j+fLFeu>2Px0yeene&&vwd`iR+Vj6(YfyT!5r+{ zPhcl;#$nm|#Ei<Y`KlX6%nV!fieC1sPRcql)Y>`3XJ}fBHPjlXvHXEeU0D^KMSG6~ zV3Jbb_HD?J0)@}f#$3BxfkCaK6`$66bsMJxZTCBUT&OEH$9X$O_g<f!39=VKM{9SE z?IyfR{Io9=57Zs2FCAvf9|plTk36b$+kbGkX+MKwz41OtaSHt-;%+BVnl2h9k6JW) zb=2>AUM2b>cFak)EojUY+pe)R6$N;fS15=|k=aP2$^yPs<U?JrW1Puu4lpE>IeIYR z0e_O*?B%bE5NmkEw}e&<MXa;>b`ngIBgwK7th}rv(zj3Md_`~|3kjN2sP}{`om25{ z?G$4RiY5XjH$6nY4j<W~b=h@xwRFA@jPLXnUE2^8$c{GW^(4-A)$Ws3ae28K(ycj} zEiJk)(ANHQEFI$S5kTvM-2%f93?YhoM!!g{)sp1Tn^|SxSkL0zD0D3BKC9QV{i`C* z*Osuh`9TrE1>q;?8!<#a4NwxlI@U@}+Rhp{VR=>PncW-W+S(^Y1#}z7k)M<g9y59R zr3R4xpfbBsU|$PGOqpXe5?dK-#In)aRwP;gGauh)5;s_G523ry?r2@?9z8t+Uuk4p zJ~=4uxb=IfsU*j2q*cUhP7vC%=fz<Eu7G}p+#Pn{471lD^qxPC@0^I(lX38Na|{+0 zA7UQaw8qUu0AhoKH!NR9s?_5BJsit|q?2k&{fh9%3k)gXj~2n+aW00a@chyDqdi@3 zA{tz#h~7+cS%>MdL>PmWxCek;@6WY1`hCq13dblx#s(0-NrJUP)7_iNl@~`HUi6md zy%w#@{3@@S%?{R^4ph=e@KvPg?G-1^t2y)fsip&dAFIw|C|9G=<=p+EFSq9j#fkcP zQXebeo)7I$71#7WgL~brExOs=nnrhN<;$wNSl9c?5H#-0@r`T@B7BK}vGDtROj{P# z8Wghecrfz@o=k#_f1N9IgWOu2KoXSIbFZR1kvsh0_&b`++;zO<SlbMNB2Wm!B|VUj zw`a4?wEim6bwJ_H`O%Jyb+97a{j2X|b9M_2>ZR4Rj}LKMM#8a=&vfE;Gwojav*61! zo|g|6#q8h07dMj~o~WshAw$Kl2d-a&#yyK~RN;(wuWgJG)#209J8YU4TOlT)47X*d zbjcV*BAy-Z<b|rG&J;lUz#gu{r8z=!f?@Ce7#y4-QI(%U9N;bG{G+D>O>$fX=!m^{ z{gAN#DxKyvb=((!q_%?K{BaFE+?ZYeGpG8k)l1GbOZ4lR;6-**AkobWx9$izaZ={c zq)+#20{K}-?BpTJ?-<5MPNZgc#U~Eb&E214KfJ%|$M22?U*IIK*Y+*kJF$1Ewjf&D z<WWG~4pH5$b|h~9N6->X8VfwrWC9Qrv{6a8HU{?7e4e^<!ac(iN`mAJ1BW2oh)V8X zwKS&c?k=qj8-OZaQn^C?q^s`}D8}1Qg+u%lRK33jFt_%NyjpFQVpf9`B_m$%)@-X! z4p=JEb+?x8H9Gz%DGsd7+tWq8De_~nxIH0byyj{+5G|OXXyoFoqrE`ju$_YY8hOIe z(){E(1mtlebrMU<awzoqs7ALzY5dP*0=IQlz2;|S=CUqZC>Oze)w8^lx?i-$<YEF- z#qIjrWaJ`t@I6-RgQd^Ku*$OfwOE8{)ZW4m;lsc<XGxshWEy!+8^5(DC;E<iW>q%J zuAR=eD}tUOHV~g!*m%?V&R)gFzR=_6Ui^U`2ZcY(C;Kc?jd|{BMX7`Ovw+fnm4?Eu zydjU|xiYoA)%*;%^AGL+>~8e?nM|<X)?hhjnr?*Qs}wBu+(D`KKMDLaSbQOeJllFP z=P%0bE&cx~G4b-R1F+W1FWH8Nhf^Ag&%Ok}bdkyymX$?Sr_v)93iN+fe0NxwtWXc= z+=#Vn#x67UP{(%$5x+Q{0aj%PcFDYd4m5=dXzV2`aF%Xt-E=*kU38)@Hh>fE5cXUu z(nZwoq0-R29!?tLg6EdS94mYj{zPJXdAv2C2d+x{-GiJ=7G?`#n~6W;mF?Ymr1b}z z)r$)#)bzxhrR2)(EJeQTMrI5r>s6tpTboUSP}1YfUs69`TVQ*ha85(u?hT}e(jo4j z*mKuNk2_5qxFm)-q4TJTWf7LOb>u&$AiLUp8SJFh7gl8_%vzNM`VxKOu?Lpxou1^o z5kc5@%hH=KPfGKRNNz9&XQa^*`IN_WEaG)F+?g_55FTKYupc67tBkFcmyzu~M-%*4 z9_7=&=;!}->}YYmiCJoy?H_10TKe4}V5^p{d2VdO|KtFmf8ttL<9WBZ*B+~sXK?IQ zlEoO<$6#IDceTl8qdgtydP=_Bl729c8_Ei+Jj<9fW9^4c)mZgw=X-`zbqVxG+j^jI za0xwTy4K{Aev5|P(YHwZEYJ7r#G+^-w4qa&vtM{wa@Ji*+u>)o`5&an%M49gTY0r7 ztCjerrBXFoKLr25k9c<8D<o7`fvxEG+S&@8Ife8xY;)4>?hXakCziYZ15np^0;i3> zw2{Nwy*gIg-?NX&P$>hV%e>k{dsT<kYa7!p-~+DJNE=cWpkja47z1&{z-1=);5Nzz zF8Nm(Qa$v=c_F>*%uUbFl@IY6UP0!zNEurGY*tUTVxxCRdRHul2M#vrMqc~g8Ikc~ z&AvVhT@y0|7KQHL1Vyh3GJE~+Za)#!i`mQSc1K_f<}D^t&QHmhj`od#KW}mPEi2*( zYD6-(B-e}2O}S4<_CeFDU9QQJGh2PrU)lie4mxdjc0J=w<7`TcN*JZ%J$V0p?Ha%q z66z-p+Kl<GMeTPv(|kd<l=iZWlwiF*&%$e`Q?evB+8-s~w+TCqXePDw7hT{H`=sqo zPpQKt&5f}InAc16_UNO%$_8k!L+9^9f?2D+;0L<{QiLyl%B2Oqd$1|id(Q#$Ib+aY z;IjNe@qXXLVw{DLvwX0}9G=C_gD=Wi=uYG`cZScj7t-&3jIN{tEQIt?(-Wb67RUfA z$3P}|EJSLnlLfkS2VP!zhOVVK7%ciJP}{^JV@Be~*2(MF7@82S4;J35qUXRr`d4Ll zeK*&Cm7?NEL}sb5pD0{Y`Evs91EEpV11*u;@Q%8F34)j;PKwGY@bJKlxb;CH3@f=& z=1hP|0nKhNHwG|{1Mr3EwL9D(F~jj0QGmngwz`XQ2usbA%kJnJrDyLqLxg`ho2?dH zklL$>4$6jnL$hj*tj<PHI>2rF+H(rcBG`&KHkpj_y4|@U0UU=MCA26Km=JoTw=VN; zLa}nuN~BkJ0>ZkJ?nhF#fjB0f4A82C>9s|Wj<e(a@z&a-S~yPzprT4k)vxpr%ew=d zz7L4E`>6iS5ooA@!R`#@uz`C2@iEp-!-#Wl*j2BJW#Mr%-n`A;SCm26Fw9s@J@z41 z6r$TQr~8iVl&*e&!!XWQY3l<rvUnLNxZYU1Z7a0+*3o~F%kwXiulWZP+em^OBhNEL z(bIE}nmpMy1zV{mJWrNIqx{mDXFgvZ?}FNR>0+lm=j1(91A5|hTx=~1?t?kgXs~OZ zLZ*Rr-9O5_IHT-W4AO8OS<ybQS1_H&x9jg1=*emKrVaJHQ%xdP>PY{S<uBp!>~Hb+ z>h7q87;px-hrs@}DE|wvmVrLYyO<BJscHKv4-nNegpXH(dh@u$B=gPHlebG%1jc7k z^4iL}sXG*8t=C3E?nc(n8`^B<xvBrzdL{PjVea#4s!T^s{CWooWGLny?wd+(yQn8U zm1F|AyqsgykORU>fo%Z+$vF)tTeZ&XROb(8hcxEw_A#9P3`9LkMo|pw@@U_++j<jl zMEs1Y!iW(jUu439P(oHubD3?lpC~5n^wLZovPFPPe^@{~kbb?hS9QOuGw<8PRoDG~ zkcCfcW5C6L%T(nt<Y(*Xjb{6IBbLu8em%bMbzJT&9u1Wx-!t38^RY~zMGnGS6<7kB zxSFfLt`p~pE;)(A)BfFd_Xk@4Xaq$dO8%n}r0va%E)I#kBS2KoCuLiaXJ?dhgSI-h z;*5qFwxeJV`Y0vd(Ln;gTI{2hyBXxO*qKCfByzTeIXXX-tsvVpXRrr6Z2V=KD)xOR z+%?)aqqPT-4*!Z`b3EWNUtRF^{42IHZnGYcLQri%_%eiQM$Wn0Qqyc6XIA1*`atIn z1rZDSJT*28j~7Z%1}vR?7XK^UxU+xMCi@HJsdE47pl>t2MgON4;D6|*{~M+KKixQL zMS~#<hbI2O*XYO1UZC_@r~g|0s>qXE=xjaM9XeE3Z74u}nwCCW(t9GubA@Pl7<a-8 z0`Fh#ql23EqHF84feelfV8fI$^H>Au`v${oNOsClK$3uS`yX-&yPq(vL-p~4z>LIO zw0vK8_^q0abK7jgk;pN=4_`#4y?uyqm9NmIZv+EH6K&{7igu`D{d-hK5+v0dnc!m% zpp$^Wt`k`|E7ZU0@*wQ~wcpl>0J$U!^_q-x6Df4(8I|qSk(yz9l25eQ+Ap}sMnuQQ zBd2fhi-1Kg&RF*ofjrfikg<~D_<8|h4Lx^h{fL_7+an@7jU*)wT`Zt+)|$>}Pb!_3 zw9boD;Oxzw>6uGuznP`cm$UR7RhR#2(0)vX%f7oiIlCBMvun!Gwu)zStPd6W8nHVE zNe+|r%rWWd^Xa}F?oiiGzG_plWDLA^%DreKhG=7zceA5oAXyn}zm={dG8XNQM?m9? z_1}9F=0n!pYGY#x2@sE?3w3#si_&zX1?M0zt_5n6Zk8S;Vj$kow5vxJGkpLp#%vRw z%rv-E`;tL6iOtN+t`nkc6z)#)d2j>A19o|jB$w49>S$!F$UZHfD>~~k`V<O?{|1M6 z!QknJA}s+DAg5Z(PbIJf#mSP$b^Fe@bvQwRGqpElRi*L9M?B$8rD`v|XvHio=9=wE z6J&aRkd&^@Z(Tmi^Vg&tRb1)ikq5b;00q!O_C1h~6C-KuME?^TKCp&Rhl`7tsN(;m zKTG_@=VqozpFs>cS)K>A(ml1!i+dJ!FNS=4_Vzy^{@y#31i=V@;xexs<h|wD5B#&D z(-BhD^%Xf6J1}`33)MV+Dz^YeApcO>y(-26A>pcv(d3dM@JI+<I@>Cz_&_pAW*+o_ z4Vfd9QyMQ`Z_Ii!N&U-TVQ`~`yTL(!fCsQ_6$b01T$o`pGM1*E%u5<d-$83;REJ@% znws7Qj;L4JaHyDs{j^?e|8u$>^kIl{k&4`n5ZRHmDLr}2G*y8sPiJh;x5l-n5rs5~ zU2p9(uw?1`w#ExDc?FIG^?7>2ow^}e7J*}#*k2y_8!PC$4b`qs9~cqI7H7pi7;mO; z_U)rpU7;Wwexzkbx;nozm47mhlwc9njQ}5xbQq(n7*k<wXRW$9W)SX4bnbRK*TJja zHrd?tEZRUy8o)oeIj$@AgFF+m1=}*F=s!rj;9>A<U&k=z&GWXx_FgMEp<ExI2G9F6 z`zD8;HCLTQS8RsgUW1C?e_ye`w0+-ioHrn|iXztds~u@L(L(<wG2$XtF8lNxN6G9K z<CObRNxdJ)NM(zqM<_k{#VetWUsTByaHc61RR;+e92<Mb`gR(g_IG$LX&i<g(eRoC zN=~m7g2i$KyHCp@PSQoJ+;iev3xX-(5`_{v(F&$niHOvMj(-5$LXYfphCJzS#2?Rr zW(B5eHZEu}d*|ivqnP?$QJ{5z|Kj^BeEP8e;QM;;m8pP=yAN5OdcTBjYp%jsdGBxj zx2lV3j}C<MzbK<>^?p{wwUO$$Z6|9AgeNL@ed55VRp7R!WSLP0<u5@z(fF6&f)a9* zGQs6|p|EUq=-=!e%d}eqCV%W5UY`7}){C*Qc&e&3yD#*Ryt~FUAg*JV>KG~suWx<o zM$tSwX`J36f3^j#OeeZ8htPWd``cm;We*ghCX%NHhHY>~uzJfL%cO81-!v*Gd_I^l zpO5RP4Xl&Kj}vo~v4KH7_t1vt*^Jm0H?k49-r5xAMQE2w+=3DnUgPfN^Nwb(;+Kff z`EreQrPQnvnBiFGjrC=<C1k1vgDB>@+EbH#jJbA_uQ+9g>#!?fFP&luGEf|l0Obf^ z2t(7DuVL=NGair&5WVcRs{~*G-@ICFR?>u{XwVD{ucKqk4-IivnwsYKBw{K`Zb}Xg z)f?h48H{az-)QZQk%W;*kjW|=k)p=0=Cy~g_m=tlre)LhMf29IADLaIZq<nCHKClW zP$t^jv+DVAqK0VJ%-5UBA=t4go<63$xEi#!TQi~w*86ALW<QhfZj-m>CImG&z3mEq z^#C@n|3@|A)%{Lm&arMK$Fm2j092Oit|};*9`0C+Xp~uRAvA8-xxv^=c$QOR3W&l( zm}-JYqK*w6xx%--MMol$3q}3yP$80;zdV=aJn;FR*i-CuH`(A#Gg<28CLU<&0C~Ln z$?h(dFg#1{W1tSTI|>U?N<r$&9sobjxx+1|9H3osJaBrkIfe9BVnJRsrw6>TMG<UY z%RAZ24Dwx&Ie(A0Ah#UMUQ}GtZ3s<0S9$GX=G7i;3|7a><;RjM&6!8ZIO&RBNPOSi zPqzFu$Q~Q335eBL)M|@9bYp#N=H>_CqJ=4)qJ7HCtSf#5uv|hRVlSb6)uzuBET*u- zikh@RGGQv%<@vFCmy4(U@uZEFfOB<>54kgDj~A@OzW~8ET27naeqm!GaMsB^4pjJ_ z_$)qcdo!x>aaJjs&rY0-KZY5s;AsoX<kwAb>TAh(be1hn-}xT*rLXQw#*VZygA7u+ zT;)MK?DaDfq$-vIZ->xY-k<KJga702hHok1w%7kdS^ocY4fF5D#D6QL{yQ0CLj|6v zh@RQ-AvQn_BHTvfEWPLYlC5BER%(r=<dtQ^bP4e$yJ?2!t=_^3JMFKD;}14vdi>KP z!$qE_EO`%RnYy1A%0hrXu?`B7t(8#+VAwF|yc`&kjGocdE46!vO~W~SZ+G~%z(dVH zPz&s#GUc-foG*Xt?RYs@0~x*&q*K#*ORLS{iQ#yr*^TbV2Pb437vpVzJU46ii#eAA z9WtSp%&ZptX&NwePw}b0YG($m5p?tJ4JZTgWA$6Uh=Cu6@ROk_Imox8h?H<HT-QF> zLG*eeoH#1h>BHvd3Yr8L3-EBc>b=%r@GTtb{(X?cD9F(16KeDWa;M4&J}xy+UuiHa z?&<RTcz;aOK19J4p{rYxoIt&fW$A;HI%V&kAQ8yJU%idY=D2$bUi0Trgp2s5yN-sS z#iN1lbsJ8BpAtq>21xDCn!60h2`~xFT#9qJhBpru85QtmR!9@hET{`MNVdiKw*`D} z$R?-97o&-T(E6+1V9xa#l$@1yFDZdsRYNNM=3|HVn=S@wms4L?y>EV1mMfrsh<$T+ zKT;wMsS%FMf4O#UyV@m=c{O=|_0Ksc?z~cd5!quVJ|&YHyR6tMPLwjig7Y-)S_16t zfu3VQED9Sed45|ye!(?iT?VOTozZUi(m_|h*^a>eI!%NQYvJKe?xu*vdmpX(4UbQs z6<P3dxNvB0V~BOf6ibkaD0MXYeq`#Tk&J%7dHGEK^KI25?u6@-g~f_~+FV^#&02RQ zKA_@<(FUKOq4hyk^I@^vLkuMC)LhH6<*~07J~4cv5r3_p+3Xg>Ew8}Wb_uIj$cFYW z1IHt|LAl~#&o*JLXAA}kshKf-Ycx(PYn3@($998LvRlNZ63eT-n?oUR3BOBeqgwV~ zK9fw48-<jrDpuwG0aD`VO@eJ!227u>**70YJnFgX%?*ownjWirNqT)`JORdqy5I?1 zx}suQYOq?+c(ip;O#7Ke)&k#$1_)@o)yfp?*IcN%b=1%N%Q7qqLMNBA45?7sz}e|# zL*bJ2`s`YCsrw>tBM$mg+WzB{Z}!q$GHGKugxY@o;CgI^HsdGrWQVVe`Wsl7m0K;| zup_n?87;1xdu9V&E}yO6V7zK@2zoieN*C~kzlcy+XX}jwc=4%o?w^IT7rSuN|9nnS z<WZ~Jkd44$HtkE>!6NmH+4D(+*mNpi6RI#}l}*<$Q&)|41V4ZUY8VKw&rS`Wh)2}h zFC~4v*qy9Sx6g}l9q)?0^N_&r(Q2~Y5*EAh{IOAj9M9H=WMx0_H^JB%@t0;rLn-k1 z5cpkU*M8ubtk$O$`y6Ix#6Sfe?c=3OjtI?qn(0P?u!3vFq4s&kND{0MDS&?_vau$c z5KPKB;uqC9rsVM|nVl9xAnlubzx<ox#z%6j;~nJN<p?{ehV4&2|JIJ^D8XO8BJMWe zOwL@glVqO_Y%vz>)5E>_6yDFBYNw?zYR&GAr6rg?g$PFp{F2S1PZUWrCx@dKHQu+y zS>*zIAQ{lo2-QIo8=zURQP($rZvgZ#NP>dPJ$_C0@i}Wv%?H{R9K?9N2}Mq3M;`1R zy@}i7^WHRx8mF+Wg~Xe8<lok|KMgnD)H=cQ`6RyiL&xJ06VP54mc+=IeQKm4z&6br z?gihemXW-~TG509df&NgRE8PTyX7Z&Fx6prLj?2)GV#2KA>2G)qJhs}IV#k18yTF} z!o79fY;`Q(tLp6zdYKhFxPH36#qJR^zi=ae#oA>-mBGx>=0&(6q1Ja)@R-A(Yj?=v zHI#vTsX&Q+wiiI^zn)%7?`$oHv1O1jW%lcPQT>Rw#z3xn|HoGF%NKb01{Hn|$Ak4} zFk%}94c~rV5eph>7GK{XTD851-+4#BM1chGZmof%hh-okQI6GaJ`}C}aU(hcjk?lr zaN3pK`|A9LVf_TpUXxa(jy=rw_?zLD6c0qtwLIUej|I{gu+DLO0o4_@&hbJ0^@_~e zlE)nS_|%}}$bxogw(}vE(49?<%}NhXokl0#dAO0UmA<p!hknk$6g_7`D8sR8Hk*mK z&O>{|4@X^B7CAQT?{zk)Cp0nn4As@zt%+>3O{Uy@gx?HTbZaa+JK5_Z(w3eY$4zyO z(&yJ&_85pL-q|}LR3p=7)n@hMHm9DDkT)**zY53p$NWCmTD_3)>M4mx7xSDGP28Wn zWYF6yI@4(i!=0?mDDOhj@qtenbPatmq3t7!{MmB3|AP@N)tiR}5~)N@Lp~Tg7USTT z^{1cF$?A>#ho&^A7`AJ1vruTqMU0;^HNQ+G`wQ6Ln(`BOv$5rs_e6ZLIv`M-D(k-^ z&*=(|CqTi+=?w%nOkGI!!1RdRr|o>nIMDwd!zZ=NP+)|rR>d+VSW>9JG^b|2<=zT1 zA}8iq^ixg{#UQ$6=={YONk70Ko&{7Mm^NwleEi+xXF{bI7ij6gS%V4d+XT?upJ&wb z7$R7{fA)1ClL8};IemH~(HBkv7^U$>3f@Zl*y^%>wkQldCN}VFs634KSiGfNmxmuQ zku_b7K)pp%GfI^kjqe;}sJ4nTW`~FzEGJuuzjL<~CvQyPX5XzmP9-t;jKgg><;%fE z<PuDXTVfO-IvjHankwbrtbjux3HqM<bD&~c-S2EzfT!UWHTDsv4OEt=?}-%?Vy@E~ zB}A#+!ODZ1%-veu3-^gm@)A?9no3xA4z>HaaJgqm+vzrsz8)3k;31Q+g2o#Z(h!tW z_I6AVWux_!uY&Tg*h<m7eJn=Z{t<1=GY1RHW4(61TWqKdsr!g`-|ZOwTBbbj)^}8I z>ynda&X)WY{fx$P_8+FZ$mzRZtbpMtC|-{5g^sQ;3N|-o!STWD#ho5)j;hMaB_qoK zXZ!~x+Io85UA-7W+eD|v5=PoCZbq^Tz^bO0yE3W%$IbGJf&P4EV~6_KbIm1336t`c zR8i|Y(XF<t{SUGkiFCzj(;$lUL-o{_fX!Hy;>d_*jm;y1#z#(MWV^;^<+XzV1tuFh z<eJ+_cbE`}HyN0vMNRe$c*&dTZ&1+2zT56PBqjSU7rm2nyMpWH(vlWFwX1dcin~XD zvwWi*<u;}F1jbSD=sQ2)MDy`Q^*5EJD~Os+`I|BOe;YouR>=00C^5QDe(Hn?hV&wj zOzE0Fs==CilFNE6p*32%A_4{W#bbMa%WUzHeZN{^gQkN*VbP$zbMHd=zg8qIWhMpO z!R+NOh>@&)N|*i$Ls|QS7K*#)Fi_^jkK3WTvXsQ3H5dbQ9bNG&OH&_~(=ZTz>ti{r zX|s2F`XxNB65B(rc5%ZF6Ukx{rR{@(MXQHm)Stts^~+N-&TFmkXF=nyWdCVK6#ZZM z3;(xmBmcc~Q5_03^3>V!Jw1J{)laF9;(~ces)Q*@pZi;7y-gWTJv|_ejoH=l$17LE zU8k$KC9=QzEVS968t)*R34HUvD*V}CPU848!&oR-mCezI-MbjMawFSdsZ5(b6+D58 z;XBcSimAqzY5Z#Tyzxx;o7Oo%@;Y|f*M_c_19mxdw1!x^w8KG0dqh4}(fwn*tH=OR zCdx-wuz`*vBdT-o@0cMZXB>R1Go|@1Oe5BytWR3B)r2cT`9wuSqZqhSD_E7(xeAE7 zP~2$9etD6AFU>NediwdcpzzD)$eOFaC^Io37sh_XTV6Pm^5mRUS9$LhHHn)kzK$5b z9|3(kh*{n|65A=nKuJb0h`tjBga4##WnTDBk7$I&UY4QYVVn+W6s)}`@7D}FCQ3b~ zcRR@9za}cNxbn9|Ef9&)qp-|gl{{OL9CT4)u@*oc;lKWo-UGd~1yS~V>g2-wg3py% z#I@Kp?EJ&&22EcsqieJ8s~b_LP}r`s_3<(;l?{H%=|{TVS$S4rrHNSb)!EVEq)gFR z$P~B2Sw+pHP)N$#x*7{-cy9oUcJSS?-$^fF&xZ;tqrG>~SCLF!YXIxY$^YrABAM7& zPfywu!>Og&fq1p3@2yfv3`~8-HTz48LOnl863zmgbNIRPmmJex$fA#y5C^6tQ@Fne zq%BDgIeG=uL;g=9;?wn+eo5>KzYEf^44E)^SH)_6HCbm&AC!B2E2iv0>?gMNQ>Dzq zMewHt<{pPw<0^kX?B3yAuD284Y|0Biyd91_M+bp#x&KY<$la-6V{v;)_Jn^=@2cEp zt2wK*?S<YihBL=C(aej=iF~|8o7H01$p1$R*c-tAY)858!7Aer6`NDakEOlWy+}xj z<;&du?eRdR_9SMDq1bq6`J;ih_G@Tc3tD9JI9Tvy)STLx+DL-H@SO{*oCHnL);6}4 z__B7D3LD2*y5iD=>@vsVM_*&tB%9(R?VR*=NNWXE1>_$SvH+R^r3vFLn~9KGI7WC> z?hQ5x_SMnj?Xy_OzTR}3<DfQ@xgF;|FH?avP-_R**d9mrnJb_PATP2pMKnynKq;F# z7AVrGuCr#3H&WgQin}UEFPc;zb@w805up-`+^a5FgvVng@B_MDNEu^onaEJ=&~eQ! zIleD}gTgM;t<S0!VoL>ebmoLARei=ytZ52hr`D*A7WIB4ngN5hn_$rlxMma+5=WR^ zSs>ICv}Njy5o&Np^|v`tzO>b*jKP0f(^ij)hJ9lnm3c)32HRRPOpKnM$XNvr#Cv3- z&$Zu_Evlp4y&nM;6b;K9_C&gE+{+}`E2gMkue2&}w)87~8oykbKlP0dsDs@elghiz zCyLM(`j$4vjI}}d!i)5|WAO4|PVuq5)Y|jWu6)eIb&xWt!aIHk1ngDT_HX#Hn{`Q) zwWhya4(q=CV|*)^RsoRATyQ79^kP3Mna>=(aD`Ll<xXusd-Q%8zV_5)j0ZO`i<XSJ z!Y^R&w7a?@>e*DQTm`r>rE(k#+U#rPyL9j!Ap`U5+7|VLogZAxa3hnG4AU>6+)J+p zH9w$k5lRobms)eG1STShkl$MR4c{=hqkLkb`=)120?K!rZ!NLzuw<Y%T@g;m8m6$F zbua<DZj@OIeg<jJ9R}Q&@qP&7X5NiY29wupfom)j&~Ivk$!6uW8jLTNyLYzvv!Xnu z-?eCVHB`i@kX{>_*pRrp!!k6_eX@4nDR%s0O&@H)u&Yg~dgN#RNw|sN+`_7p?Z+rD zJEUpdlPJXac-!<nG}As&Nun0*-4@ehv2C5UTn0)fN^T0<sGNZm4`w2d%n!3=*&$Dz zMt!VQR~kc7mY}E$$rR|DV;@J~(3ooq>&5p8@Ne`DEZ(f=w7&y1UcvhP^tgTf;lEYS zn^L2X={}7CdNOHyjz5~hdz@|;?L*a{iM6(?P4FhY>r5KXXoY$?*W$Sa@D@w5zO_@* zJ?p`m#7*a(4`F1A5g=~&I(2o$Neqc4-)a}0hfr-pKHosQ>)I>06+$WZ?Psy+qUL1G z6x9uMslBSEOswOKje_j@m|hMfD`+8XAhbIv+$8muY-a=<g{;!%t(plm34Z?kxDVc< zEQ2U`uc-8pxr|!^=K1bRN87gaIWbNup^m^bpSjT&&p@J`k?!t^Bwkc|XW-=4kM)hF zEABWKk@5$(&ZLk1N{JH37r{O)8Gxf4JO|IEQB)C9Mc*kju;-cL+2f)2m2#5O3UBz# ztvcFIBe)fqua6~=fJRU_IOL_voBmy3YwN<B1L9@fT$|(2*^_P*w-UPIw#D&pDeD@H zT!jf<&Yo)^m0w@A-!@4SWk`LYowr%*3JkNb+jOcU)|{J{47c{=iubOXjB3G_d8e<o zjD74=6X4N+cI4+|0+jAAY5VixBU27LlY)K6&bQ0=cT#&-BU7kbfB_p+R8lcgz7L)d zHD4?1tD<R_hle7SHQL-DmBMbe{iw?tu<hy?UAIvEXs>4+l+IWf;Ots7&+Bv%rdOoY zG;?ukc9bjiC5#kz)t2x3u=bb>@L1HhBr;zB_>!4u-(}jm5iB2V9huqJySHxq8$vc! zEZ>(~b$XAwdc1ewghS@k)Fc%tOF>yjy@Noe?HDxba3!Xy*j`_TO0l9`z9FkQlb!L# z7$1?{IqDio;=_YvE;nS^ckDdskA#&DrT@<G4(A-o?c7Z^Wvl>oWVB0N=<dA!p-R-X zsJ$zy?+r;s!-*|s|LF;G1;E+;6B;l672Es8&)sQ-)#fa{acty)*BjozwWJ?5twH*T z2bCScYOEeBZtb7e_#n|zdJvA@#;A+PxCvft_KdZ6G)myDGB<WA$qq>$%{zBu9=|GZ zoq{mWAxVIh#L}V8DNC;aQ|%SER+G_p#4(|9EDUZJZcA%Lh)Sj#+FCpvnd-gunU-!Y z*3j(;yyl5)c&;_%e?I28{|OZnc<iFQRkJ9Wo%Kn1NoaIOO=Ntj#r{K-!7>L<H$#`d zb`9D-WL99gUJ<$s1q*cfXD*x02(-EHsEN}eWojtv7UsIB?IL4tP<rjdF70mb024Jp zgBWiN0ayOIuPy4y9jTj7<ZvhwH06nq$sq4IlW%AfUV|tpl_|0_t*rq!J2n_3cl_=> zj}xss?|jrJ>`b7uvvHC$+6>WYjrfv3_A794%$_M$>!b8({*=$C&ajVPTo~`W`ivUs z+IcO;3}^b`Zdi@aT~>2muRn?gP{|1k9x`bt^bYNr>WLgV;%&0#v*CIX!qe*}GU%ON z+@NlPWVT@r3Qhqn(I`2wZIMWs5UrPM3|u)T%VUAb6AieneK~7Lf%Frcm_5^n{kAb( zmmCi!h-%;JQg<QLmTt{mOVJ!9-ctjq#+!ZeR`INA3|6kn9HR{;0~ADWAW9rkc(~@v z^Ft9^N%*V>2>fwce`pD5|4<v%(fU#NAZN)=G8z<|;p=%@6-(YDfCNQi;li-7V0^US z!im{p!XEyQ3U5%%;em<{1=YoOzsJjv&Ro*v82We~o-)y;h1l{NPG~ON8&=9?v*27g zBxE807$@=ns_m<z;#k@~ad(0R3mznRkYEY!5Fog_yF0<%T_#9ycOBf_-CYNFnB~5? z_kDNI?zel+ch1fqbEa#$tE#)IANf6YC26$m^yJ;xC%g5z172hYWV^+nC8XDMrzDTT z5;BuMa{djIdpSdi%O$oZo2Y^|Xa>-i+1?-7%j*F$&1a-D29uGBmY`a={PI|C<N2bf z(zW!-h-+QFf3IOhSM~#UXTbWra|2{WxVs7F@w&ZUmzcw<_lnpiQamEm&4~k75w~L& zKml<^{(it!bbO%m!j4VVa=|^dL$`BEi)ZQKUW4y`G*(z0l2@r+TkvL|-?QJ(0_)JE zEPKR`xv}lN)JL~<QqOF%pvL3K6Q7n-er7R?kFOPq0xCqD9cWL^8(cHE{7Omkb7^0} z0awCi@JMc3p`Vp%10m-IfD6(a@CZabYU-jg^tv?$(d8!dgr7SNI#i;!M_(4;Rs<)u zMb!TB1<;vR0i{!sg~GUSQzcqK4v@i9f&9F_3V4Frq-3Ohu-svU5N%4(<3Ay3pl_J+ zwY(~YE`xeO&{CREv4A&5_SF{blnnafB#`MaSI0C6U8<CjWXp`9DK|>kQUw0air6-; z)$GrcWi&U$x&qmuOGfwD^;7B(5?rxeOF(pE!(E7)B7O$2ee|UqlNA(aK@0XzyEt3` zjT@d6a9b{?j&9wZMzruH9zE-y!@`RikV+PKn~_C5_bT-lusGYI%vQ_@LUatpq>gXs zWXetEKk&MtlukPLjg29Avv~sv?M3rg?d(x4w6*(J)U-X}*y<aVNHSQXfF^FCwETeq zkAl^lvs<X46sLaD3WC0yisEJJfd<o>S&<TYTM?-Np8#@`$5J0#i+t?_B*>0lu!gjU z;I6_8yhVPq1p)K;n4=zB^_BU4L~!bMy-)2Z3@(VB(|B8CRU5+Lfd-ZHlNtL`W%?J< zym)?F{>65mB%s`=p1$b&D+llH`%_%YNm>-i>blkQY2nKqX*#@J=9e`mRZB04w$Z2u zxlZP;xP;DaK&kHRwxSyR1VvavYAQ+<RPfkj_5xW%Gk^L@Fg3hSYLD*k^>edM!f|&x zTJ^ff#skC0AYm5kJ;#-Hovw=f`j@aFnZpqhQj}w${W`wj9tC6dm5)%R8k-#j35^$o z5@Q8rF*u=qSXO!g^j>UTWKr6y<L77Is9K4`h*AydSM0zA+(`Y$d-G~Qs&puKL}B{0 zIB@LS%RWDIPiAg8h6~7&x@>H7&`hNBvsS$H9z|{l)ELg)O_LST?VU2q@hg!h9CPYZ z9IsFf=fB{Yovr4Ro=|KW#zhyG!W1<{hr&~V_(-|m2O2wIin_YGo}L~}1yB*W3kzzl z30*7JzqZUn)ImO0h^HF-MSp%=sqLS5yd9ox`^2w1!$<xv1zt*UfA$tAM(PO*=Rvmd z9n<%JwP41AaJ6#&Xba@zTK`d+4=?{?w@Lm_*k1F0EIU)X{|W44{7+H}N4I|~(0RlE zu{-@&UAwq<Ose(>Q3SI+hw87a-xs58dH#;64wA)%HCN^I;_3#C_7OB9fG(x#K|Gb= zU7W=io`hcUXiiMV7ZmXX0^Ypaq$wg%xv!Svc|i7bHN}25?`KzPf$Eek^>r0fu|I(= zj5zCSC{fTn1GFD=N-tWpvCwzZ?bX1dBFsB13;5R^mc{}Zq8Z<KMnoKHe`W**-Uq)# z{gg@nm||35e4~$a{Kd`MitK8>S=_tTS0)|F)w(jd3)Z6W)@#uD&j34db`Qy3o!`XJ z=s8aE9~GGbdUV?@zMw}%bVIq)A*;peMAVE$jvG=P^(jr6NRwyV^JC*CZ$fKc1u42c z+0drMx*Bi8nL6(<TWBHHX|u5ivATH{6Vv+^q-{NWh7DQR5kp%TLizW}QM}aiTQ&8# zCArKrhMSOTeEaS6PEH_MHs|;0#aGm0<L97JREXn;S#cVICIa?~tns}$rM-8C`Yt#u z$0`<uq$%H;_3$ii**iW<<O#4MX=`_-xoFzTU`>+(BqU)rctQ+Lk&y0`6n#cQK6xV$ zp#L@im1Tn1Nx5v_!?WzucriovMCs4Sh?M%21ldh`$$6MZ<`RwjI!@_JLkO?(NA0L& zncTq>^uRK<693s^O^%*T1VoV{41<jMORNEl$a~C}X>dc@>LYv5Uw{3)8J<(b6{o&> zz*D`prJFm_x93p#3Jx_~C4o&5Z}h=*I6Iqnds<_yKV^yaoEOI$u`X`dmsdjFFrMEs z8E~7HnK@wC<K;(f$&SnEbx>10ZSmbDu2rtwd(h0tf5!?q!mx(G&uQF_b;OeXYWSWF z&oIvNMUYg+N^4a^+G&I<A3d~3XaB=pktLl04Xc+a%Ig>ShYBSK`8GYqaGi?IFVAM$ zDV(7|RFM%K<&kfv9tC!sGuw3{`fM3=j!FrrDw*Na4Zos;;PC1T2X18`;RwHTFx>ze z+l6`lyjS+L5B6+amPI3jgfOdRs_tJ66;RSk%!?}SzM<SheYN0w0?&Ol?IIpoWIo*a zz&>OQ6QN46ePF?}jPw|HEC0nzxzZz(>rHQ6-|&lBO=#T@B&3dYb758fKqfD-c!lQN z-UPUgS#n^Cwvu9jx=CQjl3S#SmDWs<7$T_oD6MK>sfzj)ou0++yDIeOOd6utq6p$F zY0~8Id6zkbRi0Ekiuh3bmc<+Q&xru`4K&Sfi_15<vU)RGq1T_OSe<n-jh0;$&m;{J zWO#IWqSxW6k%_OUa|dDg(SeFT;5~`%{03b`f=$w@9O1{Swq2p{mi{2<A)Y?aXf~+5 zC3Etci@r}Oplir>vRH{!1G{}Z#f<h8{SL!O3a7_yLrOKKH^hg-lB^S4tL`r}qied= zM+~7@n0T8c-PT=+-*IZ)=>pt6?bfpSY(xqHi8t$WCpe-)dY0xMhzY050Z)%HReyGd zD@|j9&OW_@MC8QmBj~0vu;uTIo{TKtVHfw?gE`>5W6-wCX9k&%K?i|KT5}Sb_jQ}t zMVhz2W9kKo!4KK$Z(0FtfD{`YrdQQDqu%sekLPSfAJrh)N`@`)#_cr+^bkE!dY-wP z)4S9#`mpsrh@iZWiQ?v(c{XoDsnY-KThbdzSPAuyup301_pma&ynLv)--#!x@H7(t zu0266)r)$N46mkDFCY-2?_K_<3~|9=<ev=9@cm|`vENg4EPj!FT^`l+n&D)}CLnea z?Z6~3>wdX|^Q>Jpp*R<q;ZzTO%*Hvs`1h0<sWR97yLWePK8d_*vp-rdm*n);v7HMV zKh9ZG>v?R@OgW#_Y#_bs|KBCK7R+482srGGT;~e^uqCDkidsy6@wPz}(s!{nqY{A* zc-(|T7is$9FJ>zb$qj7kh&5DiVgsvjga2njx-U&k{)q9Irlon7HlN|6y(5p_Z;1nE z;mQ)Z){1}s{+$$;yvOGA1M&0Y;NtWubAX|<;=L#ze~01^aXq7V{&1p@8|BWG$4s3( z$=5Oxoy`?7InX2>Vxt!6v}`o-?k^9^e={km>0J|KE)F~{IgbI398$OLf*AEWr2hOS zUc&gU9s)K*4Sqn*e6qZ&A_+FleC6J*(@5rSuL-k3xYTYZ?e+n(tp(j6k2x*<sBxek zhuie2PENXV(1<4-_{cMiV|0JeKsU0U&Dj|)UY|+#0j~>Y*y@hWtROyv^NmZy<B3PJ zW#UTap+sd{dQhb&S9D){KF-tGuIzA+T<=#}FJl>7A>GoS;<>~u2=T0&N!#TKa73LX z$}vAsF$MIo81`*Pgqbc=5kKs_8X);wpZU|e6V;H~));OoaWwqASu&*N=ZFOcZQSyx z{6jO${>TAJa7`)Z78aqQq2I~=<}^8nZ_NJBM_lFqBWM0^cm3w&Val+T9otR7@5!Y& zKj)~r(aB=N-W`l?a(~~;1+Ake<Hx!eO)_?#Dng{Rx*PG*EpglScJ}2=`{Z^3tA!cn zY!<SsmjNbvq!(`g_wxk);RCU7+mhlx^mLstb9{r@*TRP2^Xa!B6H=o$i00SN!Cph^ z-NAWRD~<E@;Sytguo&4m7>E)b_IJqtzO9=H`jWo>`Am|c@*6p4+3)~s(M5@1VynrW zW-E^Kr~-plP+N*DZG~J2S)fH(cKz757$B@O+`<PJe?%^uQXHO9dKQ%Rij5L-?^x32 z-o}s`<Yj(ucaO2Vz_r3btnOnm56H~fl`V!ITtFo$Zg>+;2|v5GKvEM9-i=Z64Hm2a zOKl7B9CzaPr=X7Y&UYUgK87Ss+?8#OMWj@hp#f}_sCUOBhZnL_v7KIU8%9QS-58^d zGb{%Zo^#o&wzgr(orqT(=TC!t{fE#HgH3GE#>*Kxao9rlllVtJTEHFyG`^{oaD;jK zK}Pr%@CNU@;5S)(BYDI6T=gX1jqQfh^5<dwJ&ScG5?e>P4~L@IHy*=<0?mNxyH?>F zIr8dmD!N3f7F*cTJlP@L0SKPK?P-Z{SP(zFk#*skiXR@mf1eEbgE5cUUG$sF*s+%q z&ycNw!GxAsPcJ2_wJ__RG7XiiR>TLZmDzVzF8W6RX#r>@kdq)-Nx>Z+PXkLqqFQ;o z^twp-@8xvJ)Xw?Y+4V^RF-7?ct;cd?&1~PVJfQsiD-8N*aI0>m9;%JcNZ98dm_{>a z59azu|6z|hrg|)Ad#I4;J!Phi-v^<S?;7~_pfv%FkXredFFU;~KJ2@v-%=(eH5I-% z<S(#nb}t|GW3B3~GpAIgbZ*}iJN|occq06&TRHNv7^y*Oyt5McSR!PwgMCwA<(e@< zqa7z?EQIxnH7R~+fSQczNZ-a~w-u9Jiz)wuNJ}Vl2>}Z36iH_HfcW-cQU$_cZrNp< z5w+1Rweb><hVdUTK`%(IBYYWIkIXnx_K%G3QNFB}tZGBXO`^%jS^qlN<F$#WgryLy z%htHqriU8Ut|VVi8@#2?jE*l@Ju55-_D->@X|W|X8e<K+3@<{>cwgGLYTW<cc9X$r zYGP}vo0vYOg*kZ@V4f?!zXi9Tw~j_+b+>Z<w<-)1|Mxp^$>;L0h^HnY8)h)-akCJ= zwSg=!V-_a2-@r!ZGHqQA_ykoG*|<zf=_QGz?3-X_=zTy|;K(LbZ6N+0AgZwYp2(Zj zyMy(EKR5c<vD+Kor$Tf^5Let;ny`5UN0{T-|4}6q`$76IA|4B>8J5l9vJ*6l2`{Rz zPfEe3Qu$jxx^PrP{6pM?^Gg2TD#L%9F#Wf?&LpvA$Gw;s*hC@q-fIu8pYf^tOhjiq zH?SFzwcl~~BMD5EKG+@1=42(ae+sZ^`kOp!UNVBNCoSR#p02nUh_|9E2}|(`;K^Q6 zFApr!%3hewJpMj$KxM+M12e0G*yZ=6THa>Ka&KCz)k%4Ae|T!){kMpWw6PddMO(Om zin)#VDPvqNn8)a-TQ1`ZKbcU74DX87N_}c@ZSKSs@<*uIexui(QTXVg)xDVL`_REE zTEGTMsmNag3Cm@5mBJpb(K2kBKp&#&=s&I~%Dk{p)ZddC@+L&JadIU`?Jx3um(EC) zKFt@*0NhutaV4d)^<$bmbP=K|)fhiAgrS%jQqERd^F(;qVdkR9&XuVP4L;qtR$*Sn zOBnLS2$FW=Jv#q@$_-k!-ND-?Xb$;lW@iFQAC$PI7+1g&*Bc8<UHAk~>%BF<qa?;^ zH)-G3Yby3n@J8|&sMBu6f7mI;p9b7?aqm0FS`ASDW>=P2t23@QRuQbrFFz7yVsGmX zft>;YTGP*{|DG2u*Jo@xOuWXV2e?6F_2G+a{Q8NiecUMdDdIjL3{^_CeLT`ioVuof za+8j!->Y*t+r3*8w_V%c>3+JE*#^stFf66LG{xU{5(dVcc1>9hJeVqdV$)Y=!&l<$ z?EDb<CLi59a*Dt|vRV&)#1cHBRc8i_f(Qt%&yjiWj$vAD4B337WGZc7t-J_%BU2Sm zXWG9DdO=7BU0r|bk$tR~u52RSY2C>)?nj#YA-Iy4iu6a*7Zzsxy3fYNuQ^^?bj-u6 zOa5??>~?O7xN9Bq9x#>%xP?7YH6Ko?Tm3fZL9npB#K?M>DcBeBT^N$cyoFQLkq*1R z00UOFn0Um!?Jp|@KGwY6XFv7ZQwO1F!#){VvMTuNtF!K^#uKPy6~$X;z7$1#Clzup zna~JgRpG_k)r6L*5VIBpYCTIhA-I$ed1+g1B8^8{7$>KxP+kN#@^EHtenEoO0+)Kt zERj_bURBIVGxL(F+aGiH$~6}5v@2~~Ui1-eNibY$<1TxRbeXdnY;vt-P4+@0vljRP zV3Tvb6YvK$irm`Zus{3UbJv%GH4z?EXTXwy#ARu>!D!V7uj;WZ%m0~u1gY`7v8`YW zW4UetIFUEFw7+Ap$Lw(ad5vjJc}A!Po>>FYVpvjoQ+WihwKa85d85$(0eff}3W*fP zvr!WRS{H$0Wa(BvoPg~OupiSVt3lOxs{2x=-3oc$1{n6rby!}4M?r0bdXc>yom+cO zJ|N`4w(Fh_@!$)V1f=FI!%vDm75tg!z&rw1*Q5Qg)u73Wf49RAd}?z$QhUhGbmdz1 zCP-}Dl7|fX%cgWQxJQV!z54l-c2|w>$a=WHW@Y~1;Eb&k9Curu!`%qTkP)=f8eFjf zXm}f{?n0wk6>4SjY{t+j))@n!d>SVNrt%LXJ2{sY*QhI8m&irz3n53RT~*GgMV!Vl zrjxRNjQ`~J{Hwr`_)oV=ai#L$zV}G$#lN=N=VHM5$d*MY$3FGzU(M2qzpIQaS;^9^ zsrNVYSu@kbWSj@izuAWxCy~T5-{ErEo_(U;PaLYpea5TgjoQ($notkjE>l-mH>Y(o z<G)f#l*hh@E5?a^GZiDQ2ODvrHJX|+n7m9iF}PU?@C%{b-q!=$YLQLc`hj+9z$U30 zwVR~mMvuMomJxPK0B+3|6%EO0?jMRyzHZs^O`J9LiiGmhkjP{;joam2%SB(L{vO*Q z<t>*BQ11vJDR}xNiD6l~jy9^6U|JmUVJlwdFLcxLXJSz1A_HS#9_X~FlQ8j-(U7}G zCaZfy?|WnYC%w6CsgHIyWDn&$V{7e9RAF|}#w>Je!{sM5sh`_p)Wb8sQ^Z%*-dHKU zb@L{?YsyQ^$i>U1!#d0X>(2D;vao$lR9S<?^qSnr2?VOP+$qvC>_z^`qPp*}_=dg7 z7NV>Z0hS|~dct}kfNkNayyJeSYj|q>uTLJ`6H80#`uf)n%>VE%&2lVH2XiFvUYq*H zkS2=oLt5&!)w3Rp5b~;aZ`6O{$@QmDqHLxn-#eF$={dN4DmTytQb&DrF^PGd{wTe^ zU2O{YcT!hPOC+wHoPTpFJPppouYdE#<}a)MLO^W(@+VSRN48=Na(mfiue13k!DVSB zjoGilJ01+wM`<;TZx^peqJN~}r<s}33dD$Y_~LorIEJ%ioU>F6M@~LP`~L>~Tpszj z-iNy$ya7h2j;<rRT`N3sJpgHPgI3~_&E?Zypbo|`^9ridTP}tr?Z4N2Ds;?-)x3u7 z_7;W#?)*VhiAVC|GCRQd3Z%wfJ>Iau_b<#1L?TuFPii1c?(iBoBA1oSpt={3Ys#}Q zOC9MPlvjrfM*kAdyR)MF*Az^RXAD*n$^JFM45|LBldO+@#?<mJ{Z>j1X`w9?;Su+e z_V!D!z+*e(Xd7e(-r0Bi`-)21;m7!@6xjIe-tP}3wC6_eQPT+_=B`R&WmzwAV{z?o zowiH*V*%aG4aIhDot{o8gSl)2jcrWSRu=#0o67rl8(e3dyI%&r&3{AVP^u}J2_+1P z2!;Cfe`6nR)8jF;*b)b4`Im_gye+G5M~4fe)^rU!+gvdJ1zzRQ(MKGe7q1INKAqC^ zA>Qz~Y5vrkBKU;(MmRESG)F`(o@BrGdu33ZsFmqIvs3#w<B)r?V#U6Shfn1uM}nG7 zXwyR9dndY@D6XSOZUB$_z21v+x13#nX-{%NOq#OSx~sJT!=wlRY>=a*8WcY82uwJ& z{l1QH@2q&4g=d{2ytg{2>KE{p%4nQt;l&vB5Pjk2-SdGS6*VK0#jms+hf)s={*LhD zuc%-L8S<BW;J|RSDTRxSHc@-J?CUE+--t#>X6$J+XO)9BE};N0YEUZ6*jAN>$%@g) zS{c#_ar!L*7h-N0E%|!DS8SYRj_Q|Fx}z1bbC+X@iV)SfU^(LvU)xLzU_jUkN>Z!W zPHlgLN)Wc!l{~dHzI5rsGDvYY4PNbh-8hK9%@<LiIl$b4d20H-%vKDd^s5`gUI!!x zKSy*X<5;OrmoD`RTfzc;%Paq$%a2NjW!a%ucVFsg(((fl*Abb#6_&K@i|o$*1e5rx zA&F}~Dc8sS%?SmO!?W(5d@4+<6T=6QwSs3-o&`0$&T+V2`!%5t^8Ir(SzEK?3Ijt~ zf;r#zaWG?R;NS2xUe^j@<eP+2?da+G=-NS{5LQS(<1`JwT(mv8k2XAmj~_O=cYoqT z2;nDbcmC?uM_c3Wem{3%gARn4IQkdVb+fa}oWG{0in8I{u)(vvz11{tMV#^N(a#cT zU3)yNVa~r(ddAnzkdHSE$X8n>BD_;#;rS_@YMBrbB#?lZ>*M=S1I-mCwb*2iD7p;Q zCtM<`BP&<_^nkBG9)ZCnxC$grIHUd0q5&f{y^VwK^+~jxko~8|BmM=cRnCzW{H~8? zS1dhVjFW}OIp()1E!yV^2$)v(Q>OllThSyUM5DoOiIkShWtYS_zPA9*ad#{`2PC12 z)6n%7%(vW_gaW)@rRI34dk9&y1riQZ2d~4B;7DYQG~PS{^d-%iH}0L??5yc^Ert<J z@UY&JvJNEw&0;$Va9GL`mCEg_fxjYl#Nw(jL{$-VrS+D&LMa^^2YROn&2K$J>w;CQ z-gnGy^`WXlu4fzbLNe<ZbqinN@i>n;85~J1Q4C9X40LxcnctqOcBf6<lpX?x92$;s ze<xGU%VxP-QphyO>h^3BTUyEP9hTb8iU<SsypQ;;VT8CODEgVX-hT)UI5w~&IT=Lv zsN*YZ`dQ|)5Mtm%A%;Nn*l&=+&k|}2y}{Q}QgXF;F;9&UZgMUsNV~q(aW`F{%5oT- zz%;D;!|1wb(adLbzjK|N88y8>?qYJ^vW1+}p9&<*Z2b7?*=yOEPSacPDe{BCV`k?f zh97l?2GYXg`VV#zKkfM?0uD1)p$azm_2siSUz4MjnWh)~3JJpnrhMD|cFhyAq*zi) z6zW}_<=1Bw0eF&SNi72k?JdO9ljqkJ827VJAHa#MXhdJ2l1#Ct-?V`35twfIeDE^@ zk2YiOnXBB}awm>>U?IYq7H!Q6Smy2ce?fzH$Bd2X3V7;^!Czt-q%)dLE8`+;m+xl8 zC3(^9$V9O!Ih{Cu;l1R6|1tJ*Z*xlQS*ZcjfI=A7_L2^sA)Sg#jjTC?i1ee>N7cGh zEnBEh3>%Nobh(=zAtAA2Wgui5=)8TwSgvn-A${|7Uq*>!iF8n_d}4-pm@bfK;R`-I zl_7ZwQAz%UNYb(y8j;GM**Td3PO$<_idDpAw)lrpBb;)cjCd7$afniMD3i2++v+(q z3f;?mBf)ClR`m`R>3`mT!%pF4&6;52N9R_(T=py$434{5fZ39fNw66*dE$XL<u#^! zetbtQm-$s6-}R1xjIAQ<-XkAdt0{I0UE@nDlSnxERGkygI`-pA*4O~PEl92Vk&YDq z{|sdMF*V(We9K!i3_=eZSlr9&Y6{Ol-F>T0lK0Av#;nH*6`7cDTDJEKq<MV%J@$!S zG#b#G)`sees%+U-=8GAwe=e1b8PO5jo}e)4fsZY^a9C!RMsANO%P~1GW`M$8?eG9~ z*>IBAnu7*@Gtf{{VG$!u42&f%wmzy*45E*0S3PZWSMx-l%pXAr{LrXbd`zb9f|oUZ z<O?P8E{Vv8NMSt4a4N6wG(|y+lYE^K-&$29h@XhhA1ZtIta47R<g}brX)cD-r%@1h zbIj7?tvuFe^pM~Kx7Ply(lydQ|A2+}RvNckuw<c`mvr9*ue}&Z@u{}9c4BHp_=g*g z3?#+mi@v{q!`K)o*twdnA_1CDaln&2JiA^rBaplGk;NA=7t++e(!d9XVu~jIopVa` zt5U~UG(_4*t-64+MDMMBqu9@f)P<gjCqI6(XA+sdRpBW>>Vl8@uq)O#ZNBjPZtaZE z|0#7nxZ<UA{N*i`iK)4H&fu*0tTjA*lphQ07yJRIAAaqMH=CHueesvmx3=BU4ZmS+ zK*Dff5Aw-O&7aum9GN^mF!<%$W2OBmF}CKv)*A}~z%P@!qLd;VHjdR*l@UqEXL8s^ zyE2qf=|cgK*|kd6R3=Y;(*T1I-Sb*RZpXna2nYhZziI^Z5c-95O8e;3-D*$VFJUEF z7Im!KK&7fp6WG0AW>hecdGzDm4vrTuG`9L88DJ@rW_@iXKx=Ic_(A1E!~QwkEjo&< z*^XwhsncuK5M?gUT!r?PRL=OQQuV$XBqhAVb98yd-#w9R7q39;Rv!?zBB+ooOKyEh zWc=u$+F*rP!{PUNNp^|p>_Zj2l4sGZ1_A38!E?zK?GihHCK`76Jr9q+L=}YH{{&Ch zo^y+4I;~RUr;iQzl;#>PGK59_EOJ4)y<i(`l&RrlJPd&Tzz{lkC=?1|TCS)jyhDh` z!VupdQE_>c4pO*P$3cTIV9J4y&^_|<54CnyxUC{JXzyGxi|!lA%uC=}xF2hoL6yqO z*<yBVV6)Uj3SA|8RmCwX$(T5W^4>4GAhJS_=67R^AmvX@NRPDLg_K-hmQXanhG|V` zn+|C=!J;^mC9mSqoTo|K@%Qe!xSHev*{9eB<)cz_yw|sB4<wcvvQ=`g0^jR<3)!Yz zCZ;C1xaRG*uM7r|O_~U_!;=rF$HA-$vA6^yc(7NzmPPf63TS%R7Y=QN3RUSKP684X zfJJYOOqz#zCpO*@FF~viCZQ>zG+Rf)M^ON8aKgeSK?H$r&;d1_x5oQYJ=;^Q5YqKc z>a;Z9Z5b{d`dnJwo#uPjkyD|CuduMuMwz$ZX>BlT%HiGb3~ruZQrv>Wn7=&UBD9y^ zTyrWZOMIi3+C#!gx_`R4OxXboCUIpNx$zSMb?~Gb37$`fvHF?440-Q`9$MJaFa0Cg zwa$~>MX`eGh?D2t?9N6Djw+taDC@hmjVP~|Y~2Zb=><c9j6Wy767$^6Y=W#!dc6Fr zT;Oxsl(?>n+)E<*mTk<Iqrl+?KR9GnU<|UdBo**6xO^RKD6XaTP1SiP`&>m=nz8o@ zjkwa`*cz>((d(j(yP^%qAV5h<nVve?k2&JPjH$j9`tODtBEM`)P<v3rRv0-W@a2f7 zz7o++k6SX^i9%M4a^1MB%<yWbyB~po@j5$ogT&gpZ!RIRB6&_re8{19OeWJW#H2h= z%~eaO*NvGHOXbNy^$#MzjziL=XTJ@2jAlqcgBLpj7_ymJM3}K$USE+WKJg1d>znVK zlE`9I*@<V1?~k@^HQU@|3dnO1(+3C1Z|771=+*aTkGvdQ2S)^wp!&($i(+|q?So>z zee-l(o*;Qaq2>1=vwWI~%O+AT>0!MNEXj)c-n{|WlUNAt*VjeN4FsNjx$N2(USxJ+ zUKG3=SqMIfR*x_K6{x^uicy?|)=N6%JtFGkMOw<K!G{-dV(>$7WWtHf&bb}<@SAwc zS$04@PQA&vF8Boi9jw7c1X0f3&_++Od2t(W#R-K!5)j<bW+L^G5B2o(iM#*v<w_~q zW^AlBr`!f`#?};_3uT_=+E1_N{mXOP4ti_z`hbjCZk5XeIQ5qDTJl#DmFtKWqNbTi zmrWP<r5o{YH;wk4WIGIN3wp^0?Gz8V=vZW91ghua+wJO8eBQaAoDdmo;-+8RR_Xz4 zvcT7x?%G-(jxU#(W9^WB)M)Ikh_W<1(&6x2%CgQuHK(4N!M;rR?sbPaxbD@)I*(qV zA&#NGLOVR&(x#?z2<+eQ?4*{#m3~RkME5XnT=ZUM*rYfi#1r}1DZ$3!e632gPg;_E zg-F&>A8B$=G-LcUZ&x1snss^_qR-pF^^xBr_`FNJuqqUq4^j>}_@&b?vY{xHqGRxZ zaP=p21cjI{^Ert%cinS_RkxpmGY~Huw`1DTe3dj=@ijI&Bs~uCbQT-M7;bz(-?XQw zvct4#fkKNSTR@Vs>V<@ea_TCVHL3N5acqf}QZ(}+ta;0`{>*~RdkQKkKWO@LNzu@% zsY6Pfq4WJ)<t;&Ej4*&`fq!xLW)HgL2`|H8m;duUrlS8{7z_TPQA^Z9Nc~2>m+s1^ zgat_F6S&GdvmE^~Wum|e)^i;ijOD~(-Fvc&`8)mnEILVWP1H{8Bz|Oe4|UMbdAY^F z+KUI+@?*_@%a*UmGM`>IfKGX#-Ti*&@+141RG6}+OdGby!Ot+D&`Y)D2MXTj8HVt) zQ0Y|8gyvr4kE^wqCS8sve9KX823YX`d?C+ZryCULj4!{#L!i)NZM2?$y(3>+ljtSI ztIQjdwH&8VI(3qF8WF!f?fyAm9QKxMAcF70TT}EwhuCxH%h#cH(TS@oYP19%Q>1Ik zDjoKPuBP3kQ_GhFts(Q=s=Lx$#}@y5(nvB31@eoFwuPpGpQnMl_}HxpI4Vx}z!`C2 zVXl*s?(Zib9+d^gv`(u6(JgNsYOMsueVK5dd=Z?_3U!~7?A@td?JOC7wNk|%Hsr-H ze~8t#Nmi^X_}P;^Nn{5cx_uE<$Tw!W&ROdo8V4&UZE+c*pkaaSAo_t66MR_*HJX@P zSMZ<@n^zo@s+)%WYt#|<W<>&Dhh_E;Edw%lNUaL2eP{_?g5&i|b+<_+&03wIkiC;! zi%u&8bZq)xEAZ~G2ePH{4|F?yk`&}Kx>{o~wI17OqVR?EMfh<lM)r%M<EwO*<0*oV zxX$P4j`;o00}$MuE+yk95&Xcvw=$8u7X)5;@u)NRQ}_-Ma4M{Rui!_6P8HB#K5PZ` z$S!fffg(4HyB?CgDR085jC9$5YtStK*S?MK$iD|4|0sSI-GZma{I)CyNhpMPN*w)S z&0|DAym&8(6nfWd&=6d6CHEr%R7OEgLg#E<u;ogQ&DxJq!qyz3+0|SGbD8-PV!sMQ z@-&JcNB%;za7Cx#!ga4B#6Ww;D>`>z`V#Of#525$;Q4q_`azACk8o%~TlH(_Fj_H` zfZ@59njs1bEP10tpJUMLFeZ^3a!PVrW^-03%2N|Dn<!Sl%<h<XHv8F_!4ExH0r6@8 z$3WQY15D7X6qEWIt?my!984jF5N6-;aknM@_Jzq*X}QSV0UkF7S3Qa!DwHgnu_$qZ zot{^Q#<G$*Dyzy#&(4r;9$v!et?_<%FD5q8=aT@W0iAfc*EA@Ov%(J2)Si?P6gYs< zrT9sVhj=K5LW37#xQ-bmzt<OkipZJ7(yiIFt1_ma*=(#$Y>>rspY%FO*8&O8D6dq6 zoI)u?x5Oeky?+CmiD45soNl^52bLDp;9qT%3YPh+==k7>8Bqw+uo*u>?a3aWG_p*C z`Uiy_1tJ_2NtW-P##+)GNNYCko;opP?Bg)|NtM4>HUyr(VyCcH$NLZ45=($dWX89D zSdM!Tn6L?>jFTUi3rF~T(jCmPte?LnKg{ZPWxg5Vo8#4mGI%5~Thi*7i+=7|%j~@d zy!hq|l<BWM|KLIkbQrGe>~Wc2)D&j4O3vi6+X@)^fTw6>^@)XrW#4mb9sKIzeslC& zKXibbnBVh$id#hq=X~Sm6q~n#!r$PV@YtAnU6xa-@xLpM{4ZW}tA=mM^LN04&xXqH zq?L*PLiB)SX`TNRb^5Qo<Njw;*IO4rY*||Z>v0wwLE*7O)=c`^QQ^8DsBc}(m(JBN zO{1Va$Z}1}sHKM-Xl7ec7nhm8rM%L~ghG3;Zj^4x`CnW0Z60Jpq8imj{!R~8Azjt* zw>j#+bm2Su*Mz4;{z|e1wp>-wv<u|$4GHlq_&0nR;^_A0uIv#XHvG|nQ^(egn{YUC z!otn$|EkM!S!RAiw7Xy@u!@rG)ckzavG3U4B^>fQIP3;z)wUazh{ro)n_mEo0`moo zVajY9I4FKcUw%K7J>U}9sXHNx7YhpLP#bHyJeBVt(~7zi*thdJjNBO`4aLEGa{i3L zmM1Vbowb|$x=MVq@<Y~%e&cEjIfYqlWnkmGfkfNd^UWMip;5Px+-9>4bo9!l)8l$# z)57h6f2)8apByTy7qk^$BoANmQw11{X}KZ6V_rSA;n`as7&%@Ku-uLfUN_lUD!^ZW zK3Zq&^e~{aBm2U*`;?-fRBhC2?yS^wk;-sQd<}uVJ`Zl)uTviXo`&WXbiAP#Mfzk? zFy55&w0qt3R_0PLs-lo@<-nSCs!U^l|6p7|pwkX4@Ph#p^Lq!`rBClOD7x}inZnpt zZg^4in)VJ{Dlm5oSzeJ9w>$jtGy2||*_G?BhC{=H&DV*}Ah2tK!1~3zXtZjt8z(b1 z&l<GT?eQ~0f~#DRt3U-oU+ZiUz6XS#x+JoTeyH*rJS?Vt+u9hRa171xNxbW*%<wcL zkfFt$${{%G3B_$>U%;dh()M{lfct(>k3q$m1_qW$r?<#<ySj$IK%-%}vCEaa-_sjM z`pt%g_y$+=@N8(ccz*VH-?{SS4hLT78Ot;#(RLH~4sK)Tg5HuKB+PoIAbsr~=nB?c zdFfF^Y3UmVwi-R(IkBa1!bpO*X_b9;dJR!1tOst`EuGl(UsWadt$#fyiHWoN^g8i5 zGeNut5i_hdXWj{1bj(VufWgMh=jI=E*ppBNR6B+|^nZf(GXqzi`o(b#*QM|~_N|Gy z<rE+H-B<cYC?nhgf<Y0R5uk;rIfc07;W+`$4Mf!tX<)LLH#bH)<O;}Icek03{oc<v zHKns%w{HPv#7}*WzZT=}SLeA34QeD{tL2r%`1xM39$bW_Vu1X-%;ZW(fk=O2>Bfb; zzoChnuLj99tD#cuuJTD6TDR8jm=E0nT&CP1W_7`u1XPG@5602h`by3-)?u;GeY7%q z;s7h~F}N>UcZn$&(*O+^P3+cW>C;Yiuc_g4mU)W|CknS-)u2y$GmlO^=9uNY@}%10 z`1X7wRXxOF6I>sCk59Y=#M*$WS6}%&v^D#d1X@_M2znzatb3kFcQxBBX(<*v717(r z2kOvmJs)_TU!(E1R|sb0GzTc4N55F_dL-DKbYHyAqG%<(LrE6G!so7Kntp8|x|`73 zNAYWMYB?$0>@&$SxIVDnxg16%61W|AKpWd#c_l!|aN6s`Yv>pFoT>h{{Fsk8I1gx- zRW>h@w<#baGjB$E0Di>NC|~2|HAF{7PlOqCm3CrR?l3@q;d4j-@`AiTjLw=_??sm& zk;>|LS0cSy^ZmRqu*LuA;5fA9Q5jB0Wf*u=(Zu+yr0hy?>&R#^&$xR=;;~R!^Vqev z)eoAmo$fBxY)hJrzA5m2k#uIyc&I&!lzPbJ32bU2T&O(K+v3+8oYKc5qSISM-d&hV zj%%SyUI}zD@H8N3vhjdq-rRc$5<l}XCX>Qj_di_)>n;MeQ!YjXlNgEojP7uW01yj} zH^AVgyY-*`4j`T;h7~izovWsu@*yKLcgiPa27`XGjd<dna);P!L&hfh<jaG0uOVRc zvd3GlaXkB?vpWQO<^8x8b4J=vgsS)b=AQS3+U+r@VDZht#J6s=J9A%(2eqm*AL~!G z9`@DsA8B;I$niYAR>tv>-Bq*uP)Y5t`d`>2x-Ro418P9~MJi%vt4y&PINcG67K2*1 z%r7O9i~F2iX2~53;$8>d)+Z!`0|hE{D={E|+_4>jrLdOM5!vzQSXDy8LU5SD?l^D% z{ji6V(rewfo4>5>utfHu2r~)~+0l*P8sOStQ6zMCVj7X%%}zC*Dp91#in9|k>p1N} zjg1?e*KmZfDjN5bk%%cQdM9W=@cP<IFNf1ABoVm0k5jbYuF3br??fg=+HRQupIULn z+>eC~mHo4{xr&qHCsXxmiEnQXYmVrrLds?`R9$&StG^VN&GZ$LgX@`oB^LTiYvuTr z;5N)8elYF9;TmpKJ6GFicD}XE>>=Ah52z1#<+rJ_8RNb)(HY*~UvHujXa_=&ryqxY zEO-%5l(3#iTNYp}G17><A}lCg6JG(4-DGEB1!ryJ#@bLJ;QNME9;YU|KJq|bd$i>x z_euy$VOy|`^*P13doXlvQ7b5_OXdo~cv=@*=*u#AetVWQK;<clnP}#+Q06+!xW1t5 z!J7HRwCj|ExMiHS<H7N$6jt+!?^KduFxw1M_gXb58;8CBlM<*^ENBNSZW*TAmQHD4 za^SYCKe`7))_us9QAlw=i5Lg%n@OIEhN)3vY*uiszN+C~7J;Dw#3Yat9_6}K+x~G& zBKiBGTn<CgcbL-KhuFKxB@-h)()7}r>Flx}w}lL_?T*Wn(azlSfGo5<i0WIR1WO4N zL?8=@feCKZGOguco)tz7G-FJC7I{pwWZkz^4wTuS7x8Ii&xSi;Zv%B59YN^EK8Rx? zjTJD_g#N-!o^@N<%zC1Il%840Tl_7LWS@Lmf>u4T^ffUO^|w6SPLRW|k<h0oi2U)d z7Ne^4*5f_kCM`J8#d8$A9c0Drg5aw{Nl=fFfx&cIhV$cf65l>Cy(WJB1VgWeGZRwo zR8QF_dsWjid0_Qbp=KCf<Xp!gWzp_&XlhMeZb6UG!#nGFqCWm$*U0{_`*tPOVJOq% z3J-bj^s9T3?q&xgVtH?8C8E_(P+y|ar}q2f33W8?jOf}(LMt%dZkw``%o-1=+aP`# zTTyQKeOh<LPq*@Swn<nF=(=cm5*E67fHMJS`^!r#X+Y%7+_&ZxM6Aw->(B5;h%isp zd`&WC{mIJmw$zW>PB-di*uUA`UG**N6My?m^c`PyQ6ZO<qFXG8J|x+0Kc(V-1Q2@B zsyL&)m+GF944hYlRA+m&>?G&ZOsd@6>RI;%8pf(nuUTZkCo{$g=_w1YU&I&Iu|U<- z=gN~rGJ2HNepZs48DYKU?jP<rwTquOnq{BEXv)z{6qMdV$D}hnApF>(G-foB{UQ6f z(6yPqIu1vtRWXk6pXHTl5)|%=Ig-hNbW9Tx8<%!qh#60egAJQN8hDoKE<R<q9G!j$ zH&$Qz(&VZGjjBO+u=qsz6a{a#P}8fnXWQ*HI-+T;uQl++3b&fb<GA=Y1~eZL&#rXN zX$>DGyb5NSvTuIH9l~G)YWatY_wn&!Xp;g^5!aIsmTXT^#@}_ysLXFB66s`m;9Cye z;q^bq9;4LD_|QqXcSyV!HrJ2gmA=!Z^@};map4+zEc7dm!Xi4zeyVjSon00IO)MDF z^aU{)9k}O?7ll+laXYg|wo{8Pyx}t!#(0ZAr%l#%S_VtQu(j6YSQBa9Baaqkir1xI z3%FO=ZrL5!U{}mW7yziW=Ym&>+-Qz#vDG4PtkDlKX+ozR?(tK*>ekUkzsFD84~C7C z;pAGkiOkdUjXCk9>}+g$=cI4Vw++u|#U^@s390^hpedY-A|fkc>lsBJrLpnj#J@Jh z;1NqBDsO=JXPo7Brd+a`b(3=o(l6u7ix;C@b0Dzb<C<2f+poY|_@St_mc-j&_9f-Q z-m8=2(0qdN57kcaRr}ic4<>JgBfRI<iG_@$h6Rz_3ZpPxOCtAP)EG{wFI625`+nm| z0573vun0q7WysMR-tckwadzjpZmCLRn&@>>C<@`}EXXg~mia883^6A!<QDS?iA0K; z;i)Hm4Kq9(=pW1C!w+a^yt4z}Amn12_o(YreAVQ-h0MOFyG;I;7PM&i-qM^dO@<!e zdLAlTy1%@*D!=q%xGJj-1W{uAHR3k<liOoK1@wltn$&dGX#kvdt^wCjRB_eC$cpT9 zuuR@9$*;pnf7cg1th-)$8(&nm=*&;k{m28(?LElg#oj(A$dOT=#Z>cxvMRh?bgZW{ z(Ht>3Y1B+iDs5=uvKFo<dHS-u;GHQPK*{*weYC~^?|Nr^^jSB@vv57X>{b5aG-0n` zEYf(61W%DGuOeQ(k}K|ahA7EGo0wxXyxy&YQl80o;kgX<gONc27=kM#K;|!B=+;Sk zXo@Q;qHVc&w^;(!>I-XHmR)ydnLY0K21cgE(}600>=(edBOm$kKGP1hpTBDIJX#Js z>j-bec}DAwsIRH`@!-(kTnw4&+AWOIX*V(Sfk6%w&mQMki^$+i=avPeadPF-)_5Z+ z`U^KasR?7--JwL<=r-`{Q)zkeR#;}gL1Tf8{oBkm2Yn(owR=*h3!3(`JjzQzVPRn! z)M$(nQwU_%n6QiPPi!wE$w+8+Qb$h?Y;s=m{WWq{G&@7~Cm<S@k_OhNwj0#0b&K8P zSuxksa~n5%u*eAygaK`K@yAfU)1HzUnIox{sFN8;XYkUNpH5w6ug$c}HNMHS#lm6y z0)ib|1Qi3FSJolTS|E8Al29Ds)dnpUDdVymf7agcEqQV{Qa7J>=^K5lnvj4EzTb(* z;Z*3%o^HKON7EMg-3e>##E06_z@t&UW(hFX(TA)Qe=YJ)UX#vcVSfI8<TKS%|05_s z6v1vdwfyI{s5opehTsT5$=0(T@DR6w0ccFG5*B%5IT(ss^qm)GMKMF@ZwCOsR4k(E z@UXwg^4qE*km2%5*#7-907JhsK0N~IuRj>%tEGkC+EYH@5dCv%fa>`hCI8n|Z(kv> aAh39A8u^v<!0lfJoh3wMh06tXfBp|&cQ<|j diff --git a/runbot/documentation/images/repo_runbot.png b/runbot/documentation/images/repo_runbot.png deleted file mode 100644 index c174fdc8cceb31a9cf522065a665d00d52eb7837..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 32167 zcmcG#1yG&MvM!7hJV0>w;LgG!1lQp1?(XgoT!MR$KyY{0;BE_dS-9(;{gH3)^Pjs< zoqKNG`&Ln_YHC<(-kF}KyPtkK{EMs@5<D(E7#J9mgt)K*7#I`)3=Ev{9n9|$`7@dj zFfiB{4<$7x1$|dyI|o}6b1P$FCwDtzVq-UR6EHBhrLtt}r0tK$BCj=H)dTvH^|*-F zGZsHF`j+F5{rFXS$iK)(Fowv(q$eSFDuMhM^O|)=#4o>u;V`M<S%#Gjbc_j%aB1VO zsL{HAX%~<<)qQ!Kb_?5Uip)qD_geY!=$zE$c3&lU$;vMf16wyQVl^&0j>O@kzAkrj zWUAxIA$iWuFn&6b?M(K%J$6Y1TD)0&Iyx<dGQ_x%V(!f(*Y*PVN_sq_yzs2WyH)zG zkp-W|Z=M=rttRURBwvFzix#xEQsekTNwuz1zjXBO<0M}gKN)_GnHa|$x)udZM>UtD zr2B-gaQ)1z(Or$w3RgmkQ6wU1tw`atXIL4g3BAy5oO|5IN5Vqt#Wa&eMwp^N1CgmG zA#zop)`aUDMDx(>Vbd3SmQqCVX*j&ACUVGw7=bVLPw>|;;B2>!du)34$?EcRrq?(5 zz=5vv<Pfxm^pddEHp!LMOiC!7lZ@3lSp&n0&-H48={M+ZpmB1yD(2Snu(j1m`JT=- z1rIsevb{b<#AjQj*<N;pRu!2@Ywp_0?-Or}UL^VZ#*%zeWriqukuXEZZ2ukR9nMdJ zlHpR_;OU7p>$UsiCQ-lm07-Yaq8ea%`Dq`_a**|Qv<8VrqwsMlzTrz!*O{3YB+Dy~ z*3D?tHH#w2q}7=z&lYNnNzpe<SxnDq*nS_#imjZooS)D<^+qJ*Uht^&U^0xvF->z% zQZE|gc*GZdP^&kXok?@Xqb;w^c`cFZvG`e0Wt<?%b6R~gx8kz46>CYA>Gi8vOpfO{ z5@3N+k}aRWIQYzNagnndw&R)gA{eDV8$;i_lh0{Fj9KH?$PGd6<TO%F$t5N>#@yl& z^noHE?E|c7I2lZUyA+++>cX?&38Os%#Vb8<Gn)hpzu6bEX?=ok@cHYq?HA3h@{b2F zgX;UtZBw5XQs!I_>#Y>ft9N*n#xkmq%J!#8LZ+m8sXz(csD1w2GLnRzx=`i4_z&uB zVVS{=GLUT?7Fv$DW06Z=a*fT+8rXjR!i}=1e)<xHu6Ema#$ajp6;l_}P?P-|pXsDL z?W9m+(do$L<s|c5v2%3YLn=+-vCdX!l$&y+%4Iu;`PKp;`7_f=b&IJkLwp3*v~5O@ zE7i-cR$b2Ix6Td0ALJlP{mXqUW{A85s~sI{xj2sr+2Qp4bSgcxYLg<vP>+UEz)A!n zzk9qGd+G@p#?l}q6?gtZ3yDF~bys<P_!dJT2p5Nm$5{NMDc-L#jr8Iy9J(8d{<UMO za%iq36^Go|B}-42Pr2M=ToeST<4_eKZn`3ma&|GQQA6`CurJlH8i-g`D>O+5Uk%^Q zjC%NmwujMcTl2mi(aDtd$n8>vsNuu6$Rmsn-cTX|*mjsn3bf6#9lhnvJR|ZW;-<@p zAGZ@aMdIJB`ZGbZv}A+6GVbbdOn8oq6lp~^q0tW6*RC)u;aXsfyP~&e^<=NyoundL z^yH%L&S>N^(&SGq%a_9HO0Mg%v8F~i{<zWxhT7EdRq^75)0j}p)T}qlo7hly1!TOt zyI$H|Hx0e<CRLiO|5oU%ZsQ_OVO{i&$w~-a^_2cfZ9YW_*UXoMmF<_a!adpt12<Y? zyPWpeWZZW{9L9!de2GT1^<~@)NQyipQI<#e7>9%ktwd$!-*wVuL$0V!=Sd@weMt5T zS!SL0PPmy<>i6J3Dv$gMkkQ!Xpa9?!{$jp5butzbnza??(|QG1ZBu&?rBU4vUJ^45 zo*75+gM`!*O-hqn%fEI{$SSp_smy%yRv>#2EHOEE{p#J5O*0EtG^!{2xoUlv-!C(v zTzgsK8^rE~wP&scp;e9$le?g$2fd_@R99_29|F7}p<M1SI$M~PYBp7xK*aA+8RGu( zeFj4`_jj}d&$9N_K)Nh_AqTQ~h7y7VAPu}>(Q)54BZu;X^k@m;&wWUGax46I-Dzfh z)giK{9Q24f*hVA`v&L=p%u|I58O~@Th9Z22oLEJC7za49xA+>h8xh?A6%2@o9Qo?l zITk3*V)_qK*a@<ZlXVzSIK;_8<R*!=s{<eTg{@PUWyODtGiNFx^Y!X6K4c1otGiD; z6ts<*t#psl=6CC1^x=V)_iC(Jb;>giIkdvY)1+l!niE}uiFOgX$7%C1NmJwt_ltd> zppW*qeJtMNmTk+=E+dhk2u^=IV?$E|1PY*rJu7C|<79*h?5s-)Vr+zivc{a)NE2a~ zZaZ_2INdr`^bNoR&Y0ud>kfxW^VR}Q9oTdmerey^U2-uJ)W^TiqTQ3_%g<9Dx9pH2 zLe^ywU$-QLy|Kh~3K@<j`66#7tJke984k+9s&uEuVPVGF*(}H^IWWtWx@EiLtPIP8 zwdF0bm)7_u{_`~Ao!k{?VZ@c1te&}B^>WpDUI$k_4mtv#a?XNCD#tpuGj84&S_W~? zbh&Gh{=JkAn;ry_X!c8lZd(n3evh70l(h<(IN$V%U8*fJd0md2;`18k4WiOKGmh#< z=L&(45MBJj2&h#TZ@TY=bdoN2L?2>5Gq>n6u>JPXFa}a$!oNR%Klh(oe1Cty*@>$= zf`KuR{o@bZtw6x#_aLm3gp3I6HXITHG{ANHmK6+)7)(O=lakxg@rtk4%&h<Et17_- z>WQ=>l`&3!YIWv}b0>F|u4sL#N9sDez2!(!r>c-@0DC2FDHOet_ES|DyC{O%JCy)D z13<WD01AO7o(GAQaVz&?L64-+Y-7s7X8vH~VCBI`u+LQ7<3JD*snDIi{?)S=uUE>* z-pIWd|DNOEmi?;ZSn4<dx6}T6(mxN*At}3m*xO^o5{&1dBTS0Q9`H7hfesH4etXEj zQv=_l{`JH~7TSIOGt83Kjr2B(I%HOa3~+%>5hzHUno_4blE(M;R>gCbrrwF()v3$V zIzvlOW&&k_owK^zj#CDpDn@<$YYh(m+)=4`_5q_u2b0)x{DvMJ_0{G@-0goYVKCTY z;R^z{76V$G%bZQ4e}ST?Z>^v&ia+~LxF>EQUgN$EhWzSg_-x4Tm~p+KVc9}e&;9#7 zto?(A&31cm<*gQTGwdht!#29KjeeL)fb{SoCAW*P#AYXZQvh_sJ&yGx<fHWBO8-;Z zyVWVEE~GCv`%Zbc^aG_fg%3x8T|H@Bycb=(X?wfqI$RM+8#``~pRam1dv7$)mFWwo zETyny`xdJo__^yv7F|0<Ut|!}m?%&n!)&c{+quMNp!h4}U9~ALO}5cJ>q8!rx5yh- z`Uqat`w|*FueILxkb&uQR+GLK`?u=ziOb_@sK*!q3pAex)HI!F){_<~^#?x2*443# zq5yI~I!JMmeK&Kz%|+I8#F*)4F9Ina4#D$d$<{f}mGN?lMl52<^-1nI+wr`!6O0+3 z@y`y1e9q$*_rs>q`g-`sdFk*fZ1W16obKmz2v^oe#z0Wr41;HUsBO@VGr=*J)_Ma; zo+|zJP`?T8=_Q(VaZww}LQA-DjO%7fTR?viuDO<m>|Fm2TWdV=X`v_eV~PLcE(Scu z+l#R90h#Q-xJbc88-*oh-<)koJiufv(wf`trb?H>`&)w+Ps$W=-YoJJeZH_}hg z#);CELKc3lN2Z&MA2&k6U6>4oT_L?m>RYW1A{q5PJ`viyI(==oK!Xd}mpSnF?w>;H z^tYW_kIW*e^muZsnHGo{b2eBZGJW0fxfCIyKnH;>Fx)CKmBgH6L4}{ldWd3v*~a0G zSP|P$o@gr*ixK>5rR0!^cpj5<k>}IJPrI0h9A%Vt!O3)bdS_-<mUu`<^%IvqXspor zU`L&k4Vn&>-d*A-bY%yh^7-}_*3b`buc;&oJAqXpnuTQ&9RO&xhp+--IVnYcO*)R! z`dlnPh^`lqcA(d8(WNx>F{bb9D1101B)I}y<!-E_^jftk9{HL~Ugn=oWAN&ct!PAA z8{KL2T@hS&uZ(-K%6uV~5wIzGjG*9sa>BUp>7h%n3tCUrnxs%E#bhea7(&^@)Z=ql zH-M>me_zVw;xlQ#p|Y97wE!1NoJyRETT1qgJPs<I9RZJHe{G=s!vgG4%x$__4dW+m z*|!a-35oj5S{Syt<`56W!O?Pn$VWSnYZMT#^mRq)+6ChS8^UforIfAlA>vQ=zgG!c zAH#lhb)9;+`%LG1F5~_6Mcs6$D=U-y=42hK`6l!oNF4e3`P3QBi*JQ!R6OkLk%89+ zk3$YT%Z-X)CD7a1HZ~fxp2>+UZc1gsl6!;o7i2UEBJn&oU^m*Pg})YX2Lu{QAN$bQ zz*imL6JMj;tf#WX1UKOFUMPhXvJ;lz7b$imTzF!}YOVY#e0YS@>vK0|W-Yp_JaG5! z=c3M%7jCQyx&H*qMYiHb$@vb&m@z34&W15s^(du-Ta?$F>7)hWBM#&j>FZm=nToau zE=oU)+lJEuhdgj>H+W#}^QHF4L79=c46_U0yEm9a__c@p8!N>}iG3~N#G#Uh3qsxy zFs{ZWF73^fgoRmB{{sU4kKpqE-Q<4(@#9i>_Wqsa^#~2U@z`|R-gZD<7YD>f7c0oR z$V8HQ+UI>Q@8tP(>_ThM)YKIE6CgAM(1|$5TvnTyhIIoraotNNSp@tvE$fW-2J>+U zi<Jn679&jlb!AR+zlugILyzG`+D%a(kGGg)ot_XEDrml2byuC%v79I{iGHhE*Ad_{ zbey4m!w6xP!Gln|XYE?P_OG4MoRJq_b{l-|8k<7br+=DxHihnY(cy1J6sPHiyBc}i z^wQI|oR8@ES!$w#Y<u++lfR}xCmOkaVJ-5{`fyf*E7>4p+84-G>RswT7!>TbN7;D# zd7y~Na0X`1d57gP{9EE4M6r&>No&OQvkxXaLoZ<Sd$O(LBXxTGZQT5*p5M-}P@~B! znN(5a%iwu09yl&8q|?dQ&-D=j=_SSOB#Z>je_Z3ByJMQCO^Op=O~#zHJ$~OTU7EIF zY0c&5VP%gj_b}s*{XU%sl+wFT(C*&ghdwi7gq?$u^OMgJE-;j;@_e&M-Xv*Rt3tq& z?-u1KJbE1_gi@9^z2;nnn=$y=zmP<9N{ip+j+r$}>!zHw=w+*gx%LhKKaq}QEo2$K zL=dT8TvTnQtG#?d?+QvjT*9DBgUXbfot&{QferuTs!fXkj$|16%k`X*XY%~Raxd?& zlYydk>SkwdD7mWN^{y2;*t2=bvJVm<f_&<O$JF2`0x$88*M8(&XaMqwuM7ZPu8(2z zk;~?C$#<K{_ASSiw<(UA$J+$HPgA5`H}45vF-^v0WEU_g<Es_fhAyjEX$(5ly4PMd z7d-ezEXxbd$Qv>jaPxWK%FT`MR%8b=zwTiTFI*@9lnE<5;@*}y=<bv@tbleb1*XEo z6?ou(Zt4=vAiX%y?z1i!+kEO&s4JNo%M#ufJ!e4VKC~{!Va4IcngPz-Ep-kcaC%}9 zHZ6T`s0oEH^1+kerz-3@O|pcNE83j`Hu-wHDEyX+$$ya5mDyVRfqfckH3Aj(_)zRm zptCn>9)ic=U?u3A;Z4Z|f#zvIu5VW;jLfwtn3TOtlpem9BEnc7C%D(t0e)NOlhNVj ze>SYi<dVkOt{k@P5YIDsy-v&7m4!#e6}plY_$dgOZlTV0AO9T|F+Jc<3>0^He02k? zY&_F44~5iUN(&5XsGmOby47Q10>cEf+j8b~q_5MW-kiovoF{M}s<=+1;|e{i!>AFj zqLa=LP%tNMC1Y$7Sx#LF;jI*+cdU7j;SIScgA>wTIoss%)Mqb5U(@;KN6&%au8IHL z$ID|jzz6oTRgF=nn9O$Fxth|Z^wy&YJ2g(0>n=V^W&~S1G*5jl_FIM`G7j9e8N4AP z=AgrY2kNG7WZMSC4i^OP;#_>!&Ftfl7xG;-f#7nvoNuC-ir(YWtw~IlNi$~?(<|7j z(gj{qX8++w9YcR(W8wiI?%f=-sXF#vnSjZG5;SruF<SV#L(|Nk_|jo6#Q19V6htz! zA~GNlD!%lbh(WvXCyu-?9Xhr%++$SJA%bcNJ#bass5$We`er9yb6zw_1>(Dt)gRv8 zlS`FB0sYTq6sJ^dB>6jjmDvdSGi<~bjQ3|We)5CO-;+iE!({7)FYpvD_0uyz*}F1q z`{~|FZabNe`ZpOYz(ot;`|T8-<zn3xQMU%TMOk>;^1@%IN<DAiGl*K0)fD$`@=Ons z0F}1AqbW0Z_A=#!IjnW2H}Gd5`Ao=#1lNQ}kD)ap@a%KieA)M=eyLN%VYB6e!apbV zU6#*rt4)Vgf{AifeCLAzjNjVl)u09@G3~4uf`rv5BsN|=(rQ<~QY}Vf*OBjDtw?@+ z9VMpjQnBJ4-+&T3ZnqqjKB1xRqjhdR$~|l4QR|v_ikSCE;=w=Cb;R~*zddbLvxsJw zo~(ddoS2bvyAwKwx=vg$qHgv$p8L9@&dx`}5Km6aem3+wvo%CSbY0KV8fw+_j;sgs z?5Qoa$uks7BuCatpjER=7x0!I>zEG=s)1MMb$4pc)>aW+R0h}3Jt*+9%32b819QLJ z5n!Ve{)rV+LwVtD9Ly3&$iDl0$St=3znsTJ>cUcIJQa6X6U`6rC!&uvu9F%qU%v>v zw=|uN;n|mPPie1AD<<o4ItHWbk9B$CG+#L|3O3sF5Yqkly`&@z7>LR}SC7UGb_J_` z9S9R}%0sQ&m~wb@eDz|*g;I6|od-o&X83$V2Y;&b33SU#4wzHCwW{#wgC<?YqBt1r zvnRJ=vEwG_b?IZ8<$bdYmi-x4fT&xshY{+HDFl?RWl>#Crb=hy%y1&ttdex?6gnd6 zz^BC-G$4bW@3NSlL~OJj#5k<JhqRq+riA+^Lh0H|S<{s3)9$_^-X`~{X78*nCqrq} zhOskdT$D$h6TR7+kCU9_s|!MnL|On<1(o%naCc6iZS^UF%1Ar0<|+qsIo&_vT8=`+ zD+5@WuJ>(#ow%Y!yghA$PN<#`P|AEYBIF1ZrC2rno7KoS+FOX0$9(nlQqqgxkEkqT z6<lt9e?(+UTwzZb{z4v0Znws^1p65dJ**9nirIjOQm<hzpSFf`unO|jXJkkLk_aAf zlbLVB8#r%DB-8AI$8of-Dd6YE>ur<1!tZ9oa{{6Jum^}ISJXRE>!F{XL(Iafyh4@A zwcCQw!^=WuL9-)Y$L5R;Lgj$|bG;mTse7XCEWsfd#j0e>^MHnW!_HeB=7hd(d?O_w z&#)W3?Zq;^DtU)-gTieS0{7Ft#qUEN?0oN|2GIQo6qs*7d>XHPNltok;*{W!n~w|0 z++H)pd*sfJm5=%JHVu1v?@3T&ib4m9`GNp8WJ#d@(`th{Kl>eNAI=7_qsg?P$-(;o zy>Vv)@2Z&g4~?*o61BEOO@^6&M^xKkiYFfWb-nM}W<DJmveQ{2PrMlT)z3QD+5q&G z`0tS+Z`pN@i?KIG^&f5q+$?)&2tBNjG*=;)zv*bf2m=79`-tdXh2hb+(Z#O{R4zHD zhIn1`LQp<apAx@s^bZ~KV=M<j@T!DaSy5EhMNF{%RI6|Hzj?RNe|Oe|IMN^l<=Vf{ z(ULGXKxA+qVQV1}iqMDAcefz4-2ag>*6n*#y$w}x!j*F6n^`&}?sUNtd7b4$<Lgj1 z(_Oumhn~%;?msom?}z(OYj&@Vy)x*&LSXA&tlvNB=)BG&@!kr-Zrp<0Bxydr_KQc3 z?=hdmW3RlnN3H*+dTp;78a>=nvf2#KzG1}Q?img(-g0*wtp1{1b}@XN)7=6#eG{L< z@H*1e-5QrwxqV%c7#Q#A9k{!DefEup4hxyPlw`vjM_SvojOK0K605u&eeDU!w#~UD z2<*7~d3x?zb5W&z1>NrQEg`U_zUrV4%glF2;I8vtfV$$&^1|v6JG<76H~RU^AvXr( zMRu;Gf40=@;nQ0GE!#HKcGU;Pm(?c-_sbiIMBf2(KZhbehlkeH3NS^409xsuiu&#_ z$h{~<NKv&Il5vIJCXOwMSU|j83e+K^zmHm&{Cj4oF*T`1(jVyGi#GYU(EmU36aS}^ zYxRrhs(3n|c+$f1qus582=?xTOjcFGZAWmj?!R#t@?nzR&iNYc%uJd(qfhGL>02@z z>$GI;7fBpyXT5IF2B8o>Zi*mw^p4H9-#bfEEe&x;9HImT&OASwoiu|S9At0XCf6?J zM09sc7}c*b{_ISR{%3}RGnQu$6b|XlR~&=s;omVR7;V-M@>A)4S5;}#$u=J=L&vcb z<iF!~)yDJUM3EQQ!Y?QMPo7`B`Lv%dYbwr$;{a##<(H@o`EFECc=g@~3V*v)EWc@Z zfK6+0kdxP!Nbgg+v{iWYCFO9k>L5)Om#wBIv@KN`C+(^*=gYR^11p13R?YpBh#ouG z@^Isk_A;mjBsXt9=YcWc0$!o9GdclJ9=ZK`npRi9^3!kk^ZTBs%}+EprJJE0mqYxr z`I%>B6r09{bd~RyJ^Q8En)~;waMo0cumlC;->NokMs)0y-NJN2S2VPB7G_vBwR~LR zf%QSpvytL(JbA+H@I8U%4vE=fMDq;mMR_lWyPbU3*v02n*-l>3aYx(zIyrtiy0Lv< zVbXjWVFccBda!vv+F^K0q=${gY|{NYFw-sHtBX7}=N&6>^YoSId49gBWm|y{^Dpa* zyWB^-1rBy|cY{g8DCil1r#RUz2+h@fQ<5F)@%kSANQ5w9eb@NB+DDhNNq(aMiSlBG zejYP}MR8_EaMWNb?{*7s(XmI;RF&0i>IuHDefxem`;^jBfj#0hb*S+lxxbpo#OD7m zQ7G2iQe^0B2fPG=PBrbhLQ#rCJLYS$8CUj;N)9U;pBXoF8=jK914Cc$TKJykRlR_1 zdhqPNtY2@rUour+5bjPJ)L((8m9MkTbQ*S$?2i4c$MYmUFL(*e2`}+O$vV9Yug+zE z!q_;3&IwbSCz4Ne^`Qb_Lz#>d?L9$OC|O#14d8xtdXxyJ+mP>dn!?q7$x4V|^y$J< za);R<O*hkvf7Fp4s&c*!Ubv_aw>O5HM!2YC&dkH&F@q1;&*T<l!tSempw8YI;DG=d zYTdb_tXD1!IW94Swci;78R(_%uW)jHg1B@9@Oz%mAw!-Q&D!N}1X7<0a$DEB!zz4I zX-f~*97NS|l6VD4p~?L8lOy|i7zpAc2cO|pwJtPQ!_)BBwOtn5dmGy0PnNwlhPAF| zeT%I39?$Bm{yx1(M8K*=fr;$V<XRbn<Vk5K55!1UGCuOax*XgMNG71q|9Kp_qbr(k z8Eo^I$LCPgH{J+-gut@Vcq#45lDczBjPBb&`?H`6GFRZ1^zwq2m*MppKVJ19qd@Ck z$k5Ta&GsHqWWl=S9EmL@w^fS0R@6Xcbf=43eN3NT?>kOAJ3mMwL_cTFYylI>iM`j* zE}}IeJwX(azwYvO0LzsfSwomRz_Uud6Z?dCJTuh0*WNgq&Y>!fbqc<i^ZXUU0erJ} zi&E`)d%=HYp!F1uRs{iDb!<D-5uuWh%Z`~#G&0L7N1&e{9UDA6^HTtyFR`lu^Swt% zHVKTX79iT!`vTzfG+<Wv5^TR|;46JTkgMX}xvd9Z2}Yd&39HxpI%oi;9?Ls;s)z`K zPwn&SD<c{?$o<pS>`x%;XuI!ev%PJ9&Cc%y&EKjk@Tt-sUrwGGSt^5moDJ2hEMlS= zW{FHMQ!VoJZkEi@>rM-1SCb@w$MAS<MdD}+rRu2<z)WxBwdU0A&tyINsks}(#@ikl zV#>W}3-+&cDA)Aiz~q%??ZE_d+4mlA6$h;H%zgQtPsiykKA1Sg!Ncg|XJo)j<EeIc zhs-!1?!q&2#qF{^7m}#gj=}|62aiMKt54|JGN#s+D8YTQW2f|i`&%@x1s3A1fPuF~ z>+VKUs@@R%Kmq<c6VMfi`L*c7-^?2FZ<!|WlXR@A39Fo)9gmY&iUn;Snmy%>21peF z3RSDz>uZ<8d6V<?EsVyx;{BqflmM^Rif1C9mqXRYnu)RG4(90LWuG2&B-;eTnhPx5 z_Qdg1xq@Vl%tNBi1*xiz)gg3{gpcnTX}i`{ve#Awn6B|*E&2HC=_nlm*x=?cH2=$z zHF&8O|LU03>r~g&+K!^~3kHA1z@HFe)$3&9w;vrF2p?5Fyc?_1g{IA>{?Dw{<UMC` zQ%gkjn2vNO$CCjc^dEQ1Ww=KHTxh8KR+zk{#z_197}wkT=b!v3qv^7qPK1BCmhR0$ zg~z@{%s~Bdw(&#%KF|6;^`HNJ1_zqi=sy9wEJ{?+(D;>6<Qc;iOMDk-o|u%Rz-@@L zF@uxAhmmKaL0y0Vd5j}u&#E?1JEdTuv*2i>NwO;P*i78En+p8QNbe{`-PZInvb28Z z32X|lkK!W_`BHxruAV5{6j+$K_rlF!wDIlI{P2EzJmEJvW_}A5mObR@$$=V~l6?$a z=wa4ujd{Jn>V8Ssjn`x9WJ~n0%;XK%1^Muq=C8fkoBxfWYQ**I!lxVljs|$S^D>o2 zK4i!4#ot1l)5qw5m7E`2tx+RR7`nXYtmASVS8z1|E}1Jimx?r1b-@p&9Fys6;#alP zwcLW^(74-IR#wK-vX|Y~BWhIE<H6wtGZlXU2Kgltp2eroVG+_P2{Tn_cLpZcr?m1w zS*<2{QqLGTc<ywy%Mr}x$4w@nzoh|AjV*1LHDnmNjh1LnA>pPvGQ17V`ohLq7M8`` zaviD{g6}WVj<~Ev=C<ewRhs(&W8-anU-|zfJ<OiO)UypMq|#Wt5KU|}*K@`A!cp|K z`n(=RdGa@9c(Rrfji%ylin|Om?tSsdDN56yRCK7errW!@YXM}W-$?ql;%36wSyvW* z1K4ExB_%QnA*ltEA|GIhXXaiIs)knjUF)t;S>BnTcEug*=n+h#$xt$-q`%<9wQZjM zsE3g4fVjqSV+U8lVN)Mx)VPjmT@+*9T_?=kOKGUuFR9r`1FZpA$(H35UGy%^G~!0^ zUxb;mSM56~v!hDB-b`+4tf6B3DPosEuWnk?NtRZ8Ti>uOwTuNH+V+w@B@b94Cz`RO z+1tq~`d{sk92d44odMMD*n(b`2-l;Z@+i;zzqKCGqyosY7i!pNI`FBQKny2$<1|^V zE04KShywh9n3g#1JWB`d2H;Wy<)&jl`Ef0!Dj~0IH`}>Bd0ARebG^^medKd9T|78A zHrA!<7yMmb!c1DnFJ-)U!E>f3Xl(+3Hw7!rDsAAw;qMnb7sEjCz<g&3P5#>S?T!&n z(N~w2&$GsDdklLAg|5(OL<09kkTE^WLhrf$V|XIc9(&;HSTrr>sg;5HC1xHh__14| zilTO!4eeVDBRBQiJR2`?Oob1QBOxePGWFw|2roclp$xltGQd|gKbxKb&@F>W*9WSq zWL&p1vGg#=a6QOjpE;S$YHB()wV}q$PtD_ZJ)n>NVn=End=cF(s$U-YP==zeXZV&8 zQiA5@(A|0@Cs(A@&&ApKyKC8DqQ`h~*lZ3V3Uy)BfYQ2J!)54^imL-d*y{PqdsrYu z>GC{}61)c?HomrC&i88zg3loz$IK(tny-j{I8niI047l#9ky<1&#?t<;1Gz)`jb~a zAASG1zp#^Y{8B*opzA`KKL4ddf84aDVaY*nJ{?<eZ$nC0+-O;V+l~V<GEmz|vFq!u zh9o{!1<Npm_p%3$t0#TkaMtz)Uaq(Hz@)K42NG(V1&JI7$Fy5^vY8Yg$h&h@hR(AU zec%{_jek~rYs#UTgP~~S8<aa>MQTwM2?(gT6R!mK>&vm#{BYYg$~HDCAQB3J9CE-R zrFIoqP^zUK)lw8(Zykz2ey{{SO=O+l<PObht!ZY4Y-lhwttB?RDugMpC~mlwK!^KE z`7;?NY{^SW^}Z^GZhMtV7S#%jZ=9}(U1zW->PW4@gk;O(Q2k}o-kS+EPk#p!zky%k za(a|)ZX5kx0usdh9Eb6b^NmBG24bJ&u1f~)&%K|7v_)IW>gCGy0~+#kot`WR(-{~2 z$osk(uH(f@L*0ZJzZ?*>IZ;y<LGA)SOnc_NA~jTIO!U0%{o_H%%6+*PTetfvfEW03 z@WFeQ91U?Yodri6$q$FBx+ZynNIOxWIodf!HP#h)AIa>_S$ZI!-vctK1<l;rA5MN; z=#c-kQ}_Faew}G_c{SMD!TG_Fhl7rHGTX3NZN?IKZ>8xe3=xkRU|kTHx}%3dy*7<0 zFKVo#a`c5yM>bybyM_%Z!miieqAfqeud5wOY-mh&%Y5*OmS2#)5`YUKYCmVU2GTQM z+?~qNk6t(*Q9|`}ujE-Kv&K~Z>q;PKCf(miop@MVd?O`SD-#?OtKBl=Ad8(8YYz)H zY#`?37mJMpoI4rVuo*nnPQvj4rG+S*bwq-%_T)MpQDEy4y!}luT(<0b>>)+1GtpK# ztwAmL0#Ir01wF!+(`GefbReJzuQ)Bm24%|urF_GFQ$v^HA8tVv0p$I+7J$?j;<HXt zKwtkXG+`uFcaTmL`ibiowOH5|WGwMYTU#_b<YQl#J9eHME6A`K#w_BWzQo>72%#v; zLtKj)*E5|1L`QP*(*8|`NDb1OtZIb+U~Lx9d>hgL3G`09Yl3&`op$S~+%Y16y=;RC z&yq*Kt0n)0nwZw@vmYqwAiE#nH%BEufa-F~Kg&uz3NYwYGSG+B?Obu(iOeI&`50%W zuM_}PKCIfG;Y+jQ#&FgegYM-z!r`!0m8viN4KknuSxJEW7jjdoW%aq)1K2+2VnT^z z)gUz#SOZL24q9?##8`2P&a}^C%=)Q(%f$CK@ww8X)yB&q?OHtsvFuN0j95es<53uo z6k@h>l^R3bDa4;Lgr~l;Kc>_e?dg$8kh*Kg`3p~VTJRMTrh}kfLpGgW4e0&H-oaJ- zkyizNx$}P+{EILi4E|sf8|*CvZZnL_J=Yf&L(}Vu)9UEd`*}>9Hh;7C6=6^Y#ck6b zozBJK7>2`A<>Ahkh!CZ3!F5FZ%s|RRFZB--b!(4DsAJG)0lxJs^~uhriy87IV%mQQ zTR^$arv$n80CdkzFwQplVucQR`CVP|k<KJS+~FUq?i1UVzj*$ZI3ia`HPus(B*SKH zQ(!KHS~pJs=tXX*Jice@XGbvC+8=1I3Fl(>C-F}BRpjq|0ihy`kJwmUnxQgH#6xw^ zxX2wINcmY7GafF!xQkoiK(Etev^xapVfuNRU(L65tvAhkC>6wfK49p>AKQz6A(|}_ zFOkp#FrrLAR3liZG_0Wvzb4`nedR|}8ybSSLAs~*$|q!P4^^Yt8_XgC3s=Qes|Vj@ z@J4$T??kM!Qb~B}e%rz9UXdc@B_H<o^Y;;b_R?qG+smJLMfy1D?CW`_h0p33gpP2{ zV*H#9nE6}$O}jZudl#qXgg)YvR@P?@G6!-V704b7j`wo}R0O|!$`9Nh&>qJAx7Fhj zB}VJD?5<iKymv1yL<{(w?RtKFA=Y<dKbserh%t;cnWRs3E<fDbbBtO~)$Jk|j{e9s zw<MB?AQ%wmi4C^4GmngeK0cwWtqWC~G6G{7g{sOVS;|6+tL*IbY`f|omGeDivswV% zT=~`~2*Y@`yn89x>wU~sbEq_>3UA|as}&G78!(sFcB@EOKRVzXiVS+!aoW9AuYdVr zJ=}F_k`J35gfvw}@Ei3D7zvmX0tXa4-`OHhF8gjOs)1apS7R=se{26HRX+>Meot-I zIn<MbCzjp&X=~u@X@~26N4a#c<aL)KaoSHW+_Bli>cCXWj7~0#k*Ah%;K5Oc&Ik6} zp_kn-B(+^_)X_se!@-m`Z7+eNT|MOmW}!%r2<*p{{uO+CU4&%n*z>eQ0A!ZGyCDGW zfcOxGUKA*dLF`BMUpmOaRA5hz_T;&ivtMRbe0gSVDa;mHvv#f-;7t<0Osojo5%Yxs zTBlRJy%{ZaEs9>mY&Wynp>@Y;xM5;^Q~qs^qn@)}?~oS-D)q_9H2T$<b(DL2vP_NR z?#0K!oAAu(>h%6Wf=?~mXtJ)TF@}|>EJ2_xp8B+5;`}=<1Z*evK{t;fiSqmc7~thd zE68VV^x+#lHj1i(`ukjCO~_l&#TOP&)Y(O6I$v|XYrIyalGj2JZnd^Kz1+2^Td3!J zH&N7$e%xJHg6&ABt<-g5-o#W?YIL9ue#IQKuXCi^lpfF(3(=-i{l6*WA85TR^#Q+C zBXVD)+Q&fu^~24a>*wzLy507A#(tk6Tv4)mW<WT>DF$3t`yR5#np|g!P|2PLGuZ8D z#1FUMsuo+6{Bg(Pn8VZL0#mK+`!n^UAL*>w)V>|@U(bowOjiJmc?QmWQoFTrogt0} z6bK!lptg<gzBuKnV~MJ&RYtN@E-jh%YvuV43Ix{zIzp?E5!5(8ZQgQ&?2NxOSg~oJ zUbfwZ$peOWxW($Or|Q{)ZXbzOm~Mn7>r8OGr!ljLwb0+fwHoK&f&&&E-+;-42!;hc z$Qvd*byWVp`S|R2qrlk9P2d<XmNn@xcW2duTfhVl?WorxY0Tkg<{x6=w`gk&;cq(6 zq1;UUPpK{Jf1_^pe>tDS<46<=&jhitH=z5_iUQe~OOdtEB~$o{5M6JAT@^h!7`JkZ zc)}@b&Q?}Nka%|OUQO_frLyy<R%J&sU`Ww1FQ#Fiun!^sWT7~}lR{Ba;hsPwD0{^V zKm#iS-p$hk^jyd9(i|ig+v?VU%cqqoLhe`BShFdS#phYzI!7hfPjxmDguQ~!Pkc^) zZ8B<{T!LaK-sv@(L;&--C`}RKnA4f(VEP&6?vG(uQ~s1I8>^g+#rJV-kVZQm)GdaD zJ6qP3Z)C4bNj@)Oj`QiP*Kl)qb{nX$#Gaah%wLI--gUL_p)p1zD&mX)ak{!WJeoj0 zp3_dIT(8U8M^uye=?*ce_1Ng)n3r3V+fL~@v2UI*%eN}Dv%90<fZp$&KZUQ@)k!wX zPL#)C^azmi1xC&RWRkeP1f!vIK#~M?^bxDwTv<swy9kHq&u(CaDV{djA8-N}^=!0d zM#+$Btq~_L%aKl<eY~<Ju>}n*A*HhRiFZqKZq9vkMpi{EK{P_oe&8^%z$MuQdC{Fn z((<<csJO51mvH5>4OPR&T)Vid-c`{5J89VsM65Ab@jDQn;Rhw4?2s-#9_w#);3}zP zKLjQUtEFY}RTggu=VILefa6OC$ydiE(j<mwSeg}yfKeHzW0irooktzYurmxwE)m0I zZtIk&ZpoaZI{cCPNKm*^ACy#P-<eN^O&T58T};uaeV^#N99kzJEyfuq3jh;+C~mHa zMC-ttXAf>fO2{hfpad7@;R!E1zN2(!`Y*!QO>ysg0DVqgEw=4RDC0c;T(BXhUe!ix zchqIRXF$f`k_@qgl_}%taa)SLW7c+)sRcF8_h_=W?RCh-o(jqP->Jw!{-YuXJFZ6% z&A7#>wINRQaxUZ4Q59wVy!AV2geQ|a^Ih?4_*dO6f4B%tim2*?8aSN`zTsG(ED`0S ztOMw#SXt?$v<Nh-qS7`6k4ZZgzC+j864{D+_@}5ziWQ(3f_FX-0i_ddkf|loG|-uJ z)bBgZ`*P-yqP%?j9t0Sy(d;t#zMa)4Jv7RFu^Le$*3)%@K4Y<G(E)4diIqFrbi4>1 zjfXJQ0G6y)rP0~wKq-)+8H>z1RTpR2nGUvRHGmM9k)Zwf?&JN7q~tEu0{VgeatLJ_ zN5d3bZrIY1^&#Ki*5{|lAXTMd5TtXMb6tg-#13ob%PhEj$gR<k&P0>#73%+p%Xp&{ zdA7^s{b<-)r;mZcS06%u>h0Bb6CUg)N)A_HOBI0rT6c2g&&=<|0C`#VzAKaY=JX{0 z`aD$aWivfx@99R)1w^TBd7`%ziJMk`6IZ)nA6W~jzEFC|`S&uuDvkk}c$Im*Dl+Hx zxZl0zg|CA%RTDJA7b+^t`!7RCml~W~Pn^}LBmoT#BApbO>H<iyDsK&G#<B!Alwmf; z$Rp{$PdMNC{W~|wA6Jo4=Y3p<w@|3cvh$3|?W6R09SoaH+s%|N{}6X{>5syp>B}1{ zH-mUlgc&Yu*Q90frY%F|vcxKe`wN(bg~=2Cdui=|D0lsjCTDU37YOuT5)~w+q*5=P zPMKlRpnRF4rlu4mP3g!oxQA&Mmt{v26$+n;!{B1~^LDb-rPCFD)!x%zWhp;<z7tpz z{c+kWHkt{Z1mp;uqAnQGSP8}{HPkStW@kM92f)1%X~(DX{K|g>zV?1fiJg!&Da?k* z!#WZf_L-Ba9Fx-(^?#+Ns9Xd+AzD)qRb?a|raN(_;AqS5+H3-L@U<mwpiI_k!n|+n zvBCI0Ev<zawiSN$G`+LS8g+NE$;ov`KLI*;HBn)IadCO?;|exj8d{vehTP-8jBs+D zz{&wDUXnZrw@699p53mm2N~vy?$Pt9FRdUklD7w5vCXN}a^PXFud!yf{mn)`vv1q= zY&gV)IiW2nf677_A?PSP%0XAPNgQ15DcB6kE-o@R1Wfu=>Kx{7*IUrE+l2nt!ita= z>%UcK9oLp7^l<S|@fB=ertu8>4khtzq0P(?Cz@!H7a8DfN(vuBcoB&wSNcopKtZm3 z%t$<Av=KjAYarxY3D(pxyP*sOJ{CMagqpXrz7Bd{^YewaWW|6_tau6Bf^?)Wl?mu{ zF}v9xiPSY7ChNsp&+X^+rydRXkOUCW7t^7BqTlAHj-+oH){10;<fvO6FzH2c?$4QQ zPohx)tx97!Vz-Xz`%xm{nh`0tg3$LIOphFsl@@#(x6<M`oRlhG_~v7#S=7eQ99o}A zJPhm&9?R}ZFclB#;@sT18l2qByof`SJT=2oAbU^R0#`LvzkuMUuY_qeAEXf@{I?Dr zc1O=ym*P);4rd?c+!Du<dbck#?2Qg2O|;WW%muo>FS{c_c&>Mx!VHhPwI3dKA8BQi z<ZO;vc!b>c_5??eEURUFBFwe)8JVK~4hxyYfNP^GyIC)aDa$q!_eOb=A$pG^sq$|{ ze$332@62yum1dd$-2S`|_OZPwtJzsUMQwB{lm;gmvF1gLK1fEdQmxvNFPAY<URhcA zr0X4lr6wE!=ZV%yDlg}91UP58=HOHZW6xqo_zns?s<c~ixpr5c#KgT4fy@eJezV!L zJZ)}b%%PY{<<$x4F&~Jn^fUmDRo$))o#_nNoY)75dYmZED2PY_3?ZwXuhb_lr30yK zL+>QY4OQ`m=V8Kib%q3Zn-fK*@r7Rt{O4-a3W-fAe-ltE_LNHV)mP!6)VY~EmaDmT z`t(w&a|%vp&^mYXQ16QjuW>zq2<BeRsk>A(_EwtS+o{ItzlkQsEt`r0m&F8LFS6Q- zBfaq|s+#*^pL~e<cK}u*-}u7-Z2yAmAV=H1p&~qUF9+1DL5m~-BIvx@S8_1Lq$%Kn z_+?t(hcuGzxXBM|e0bwwTE7UrE;69;z|;`k_@tkfL}vKw<NINsj?IgyCID&OmMg+K zhs4}!Gsh?7K{0thKUn>hp9~f;|9hIrgnXf+$*+Gm`=Y1G2be_)&&Zl1Ajvj1`A00X z(dI%)v4tc~$nSHkXU}$`MXT<|%!1W09Nok&?uw=*@>@UX?q-|zbpXw9SZd|qlREr< z%*C$n)j8}y6rGO=K~%$@;X(!{fw~~(jTXQ?8U<x3GR}QFiVY0XvOK+&nTkk>Qr3ka z(9?l@6_i`V=nqOqFw<G{vVkpOF>)D)y08ZOZm(t0=QBIoC<cZJ`<<YYkr4Jck?w^` zWyGP#)2tqLd;$EX)wNbpgU_cma9CJ$kGtw5rsmb?v7)y-$mb~ENW;ft=<7y)z8QX; zFG3A0o(qpK4*>OW@pJcPzR?TmFNVh;Ji|n+*PTFb-%^y*AN(F3XbTn8{pbi&laP8B zyFW=HZ9l|USiQAUund|XH|cL3LZI}c>kC!%!`<?Hq#F#>7K=#n{xnw(_x{B?(&+1z z-fQCyc8E6{*8vn{y7O5dimdm8n=b@<7_TGN^c+&w_&-#S4T)dm`7-a8T$bh;z7sGV z+kD+!l~VS9&fr*(sOV7^y)NZf5x~lKHij+pn^hJzmSOK}I<)e2=!prD41q=@J36y9 zBA8~pwL&_(iqOx4on~c^Z;>fg#h*3})%DWq+Z`HTCZbCHI-$TCQQuQhAYEOZc`Ua` z-QI$kg);HdyH@2@;qU1Ht=#Zn?-OqwXFcGtomEqb6x!<#KFE+`)@^~y;mSPCE@+pI z^a^QnP4HI~c%K-8XQZF~#c#PfB-)vI^E>>xHq0qL0qInGq5t|aL!rP6J_bXA42n;v z^78m-@Dhj%TX?XoI7uXdk;`P;ya##xx9F$=f|ZbBsx3IF?#NshFUQV+;sDWjUCxex z<5&@^%ArIek5TA$4oqg0fbDrkN<Rw48`}qbX?d*VK{mtp_A{otwP1h@3Q*{A5e6<K zotoB?nt!rCr=It#DSf^hW=a*4jKGiYK0P~hZ7=Ap<3^+eoEm;g@xo|Ub9BGyhfI{i z-<nUMg_EMW0-HC9V(L!bG>vr!?wVUz-zF#Jot>|lN<Y}H;KS=sn|$*>NNr|f460^F z6w{WYAW+F~Rnh(tP}tQ<po-u~{Y9LhY#F_>C6fXmXE-+HvI?|}H+Iz{LPi`X9fnZH zgk%c0LYl$|_cfT0d&gincco6}UZ0+lDl+!ts%R%<v_x-9=$mw`xVrt5yGN4PQyo~B zHj`Z^?<F^Zj+gF9!B$^Zmrr)i-lFl0ZRGD-tPDc(`>^~YfavaKEy~!(D!(|%c-|v= z4YVs48&aU_2w&^GgqUlyfum{c*sZo3@W7LUx9@{Vu-Svey8-s>GsHKQc${2k<ut9$ zfa-dox#tI4e?0yV>YUW=O_67ZE=xo$F(=_WO0UmOb941mRS)JDMCTbR-=gUXAF?=+ z1k6oG<B_pXIzbgVzZIuUUsJe#CKv8$xc#6EH?P{X!(z5|;c^FC``Fz;B4p;s_meKd zEaT`|xV*<CRbqBbxwc(xKzo!jWY+%(?;Ft5vE5@S*usi=XFk~M?ggLw*aaW`S8>Yh z0GZr;6`d=!h^bO>LUo$OrreC<d>JLBp5uLS>ogo$2Jj26$?SK>cZguvoaLoChYyo_ zT9LM-GXk6ut2lKtvRu<CB(iQRxZU&GQ2<vamB+8J13#Tdd};pKYrv`o-0VLcCpl}) ztd{sbOZDcvw*FH@So^XeM^fc3@jz=yM7y=+k`r#*7<0e;)%^loVChBWuYY10{jGe8 z8*gkQnQMRFa|?p_llaHSg%rbW2h`Gb#t-m>WuDQhDr!A-i9BuOjc1;f&4tg>Wquo4 zyVFP0(f&MrM70Wan>UW*b><I8)$AP+L5)I}@3uWLHZ#qe4GSP2O0SQliE#<}>n_HP z<Qr2v==_r-$e{hFzKRs*|1ufyALP0J#W%~60htaw3@{4{3KCaaBPaY7KlqkqsTUNC zxHsPhg?eT;1x`)j&>@(rMfA0p>0;&2OTRAtt8;<!Ey;|#)ZgFe_qPeq+*+u(SeFig zvyt(l#IqWgDq|~Mb|U|0d)uYu@0PQ)<%ai%u3y{~q37DzThA22f;zC5Hz59V=fZQ` z&FjEwo0ym~!PBenu#>2s=%DbXUK$&F$^c1HFUR-cUjCpZC<IjSPYe!KFf<bebWrVK zfDa{tJ9&aq25<{tI~tHnf)odaA!6vh+~zV9?}?18(%4u+Odw1eN1V2A2gu)_bZH{_ z=vW%lv+QMf-z@HBkUX+J9_ydS*ERCP>(!aY`hakF%*qRy7-Irh09l55QIa`pKO~G^ zDyg7;s`D9BLWkszU{8%m9c4Dgq*mRIZ^nfqmkK)WP$unRY?hpF0@TA#5@dI0>^VzK zkBi#<QU!FQc+8;;d0IZYO-GI6OC8FYCOCVJfAQNG(Z7E{D-pQ))S!m%D!0)^;&M}g zJ#JXx0j|dHfxW}Z!BklXToS@8Q<GBH2D8jY9Z6wEYk}re`*nzM^s%_4?+BnDC1bMR zJA@e3uz&c*Z=v2p)H-g9vKm)3ooOcsb>M-n9`)H?_+%y9sJ<i6%rTm1H(_bM(wWGK z_baOO5A}6DTWQ39HqQ*+hO2R}6lnZqv7C5nJfBB3<UT5ORcW=$%K{gg8y_IzsICoy zjl@Rk9B6SXW=V@vq>A&XOFjf~eNckzf}f+;7BgoGE;Y^CS~NdU`-w2LsMVzoNheLu z&fuz$p4c~yK-jBqr<j;9%wYTRNv*Xs)<VkHgfXQj*7E41w>@h5?Wfb08XQzu?BmFk zOSW68x(}FE`9>`WnoHtS6@X1@-GN`3b$;1Xt;QdeQYEr)=*IaHMBo`X%4qa_zhGs* zX)Fe@a=pm_$jJ&9(I`jOF5Rs<jU?cjzq?-N+pdU=f^*U@92lJ>^tbPn&{_m-b-Bhw z3UWbw3|h{eQJ+(pCj2pAkEu7RR5kNLAJD$8o_9rU4K`Cj)5g9p&A-B-qNo5-u$DU~ zezs+yM4~9b>z%{c)Y>YnFUQ&JB!WVoufBafd?(eIz;!fGw~M_e3n(Ag>jky#OE8A4 z)~8N^z9n|-75O@FJE~j<j;YXT6Vq@djVjw%SU#uNoCc5L)AX1$Oix2(&84j*j5ZQ! z<Rrt+mkROL5q3%vV}o^I-L^kpHB<Y?A#kuP<|D|xCB56UNPyM~o?>43m(Ba9(z}@> zLQ^<c7^DKKQ8o@p6WR@W0$0VLZwV-c_%|Dn(krF4{1j-bJ->#`)+@1W-%INkC8c`= z88Z9LF9&s8ZzPYYbZf%brF^cq5A-96vsWiO+r>~wX$TYQG^$JB-$A9M(i|tScS50R z-C5f`G3oqVGmg^!{GnV+M&FoV)$nB+GVYRDt#VQ0aAEZ8BG_sUQDyAIG~La^oWhj? zayh-<kZAV`3X7*SsRz>^9n3Z@902*tW@*2zb1WA{pc$keQH&!ymu=#DDc-L^OZp&9 zArg`AMEek<u)Mi~WQoFhFub8{Zs4>&ys&LWE|a5QXTh=x=XTZw7Q`LjkEkx2jSIs5 zG{kqr-#YWOv{y~4<gjH8c89)dp+IlhIKF;fg=B-^^Y0JT=?v<#GI%9Qt|=dp>W1vl zpUuf)vvCj0-kHzT?kuwQ{oPncrkMKVM3WHXv8emZA#$Xu*l5fiTs+RoB8ipr$LRK) z&$$4H8Huf@13;?GNF&&2O?*xp36d>j*ts0MB1Zvi1BrdZ;O`6^J3RJn%Hm^E-{>#3 zq1H8E!{;GRF=88ff`7WPr~mRQ5(*gf$zL-HxrHBG+|BE*i^#Y(zLT!XU$g1Blb^x5 zm`Tx>_NtF<gMG#><0#GaAES44&XAHpwX9Fv^zf!n*S!tk@=t_92lxFk=aU_(p$dMa zbF0@aXE>_UDz+h+02fe~$fJEM;1*XQ4R>NXeI<{Wo)&o7BU-so=?w7i3Mi;y;Qs^i z739Dl-YeleW~9v28&~IWF0gPbIdca_z7s)PFLa{hYQ@V<ks|lsuA0dR(!tLlLv$kD zKB5n>k8)mO#%-}BWMSgL^9O*bJdC@=4|$gzuW@D%_<V~PUf1hArozFRkNzGeZF+pC zWc4`5;=0RtzoYoGI9hzK>wxUQIZH<RyqJ8(#$a)3R4M(~h&NHh0#J#PCg@R$dfVT` zdmXBQRyamX)*tjJ7qO^hO|l_YEbMx&@UY3kp{hd;WV#~V@{c2Tb*Z{I*S#1{b)-JK zGT5($9oo;0`1{^O*!6{@Q^|ULkG|6*VWrw#mrd&m8Bm-AX1(IrG^8&qsIdpV9k7>8 z=09~>Q*r-djgUd<w`yO#==ZDswAS3`nQi~wL*r!jPo^pV^S?YZ!N1zP4<`O4`3n74 zBRF?$L@3@hG%uUc`#?ha-pAn67qIeV4tWjNXpyex(oB}HPlibw@IG*2KH)o$w;+S* zpR;Vf7|sbCAf3UygDo6(ix1xkQzW8mZoDiNG+QH6(Z!f8W<K`I=?dpAzvBG*bH35L z0z3|eYsd~42p`K{n*oZf#(0kEw-zEF*O?dgZ#%6_<c2$KCPc0ZxHLYEb<04(yrexv z;SFUU=>31Ky=7D!PuD#P!9uVg!6kUG;4VV~f#B{0x8UwDxJz({;1Jw(aF+yk7~I`q zV7T*>=Xu``_pZDCcis8cJv~$1Rn@2V+2@>HFKCeqs!lO3s~M@C17ai857sNGf2O~S z@UUpgHN4=A{@ViPo{qjaB}yhgv%dc4n?aF%{K&0S1|hz_7$l3zmA=dN&$`@tPjpO^ zK2yV)0x>}&4^eum5ftZRB;q?{om4=}&Bb7npOb05V+~)Leaws?<`+ZaDC&N@mT#^u zKYVc7;zyn<L41rpW>{7DPcA^Z_%4=LqX`ay)|&eB$81k`{-n8Fp$lia@Z{wAK!}#$ zO^S6Tg680#(B{-k!L$K_-MR;R0jJA4d-pe~4_C7@w^_19h6H79^W6=@Ch`<-fVyLc zyu$W294h2#pDbzC0e(*7jyxxoMYpNgI^IA!Kw@})=*OYy_PsvS<H1adn?A(1yotHI z@2XF4o};1;T$X-8vP`+a@lq1ysDAr`67iQ)oQdtcL|)a;JO9kDuY*?)lD0Dh)$Ur9 zv&qmgJ+@HF`8lP>O)8!<gn)PDI;}BN25+?f2o2UvPt~Xs#MiaFiXVvgk>oO<+JKs; zIVqeurJOODF5BWVxM5Lt9~XLhVIs}K>AA9a#%IpAi(`$N{iaT^CuC&M<Vu;Hg}O4^ zZP_E0<qY?mO4A-)@N^^lQKkzPo5vfx%Hv3_c^`>)MC8KNpn6mB`IoUIM<Ec;pP0TS zmNlQlmRSY452wT-{!SSQZuV%ec3%NfgXtNtw;P?W@eTee1>aG0e(QU?OT)dj(gJ!h zv4Fr*Nbx}Lf%$~Ow=5JLXVs%pxWH(OjM$~v_7ugKe>%WfJO{WP!ji>b_V~%1Df#2u zB+}7cM&5=7-!LTLTRz><wU;t#<_-DXatgOv1Y#S`R}$q)uAYm(n*#M<wm~Z79jORG z<&CZ7QF^Wp>X)<o^#KQy)r5d|Utg;$KSm|Q&)>!`j{9uvIIy5fR43<sxng+!5K;5T zEY5Ro9~E~_lSCb1qF=gPnApnxrDZz0#90DeI=6VRH^;TL^faz#h>}6v3v~Rb!kt7h zyMdu6bwJW!AaA<A8h*_|%aLaXU$`vu;mU+<rYN#j&8Z8E4$Y>HE`JyiYW7sDO?K|I zf<?ELWHZNQe1JYa7&A?3ZO8u)b9y<B&g%_f&}~WY84+k7d@6Rk#R!iYCHCj7`q9(= z1@9GIS=Kc}EF1sgQpcs>a<vQE^Ry*~i-MSc_}^={y0#3*&FeuUxT+>pSRGK4tBcQU zVh0MMu<FYyN=*)^%<7O_p3iTQ)Y9LU1#)WL?d&)sN*x{QpD(`~`_T$j^XcvN)&<-f zUin&QOr*Et_8h6NNpC(8j1u_=g)_B@I<31eqb2Mb8ytAkCL4JD=f$=&X|8#R9o5p< zrQPrcS6eD75H|_+{kc@|c@CdtkKvDTxCb>fGs$RXG^O>lzj|Da59gm5f3g-lHS4#8 zQ%E3SH9&eoA^DCO@L)+&WE)MOx27#mfPy8$i+Gn1t8{NfUKm4E;0)o&5XwBa^<z(x zLK}>q*zJBFxx2y_&CN)B#pE)B^gXS4R>9^QwG#WJc`%8R5>Gp@s#jsOowysQsAEU_ z*rr}SxUBQvDRz?M)VsB*p!4Q~`@zhgN@-SjphbNMcrSNOt*dojIQ4v5vyR{_G+2n+ zj1V`6#pLW0K%xx8%KT>?c0PO*GVc1|b8^C)J6hjKYZJhf9Y?Ph(zdme(#F!4-;W&L zMVeD~P5Ry*O!-OnI#kAk#Rj(mTA+?i(kr%9HX{E=X=(7U-LA<$lbgtb3{6Yx_O!p} zSGl-b7(5+Bt~Bs#^!1s%T1g-YpHVaPD*CawG;EAiOu3#PB@ostTx*Z)a?_+e3yX|c zz~bE<W?g@v8ayhN@|4dZktzR%)5vrnlD6);*1z4dZsTq2?bI<~tL3b8CduBVsx_l= z@$dIf4-a@x-`O*4wfh$?AX?9Z4P#jcy>@KIb;VcfL2=iwnz^$Q$!w(~Q)*V&GSpN^ zQxsj8p3Q}%)`NxK@ciDZfw@BtY(}>x1N{xg4@92=c0%tsZHpr_HDJp|{@zf1hCCXL z^0X1ds^0MK-n@|~-IWt}tIrdu8KR?Rn+-cBZ^&WN#k8Q*aUR+w7oA`lR;<Zlge1{R z9#+rsb<X2!F>0NUQs(3>FX^JN#PmKq&iF+vcdd4Nstl~a8$pIBA9rRDdhV55uH!$@ zx4^h<vk$h0!-7I;PHn4w0|UP({{_WPM+J2MhK}N5WdB>(`u}}V{-1p6soejC0RG3% z{y+R{*9-gn4!_V4bfbx%pJa9GI>!UdGOt3|EhgS0M{5T$-4orCGn+!IKcS~9b-`z$ z@gF_f*I19eN5<g4;k2F+H~Yx&v$ch6d7j{WBCOb+Ja~7rn2@Iou1Z_&?=IM`orcDd zAfLCZjX>w#=C@={-8~s7Qjeqz%N`n6osTq5{GrhJ-E~nVCv=seoId++xbKyB8Hj1g z$oRW!ijp>Gj9$$;_=@M<U|x9iuYRBN8BdfE^@O&Lpb-I9=SFO=QAIkTAO5Sa*83YQ zvH9D6T@c&QfD@u#>OuF5FP516I?@gl=mt@aY}b2O92{$ToV1kcf?_CwAKcfzxUeCG z=B6F8jA}g``g&)<B}6B(a}LQi{;K$A2s<d30FKl=acf1|BO{jZfF#q8-b$!6+H&+7 z7aA42HNi-#>B_0sL!`B#;#(dpq4X)IJ!o8lPMY<g_}ykolRrZRcOvvjHdE2NO^)uC zU$C<m9E3pIs3F=+ns?WZ(pX30!$jx?i0Pw_?q;EJZ3N{;vuI`Rwx78w_Xw2S73FCD zHqn=IlTiD7r&q&1vNIiO(kGLk#Eva$0fw{^p4dWF{F5Hs`gjprj%PcE9~_JcQGDk~ zf($pGT|AIP%CAZIV-n8<hnj&OYUrEpR8E-tBcpxY%nf4YOt#nY#zMd0wOqpm)Ss)X zRIdgzFa)n5m-vn%NgWj2zGi8pDM+xc7*IRV(D_G~z+0>yQiF$%Z!0g~Rp0aFfJ9F> z)aF{|6p6cniEG+@0tK05E^KbTT(d0=jx~DCEGiTscV|_HKnE4Tf8MLGWk|7}zdmV* z*Vw*`B1GW<FPwfkzTL|YBs8s{Or4An=1<_qB~3lw5+v=ynciJuORY)GUEEWd{%`Am zm98*u2BJ2~l)Py2_W5gST1=>eV@vQkx!G&a3m!NnIK6f~0AYEMj-2M$z{I5(ONP5J zSN4<Ns(AA(Ix-qj-7Xu=h<oieQ`^l<+T#F6f`At_C+VKHS<lM&*Q*U4YOes1c9GX- zX6pZK)VZ~wp@KI72a7zGKu7t_EoTP<EhPxR6POZzQ|17z2$+iI$7}8OmH{f4)8WK> z8V|?}7d0fdN;3>G^^pn9DmA=J_;c@fs~zW64);O?lR4cCKhjdbhQ4Ld&O!76Gn2*1 zno9#;$&=5&05O61pK|PNl1-hI#@3`n6TvcYX@8t`C}&61G+xedm6_=208X;78k`x) ztsps#(`QC?zZoeL5Lj0J!VI4^lGZ~Nc{r)e0r?3LZj>b<?Fw$&T7OWb?kUxCL8`-x z^q@vS$<a1>dsygl(otQ6Bd!7NTb@)_+!r8|z3Rz7mi$ugAIGGGXuE9JwJkha&GPSz zS1PI;efs|@OgaBg-RA$Hd-4CbCHuekT#&P#uO^b0{xr@929J?)gc)9C-UCK2J6VjM z{<e27D?9quE&ss3FkPOG*;DQ+Im14MW6h;L$_-xa&P;6=8Ka)^MJ{bG<0=0CYNl8K zo3Z1%vfRw=EBSi$>mp~%8kLmQBC$9pFP;fGU^RH`MH54@IaLGGya{n@uja^(2di@G zgL%-&f12#b&VhCZ=J_LLQ0s=JPf$Zghjmf|1-~Q5463k`RT@mk@8BI@^l+o1%W>!E z`A4$cQFOJK@n^VdsCbQqv2*|uC`S4w5p-oFE@n<G27Z6-207qX#=g>oL>UZ3#VxRy zlUh`#&#IUy8w?pZUi!}2hQ@XJB<CU%SZP!iStziGSoc9KNEfjRrl##BRx9NV_TQ8I zZvUK)A|wN)fjk_UIo4P|s^B51q}6#NV~)bVp?F5(pH+(Tx%H43INbav?2qj)ljgML z&Po0U9y9alV(J(LGo|+UVh69a!sN*a)5&*|;;Q3wG9WFmjEK!2^&F%YHp-JYro^y- z&XU%fO*K;C7Jm~-xe#a@;R|Eq&a@^`-yIRuRjx!<0P*YSX|@=cdY4ak6hXcpBcb&a zM?dM^ZP)lbXWy{_zf+>X<)glCWmr*{nYw2lpi8?*tTNvD?Ii4I+-CtN@-f#ypH)Jh zQ2KL|_8a^$^D9A#M>(&UVql!#*{tapxE7!aE3RxUlrtI-M`@_SS0X`7*r0Mb+I@dT zK(x><@2KZo0~~amQxEW2LH|!q9+gc!aC-o9l*;e$ErUR|K<HAETu56yjUZ3bl;iYP z&krH8Eir953lGFe6bjnujwdgb%@#<s@C#&dGVCkhbX8@X^ebE%9s{T;L$h<Bw>)jA ztqiPppHuK8Viag=(5`QjwscjOe(%7bvl7~_YRFpFS5_4&>PLLVR!|U!0&)!tRtHT~ zs_cu7>_kO+PtzONLik>Fxem#T{W^Ep<Qndfr{)*|P$WVW_4PD37`vcTxzEy^0dm%n zcIo!nr43U2?BEO!nUo&EPYUREpKQf=_NC)Q*0|iieRu){QW#GKnqY;`sWJWQ6r}aQ zJP>lytu7<JceV@1>nyf?Sejl7@|DE+wlH46%cnT+?Xp{SmQum+($%)ruDz252{=t9 zjAwcQJ+&gJX`80#HPfyut6jkboBho2)1)cOMi51SuMl|{5?c_it<B<s(Ez%8AoQv! zcrm2n<{`6J#QW3v+3-$>VGGL+LWqD+_Zt0?nH38Z=P+bE$VU<R7O<muz?<ML3Gh5j z|92ifL~Tg8v)V!pN3@)5x%A0mP<ht^$=`EHeNhP{U^A>qXVokKFMp+;z&+<~v^ExT zdZUE4fxljzA_15{O2=|P|1_E!8OZ`;5=jhXO{aQlXzD%Q4o)H5o<TEN2+anp;Z$;J zo;;-;GuAhyd*sL&0g|g-k(_q8NysFV!a{uW<#eDwCO9mu_W$yT+AfdnNSdSvG&;Gr zv$NKopn!-MEx9!-^6ElPwgYdl%M=G=UeOQkWnI<wj^u_mkb(Sr+Knh1^ctG$njR&B zjLWR%HAb4f!jWX0S0|db^gHS#q?LUStr<mhykA&<n?pyaZc#e@@S|iYY3G7oo|Q8p zLR_;cHMYaL_?@)`os{^J-ZHCtE5T*uHml7RtL^=xnh&j55RlWoi#l>+ESeTHFVgH= z(?8=(rLWuksR^%kSNN+$eP}>o?8Sne^!Z{a$JVecPMa>@G8=7S!n)q*W?@Dt&*E^Z zK=_QUd8Kua0}!7Yr+nr_cOdUc+H*CIJUwv+oV{Z}0SF~R$iJ}G85@(tSJ^(08d-JA z?(|KeNsp_mB_cjEp06ED<p<*0Mh51JDOcCe*jNN4wZg!AqQ2Di$#8MBFTdR?=9$qX zBY}T2wh2bWz~ys|EHv)jpR&;*29@@Ig<_@-BVe>yrXr_z%aa}})K;At`Mk3s?BXhO zVc;owPtwPU9>wJ^G=6BM|1TQ9f+VM}k2U~a_MRLgLl*?=(&8;QPfk91hgjewTvyQl zr#}#*SMF=cVETImjtjcG15W5;^GgSAe@r;+Vh64<Z1JQ2jV#5nM)Y7j#47$ryGB1z z{=b3-2SZ$YA>KAVnparz&Kxj@;G>g<H}M3h@`~QlxFIks%-gJHa-MPJN7V%dJG}_) z*pwCqrDK|)^M3QGOBO(hq>M~leA{W#nBm{M`hUdx1UXKBL3rN|Vz=sXD`ebVawQs= zc~6k!uO{wTl7>-pd0o$B9H-UBll=c72qmmug7SD!s87Fk8Nqde#Tx3urlPODbG?L^ zhh9IuDO-_U9MGNfko=MNA7RLClC$`pXu-4LuBAsWp(tSe+dGKuPx^bTb=|s5xwf)W zVY5DbaLzE6>E=FsC2hXDr5hJ@Elvrvm**E&Mh!S^eEVM7(NWvC6M}6PYl+H~UdAvs zn*VbwdXG>uA7`J}5+`j~WKRB>Hsc6x?&6B_BVCnea+K{Jm=~&@=>d}j0};n!jm`JP zb69gEob3lo?fpBf1-(`;nIwZ57w(w$T~2-oF0;pst{X^sM)~k1dVrKKz@=E130xsw zJ5tc$o7B$w9|(Svh={9rApD;%K{Yhax5^Jw-q}m!up?PgGu9-XOM7y}^IEIPgzffS zT2D<LC-StQUPY<yyq5Ac^R3?pX*8*RCFo|%Lsb2q8$Mq8<A#ed+IgYlyNl*I@25|D z(>l}hJhQz2WgkaB!5rfD&X_dZ`K-rP6S`YTEDmFPF2phhpM3nWQgo1DoL-T*#5K!L zXD?LKY;+@?fv<eXv9z;>Jx_CVy8ZB;tL(`$Cb))Y)_KmMp-Y}!`kyBSMVcK*weAWG zkZ=N$5=FKOwi}*JL=4czFDC!tkE+uH(2m<Ee7i!;!M;!j8!8YzG#Ma2AAXkV%?A_? zZU@Do=QR1X<!S5N8SJfC^LV=pZxH}KY2Wkn*f5*fx{7e4stlziM=3=!P?^8sQLM2+ zSO%Bm4R{5K7FRmQMp2tixQBpoa_^#L6ypTb`t3;hPq??#u=~7$aQTwO*yhl~Bh`k- zPxwz&Y48fn2G=erh9o3W&J6=U7Jyt^K%7wi=2#4@KgPwOb$`S@6X&0iiLR)n4Bp_M zt5!QL%S(e-CB>ZJDv#yGkL3#1_@m%240ByZlG#0Ad0AcrSA7apKT9Jv<Hh<+X>g5& z1}|A39^ZKHo0!Yd08R-8zu1KuVCYczWTpV5XuTE@aWFh%v9TY#HzZUAuwonxa+C82 z<~A<x=#0IAd2*uFaRa?wO7vHfDGlw|5X$|*Eq_foF}~!}+#kJ0nB$3t7<LdZPUw64 zg-G7lMQlgIk_NKEJvD?~M>ajcxttV&wI*&Gjx<Pe%|SG*7Q%;&a7-M`5r6z$>|pi_ zfnQiy&`f7-RR8i(_*deteM9iT)38WpaR0Gx3t_$ue6R70L`o6U{EG=+X`IAs^4ubC zl>W9WtvQBx+O&%|ksWU(D$jURGrWjVNo{@}V$hocHOJ!0@PGJNS?yy>U9OgT&%+`5 za2>PsJ`5n^Q=l4O)SHZvfl!6|nKLpJ`su%gKlw5ubWA7t-680j%JZ~$#ugoWM8)tL zwHv~z)GF_jlf4myiSNaCuJ#38D>h%uUT!QU|CuZ5=3ByriLVS1klF^_ZSGQpNGSJ_ z!)hQ3BV8T}3uG0#Kl5#@!WlCr;K$JBj`w~{Ca(tG(aw})yeoqjlGAJVdv{J@UBLf( zx6CHhLa5G&V1>6x&qNlO9Tl3+zg}77J!q*;P-X0*ET90+J^0Ehlbr?2az-Y$B^zHC z6I@tI?ZI8dwj{uA@q^>0m<yx~*rx22iiH14w-Lgpi_&_zddgD9tzLa|M`C|kvG*Fg zZ-pn5!8jqD4X(qd{pmBg;F^u!x%-qap2Q+PJ#?AE{Gs_wxzU+}iOE<#n$3$h76o|H z$7N3*DcP@EeH^he%<Rh<zoBBgd$!!DvVr<`&WkOfc;y{261H_$18q*li_Vv4?0qO1 z3c_HtdDqSEL`~B66(j{*G(ghjZbO9t6!33@9*v$V6Fs<oCN2eAJ=}P~-wL-Ur9XOl z_u;x$-W?W8PTy2`U+OWMg1e~=!YSE}g>aW*#M#gdMigSIw_*!tNY6%Yp6TLU?5GUk z<kZ@qAQ*Tcjow@XH!56ZA|J3lPVZ8YW`DUZ=$*zhk<G#<gaSLTBLRZ;l6w~he9_C2 zRFV~|zm8Xs-Z9?rI0DML#dAI2xsA(|rB|&k_R)aDAtc5JtZuz5-A^x3*|~>mh7}=1 zl#X$>X9Cd+v*N$=zjLdtH8mMHe{5m?z>g@pmWZk<J8URz6*nldzEc<}cIO~Y)mtn} z$r2I??yTT`TsKYLIsKUj-{XlT1bMV13s%CaP6|B(78j1u{n8sY@6~54;Ya+|CW!;K z<Jt;^^}`&PUmpL3G4@wAya_-k-CYI2l;mmwx+`)ROBCKrnRJ0GnLvoyu^Kt7t@h;! zq!L!TB6<Olvx$+x&JWYo6Yxon_FRh_{-Jq(syE)s2p~KzSg{Q3P2$sco1WP&+V6iD z)7R>QuG8X^FUDSHjGgPj^TXg)vuj*k2YL=e^7ZnGA8G=2Nmaphg(c=aYrP%dT?@c` zp(L%`w+u6<JE9`=9h;GotnULy05;!clM$mQy2-TxiZEA@iulLN0c<BR^Z^55!5n?j zwdAZswVAigJZUrA;zm7ihl?#nM68`wG&}wtLf$|jNon<(<0RtuqO)_R*3UW!*gk`t zJ8R;YAU$cZx3VXmFD;6T23#QmU%mh<=`XUw(G~Wk#dg#$U+B5~RmU6q%U6_wFXqBc zUX1u0auRC{zmuEh_?_83Fl(O_P@3^!0n*01su>$aazc+1SUc68CDSb|E%Qoi{D0Ll z5(h{ac@72(SnYXMi8Lo9l|?Mhb0?Kc))5($4K^@@rT`Gx47uPi<Xw-WP3(8w<9o3# zx^u1;AK*n1`k4so8aPCshOBD`R}qs~>#R-LG3W)8<r1MS0bw11QlQ`N+9Z1T8V-Av zxkoJPgW$)_V)%3+3@$FNiS^RrUwy+J#s*OlsKJh8TIXk~ARuDYMZww{u{Gj3H}}R- z+}9-_J&schtcH!F$bR0%Z^QF+#5Z*kwOHWcrY-V*&*eM1d<>1^nNe?%a{AJ@9AxQH z;;*Fqag$9!ld3IAnI{l={lT|VO9hjCB>qEOMM47Dl5QnHQ})Ov)wlb?>E*`^=p7rc zD^u|6tOtqa_vqEE-9>WV7<1qjN7JpLL!iy6Nne2U%)%KSbs@MgF<R4>+@m!U^aiQf zN~2dz(dL_7e9bdkIM@g8J8se%Ij)Vio1G)F8S_54;U35g82*KG{YmF+VK_G$#z_D8 zn^J!DE=9FD3CfvGbN-neiDd6;p+-@Q+DySrm((w7re0IJNu&1Rp&=DtWPIZ4bQzJF zgXprV0gok1s#}TeCaj;`5;*}_qC}G7(hC1Jd+dG;Ui=l;T#1(Wb^sNM*+un*sh;nN zjkJDg530ej{1{7Mn!7$Rd-O+X0J_0(*1)$I!m*VvmT?Ifb{%r#UE++4n4ip15G0G@ z7l<}v)s$MNkg}F8R&{R(K6ojP4c|4~f7+6I?!c~GlDrTU{9a<`_cL~i_XyQl&g@)x zn8r2a$ZOz-<9ks=coFw28%*qOzbLLvqxYvV8H+W|c;<yzgEsnRQ^2s=cUp-J9)(B1 z)&6}b`yQ|gQz9P3M<ftxc#0kuL#PMsBtASsIL}J>rZP2v+2!!A&s!A+F<~ct?2{s= zUeFAVh5ar7@_n*Gaq~3FPEXel%A3e?X7?;+Vc&mlYV_ywpr3#c-mC*=M8y#7+u`S# z3b_!Ybtx!fTN;jt%iq6j98W`IkD>bCWm{Npj<Mm%eq*P{z-9HmK3cg{&QIuHeDI!l z+|VAg#~*8pY~13FZX~1{%k5&`eB`4Vdu#D6nJ<$;Y?Jcs0QlSJ2SG&zu{J;nXFOl^ z4moHo^r%AU*V?L#u!Bt{8?_-g{AU|{Ge=bF5j85>T(y%MhW~F&WN=DIKIhx5{GKGq z*`vZ4u!GZR%M<<Yq)~ehDk5l~czmz_x`?jbtk}E>vPmDVag_Oa1uP9iJEDv-x<0uK zFvcD=+0_5(?b^p*0v@`CkLQx&9ZDM#+ltv>p6l`yxfte4{b5U7cku*)o$t!Uncu`W zXT0^&JP+L<eQNC-7c*y|ftO^Bfu_YSBYNY<I+M)0&-gfJ!0hO|CZCfh$iSh&3N_}y zfv!He_5kriVDXCs!Rrv{*IeB5@1r?b*kB2){IIW(VC)o;?BnZ;b*rDdfsE2_)~0(T z59P<$e*#2AVh|~-d2`B9A9CeHkOXF@&%f)IB+PP0k+TSY-<maawmGZJU{&SGbwoD| z3U0t#o0Zl0c`yxUA66vhM;mp@5;?AP!IuZ;{q&qb-G%M?J4e*v$ct{me*LX?sRG1} z_9C01z=z(MS(2FPgXe)M{usabtqShfZPK$@Nz41uGhW5|rfji6gyPWm6#}8yb5qsd z+CmN~?(z6$`f}gfC$;R9M2Yta$|rxrGGN2So^%{bW@{zZUasD9JsowdTSojU(Fz1| z-4#lTS~VLsXTxKm)BGk}Kl{?Tw*!)}9U%<as=ddmWir%&kCw}YoH4F5qQFW(yW<qB zKIk>v-~bE)U+c*9Iv>WgSnd0M=M^Kw)@WMGXSv&|q4WN<9zglIci5}>=6P%TR*vfn zY$u?1E`RM2kVBeRdSjPOO|kw6c)!rqAwZE!X&P!_i5!zuV_jRe1EPle1pIJq2_qAZ zO)|b+I)uZR$c>Pp`H-~g>dLp)KX-eJl3!;0i+qkDulJXPucgN5Eg7&|#Qlj!8u>l; zG)a30X>ko>-qW!s5jqK17Q0P3SHrhv99Y5`pKS;^<mAiQ8`6xyTCNmz!VP_u8xvKP z>rdS^>_C$BnJj`@($D^!%u=A?DZsS115Ld{&=CJ;%f?IxfH@yVC&lp=zMTUx5snqe zjhl&Xm;UUsc&PPilIW?Fp{a5Cje&FU_=zDB>`s#6^(uVQ+)ha)b#U&IZgY^uK*|$Q zu1ZM$HrZ8##|X8-u$Nwr3*Hd1#ErjH0PmA`XP*-aZOW%T5#RH^2~u`s>Q$S0T!KZM zwR+bu!C-8+NQ6*$Qel#k)Vg<jMJWdm&}5&>^)8!t$~{SIos-+Ueqbr3=~r|KV?u{; z!*w*prECrno4t;Y&M%E`cqS+CUVk%}B<6D_4SU_|JvoVu9pr+7$?d=<YC)c;puHwT zMI|3yxt}={<bhD4RqwYMO=3?g+dbK^1LdXTgidbsH7#n)+jkW<xnV0cQ(iXUfmxG= zcwLn*LXns&9p290yjHV=J%}02Zj67fx#kz~k1Z5y@4%sjiJ+rxgg+w6odi<z3M2~} zy9JaBNquYV29fK#XV&IND0V7`F|dY0Hc#Y<D@o*Uo5eS`+F(zFw9K1iD0{A?tvS0} zr$ikWhpng=ST`XxRspZurcI3oSRf_%u<Q_rsYe>4L$%nFA8?{GvY)=*+d`RBTO?kz zk}2K867CGlkBV4*M}rx*5q4zSc*Y$sz@7oGfB2*V57p9&y86jJ!|*gOlDQ-K(@BVZ z&*M!W#p`jrjXQ2d{SBi*9SlO$3DvgPqKuEqi(gR~=3%npcI+h5R9|5@d*iNu258QO z=9}r>qSq|Gvfgz8a;+ri_M7y{V!oGQe+VueBf(kRhy^&D+v6}@!m6}IV1?-cTY}&C z9jilp&eWBzP{nD)r=Cf5(xW6^LlYu+neJ_spT3aGwhldeO<^<YElQ`clfRJkAESNV zjt<}ZuuPba)~gbHWCxNuC6lxS6Gk+tR{OyAcVRG`dHFM{iWJp%Cr1~bT=m2qf;~q~ zJ}8VSt&v!>QK*V@7CE>PX0Vr2_Q9(xWXQc_p(QI~D?I8qv3%y*>X&Vjf`oKYfteik z`PkhJeN1|O#;%T4);gEa1R(L|SJ0hdZD{{~8Pb<+&WQIX1a)cWTkfrMNwq#IA`d`k z<#$gXzu%d1w%8HA9sZ_L%(mVcv##6V=P}x-GR+<54{$IT__{e1zA`Hq^%$>KzM|UQ zlt5PO^2^AVBOU%_HmivDN~@(bVhCC)PTWPYPGzdEyHd?uR50kUJ+8*=Zi}F%LC5L& zXcFJs_4Ir(MwlXdmtMgPoqeSa+MzEx`*!_2)wy*0n&OE+r+wCAyW+QbZ_JxlO2g=| z-C&kHkfZx<I#Xr{skxU4>h=pgH#v)c;F}f{S<tecbuK`uh}gIz+i-qbo*3ajL2a5Z zKGyn*h{k?sJWw6PB9Gbn1;;IT$za<|YP+ghn=@o~Zv#eZN=SLR=J-A0875_TpNKS; z<y`4CBzv~39CqyedVWC78hq5bZ~M{v<@5WuaAN2T!xEXD`chCGIdPh3vz{MLD;6O~ zQqVKPR`Mz%$S=UAWzf*4MH6sV*u|<hKA<h?<n)vV3}M46%JI2Lw<8)qUy~q~zFd6# z<k##~n|8TnH+{aBG?vkcv)BFFmONF$+x*)F)#bS3-|2JzjZ#H})&+ns5b(hosEt@= zo5O8?B6svkuWDqk8>!#iN3Gy@PD%7ECKc6Xf30=}am#;Xr^zz@tNMR1!IfJAb9z%K zeT7D^2LB@B#Kk*a{jY2P-;?LK<UTO!>(Dp9vb$pE4jM8&eY1Kb0L<f27li%so<W1< zc8GU$)_%dUocLYq{!{pp=E-|1DuQBrv5t<)i5-SKuW!y!ahr12x7cx!Du^HTb=c$P zOR)bXGV}2KBmd3W`rjFQT*CiorjkW;!(!}VL-P`BgsRlu7$5%TD-T3tC;(83<+~&e zy;59QYx**t%>Po>;sRX%keUbUYhGudKlQ88ylW)`xY%){0e|<)!)G#Ek&&7$cmbkH zzP(F@z?`WEY1~!jZ!?<uKS}J`<BSJmomME;!FwK1hz>G>MtYQU$K}W$(!Xxgo9v(l zuZ8&@$`OlQ*R+AuCH7ExnG9^&DZV|iX+OVaF8Y(+fBju0EuA|{3vEvEy5sEXjA`HO zd4`?Y19COjNqtpqGxvHo>Av%4EnUs-Gi$Xcg^lrJiN~?XBT?gxy7INz$==G|L#13? zmGNh#;pF0C$Cp(0s1_iO!c&LO{OQJSJ{Rdz5Z0~7!j=3gKTu@hjveLY1~oJz_E%P? zXhpY#c|x4+?#I2S18a^=tfvg)mu?TizlI$hHtZ=#S3m{x@d}ACIEs{-mUZLP7YCfV zx#5}T20Y=;L?b6+W4iGzfAXRP|Ij-h`XeB3WarugU)|%P2&BWAxI+_V^r5T?tG5c` zsXC}plLbgDj`Ov;F0^q!5#0s-h^ZO%`0LJTDcXQ-iAF~V%)<%;hh-+?nZe8;GbASz z-TT$kRDj=J7iA0u<j!NB-}e|ydcXfRxlCXhBYN#VuO68sOW>Qsnu~x-YXDO;crz97 z6B(9?4-<!YDgAw#zNc^CCY<WEBD!Ql=X%P;Y198K{&I$M%6ckxYRoeSxJ6leNr?-_ z@@U?h93*nDh`m&kc>o_3b5sHFUQQkK?z=*V31dJn7&A6E9PZH95qp)z$GZnK8Gx-0 za&iRGpSCD6jke~0B**RLIPP9LORqu5BiUr#k9MpE{h*rMEe|J3W9Zxd08``FDEBjw zoO`GdFWQlJscAtQ(`l|6%tf5b8zmWeYAx2!R%GL!(3BXAQ=xSo3+U@UUcyXsr$ry( z5Xbw9o)b%{aoA8+%RMY8*L=U9S4Orz#$JH!Syk*HsVBh7J{2$`;GQ8?L4H7*MW@Jg zOM2AQFy!62637LK_1JNX6HbDpX-zO1V%PA>cO_Ux>r$E9|D(EfxKFsHAWo|-FYC5# z(F*P(wE(j(TY@jo4^5|iKG*UQQ%H8vL~A3>fb1iTwUuQt{LJ6LTVTkmT4y$*><ws5 zTGDXC_0|9A^U_(fXCtFCEHnV6P0rEN7|ehOxr^KylCVV&hAb;Inx4Ec8_xX}p7H2* zJp#!>JOm~~&}QaAAzGbGgCx$J-(AV-d|j|=Y<j~DSpJ#~+`t>7R>pLZK*&1e8xb$_ zx1P7I=lAu(=vr|SJ6iKawpI_VW+Xd&ce=fPg~Pe;D|KFpAzvF!y(Djtv7HAP+7`=e zWb!WD7d#Mt6Cin*YeFXl+@7Ji_UueO3lbKn&`*DvKbkp+xDsiE#TllQU@z*y`=`Vz zoo?GJ!sP9p+RaI>_NtQsv|=~t_^@o&48hTC>6m9n;nn>H&)od&j=7tbgGkfcz1h1u z`~Ek{%#TP)c4t2Yc9D<gSEQG2Y)8rfdmXP8wKj~`aa+o@>VmvlkIkNcAKsE}hT18+ zO>?QYSBDUg%jdbSzHyz&0%_lb&^~o%jI6BOl<V_jKp;$-%KQF-UzmL=(RFrl95r_1 z+s?_Osy@=*MNd%k(zPF#Ms7ms32IFyK?`e8GMN`k^TPx*ty+7w<aic7z^iJ9BcEx) zD+H&K*ekP-Xl}$rm$DyQn4GlYiM~8s79R7x_O1_HX&{fM7^QXV$1!_eHy#04SA1vD z$sR3iy>m|P{2_BX(o(=vVZK4}>td_ebM^Dw3scgd`gS~|$I<MjlFseI)l%oT+m8Nf zA04ehPtfpsVf)g^Op;aV?7Z`ZTf^Px#R^Y!%T>w`L5MnWlWn(0v#Eyx)(*<FyHP*! zT$Q_Di7@M>QTScY&509W8i*78u!}O31(c|UcbCzlX<|$+Wt#iZ=1eT5)}M$HWzf8E zLv_i0<G?~57`<Pn05Spbys`2jYsjflc#^tehu+F#B%RHb`ut_wH$uF{kXBVY&IZ@c z<|gPvR&fZYz-yIqt5qq+2N}}N-`E9LeF!<H$UO`C45$x@(dKWYx}s2x&CgLpz#VYM zO;`iAm`Qe>YtmQUuD`48Zz|B+`L8`n9zs;(t4fXkq&fK`t|o#``k`A~)^uIgdr<v~ zjqsd>a}b1XR)Zd<n7%TFD@Auhw{SH-XWt+8yqi2|J~p|~O4PEkTcD`p9Jxtchjq50 zh>qi<!ht#;@;T_RX_S&7x<gp|ao=a^Yoodfw30%9mRzKrF1%wtc#a#KI8ZTP?;11W z@Jt<14`s<$K<AlyT)a?ERN1S_Kd#clu~nd6!do)8*4EF19^^e4cXftd9$LiXjMHNY z-!@(+yRZ%*3MYI8b(rqh-Svc|6wTT9+LQS}=!0UGVdzPY?$=#2t1dXwd3VL@SIyJ= zXc|s~>UeJr{mMyW`l&dw8*Dahwnm?dNbFbRx+A^l6l4~g`3Q3Kff#<xM>dxFu+R_8 zWdN&+c#GhwUzM&^%o4jkQmpN57oSr2Rz^02h=jM(-KwH2>W^o<n>a!ySuZ@zG0V#r z`P1<kKA=S=gWR3o7=l%k@y^LAbirW2I%L<_Tz>4Je~;79R)u%WY>duAWCh*q=(`g? zEYa%x!Mi@$=-As*FuyyfRPII=`8DDnTL5?iZnuoNSkJN|<Bwz=YrTR@hH3Jg&}KCO z#fD~U!{VTtdXxz3xckM!SZZ4Xe@t0V&3f-1g2)GRg;<IoCN|TRt79j2G6|?#4?V&1 z_&vSy&z9^NuSu^qIt_eSy`vwz;iGnWZtC>Jn%BM+QXd%&xSRcO;PQJJ>)N79Pmif? zddyGg9=NlM*kF_~Z>WMHfe$M`>=-$Ve*32T6bnG~>{J9rKB3zSeciwCBj+in&`>!+ zh?%Q}-074E;fFM2P@It*G4K?{;oo@N&SKj1AiS`@9GE_O-)pC!h-G<4K8)qQQ6Dp- z=;$pz+`aQG5#Ar18h@4Je$luXdRI;1)Wjy6)6zpXwi<OW@_ibF^?3gH{)v$@`L{;E zAGcpu6xUPU&2pMzTcu!lFumR@Gp1xGQj{F7sVJf&wRPV%948~3#y?BR1ef1A$8C`N zxFAo2<7To1<!8amLko*vyXK1&?Qq9)4l9!|kJByk0$rK`woY0v`I!37susTKu7@_U z+e_K2@BXf?hv6tV^vMNd!#^22_i)~`#~&7f*)IN~PBMxHbdTC4Wf$EpV(tc_ek^sx zoK_q7#CBeOC{tAh))&3ZK7tV_zG>v1CR?0mM?K{lIv3>UK_lp%_8krbx}+2-8|80Y zemqkoEBT%aJgyiZdpCob^V)x7g?xy&YgE>%4_wOcQsULYmo@($j&gNpTNv9EwJFS! zbsX5n;ENJ(+o*433@Kezk-r(tUGMgF!bv<cx$92Ge91Oi6IuzSxMnx($i-5Ep&v-# zMC%%Fq(@z8gHvMAmY@sXnPaF#%)OOEvZ~YgxKv-dk=Wn`px-?p*QZr~Rc_T~`1(Le z<{=x-Cq570rK87&_wd$SN+!#D=h$~J78Lv3bHg3<+ofxsbjf_#>D9ss=v+Nf$%hN? zJn_Ui8={mf)2t^B%^#Dxe)dn<lrzosx;EfHa+{kf*1x0$a;n;S(KPWH{oTbwDbnC% zDzY6`o5wZY@DcsaE(F{;<H&l>^+uYgw%;|D)*QpT!OWRlS>2o|Y`c=f?oK=^S&jhI zUh^6g_Y9Y<y;_q3tQ>Gu%jn*U#_bFmxxY^rVZ84%)wKC>pBX5GqL1G2wqEEB4tf`c z)WVwP(6ug>UJf?IXIh6Y%eugO`pmwN|7%385ruFhdkfS3Yyk_GG3DWI4#!EQ1=|<S zn-q#VqZ?<s=a-Y!PZkO0q=OeZ9S4w(c&+fy;!>CcqW$p<B|pgP#jYo)3Ibbd7?iPi z-mqA>hNXD!Bc2$R_Ll622u68aOn5H1rlOyGG#!gnKIxV~R9X&Somr1x;3Z!|ZJ_J^ zST6;6CKX$kf_fy}`2an#_i{4aDO}>3Z|&HN#~eS{h$9fR(YF<*I)>&)0HhB;o2vq( z<4>j=HQQW0X*{SbA=Y1qE1z2rZ-+W_wKf`_J=wAow&zb-^N9sB6eX8U?&6{=B7g$e zjpY!bN?~x;e2tg}jTCoTuO(oG>ct`_{?F$n+)P83NMsWJ;kANsvmAlg0<K3l?Q9mM z{-##imdFl0@Bs3I@1wd+=qbakT!ONl2(C)@NrfdSNzu(&)V4|?!<C!S`Q*>k;I3!K zV#x}4B?Q4im@i{y(nokCU9ZAQAa6mkFdxHwCC;?1$XT}L<RDUud6{-_Smls|1E0h( z(|h<MS4=|6sDbf&xUQ$#4>^CFO#QV|qzmz0BSwHjlPy$IPR{1m*8Dn77g>N8=8Usm zy5Rut2ybj9V~d?Wj_B*K5ah~3N4w4yFWiXWRZ~K=AJ^OFWL6Z4_qr+R!iflIqjdzV zH}eU5#2!xy-O1I-CsV&>N9&CqlxKabgYrPdvUHp$HQn8xDlm|fhIdUexRDSr2Zxyw zylCqN3OCx^)b#aV^o(_q)v&qQUYo<I{%}~b(&+Q>Pa`QIE|p<-Sz9bh$YQx9p>Bmj z<U9`3RXxEzD*mx;*J`U(hmf79m~7y$V<V|u$A>`56TVDEv59X*HPvrT?Y=1vma_Xe zQdMTHl`GFzS}j0wwXI4q@UvffA_^N^H%N{hBpXd}tP|6OiG-dq#t+o|(QHx-hd8X( zgb3jL9s<%xQZs%U(Q)!li5&`N%_J><AO0cag5BU!FI)eOrD>5A?W!x)_UXsWT_*+u zi%Jo1!BDp=cNIGWhF>icmy<DFY70Ev3k4@H<5s(9vp>I5j41;g9JBb@8(uTC4P-cb zuyQzh?@yh-woQkFE3rL>T@L5>tEaO7&aVmE7Isb*J91nU9xgTHuOGrbKY#sw`md`7 xYVlOdAy|?Oum1j`#1Hsx1DO#5hxGSh;=He}VOOBVNatUNQeWi7D?b|q{9o)GNcjK& diff --git a/runbot/documentation/images/trigger.png b/runbot/documentation/images/trigger.png deleted file mode 100644 index 6987a070aaca684e7b53db4fa6921f79c53a46fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16628 zcmc(Hby$>L*Dn&%p@K-G(kaqI3P?9dhk|s&&@BR@bm!0lQqm0~-O>%x3_YYn%$xxp zectc;p6@;1xvq1r!$0@zJN91p+Iz3{Tfen#LX;JyaIr|SkdTmYWuzrkk&sZGk&y0~ zVcbQWVW@N_MI2C_#AMVl5Px16Z$BW~B*2$iKs7s4pz9k)6C^WRI~x;LCu2tw6I&;9 zJK#P_i!c%rHIj_P3w5{Doq6Ba>T79dS2Yx7sAi<k8A(^^)?Bi<my^f3#<&Z3hZjUO z)pP@}fY34<d+u{{AhsaemSx$ts$DBnJyuDMK1N{%kuRY>rVVCQNYxvw?-QL>Ik`)c zDBYnPYR=`xAQ3TCl{?&#q0NTACWFo;PR(ZbVFxSD-b44ohN_RLWuj?hqH{v+XNX%w z1t~YWIpK$6{v|K3&(g@CFHIZTWaZ_#dJmf2t$lcNFO4@wgUjv{0fAX%shhhPM~{z> zA8kqBG)$s*cC42nwVB~J9jMZQ!C;5nzZ#!Ix>iodqHeDAay4Jc-ae0FVrEuhHOVGU zaVw1c+x(|DzhYnP6#Vt8703DJvFPXBU*EKS=voN^+&nCk)csqGfc6!KAY3xj-w%iU zhexDo?*Dn@b6#H8@26sD?qil$RN(&ZUPeg?|FzOzl7vR5r6~wf^8S`t;&&G$?V!KL zC+Yv)Q@~p@vlwZbKcj}&wcLu>EBfc9U;o<<rquq@XaLD?brN{~{`Ks)dT@-CC~z%p zY-~{U-`(gWAo;hp@0R8LrRuw!zh$QWdvGbA9cF9YfpN{TAMp#@TGSG5zZgCaCMk}` z8fE!Mcqib~o{Z>d)OdpqOCJ(I{hX5bL9MvkPt@mJ@Z;zyCl0wVoQqSs7RQJj;l)G} zB0F;vRn|CZ3fe3{Y0H>vQ7r7dg`9<b@<-`w5@ifoY3}=}e+ig42ar;x4wW2;IL)Fx zJ>TU)pLn?vAeSGelm!&YRD`*Y_X`P0FE|93ad|(f64vjVd8c~&t|M>8OEU*2+1M5< zdf(vm*ZSbvuvg!CN_~QLBGULNj!ehGms5H#5mQ}XUj|}tUhj={#Ivd@oc-a{*HCV| zkU*&F-E;-L8o`|pqnAI7EJ4;;tawYC3?G|yWX5B&TuZ-5+nGY+Ccj!Ti?V9!{g4>H zs)=f_zV3v>!c;h{Z@atL!w=g)tNpS`W2+KweGl75eQ4rsqRNJRHdpienJ_fHs-1^) zZz<dww*$vnnti#$beAKEvR9WazYO)!$J*wuAMzLB#j3q3Pg;v;UG=ohep0j83{@(% zVXpH~B<t94Re{r}d->kqon)tZ)fBol>Y-$z?}!OMK7GZ~(D>oAfZ12C?zbNW$jo;? zj7DLo={P@#>uAVs0j8du87R$ny0|pm%iLVn?ysj#Cm`^(<^L2vJ_<r!z>5(ni7xz1 zI1Kqg{?O(_%$ysp=4IJOPc~-Cd^_>$WqHgPB~8=a(*y1T0qmTIhap&{z{c;)y1Lp8 zB_VvznBvhzw?C$6AH)nu47EfFH1noi78fSL1GpQJQN=~;!%oQB<m(|k>po8-es$_R z6r92my%1kqTQ^Yvrt9>~rI>QK%SQ(h@g6BfRJJq(iBcL~DAEQ#eyz@5(PF4z0USD+ z6r6w2bKJ)O|1QkT1kkA1Oy4%oj+Ch=OnPMVcHoE+<GqKj43)WHb9^B0r1Ih``4d*x zXRa(<3%u#c@q`34kC3WM%}(T?@*wROZ!}uGx6^Gi2{L(8EZzWISsDTlQBX}<k*w!y zlVaE#0(BqGwTbxPP6%psF?-WrX8ND953YkQ5**+s9hK~c?a9U82CG<Vo1*5DT<ziB zXHIaN7);@&$c`aklrO|tJ(VEq^%lk5n@yk7_64r7)u`2%Drb4-g%=o-=0sG{5`_-d zAoZEXTYAn!_b`fz$!KJ1oJ+78)EVw*bR5KzOGHE%hSk~*ArzjF9X%ST%nKMBuZkko z>0RyXW2C@Zbsaw5(XYI;7TSG~7gMwGS;DxkNysZmGK6UL9vVynWAig&p5@uM_2Y&c zHNpBR$>wq|7TPW3*?NLGvo66&bQ(6n*?6Xxbsjp=1Px!`mA?~w#2xiLo#pk_kZwM} z<V+_EC`_cOsUqCC*z9l**^!arqIGuXl!%Qb+s!qf$4H%WNg`Duc>$WvoO>C%u&BA3 z^ISGuUtG?LctvDXdi&r$6SnIjtxoT>p<aJhSh2l^kBG?4wwJk>gbO)KhHZQ)=#Vha z+iTWIR;5=HHb!=TOx^TJKkXaYNk81wcpnNT%uxdZMRBMj#eq|ML|o!d80h}ID>S1N zJF-aHDvlQqr`|h#xa&X2*Dw<4`YbFFMa5C78SZ=T>g*DQoHd*}zp)<8Nj~LczXF+G zv0*M*`i|}yk*oUc6mQv^to?i4s?QW&BjBs(`-4_GfT(%5$_2U1E|h~=wszl+k$~=g zPzi56Sl0CgUvO#@Y%ojvaWp&q8Hn}T+aQdMZ>FN6upyu@mVsc~I&*D-&N6{p`8-Ya z$!cT_8fstd&V&WwUKN=6IXRCV(n!PRhde(5pbLTTgYK~Vc&pJdxHQYaoQ&5DKk7cY z@kXXxl)!&1wBXwskxi#zwXf)l5{^*p%(x${44dOUo_XX9VeJ>9_bk<w{4nD16>Tef z#`D3FurI#8-RIM@Lf@LV#68D*Ryi?1+Q;|t?mtFOYnX~#PvfoY5KwzxQI3B;a^qc2 z5@7oU6$$qYi&x)0YUdqOz7WGgMvDa{VW+W<iHu4<8J3)nSm%T<0C-xCn2u=_@F%`j z7ld$&=qbpN9yV*q8&Ic1R*c^-m>aEmTUl6I7@c?*n#%+-kZV>?P81417JKnp7aMn| z>adp#y@c0MmILjvxuIrTZztWB=9qHLm3XLc+57L$tZnq<<fPt(D->$|QaUQ|Zr)z; z3OoOb%r8+94QwReznYsCZTr01I<&YTeP!qAYgSNwH`yReMT_9Es+QQB#;E%E`Ie!y zlQALu4zpC8V^hh3oEEE_iCJ=%7NbB$>~mp|d3``VE%S70&^r=dj*CV?0HuC+>h?p` zNsf0TA%@xJ=8LI<vO`#EzKh6~KGQD^WN0fsAD=S!H2i9q<Eu*ic_f$Uy5gnLOqgrE zj?g~b+aQjTA;VfP^dN<`sNRV`tdvrsDZ94-q%CRiC2)p{WtqN5b&zedVc|t6e!A#M z)hGiVSJ~SIa$DFN>`XPf=k7a8UU3c75twS7F&;oXO+!ziqj+n&{bN08DmOYXAt7PR zMIThp8%nEp*d!_GRrv8R>KNwJq9)cgVpY33B3HXwFNNW=0$>`#xTAt4hqdW;K+0BU z!wM0<e1AK^Cu95w2$pA-@yVju^Db}Er)S`5&qSYPB%-LTCm@`AVL^VHTF|tXTo)@6 zCo3~GQosj?lfvq19;31;(S*l0D$F?CDdllk#(WCi+1Wu@{HqMl4BI*Fp-Va9AI~ad zd$(j#FSkgyW~jZ+6ZrUn1S>Z~%LL09Dy6+3+QLaJdj7=}9x@iOm7nH{Tc8}P)x%X% z-&k}u{|cQn;Rb~O`tNp}GFuqh-l86G)9HebvpFfP+)Y>D$mf3mNr=Qd1W5c1XZ{84 zA#j+?zreJ=ksbMW5WrzV&sTWEkT2rA1lDp6%h}p+T7qtHZVYSU*@`tLapO-^@Q+`V zLH9^feOrmf8iAXZu{VNk;Y;BG?_4ngWzi~1n4g*f2VbPYdxxyFFJFHsY}SzU!8jZ$ zG~ENgI1CqP54^>s-Vz8Pb51+vzGn6*5uWW)P1n{wSnzWd?sMigiCw7e1-B%vX@ATt z+0!wuo}QqDv1zH@qc1o4qmovUiI8@G45DtppSH{p=@S@aBA2f}Ddu@UtL#1#TX9wv zwb*36`{h6yE1_aB&2a^3vQkoCIr~JJ{R@<`B=~om4f^~#FTBD<Hscw7nXPo|%q9W( zrT3PmhN}JPdk8?L)ODwJJB7I1;S?pc#^T=5<jefpT}mb;)OsE53Ls-4+3cp<gRE>U ze$QSLbr3glb+PFn5qD*}6Hx+wfiYBR_5(i2%jueR)vLIBq8|s8Sd1XX3YEL^Vtf(r z%0_FqqE#hP&sTJBXEoiF%J<?`7wO*l24Ds`P`<(CAyJ#I7JS)Y1yaS*SQm6&V)VtX zE}+gh6VePFtIVOEm*2&=w2)3vy;5?upIr>2pQ#V(C`^{QlS0*)6c|xSK(0HWFsZn* zI_P29>`Hu+Mu3H!WA5%K)LN>p>aGYA4n)Je>lZFbTHnceHldp@$oR6)7+Y&5ad|bQ zA`$q6fZ_FsLe0VMsQ5*HM1@PMtM=s)pF8p3^5<=~Y`!1gx6pXiMw8#o=h2gC4nHe; z_vH&D)T1J-KDUai@s722&-dJucBL<?o%@TObJ$K6aIleOI)A_C$jw?Ap_fIh_=qOe zJS#PnV#aPVO_>f7cYBZb14CS8TWj+xL*t{-@23KZVu@B;^Fs(>Z|`jTE{KgYzEHKu zs7PD9n$#%AZs}BD{(QDh3)a}$o|OsgOBBowUCvZ~_FQth+Sbv(S)EqzdDyhtM<nU- z7A5~oF*-$7T6W~R4dRMJsD};+PLUby<=ECV*~7~4S^^bt3aYUYavq{ziUq41>d6u^ z$k+ThP&z4sL-+F=-CVzLItng=RC_V7rmHf;8$fn>LU=i@BJjTAO51_V&AtjP9*#qY z7Taq(5KWUo<*^04b*A(FFR2q!8NLdPh3r$>={n#N3r~5n#35I^65z*jHhJ8K<XUM} z0Pk4&DOdVI5)7rrNe~_gsob%9IxSs7l2`@camMYkD8kF@3P-~WxlcKBVa&iBj6tw3 zOZlr6u+lzC&>Mb#-kX6-FjyLC8PYCrhW}FP$80OT7)|5F&sou;cUvU7b!S~CV+-0+ ztOVg<d5((Q4<spmo!(lX{k%nwEs5{<#6$%b&?Fwux0a-Nc4iNgi&b)LE$b$FDxQM; zm2-Gm7(s&y9iuWGvldp`Lx;JA2%)*n(hX!4AO4tbEg^hHSa0wpula;kGL6GO=Ay(Y z1NW_6=X4xuIdxS<03X5NOkULC>^>(uvn&@9MF<~cf&OWg6S2MphA&$J0s4$2QReGq zy}i`LEGsKA)}i9hrrH$T_=HZUsj8>*JG3`?J$~c00|G>;eF_Q8@YcsIOt;GNd5P;h z>)Ew{{KSwY^BImV+2qJbREF^=(?i&(mu33?I~%J+-V-r|t;cPdO?vWZfEIxI=swLO zri|l0_XEiX%n9S8bCp3IHSDh;A0_Wo(cP1&IoOQm=Heb#J5qkYm_?XQiSjdF8DnJE zy2#GxXmG4yBTO&_Vu;gbr&;l9m@YYGqTKD}(;nqeh#@22Pn(7<;%CMqIO{@VVp2@f zgJmk;?_j+AehX=7z)!bpK!8w|yz`NPe!~grfp-Ihm-`m6=`*ssM+FN+&)Vpfy=_uE zM0DDs-JPG&B)lsJ%To#<i>)N_7;|!q7z_?LMdu*Dam4O)8t?B}MUhYI+Zi*8x6^9J zReHgtA-VSIRqV?%>lQ$UJ%<UrA}|?Eq4GhleeWyvhrxaXfg7<=H3i{8RIKj~gi_B+ zLR9GbVQlkftR^P`M|DdIAHvy8N9LG5r(^7REjBbgb)4Y(Vo?8ShJM{?pqwz%o~?|N zxz5&dIQS~)c0v|bK7bH^{%p&Ue+7A8oCJAU3qMayelUyuH_kCwGx;x%_4V%%`f5_m z>?_tP=hXszqVNyrTdIQsx#W*ao1UIV@Cg6l(#}1${!k;-zpr0M@HfjQ2j>)GXwhuL zt!bztuIo{1ES|j@#-C!5prHyld}?r(1l4o1LdkBvVxQx8XzyYwYx1F0Si)2%loFY6 z=IYKdT#teu;V9Vcc^`1%^n@Q08w!UV4Sl%3BQQsi2N|XV?IMjlJM8oASzRtVYZ){X zZZoUMZtqO}#&0Y#R3=4Hf`03qzugZ(<>%+WBG@|aVKp7m#14sk0_(avVXHN8x!+3R zsu@l73u98i<i1lW^V8>mjg|2@OCtnVpwsz~WXtOb8sqsonmpU)MXIT$6({`k2d5zS zrC>AV_SMR^57@OxiE=1T6ZFl@&@-yz>?)jC=|WBst8vkJq;Ua8?*lpZ{rW37&1-ws zw{-*8<;D&N$hQOm3>#`w7pC4K52tbFyO)IovBs@d53JbjS!dc?(q^WQ|M2S7qin2C z)FidiDmM~h)m=fX%PA6~{>IAGcNhn=ulg4~a|Z|M;um$r=Rdnduih6h15~iF8cQHa zw}`cs(~gx`WeQ;qYhe;4XsB>Bg7_0yK6B#=@&q68DlUgN<oQ-c%8)z}RcysfJ**Qs zSU$C#qS!j%dB)XF@blypwbYmA(<VBM0`-^m0wk4JsNZ+-2pe=c08vjP5f)s#Sw=vl z;MzRJ$b%mQky!$`nbe{m<LM_KT*}hMZLG$4WCoGh>MO|Q$M%&pglH454nvMQyw1?A zLRvOFc8g#zFAX88)t#1x0A1LkShNzm5llSg&eY}40T!>Ez@Jl=w)HNg^sW<XM@<d+ z__3<a?u;tb9uu$Tcs8t7NjAN{=T4t|_k~;*zA<taU*IPdgz-tjz%OCDQt-j6A>5_L z9(t!8e>Uf_GA{_W`1I+;E|6*@`DO#1ADr9e4(jRy5u|{w=wYsub_FAvb|7Ima&;gQ zMuNQYY3viE<UKt(DS#~Wn29d+Zfca=g_FXVZBgE1VgJLX(xxi^lNP0WfT~m(TBt-~ z`!D*f`~thzNA0%%<P}j=4v(_lBMG0mV;R}DoUejEyRSt+Z43pWi*`<{98dWwmVF%t z;Zxo$&2e2+=M=Ltki{jVk~%j+arc--*zp;;d#9oO$S{@H$>SyJ#?)nnsnPTsHZAAQ z7$P_|C^8wOt@0EOHjXt!0JF@<8E*!J<2qB)-<e5zFUmG-d`>43VjAej*Kn%;Wi*K< zDQN67?d3qAoJT~i|2GZ52=qEc<zGEEmAdhL^@||+0G-IJ9`7c5)yvb&Jon4p;n>TU zZmt-kJ?xa{xB{@OP%FjV)u;>KF7}xSW3dShkd2Jv#MG7H)yd?-`yo7&ZkW<d{0#oO z-J)zZ=2nudBTs{r`M7SCc=Z8A{Sg1Y_36h*PC*<nAB5r?gC8Hn4GUStG3#4()4EA8 zCOS26iwiciM!N&HbtL4^#M7zp8^m+T^~?NmqAGM!Y{#(`ib;4fi(+|z?a*5r<9-T) z=j^peK7(LCcH4e611NvQ$^Gdy->1@qmha2Kz8$_1TdQH;TUK+Zo9--KxEnJoz~kuP zzEs;xH|A<2V;G@H$3y6dc5ZBZtg6?{1uEczfn3CYPBis|*)kulz=?~`W~xB;ekf5C z+}w~O8Lvu;9pzExf1Uu4wzkSSJx$U6)xIx{hQ)B@NZB~t-fMEg#asLavWu<Nm=z^~ zfe}QF1ev%?sxt%LszIN~AQbX<Y9U{R=D}h_oKd7q9pq^Su?NzCDB=-t<c>Cj^&~jX zK_G`Vm3<UfDSS}`vigzEAQaSI_p)U4hG)hVzN%5(5XTBDSfw&+i$<TUFki65*ibzf zFU(KX?(`H`T;+hyXw&Q$o9t$hX(=Q&s?k5&eW&_Fv~{emUy;=m8uW7RfKbBNMBU)k zOKn1o1QnlN4@=*MpqFw1UT3FZ!YLN$1Dm%p_c~gjXc6#3w-Uh%r_glZ^9Vtep-T@8 zN)j56?~ewlnqtg+k5PvxuiQTyIFSEPI6}c6#`$LUC_3(?&k|gGd}_$4$d~=v9Ot~q zDa{+aCxrMz(eJ;I5#mg@+q;r=^8Qj;Rdw%E$K86I8|QI1&dj$e#5jmbK~Zrq(<2w! z6*Ds-ufJldD*Cot1x5ds3BH}Q^Wl;W%YtbFdM_h1A~Ptd<E{Jg#+}*KJO?^m#NAlk zeeTT~!<#EK7Vtkjc6Y|!Uo3pi)!#1hzYXFL9*>;=N1!y-I92s!2xtfuy)x+O!XD)T z>CG5aEb*>$q6zlhkG2?$DTa@DrlTIC=(JgtHgW&tzHzL=`nTTbuRv0!_ixr6p_uSL zT=%(~kn>1NLER;(M$6@$6?(-L&@6pS%~0x^ra{}6LC9QXQS$P)t}mjuK29wcq17&I zXXLtA#V<57XpQrO=S0qxXxI$t>au%ge&&yd&yt)@bDesWHdJ3{+~ysQqrCcS3|}%l z(tL48wBCOJe`1)2D?QpyU6~MRq<BovL%0xSO+C)q6l(koTEbMnmjW+|1MG_Ew!fnq zuiz#z9pfKm2xHH5F}|b8+7O7T?LfL`C^|5svD~Wnk)2ZMAc}M=d$e|AuxB;O=xFql z_N6k4jX(otIMe6lkA;bXbjY(BPA5d%z|i8ij$YhV0z>QYIU!t-Ui)%aviWS%dwWVW z<2;9P+T74*Z*Ng^z}%ayf(QSR9i0%)0~VPMi%bqnK8`Wf<m8y?PTo)>z-TqgDD>g@ zL}`_|{mbzS+?EuLy(p6d4^<4+zhvVf=l&i5U^H5{c1l+8fk(}=PoE2j{21d-&&I@$ zSlt(Ev8s+WC2yBTJeKKSIj)@98Se$wRl<QBRYO3EnNqPEaSEs?K$1ORFQ$@`tmQ`4 z!{BDmd@HpFHjLdDTU>Mrglaq{q$?4-6arZ0^HcKC47qAdjLdAg%OhAu{>{B=l@q)K z^a&O8@oEIjMu)tzn65l3lVdOKa!_#Cv$uM&di$_G<hU-Lt!e1gxZHytvcwnbbncXQ zzkxCx_e?hCd?evCeRlM@YdU_xsS&)F)Sp-v*~yDu!gIaKRoCTc?jw>mzH_^p=!~gy zd!EVL$6H%FPtYW?`J2#@>sE2lt5JYug%xCo-+PpFq_p&7yaF91gEWzEPfm!-P0z{* ze?T0`u{?*c)_g#7&5UJOU(Br67N{oGJk2PoMEQ6?z5L_ykH#a0ij<o6L<D<KTFCo~ z%BE5DKVI(!@Po~R_kfFgm}e!CAzclA3^@W*RTXwz5;QnDEA57}-V6(^C1W5+s`(@2 zd%dMiA=r{=i*<V{+E0HD#Gf@hMSHN}xY16iuRCk*Zp+Qxx0Uankyx=2d*36SWF+_@ zdC#VC_3-6pgoUs%-AnNW&))D(6Bh3r{{$Bp$&+kGru5eS7C@B!IW6U{i+zatYjVyZ z#e=czfgOzS9P9J7*c~swj))e~Cw$4}y_Tk_@wmVWldI>j6Nq62?SjHV?UTWa3HfGN z;K3VT>uZgUWV$9Iw!hYDtPz&jkLHq~xG;sW()>(Y&qebOF-pud5(qyLueLb$t@mOA zk%Qtiu-D)i2BIr$Cr>4B{_LRNUtZTu`08_Uu~=H@bBB{|;PrwyD=_EXDmfV~xt}tP zbw;)i5Xa;}-=pdZQ1GO+`@Vgb+MFjGU;wBO-P$&Du-)TH)?izQh#<Z9@Ko6~&uGs6 z@aZ#Q%zRa#Gxw4EYh4abSbeN<z0Xj6-1by-(5hX0S;l7&MK=5Lev81TqjJNaO9BZa z$LDW+-rl^?8}6T(c>k3VoDzEIx5=NNay9-abFtKDI4iC)9RT{?G?LIw@@fufK{8ye zv##%n^2sOZ36Jgb72|#1hHmZA{NzZft_MeT>swZ}usu34GY!Z2u`F>|{y7jc-SIIo z5#>O99@Xpl@I6<2JgUB;z)xCSXDNhf$`aZTb7}giYMcEd#kZF)O$gzoM+`k^3+Gg^ z2I`W-Q%;N&XggD#c=gb$ik5Zm@XSgta{6$}w#k-r+Hjwyr1SXIH(ok7Z?rUm@Z!k1 z{RStf6^qU+>EJWr*5fNm>?q!nkGis7LL|k%F&ACp5n!B^Xkdh9?C37$9_XrM-OI3I zon%J~3Ifm95<P}rPS3f(F2=YTQ)ekz7ZLs{*1!=vH^J*tD*lu`rTo*7@%#Y((bUrc zy~?v6=`{zVzTSTIi#97&HRtbAe>UXd_pEO|7NwTnWT55?-5EkAS2)OTj!~sCfO8(0 zfs#dHoRNT|xI1oZ+}~KWu1(3Y4EIkV=glHBST45tBOO_j6B<PyXIy6+0^uvURPuLY zb`KqT!klxk@;E#_?iHMKSAG^JOf~qhUpSJ%d#=|EUGr#$e?r%)(HzQSwTeGH=tMRC z?3+H@MJ2u*;`@3+d{+27uGm|A0aD~Qo%;NroTT>Zq@Fp-iwOE%Z&i!0BwRwy-Avh* zr!*hiTC!ahxclo$L+pFF)(!L}2{F<%dYl*8p1RI5tzTV{KZKR1eE+oJI1hb#x84y< zjY`4^*(Ri@3SiZRT@DVbyV}+4t2?64&M$4b|E>i{PG4Tya3RP^k+_hf5gub63P`n= zyw^4&V7~C9zk1T;;3A#PWNE(7e9ryn^tDEE`neC7o&C%qKwBmd>`wMQEV{w<!I0o8 zngaOS1jxXlbK}8}hvz&xX?#nij2ET8xl3h%EWf-MiC-Bgp0=<o7u0*Yd-Yp~!w3@a z(|H1>N0$uKJrV=qQwq;EP(SvuzunmshdjF(%0K=lTe$RBR4qB9sjEv?HiZssSlEs5 zy?ojhgexFV)YLL*;A!*CD)L%m-e1kR06%Dt3;zYCP*3^X#^*T!JZ>I;rMAK1{{vX@ z50wDY2&(QWdOZO+dus_}S>F!N99IhG-EWvdO0kKBN1=&887E$*KRJ6^l>@G&0*`ip z0KHy_8$}j>pHk~Fx!bFiEk!BtgZ2i30H<j0V>&uI>isT1_`8<igV66vfy6&Z1VLIO z+z8q4O#PF8i1QIu3An0c`-#1AeTa3|kH=FoFWtDve63p=+`EVry>LM_Z@J}(Bwz$Z z|HF--$^gd<>_}l5o&}CLq9yDPHXhLm#;|bOjppvS$!ZSgj;dHxq#5_kebZr=VWtwH ziZChzZ{{A%jc5;;$mZ1aizh%PHFyg%iRaE9cJspbk12L$ZB)xZ+@6g_<`R>wf;Vp2 zVf6ck-;Go5H*Kte2FLoEJEk@z_nMa$-5}%CH3svo9g}dB4GLaS!=M|HBERN^0~t`o ztgj+m6q7H+<JE~M4~_{Oc$x`Jw@!GSwzFoFNNwJBsCicF*)0Bg(goYyWq~f?Lt${A zPhET3FRcJCt#*rx+^Ry!_6)v9U+gW<#OP|5A8^dH(dxNRYh|S~ugnPG0&hl(L6lZ( zNwp%ODluj6h`(n<%QacOi}XE|lqlEAtP8ZA1_&=-5flB;SkcD^8`sb=T##MyZD+jf z<vSEZdTg}P`Fsn`Q|90a7mm2@&*A`49Rd6xQU3is-uVu7Yo=#f(@;ydIamFL&JxDG zXqv^)82KPfvt91}3jQYJo(R3_>&F<|5Cl%J7RKhDl}_s3=faDh8+0-Hp5B{TpPnt* z$J+DBlyg>@?VRWX@6nx4!}4#86<6Z4niPOMYg%G6<7#aoqJVf?Wb9$VucK}Axhvbl z(;%~H59>RDU7ZY2)!6v-;@PX9LP8rQyVu(g+Ef5J2GYDfjrT1o_s)WzGJ}<Snbt~M zVAwwPeQ}kc{j?juM)`%Ps`J#)Q<G)IcjP#9_RsN{4Z>q+I?*&0o;*Zqc4t*X%xzeL z$X!wU)g(Rl0kqXS?XnD*i@n7O-obYLb!i30c;E*&`3}4ai6<`<74f7Hl|08dK#q~- znVFdn9Yz;I^l2`PH&Dd_j_m(UG~q`n=xUrUoWzF<!595O=rn)VnEWYlp^1`?OiQ!* zuhlsJw1dFg1x~A?#814{b*pWtHt%06UM4_~Wxv*+Z7GstOr`Uj2Htj)&jAPgLzzru zGYIf!<iGPq|8VDjQ&4j~UUUx5pW>l^syzB9!T){1%^#8esa)uPDBQ@T+D5lOo5Xqk z%#0!BKdI)UWDpJz^Tw1=xFF)P)Kv1{VVud|wg}PBpQ0y}KlMeCh>3S`ss3%s=ni)^ zd$oqmff;f)HXQu5;@?&j{oi=Ozsb;ltNp*N{drrw$na6pPrnKK_Stz9gIl{E&~X?} zws{|qM4ZB;=&8p7oxt<AqE>FZRW)}T?_N*uNZzM^u-%B2XZ(AO`V0FgrP&nLoqr^q z8EFtf7keKOrlS5FBtxu3Epk}4|D$CDw?CaLJ!pj)nt(<W%BOxdT^DrC8txuX3m)K` zyvG~&zFE2>dDnR)m{qKo9%Bsbi=Q;p;QC@w9we&4+@mCn%&suY{41J3u=oVHztM%Q zA&Z`D^TzSy_)Pj{4srx~*%<Mw_%X(q*I$sX3pE8Lr^h;N#0p9`iCy9E8?6Ot%q-$W z3z#Cm($rKHBu}<Y|K5mO7dym|b+tG-a?T{7RQ&pMr-x8tYJaZ8`RG@QfG$HS|EH-= z(uqcbVB=3o!e$TG$_NPZz%VoxgIQU@M7f?0+MI<~Zisv!yD3B*w{mxfj_*t2N9Gcv z!$b^3(O4?8rMbuk6C0Ik*I|0~J*wv)%k2ho&zYb45$<kVYBgF470)(c;zg(ca+AL} zxW5PRmLE?n!~r_s%97^3XOBD9=^vK;Dx;fmp?jPv)jDON=Pbkc6#43u>}KK0q$Ay| z+*I*#HF_}!)MOPcW0#k;-G73Jx8f&#N(jjJsjHVA0II9W2}iebL&FI21Cnj?1|<Pk zZ2GSxDIsB1EN)+k@SgD!qDl`R#&84E)n1;oBTvnU4&YBdK(F5-uJ~E@uwJpzPrHJd zADZi-b1Lgx57O;#Y}nrXps*1?YnSt6Mt+yob@&ISx14o!SfTkNjTf)R-6Wf)CFe(U zNEiIZ>?{*q&q#Tw)D%x}EAk(WznGst%7+d#7-yp_nDxm%WGklkQKjYef%B{zB+6YV z5vyIuPi8_-ZYMX?b(54<%E?1>m&SEC&Ng=`VV8jIvyss~sBv!+7f-^N-Q?)hohGnM z;>bs=kCJODf)0X#k2l?#lTHJYv4=VH)Sz)jvU%#t<UwP7x?pMX?M~BZ34YH*psW(} z=gV25_66;$=&Y({f4;g@p|aF>TK8gFRAg%_Lh3)4xRwlbL%cj?89NM<B%z4}G_w{F z;(bGtqfvuvI|ZYNsN3XloY!HrDlV41Vn4yb?_y_9YV5?3xmdU1gO~fmJ`Gl5m+3r* zMV)mk1J#aD63#^T(G_dCef|aDnY?Mgp7crzie}1<%RD;qRZ8=A-g$!ej3pSOVQL(+ z5kZF8@tlU|sYNOd58vVN6U7^T(;yz3$^Sm!TZ%(8_-bhP0R^b(A`c@9LlkS%NYjiL z)iDHbTYy7$P(Z;}b!ST+Sw_Rq_py}=mE0W)_O8sYXGO|DRDSPCynWEQ3vt78&w`cN ziM~Mka`fdwy6fJR3HeW9!-5QtPK~`Q2cat02^H-3#w!J>Y6I_8(BW}b_F!0BjNn0E zSwCrt_s^JH-a8UcC4cl~+XLsyJVf~515mlY`u0xibG8O&(m>B*{Es-n17v>3Y=C7( zYSc7}_C-|3?Dr`A7in@Uuq}*xnCOow=wUN^;t&K6dm6eS(MIq=*=JWl?-|pt!T#)W z$!{sF7fmjT8ntILX$|<h)}}Tq8sWX{xjR$dgY%2te(yq@>YR)|mK57m3f3ein>ddN zD)&!2$=Xz=hP-=9;@!`aOm)REVAh{w-tDP0JzN`_8^LujGAJVcmh0sjXChRS{=Q;M zJBlw{2!s`CygrQy@~BLrXpM?hXi1Gu!~gmVLM)9crKX`h5IS1!Z(Cht*x4csv#M@y zz@*e{cVA5InQS5O*;lyH21zO0a|_4?i3!%IIBZ>GsfUq1gb6A#Xbko{Nr`6ojc__; zRs^1#%}CSC<X;SXtuN|!t-Ds|(O7tV4XRR5D1t6o>WoC5br>DuS?-}@s<@r^ybr1H zS#u;!Ce>;J4uB?0A7uS3GRgwuB~NgdaSykCQd;O3s9}3^bsA86%AYHNxVyuV)MvDP z%ahn~(wo@=cGL{#Xo6n=$Jt+Z=8*Jz<_CtWN2VGhrh(w&dtBi23659=7UQWw4@!fI zwWc--C5Qd1Z8z}d^Dm|89d&iH<~ntWU2r6VCy$U^xRe)F+Fz=D?Ae?7UY`sV{92qy z;;Y}wq2FRQ-|K8GBYw!+r=_em6h2H0Ia^HYaGz#BI7pMT@VI&=QhvSE!C$vp;s6v1 zMLH9x{sc=)*$kJRujEsn`(I`@wV`p^8o8Vq#f#KO^5csEfgEd#=cF&iGfAi;rNG*6 zw)S;Jw+kaRReOpUq3Ed>3-Rsy4;dRmw}>0G8f}TNi8~mQR2609a8KqL>-}$d1x6O1 zqImbo<Gu8E=<?on2FA*yEmo9}8>2vRaovNFNS*#{Mkh=rq)R`O0Ba$mz^Ay?fI_ zy$MKMrPRNXY{DqEq-S(XI^e@Se`MSfUK3+uy5yEi_wjQ!HiMKXXti}cw|aw#(?chS zD)}&C9Ri>EUTrgf0Bw%BB_8mh>6eg2GWW(6Uu6#!L?sD*d2R_PxW>j9eVzt%#a-%v zvk@P$(mrgztl82YgH4uiC!>04qPcx-14rl;;3s1w+O2638sxtkl_^aU7R9F5sJvv4 z$H3}Jsj$rTpx*VvmGUM$RIL1ZIOdlH{%7w-`s}iS`*j<yo?WEj-#Z*iAF$bIQF_tV zSGPNS=t{wqc4Q1nJQ|$ufm?FdcvqcMukURL`~SsAG!7*`*WxiDPVcHup;|AKO0F~C z&lIs+%7dRHIk(JqTG=`83qSHYJAOP(E`*AKib;Wn8XR?{?U#*7Iqs_Zv3=Y3F05be z$gCXC&$iy5qO|euZAN#5emv7E_VON(>K7qnMW}FsolhNTAHIBNVs~85qAxjDu#uxR z5xlr@3h9b;Tc%1p`B5EVA3!kO79J#;=%6{cJQwA;cQ3SYgWLMr2j(dGAxin4g1!F> zS+L*PdQBGmj|l?7Z>KM=5s#`V8DXa4a9p3;@~~&VphE_86rTUi^JZ<~bN!S;#ngkz z*f6~EH3TW(_Lj_;iQQ~2pIUcoZw)d)wRgBQYWmF61GBAKu>ZU7rSO?Zg3<9*DW?mt zML6(!<y%B<B0#%@l9JMpDDl(X!s6mEEtcv<C)lqpk>}P^U)$7&FFbdvCV!(Z^iPu6 zDEeY89;ccqe3181Ko=Lh@VLLK=mC8HE}!skGw6SV#y^>dSNKu{`yNiBR(wVTV>e$o zoWCC46xFoftW}jFDcs;hNteM3x+MzI*-pn))&-0RE}?%;$+B@OcajPnPWiLFxS5%@ z5=7#m59j)|Wul)){zl9Hn{xYq2FIifJ-A+Wy;Xws-Wk|8IVzL;e~SFSBKQm>@PoB5 z9#Eq%W`vFGvCp7TT0w&Mr?PCZ$i3HX(Yny3(wO%ZPK<mTZuMu$r0^50J%eWd;(1Yk zy!Bj7t)uGWH-5!rwRSyBJTAwB-IPbTY!|af9|G}@j&8)itM?>|nx4KZ4xkE`8P@S+ zW;S4532I3Ay4u`Od%CI-&*By8Lfs4#j<;s|xsQm_=S6D$9ZGGos@TV;wY%{5%xP*4 zUE=2&-qY3?$RB{IzOE+7WTXY}X^Cz3EH0H?#D85~NMV#K`Z?iiMa%7{#C$z0sa6m` z)mO$Y7b3Du=wm|e{@9P8%)yxC#5ZNCmSTpd>OK=7=DYpAR%>k;In`!b^5u}3#g!iB zisF3JF`bUsH1V5J-^%Z&VO&=hge!FI7<W(|NnBm-K-R;o74X{wZo=5R(L&BSKE(7% zI1dJ~cKMH|$)C{3(5cbH%)f7o5;E3)^P~Ze=Hl$~f=`y~s~KN1(UT<k=&jHgnqcV( zP}vW&Y}6Qsdc0kfTRm}zQxq&pU<Sv~2%OCjo}9DQV?4OB{_r!Bx@XmZLtgO3<?aI2 zQ|)_hLW*9calDX0!lfc>F>RIaJ6VNJLt2L?eDg~To-ZYCR*+K2??7wJ4w*w1QGmK; zw!JBhl4I`afubA1zBIO^{EROB?a^JGXe1e(37+N(Z>CFPGe^tDDCd;uCUh6u*YijA z=LuLpr$!lJml_krHru<IF_w8g-ppM-V4#uz8S~9h#}@W~ZOX@gZ^{wyiz#K(^cq7n zDNqlB=TYc!5dD%?-{AADg}V#WgrZMo<6(^6LVJNoZ5(#^?8dI<dw%eoT;0w#T9aLi zhdavIe8uE!X|vlsO&?D|cX9Mh{%e51D2|GpUIZtD{aszkH8%V+w(NF?SSUkjf&qV6 zf`9oE<<Hij5fDg`c=G0V1orQFw*MoR08x*FVDRSt;u~ZC?imqCQv6}){{GYmk#s~< z@%$F-Up_1PAI9nbHH?M9PxnMRx6hkVemxizh*)UdyCB+Ycjxl!eL+&WH^MZZqcdbm zU1nWS8(Q^I!lJXNI1+8PNJK+Q=c0}0b9sk$9yfM!EBS)}uEIegJ*PHNZc402*ry)y zb?hRxkpj*2A_Q{IueQ@b>E~{jXIBt+UxUTfi%HJ?vm<f;&9OVzAE-Uc#6uiJ|8F+Q z_s94xY7uq&R+D>sOO%LV{9J2gidTX?-`9jH2Y+Ycp1?oDK#Y1&Jz9W*I4Zvuqd*+| zkogx1p5BT@{h!2AYRUR4n>%73fw&LJu0b|8+Of_$??&`{n27J~kdUw>ZkzspHFH!8 zataD$@^1!^_-laS|2W?NgC;%h!1E@sj;q<;+>fNR`6jTOxNM%%fE1f{Y#?#vXIqTD zAMml6qDBX2z}ac;aarkl{TA<0AQg`(ENrnai!=D=T|%WQ0jp+MI#}><WIO69XMI7e z%;;b@bVEV<sK@Sg*g~Gu<gRW-1w0n0cM{BBbG$CeKL=ITu;u5TG~lLP3z(n2louaN z6J8%+z5WQXG!vl;JvRK8`A!UJH7r93lV>@$m!bX5wNG;`m8!4YoF;rc-Ca)SW69Rj z0Gkav3ymCU^C(G>o>J3cceT+GR|Snr9~+%6d>FCv+o~jJ9@WshYMO%epL^+ROSrn_ zTNmh#P@+-JCFgf({_~#Y-mC2_+B}?j<2VBn!LEDVoq2Shz0VWJ@CKq0;v6sq;5(nr zB?#~y)38!#LyE0N7JiLfBsi~Emli81ErQ5<3oPXAsuwDlYKr&!OBQl6fgYwl)++IO zJe;|fnzk3mhegFn=_XVnvD~ggvHDV^BQC?-bv_QBr{2B}CHkW=G}A6!#D~yyUk+x= z=9D=4oiVWQR<qZUfa~W9PwHIGbyL^{sQZhji^;?ZJe6nz?_5(AgPUX?z~KaId@R*+ zW<MbU;E)@%kiP3NS_ea9NPC*!zEw~pJ!?pE>gkt<rnA;TVNLuVtI#=TC&NMbYEl1= zFJODm`^l_lp^T-fWlFM0vzt+})facKla^KgLo}#$SJ|#0N`y^sFM8Ko$OzcoJzyxD zRoi)y-ElQg*0#PX8ckDv=>21?PY4}RlYCc96OXX#16ht7F4eW*0Hbrtgg(L6gNka4 zgu-N87LRhh*1`vIE*G+jel0s|KPzTW**lkoxeV;lR23A<l%M$*O;04F$(0W7sCkS! z?|&)UmDSlO?luHz%8o>8YtAQ<2@jtZimSVtp44cVjDk8-J)s|2z?(w>Wc5CEZ2N5V zyc<_$VZB4HI|HI$pwsxgX5LCpu$^RgZJ+r-XpF<Ww)g%hzgy?D_PNvme}Rdt@JW)k z1^Y>^(|o+(=y@i~MWMTP8BkJI$+o96{j>$*neO27urfl?dlubs*U0~R^V+4g1_QjW zz)Gr2+?VI<uUt)6I`zR+;~=zjP>MEHVjNV{Fqy>p3VeQsUU9L;t~fAPxM4Pd&ffby zTt>#D+;b>%EG#Xx$9Ow>EO+U+T3)yqqB*(0CvWB&&3NRr)_7WJSWAN)k&vDL&PVf$ zUA{v2@rHC&bXPr|*&)B0(@c+yY62z|0&WfM^qjVpHhy`$;bUnKZ{XHjW~_!gkTe&4 zOv!iH%Z`kM^zCbbI+KN^CFv!ku#+RKKyKthkG6-o-YGoowaiG2cN~qetOfr<@|?0_ z0Z)(r!cJlHv4J<^T0(hdXOMBLf-iUJzOvzY^jMr6J5>N<1S>7r-AUx*cY#{C^m7mO zt<m(SQU=|p7ZYY{K`ZGRoQTvS9(<p<^o!}@DZL~7>HPGpVa!Za&(elrcRr@w{?7GY zlEe`nOX7JNvjrGE=|%xJR<iDPQT=jyt=0^w+c%v|`c!T-y&9Vmt?n^8nqC<u>-gHx z13kL3GDmRgDN`7(h4hG125*y$EO@AS!vnpJA<)@P36Nf7dMK5mmeM@dlE<^Zofv~Q zYH%e*=63p?_`-E{uWIPCdn+1rNBczPyaIaa4Ba<$P0t=D_r~r-M_BPm$3``?gO@#d z0|RlyB8fw#(}{8E+dVt@HMy}M;8oYUwPy(rCtFhTMDhd>DBtVt3N$@g6p0Kv>YMh4 z&reSnCXf&hDq5Z2<!$?wlr()Au;bxn1AC({C#*V|G@f#Ce~8i}0bzYAhSrxZYTkNU z$j0{<DOqlDHWdrkz~4%pg7|3%s(M;WinUuRV)>oub71X#5jd*{*e4{sds7YQu7XL$ zLOtJp8S4&8@`m@edZRn><%WJaHQ6>E+AxQun`K1Ehf`8^5C1|(%=^Iv$vdg!$=uzK zWQQjc4MzbdDdphB>RfhQ#-<vfBX+~YzzW?Mn&R|ds<8TcrW%`H4PH<wnYJmJB=5Cy zmLyk8)p`EfdzLef;418ji_cZ=o2Pa;Rx>Y?F3?%QIxXzZrM}#OJdwq4pi(uRjO1sZ z{zQ3@gG+oW@8bb>oBkyG*5K!6SJ}|66j1*#4Lx^iw9~rcz$Og}J=^$m4&tU30eHQo zKEO9mc#Sj9bWkeAX|i)bd;Ylq_Fz&nV<A=FK%q;{+*D)*UFguJ?8y{I+k;R6E3ahV zas&~10cFve0Wn5jJK!MYTveGmy}=K}hF*}4B}!9i^$Vx`8WCsX2<J=Sd_7;^zQVfC z?gUE`&s-ZMWv0qXlTIO-Yt9()>^Fhlr|^@}P_he9Y~NbuRKbzr!Kb1o7`x-_sVX3z zt#3{Z)ICFWMq?~6BJ5CWcI}oQ0mLT@j>7FN*L~NMj~B`T_;clgWTTPsoPhGTkgUoG z(!FWKZAd{2c0&+mpF%z#G(@Y^Wf@W!)h2{<eW?Xe)rdHV`XfNcy{+prgptaKRfv=! z$`mJ?oSgg~aDDP6%`L1TNCPxxyQlIaPBrrUCy4R?shM%_sU%IA+KtT0z}r_y{Oc9| zW0QM3`XNGa+w1=eAc}aka$mU5t<X_!=R}ArqIbZzubGeN6XOOIeaum3%2h4+t08@h vhNv&j_LU0M@81p<^^b#HtFL#>5r)5h@}Xz?4UQV3e<YcgiV`JauiyPI4t*E& diff --git a/runbot/example_scripts/nginx.conf b/runbot/example_scripts/nginx.conf deleted file mode 100644 index 6c3f3da3..00000000 --- a/runbot/example_scripts/nginx.conf +++ /dev/null @@ -1,75 +0,0 @@ -# only needed if not defined yet -map $http_upgrade $connection_upgrade { - default upgrade; - '' close; -} - -proxy_read_timeout 600; -proxy_connect_timeout 600; -proxy_set_header X-Forwarded-Host $remote_addr; -proxy_set_header X-Forwarded-For $remote_addr; -proxy_set_header X-Real-IP $remote_addr; -proxy_set_header Host $host; - -server { - # runbot frontend - listen 80; - listen [::]:80; - server_name runbot.domain.com; - - location / { - proxy_pass http://127.0.0.1:8069; - } - - # runbot frontend notifications: optionnal - location /longpolling { - proxy_pass http://127.0.0.1:8070; - } - # not tested yet, replacement of longpolling to websocket for odoo 16.0 - # location /websocket { - # proxy_set_header X-Forwarded-Host $remote_addr; - # proxy_set_header X-Forwarded-For $remote_addr; - # proxy_set_header X-Real-IP $remote_addr; - # proxy_set_header Host $host; - # proxy_set_header Upgrade $http_upgrade; - # proxy_set_header Connection $connection_upgrade; - # proxy_pass http://127.0.0.1:8080; - # } - - # serve text log, zip, other docker outputs ... - # server_name should be the same as the local builder (foced-host-name) - location /runbot/static/ { - alias /home/runbot_user/odoo/runbot/runbot/static/; - autoindex off; - location ~ /runbot/static/build/[^/]+/(logs|tests)/ { - autoindex on; - add_header 'Access-Control-Allow-Origin' 'http://runbot.domain.com'; - } - } -} - -server { - # config for running builds - # subdomain redirect to the local runbot nginx with dynamic config - # anothe nginx layer will listen to the 8080 port and redirect to the correct instance - server_name *.runbot.domain.com; - location / { - proxy_set_header Host $host:$proxy_port; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-Host $host; - proxy_pass http://127.0.0.1:8080; - } - # needed for v16.0 websockets - location /websocket { - proxy_set_header Host $host:$proxy_port; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-Host $host; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection $connection_upgrade; - proxy_pass http://127.0.0.1:8080; - } -} diff --git a/runbot/example_scripts/runbot/builder.sh b/runbot/example_scripts/runbot/builder.sh deleted file mode 100755 index e9bb93dc..00000000 --- a/runbot/example_scripts/runbot/builder.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -workdir=/home/$USER/odoo -exec python3 $workdir/runbot/runbot_builder/builder.py --odoo-path $workdir/odoo -d runbot --logfile $workdir/logs/runbot_builder.txt --forced-host-name runbot.domain.com diff --git a/runbot/example_scripts/runbot/leader.sh b/runbot/example_scripts/runbot/leader.sh deleted file mode 100755 index e1a2075a..00000000 --- a/runbot/example_scripts/runbot/leader.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -workdir=/home/$USER/odoo/ -exec python3 $workdir/runbot/runbot_builder/leader.py --odoo-path $workdir/odoo -d runbot --logfile $workdir/logs/runbot_leader.txt --forced-host-name=leader diff --git a/runbot/example_scripts/runbot/runbot.sh b/runbot/example_scripts/runbot/runbot.sh deleted file mode 100755 index 10933c91..00000000 --- a/runbot/example_scripts/runbot/runbot.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -workdir=/home/$USER/odoo -exec python3 $workdir/odoo/odoo-bin --workers=2 --without-demo=1 --max-cron-thread=1 --addons-path $workdir/odoo/addons,$workdir/runbot -d runbot --logfile $workdir/logs/runbot.txt diff --git a/runbot/example_scripts/services/builder.service b/runbot/example_scripts/services/builder.service deleted file mode 100644 index 79b85a09..00000000 --- a/runbot/example_scripts/services/builder.service +++ /dev/null @@ -1,15 +0,0 @@ -[Unit] -Description=runbot - -[Service] -PassEnvironment=LANG -Type=simple -User=runbot_user -WorkingDirectory=/home/runbot_user/odoo -ExecStart=/home/runbot_user/bin/runbot/builder.sh -Restart=on-failure -KillMode=process - -[Install] -WantedBy=multi-user.target - diff --git a/runbot/example_scripts/services/leader.service b/runbot/example_scripts/services/leader.service deleted file mode 100644 index b0007da5..00000000 --- a/runbot/example_scripts/services/leader.service +++ /dev/null @@ -1,15 +0,0 @@ -[Unit] -Description=runbot - -[Service] -PassEnvironment=LANG -Type=simple -User=runbot_user -WorkingDirectory=/home/runbot_user/odoo -ExecStart=/home/runbot_user/bin/runbot/leader.sh -Restart=on-failure -KillMode=process - -[Install] -WantedBy=multi-user.target - diff --git a/runbot/example_scripts/services/runbot.service b/runbot/example_scripts/services/runbot.service deleted file mode 100644 index 97a1c74b..00000000 --- a/runbot/example_scripts/services/runbot.service +++ /dev/null @@ -1,15 +0,0 @@ -[Unit] -Description=runbot - -[Service] -PassEnvironment=LANG -Type=simple -User=runbot_user -WorkingDirectory=/home/runbot_user/odoo -ExecStart=/home/runbot_user/bin/runbot/runbot.sh -Restart=on-failure -KillMode=process - -[Install] -WantedBy=multi-user.target - diff --git a/runbot/fields.py b/runbot/fields.py deleted file mode 100644 index 1da6e59a..00000000 --- a/runbot/fields.py +++ /dev/null @@ -1,52 +0,0 @@ -from odoo.fields import Field -from collections.abc import MutableMapping -from psycopg2.extras import Json - - -class JsonDictField(Field): - type = 'jsonb' - column_type = ('jsonb', 'jsonb') - column_cast_from = ('varchar',) - - def convert_to_write(self, value, record): - return value - - def convert_to_column(self, value, record, values=None, validate=True): - val = self.convert_to_cache(value, record, validate=validate) - return Json(val) if val else None - - def convert_to_cache(self, value, record, validate=True): - return value.dict if isinstance(value, FieldDict) else value if isinstance(value, dict) else None - - def convert_to_record(self, value, record): - return FieldDict(value or {}, self, record) - - def convert_to_read(self, value, record, use_name_get=True): - return self.convert_to_cache(value, record) - - -class FieldDict(MutableMapping): - - def __init__(self, init_dict, field, record): - self.field = field - self.record = record - self.dict = init_dict - - def __setitem__(self, key, value): - new = self.dict.copy() - new[key] = value - self.record[self.field.name] = new - - def __getitem__(self, key): - return self.dict[key] - - def __delitem__(self, key): - new = self.dict.copy() - del new[key] - self.record[self.field.name] = new - - def __iter__(self): - return iter(self.dict) - - def __len__(self): - return len(self.dict) diff --git a/runbot/models/__init__.py b/runbot/models/__init__.py deleted file mode 100644 index 46108428..00000000 --- a/runbot/models/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- - -from . import batch -from . import branch -from . import build -from . import build_config -from . import build_error -from . import bundle -from . import codeowner -from . import commit -from . import custom_trigger -from . import database -from . import dockerfile -from . import event -from . import host -from . import ir_cron -from . import ir_ui_view -from . import project -from . import repo -from . import res_config_settings -from . import res_users -from . import runbot -from . import upgrade -from . import user -from . import version - -# those imports have to be at the end otherwise the sql view cannot be initialised -from . import build_stat -from . import build_stat_regex diff --git a/runbot/models/batch.py b/runbot/models/batch.py deleted file mode 100644 index 2a5f95b5..00000000 --- a/runbot/models/batch.py +++ /dev/null @@ -1,462 +0,0 @@ -import time -import logging -import datetime -import subprocess - -from odoo import models, fields, api -from ..common import dt2time, s2human_long, pseudo_markdown - -_logger = logging.getLogger(__name__) - - -class Batch(models.Model): - _name = 'runbot.batch' - _description = "Bundle batch" - - last_update = fields.Datetime('Last ref update') - bundle_id = fields.Many2one('runbot.bundle', required=True, index=True, ondelete='cascade') - commit_link_ids = fields.Many2many('runbot.commit.link') - commit_ids = fields.Many2many('runbot.commit', compute='_compute_commit_ids') - slot_ids = fields.One2many('runbot.batch.slot', 'batch_id') - all_build_ids = fields.Many2many('runbot.build', compute='_compute_all_build_ids', help="Recursive builds") - state = fields.Selection([('preparing', 'Preparing'), ('ready', 'Ready'), ('done', 'Done'), ('skipped', 'Skipped')]) - hidden = fields.Boolean('Hidden', default=False) - age = fields.Integer(compute='_compute_age', string='Build age') - category_id = fields.Many2one('runbot.category', default=lambda self: self.env.ref('runbot.default_category', raise_if_not_found=False)) - log_ids = fields.One2many('runbot.batch.log', 'batch_id') - has_warning = fields.Boolean("Has warning") - base_reference_batch_id = fields.Many2one('runbot.batch') - - @api.depends('slot_ids.build_id') - def _compute_all_build_ids(self): - all_builds = self.env['runbot.build'].search([('id', 'child_of', self.slot_ids.build_id.ids)]) - for batch in self: - batch.all_build_ids = all_builds.filtered_domain([('id', 'child_of', batch.slot_ids.build_id.ids)]) - - @api.depends('commit_link_ids') - def _compute_commit_ids(self): - for batch in self: - batch.commit_ids = batch.commit_link_ids.commit_id - - @api.depends('create_date') - def _compute_age(self): - """Return the time between job start and now""" - for batch in self: - if batch.create_date: - batch.age = int(time.time() - dt2time(batch.create_date)) - else: - batch.buildage_age = 0 - - def get_formated_age(self): - return s2human_long(self.age) - - def _url(self): - self.ensure_one() - return "/runbot/batch/%s" % self.id - - def _new_commit(self, branch, match_type='new'): - # if not the same hash for repo: - commit = branch.head - self.last_update = fields.Datetime.now() - for commit_link in self.commit_link_ids: - # case 1: a commit already exists for the repo (pr+branch, or fast push) - if commit_link.commit_id.repo_id == commit.repo_id: - if commit_link.commit_id.id != commit.id: - self._log('New head on branch %s during throttle phase: Replacing commit %s with %s', branch.name, commit_link.commit_id.name, commit.name) - commit_link.write({'commit_id': commit.id, 'branch_id': branch.id}) - elif not commit_link.branch_id.is_pr and branch.is_pr: - commit_link.branch_id = branch # Try to have a pr instead of branch on commit if possible ? - break - else: - self.write({'commit_link_ids': [(0, 0, { - 'commit_id': commit.id, - 'match_type': match_type, - 'branch_id': branch.id - })]}) - - def _skip(self): - for batch in self: - if batch.bundle_id.is_base or batch.state == 'done': - continue - batch.state = 'skipped' # done? - batch._log('Skipping batch') - for slot in batch.slot_ids: - slot.skipped = True - build = slot.build_id - if build.global_state in ('running', 'done'): - continue - testing_slots = build.slot_ids.filtered(lambda s: not s.skipped) - if not testing_slots: - if build.global_state == 'pending': - build._skip('Newer build found') - elif build.global_state in ('waiting', 'testing'): - if not build.killable: - build.killable = True - elif slot.link_type == 'created': - batches = testing_slots.mapped('batch_id') - _logger.info('Cannot skip build %s build is still in use in batches %s', build.id, batches.ids) - bundles = batches.mapped('bundle_id') - batch.bundle_id - if bundles: - batch._log('Cannot kill or skip build %s, build is used in another bundle: %s', build.id, bundles.mapped('name')) - - def _process(self): - processed = self.browse() - for batch in self: - if batch.state == 'preparing' and batch.last_update < fields.Datetime.now() - datetime.timedelta(seconds=60): - batch._prepare() - processed |= batch - elif batch.state == 'ready' and all(slot.build_id.global_state in (False, 'running', 'done') for slot in batch.slot_ids): - _logger.info('Batch %s is done', self.id) - batch._log('Batch done') - batch.state = 'done' - processed |= batch - return processed - - def _create_build(self, params): - """ - Create a build with given params_id if it does not already exists. - In the case that a very same build already exists that build is returned - """ - domain = [('params_id', '=', params.id), ('parent_id', '=', False)] - if self.bundle_id.host_id: - domain += [('host', '=', self.bundle_id.host_id.name), ('keep_host', '=', True)] - build = self.env['runbot.build'].search(domain, limit=1, order='id desc') - link_type = 'matched' - if build: - if build.killable: - build.killable = False - else: - description = params.trigger_id.description if params.trigger_id.description else False - link_type = 'created' - build = self.env['runbot.build'].create({ - 'params_id': params.id, - 'description': description, - 'build_type': 'normal' if self.category_id == self.env.ref('runbot.default_category') else 'scheduled', - 'no_auto_run': self.bundle_id.no_auto_run, - }) - if self.bundle_id.host_id: - build.host = self.bundle_id.host_id.name - build.keep_host = True - - build._github_status() - return link_type, build - - def _prepare(self, auto_rebase=False): - _logger.info('Preparing batch %s', self.id) - if not self.bundle_id.base_id: - # in some case the base can be detected lately. If a bundle has no base, recompute the base before preparing - self.bundle_id._compute_base_id() - for level, message in self.bundle_id.consistency_warning(): - if level == "warning": - self.warning("Bundle warning: %s" % message) - - self.state = 'ready' - - bundle = self.bundle_id - project = bundle.project_id - if not bundle.version_id: - _logger.error('No version found on bundle %s in project %s', bundle.name, project.name) - - dockerfile_id = bundle.dockerfile_id or bundle.base_id.dockerfile_id or bundle.version_id.dockerfile_id or bundle.project_id.dockerfile_id - if not dockerfile_id: - _logger.error('No dockerfile found !') - - triggers = self.env['runbot.trigger'].search([ # could be optimised for multiple batches. Ormcached method? - ('project_id', '=', project.id), - ('category_id', '=', self.category_id.id) - ]).filtered( - lambda t: not t.version_domain or \ - self.bundle_id.version_id.filtered_domain(t.get_version_domain()) - ) - - pushed_repo = self.commit_link_ids.mapped('commit_id.repo_id') - dependency_repos = triggers.mapped('dependency_ids') - all_repos = triggers.mapped('repo_ids') | dependency_repos - missing_repos = all_repos - pushed_repo - - ###################################### - # Find missing commits - ###################################### - def fill_missing(branch_commits, match_type): - if branch_commits: - for branch, commit in branch_commits.items(): # branch first in case pr is closed. - nonlocal missing_repos - if commit.repo_id in missing_repos: - if not branch.alive: - self._log("Skipping dead branch %s" % branch.name) - continue - values = { - 'commit_id': commit.id, - 'match_type': match_type, - 'branch_id': branch.id, - } - if match_type.startswith('base'): - values['base_commit_id'] = commit.id - values['merge_base_commit_id'] = commit.id - self.write({'commit_link_ids': [(0, 0, values)]}) - missing_repos -= commit.repo_id - - # CHECK branch heads consistency - branch_per_repo = {} - for branch in bundle.branch_ids.sorted(lambda b: (b.head.id, b.is_pr), reverse=True): - if branch.alive: - commit = branch.head - repo = commit.repo_id - if repo not in branch_per_repo: - branch_per_repo[repo] = branch - elif branch_per_repo[repo].head != branch.head and branch.alive: - obranch = branch_per_repo[repo] - self._log("Branch %s and branch %s in repo %s don't have the same head: %s ≠ %s", branch.dname, obranch.dname, repo.name, branch.head.name, obranch.head.name) - - # 1.1 FIND missing commit in bundle heads - if missing_repos: - fill_missing({branch: branch.head for branch in bundle.branch_ids.sorted(lambda b: (b.head.id, b.is_pr), reverse=True)}, 'head') - - # 1.2 FIND merge_base info for those commits - # use last not preparing batch to define previous repos_heads instead of branches heads: - # Will allow to have a diff info on base bundle, compare with previous bundle - last_base_batch = self.env['runbot.batch'].search([('bundle_id', '=', bundle.base_id.id), ('state', '!=', 'preparing'), ('category_id', '=', self.category_id.id), ('id', '!=', self.id)], order='id desc', limit=1) - base_head_per_repo = {commit.repo_id.id: commit for commit in last_base_batch.commit_ids} - self._update_commits_infos(base_head_per_repo) # set base_commit, diff infos, ... - - # 2. FIND missing commit in a compatible base bundle - if not bundle.is_base: - merge_base_commits = self.commit_link_ids.mapped('merge_base_commit_id') - if auto_rebase: - self.base_reference_batch_id = last_base_batch - else: - self.base_reference_batch_id = False - link_commit = self.env['runbot.commit.link'].search([ - ('commit_id', 'in', merge_base_commits.ids), - ('match_type', 'in', ('new', 'head')) - ]) - batches = self.env['runbot.batch'].search([ - ('bundle_id', '=', bundle.base_id.id), - ('commit_link_ids', 'in', link_commit.ids), - ('state', '!=', 'preparing'), - ('category_id', '=', self.category_id.id) - ]).sorted(lambda b: (len(b.commit_ids & merge_base_commits), b.id), reverse=True) - if batches: - self.base_reference_batch_id = batches[0] - - batch = self.base_reference_batch_id - if batch: - if missing_repos: - self._log('Using batch [%s](%s) to define missing commits', batch.id, batch._url()) - fill_missing({link.branch_id: link.commit_id for link in batch.commit_link_ids}, 'base_match') - # check if all mergebase match reference batch - batch_exiting_commit = batch.commit_ids.filtered(lambda c: c.repo_id in merge_base_commits.repo_id) - not_matching = (batch_exiting_commit - merge_base_commits) - if not_matching and not auto_rebase: - message = 'Only %s out of %s merge base matched. You may want to rebase your branches to ensure compatibility' % (len(merge_base_commits)-len(not_matching), len(merge_base_commits)) - suggestions = [('Tip: rebase %s to %s' % (commit.repo_id.name, commit.name)) for commit in not_matching] - self.warning('%s\n%s' % (message, '\n'.join(suggestions))) - else: - self._log('No reference batch found to fill missing commits') - - # 3.1 FIND missing commit in base heads - if missing_repos: - if not bundle.is_base: - self._log('Not all commit found in bundle branches and base batch. Fallback on base branches heads.') - fill_missing({branch: branch.head for branch in self.bundle_id.base_id.branch_ids}, 'base_head') - - # 3.2 FIND missing commit in master base heads - if missing_repos: # this is to get an upgrade branch. - if not bundle.is_base: - self._log('Not all commit found in current version. Fallback on master branches heads.') - master_bundle = self.env['runbot.version']._get('master').with_context(project_id=self.bundle_id.project_id.id).base_bundle_id - fill_missing({branch: branch.head for branch in master_bundle.branch_ids}, 'base_head') - - # 4. FIND missing commit in foreign project - if missing_repos: - foreign_projects = dependency_repos.mapped('project_id') - project - if foreign_projects: - self._log('Not all commit found. Fallback on foreign base branches heads.') - foreign_bundles = bundle.search([('name', '=', bundle.name), ('project_id', 'in', foreign_projects.ids)]) - fill_missing({branch: branch.head for branch in foreign_bundles.mapped('branch_ids').sorted('is_pr', reverse=True)}, 'head') - if missing_repos: - foreign_bundles = bundle.search([('name', '=', bundle.base_id.name), ('project_id', 'in', foreign_projects.ids)]) - fill_missing({branch: branch.head for branch in foreign_bundles.mapped('branch_ids')}, 'base_head') - - # CHECK missing commit - if missing_repos: - _logger.warning('Missing repo %s for batch %s', missing_repos.mapped('name'), self.id) - - ###################################### - # Generate build params - ###################################### - if auto_rebase: - for commit_link in self.commit_link_ids: - commit_link.commit_id = commit_link.commit_id._rebase_on(commit_link.base_commit_id) - commit_link_by_repos = {commit_link.commit_id.repo_id.id: commit_link for commit_link in self.commit_link_ids} - bundle_repos = bundle.branch_ids.mapped('remote_id.repo_id') - version_id = self.bundle_id.version_id.id - project_id = self.bundle_id.project_id.id - trigger_customs = {} - for trigger_custom in self.bundle_id.trigger_custom_ids: - trigger_customs[trigger_custom.trigger_id] = trigger_custom - for trigger in triggers: - trigger_custom = trigger_customs.get(trigger) - trigger_repos = trigger.repo_ids | trigger.dependency_ids - if trigger_repos & missing_repos: - self.warning('Missing commit for repo %s for trigger %s', (trigger_repos & missing_repos).mapped('name'), trigger.name) - continue - # in any case, search for an existing build - - config = trigger_custom.config_id if trigger_custom else trigger.config_id - extra_params = trigger_custom.extra_params if trigger_custom else '' - config_data = trigger_custom.config_data if trigger_custom else {} - params_value = { - 'version_id': version_id, - 'extra_params': extra_params, - 'config_id': config.id, - 'project_id': project_id, - 'trigger_id': trigger.id, # for future reference and access rights - 'config_data': config_data, - 'commit_link_ids': [(6, 0, [commit_link_by_repos[repo.id].id for repo in trigger_repos])], - 'modules': bundle.modules, - 'dockerfile_id': dockerfile_id, - 'create_batch_id': self.id, - 'used_custom_trigger': bool(trigger_custom), - } - params_value['builds_reference_ids'] = trigger._reference_builds(bundle) - - params = self.env['runbot.build.params'].create(params_value) - - build = self.env['runbot.build'] - link_type = 'created' - force_trigger = trigger_custom and trigger_custom.start_mode == 'force' - skip_trigger = (trigger_custom and trigger_custom.start_mode == 'disable') or trigger.manual - should_start = ((trigger.repo_ids & bundle_repos) or bundle.build_all or bundle.sticky) - if force_trigger or (should_start and not skip_trigger): # only auto link build if bundle has a branch for this trigger - link_type, build = self._create_build(params) - self.env['runbot.batch.slot'].create({ - 'batch_id': self.id, - 'trigger_id': trigger.id, - 'build_id': build.id, - 'params_id': params.id, - 'link_type': link_type, - }) - - ###################################### - # SKIP older batches - ###################################### - default_category = self.env.ref('runbot.default_category') - if not bundle.sticky and self.category_id == default_category: - skippable = self.env['runbot.batch'].search([ - ('bundle_id', '=', bundle.id), - ('state', 'not in', ('done', 'skipped')), - ('id', '<', self.id), - ('category_id', '=', default_category.id) - ]) - skippable._skip() - - def _update_commits_infos(self, base_head_per_repo): - for link_commit in self.commit_link_ids: - commit = link_commit.commit_id - base_head = base_head_per_repo.get(commit.repo_id.id) - if not base_head: - self.warning('No base head found for repo %s', commit.repo_id.name) - continue - link_commit.base_commit_id = base_head - merge_base_sha = False - try: - link_commit.base_ahead = link_commit.base_behind = 0 - link_commit.file_changed = link_commit.diff_add = link_commit.diff_remove = 0 - link_commit.merge_base_commit_id = commit.id - if commit.name == base_head.name: - continue - merge_base_sha = commit.repo_id._git(['merge-base', commit.name, base_head.name]).strip() - merge_base_commit = self.env['runbot.commit']._get(merge_base_sha, commit.repo_id.id) - link_commit.merge_base_commit_id = merge_base_commit.id - - ahead, behind = commit.repo_id._git(['rev-list', '--left-right', '--count', '%s...%s' % (commit.name, base_head.name)]).strip().split('\t') - - link_commit.base_ahead = int(ahead) - link_commit.base_behind = int(behind) - - if merge_base_sha == commit.name: - continue - - # diff. Iter on --numstat, easier to parse than --shortstat summary - diff = commit.repo_id._git(['diff', '--numstat', merge_base_sha, commit.name]).strip() - if diff: - for line in diff.split('\n'): - link_commit.file_changed += 1 - add, remove, _ = line.split(None, 2) - try: - link_commit.diff_add += int(add) - link_commit.diff_remove += int(remove) - except ValueError: # binary files - pass - except subprocess.CalledProcessError: - self.warning('Commit info failed between %s and %s', commit.name, base_head.name) - - def warning(self, message, *args): - self.has_warning = True - _logger.warning('batch %s: ' + message, self.id, *args) - self._log(message, *args, level='WARNING') - - def _log(self, message, *args, level='INFO'): - message = message % args if args else message - self.env['runbot.batch.log'].create({ - 'batch_id': self.id, - 'message': message, - 'level': level, - }) - - -class BatchLog(models.Model): - _name = 'runbot.batch.log' - _description = 'Batch log' - - batch_id = fields.Many2one('runbot.batch', index=True) - message = fields.Text('Message') - level = fields.Char() - - - def _markdown(self): - """ Apply pseudo markdown parser for message. - """ - self.ensure_one() - return pseudo_markdown(self.message) - - - -class BatchSlot(models.Model): - _name = 'runbot.batch.slot' - _description = 'Link between a bundle batch and a build' - _order = 'trigger_id,id' - - _fa_link_type = {'created': 'hashtag', 'matched': 'link', 'rebuild': 'refresh'} - - batch_id = fields.Many2one('runbot.batch', index=True) - trigger_id = fields.Many2one('runbot.trigger', index=True) - build_id = fields.Many2one('runbot.build', index=True) - all_build_ids = fields.Many2many('runbot.build', compute='_compute_all_build_ids') - params_id = fields.Many2one('runbot.build.params', index=True, required=True) - link_type = fields.Selection([('created', 'Build created'), ('matched', 'Existing build matched'), ('rebuild', 'Rebuild')], required=True) # rebuild type? - active = fields.Boolean('Attached', default=True) - skipped = fields.Boolean('Skipped', default=False) - # rebuild, what to do: since build can be in multiple batch: - # - replace for all batch? - # - only available on batch and replace for batch only? - # - create a new bundle batch will new linked build? - - @api.depends('build_id') - def _compute_all_build_ids(self): - all_builds = self.env['runbot.build'].search([('id', 'child_of', self.build_id.ids)]) - for slot in self: - slot.all_build_ids = all_builds.filtered_domain([('id', 'child_of', slot.build_id.ids)]) - - def fa_link_type(self): - return self._fa_link_type.get(self.link_type, 'exclamation-triangle') - - def _create_missing_build(self): - """Create a build when the slot does not have one""" - self.ensure_one() - if self.build_id: - return self.build_id - self.batch_id._log(f'Trigger {self.trigger_id.name} was started by {self.env.user.name}') - self.link_type, self.build_id = self.batch_id._create_build(self.params_id) - return self.build_id diff --git a/runbot/models/branch.py b/runbot/models/branch.py deleted file mode 100644 index bf3b81d3..00000000 --- a/runbot/models/branch.py +++ /dev/null @@ -1,249 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import re - -from collections import defaultdict -from odoo import models, fields, api - -_logger = logging.getLogger(__name__) - - -class Branch(models.Model): - _name = 'runbot.branch' - _description = "Branch" - _order = 'name' - _sql_constraints = [('branch_repo_uniq', 'unique (name,remote_id)', 'The branch must be unique per repository !')] - - name = fields.Char('Name', required=True) - remote_id = fields.Many2one('runbot.remote', 'Remote', required=True, ondelete='cascade') - - head = fields.Many2one('runbot.commit', 'Head Commit', index=True) - head_name = fields.Char('Head name', related='head.name', store=True) - - reference_name = fields.Char(compute='_compute_reference_name', string='Bundle name', store=True) - bundle_id = fields.Many2one('runbot.bundle', 'Bundle', compute='_compute_bundle_id', store=True, ondelete='cascade', index=True) - - is_pr = fields.Boolean('IS a pr', required=True) - pull_head_name = fields.Char(compute='_compute_branch_infos', string='PR HEAD name', readonly=1, store=True) - pull_head_remote_id = fields.Many2one('runbot.remote', 'Pull head repository', compute='_compute_branch_infos', store=True, index=True) - target_branch_name = fields.Char(compute='_compute_branch_infos', string='PR target branch', store=True) - reviewers = fields.Char('Reviewers') - - reflog_ids = fields.One2many('runbot.ref.log', 'branch_id') - - branch_url = fields.Char(compute='_compute_branch_url', string='Branch url', readonly=1) - dname = fields.Char('Display name', compute='_compute_dname', search='_search_dname') - - alive = fields.Boolean('Alive', default=True) - draft = fields.Boolean('Draft', compute='_compute_branch_infos', store=True) - - @api.depends('name', 'remote_id.short_name') - def _compute_dname(self): - for branch in self: - branch.dname = '%s:%s' % (branch.remote_id.short_name, branch.name) - - def _search_dname(self, operator, value): - if ':' not in value: - return [('name', operator, 'value')] - repo_short_name, branch_name = value.split(':') - owner, repo_name = repo_short_name.split('/') - return ['&', ('remote_id', '=', self.env['runbot.remote'].search([('owner', '=', owner), ('repo_name', '=', repo_name)]).id), ('name', operator, branch_name)] - - @api.depends('name', 'is_pr', 'target_branch_name', 'pull_head_name', 'pull_head_remote_id') - def _compute_reference_name(self): - """ - Unique reference for a branch inside a bundle. - - branch_name for branches - - branch name part of pull_head_name for pr if remote is known - - pull_head_name (organisation:branch_name) for external pr - """ - for branch in self: - if branch.is_pr: - _, name = branch.pull_head_name.split(':') - if branch.pull_head_remote_id: - reference_name = name - else: - reference_name = branch.pull_head_name # repo is not known, not in repo list must be an external pr, so use complete label - #if ':patch-' in branch.pull_head_name: - # branch.reference_name = '%s~%s' % (branch.pull_head_name, branch.name) - else: - reference_name = branch.name - forced_version = branch.remote_id.repo_id.single_version # we don't add a depend on repo.single_version to avoid mass recompute of existing branches - if forced_version and not reference_name.startswith(f'{forced_version.name}-'): - reference_name = f'{forced_version.name}---{reference_name}' - branch.reference_name = reference_name - - @api.depends('name') - def _compute_branch_infos(self, pull_info=None): - """compute branch_url, pull_head_name and target_branch_name based on name""" - name_to_remote = {} - prs = self.filtered(lambda branch: branch.is_pr) - pull_info_dict = {} - if not pull_info and len(prs) > 30: # this is arbitrary, we should store # page on remote - pr_per_remote = defaultdict(list) - for pr in prs: - pr_per_remote[pr.remote_id].append(pr) - for remote, prs in pr_per_remote.items(): - _logger.info('Getting info in %s for %s pr using page scan', remote.name, len(prs)) - pr_names = set([pr.name for pr in prs]) - count = 0 - for result in remote._github('/repos/:owner/:repo/pulls?state=all&sort=updated&direction=desc', ignore_errors=True, recursive=True): - for info in result: - number = str(info.get('number')) - pr_names.discard(number) - pull_info_dict[(remote, number)] = info - count += 1 - if not pr_names: - break - if count > 100: - _logger.info('Not all pr found after 100 pages: remaining: %s', pr_names) - break - - for branch in self: - branch.target_branch_name = False - branch.pull_head_name = False - branch.pull_head_remote_id = False - if branch.name: - pi = branch.is_pr and (pull_info or pull_info_dict.get((branch.remote_id, branch.name)) or branch._get_pull_info()) - if pi: - try: - branch.draft = pi.get('draft', False) - branch.alive = pi.get('state', False) != 'closed' - branch.target_branch_name = pi['base']['ref'] - branch.pull_head_name = pi['head']['label'] - pull_head_repo_name = False - if pi['head'].get('repo'): - pull_head_repo_name = pi['head']['repo'].get('full_name') - if pull_head_repo_name not in name_to_remote: - owner, repo_name = pull_head_repo_name.split('/') - name_to_remote[pull_head_repo_name] = self.env['runbot.remote'].search([('owner', '=', owner), ('repo_name', '=', repo_name)], limit=1) - branch.pull_head_remote_id = name_to_remote[pull_head_repo_name] - except (TypeError, AttributeError): - _logger.exception('Error for pr %s using pull_info %s', branch.name, pi) - raise - - @api.depends('name', 'remote_id.base_url', 'is_pr') - def _compute_branch_url(self): - """compute the branch url based on name""" - for branch in self: - if branch.name: - if branch.is_pr: - branch.branch_url = "https://%s/pull/%s" % (branch.remote_id.base_url, branch.name) - else: - branch.branch_url = "https://%s/tree/%s" % (branch.remote_id.base_url, branch.name) - else: - branch.branch_url = '' - - @api.depends('reference_name', 'remote_id.repo_id.project_id') - def _compute_bundle_id(self): - dummy = self.env.ref('runbot.bundle_dummy') - for branch in self: - if branch.bundle_id == dummy: - continue - name = branch.reference_name - project = branch.remote_id.repo_id.project_id or self.env.ref('runbot.main_project') - project.ensure_one() - bundle = self.env['runbot.bundle'].search([('name', '=', name), ('project_id', '=', project.id)]) - need_new_base = not bundle and branch.match_is_base(name) - if (bundle.is_base or need_new_base) and branch.remote_id != branch.remote_id.repo_id.main_remote_id: - _logger.warning('Trying to add a dev branch to base bundle, falling back on dummy bundle') - bundle = dummy - elif name and branch.remote_id and branch.remote_id.repo_id._is_branch_forbidden(name): - _logger.warning('Trying to add a forbidden branch, falling back on dummy bundle') - bundle = dummy - elif bundle.is_base and branch.is_pr: - _logger.warning('Trying to add pr to base bundle, falling back on dummy bundle') - bundle = dummy - elif not bundle: - values = { - 'name': name, - 'project_id': project.id, - } - if need_new_base: - values['is_base'] = True - - if branch.is_pr and branch.target_branch_name: # most likely external_pr, use target as version - base = self.env['runbot.bundle'].search([ - ('name', '=', branch.target_branch_name), - ('is_base', '=', True), - ('project_id', '=', project.id) - ]) - if base: - values['defined_base_id'] = base.id - if name: - bundle = self.env['runbot.bundle'].create(values) # this prevent creating a branch in UI - branch.bundle_id = bundle - - @api.model_create_multi - def create(self, value_list): - branches = super().create(value_list) - for branch in branches: - if branch.head: - self.env['runbot.ref.log'].create({'commit_id': branch.head.id, 'branch_id': branch.id}) - return branches - - def write(self, values): - if 'head' in values: - head = self.head - super().write(values) - if 'head' in values and head != self.head: - self.env['runbot.ref.log'].create({'commit_id': self.head.id, 'branch_id': self.id}) - - def _get_pull_info(self): - self.ensure_one() - remote = self.remote_id - if self.is_pr: - _logger.info('Getting info for %s', self.name) - return remote._github('/repos/:owner/:repo/pulls/%s' % self.name, ignore_errors=False) or {} # TODO catch and send a managable exception - return {} - - def ref(self): - return 'refs/%s/%s/%s' % ( - self.remote_id.remote_name, - 'pull' if self.is_pr else 'heads', - self.name - ) - - def recompute_infos(self, payload=None): - """ public method to recompute infos on demand """ - was_draft = self.draft - was_alive = self.alive - init_target_branch_name = self.target_branch_name - self._compute_branch_infos(payload) - if self.target_branch_name != init_target_branch_name: - _logger.info('retargeting %s to %s', self.name, self.target_branch_name) - base = self.env['runbot.bundle'].search([ - ('name', '=', self.target_branch_name), - ('is_base', '=', True), - ('project_id', '=', self.remote_id.repo_id.project_id.id) - ]) - if base and self.bundle_id.defined_base_id != base: - _logger.info('Changing base of bundle %s to %s(%s)', self.bundle_id, base.name, base.id) - self.bundle_id.defined_base_id = base.id - self.bundle_id._force() - - if self.draft: - self.reviewers = '' # reset reviewers on draft - - if (not self.draft and was_draft) or (self.alive and not was_alive) or (self.target_branch_name != init_target_branch_name and self.alive): - self.bundle_id._force() - - @api.model - def match_is_base(self, name): - """match against is_base_regex ir.config_parameter""" - if not name: - return False - icp = self.env['ir.config_parameter'].sudo() - regex = icp.get_param('runbot.runbot_is_base_regex', False) - if regex: - return re.match(regex, name) - - -class RefLog(models.Model): - _name = 'runbot.ref.log' - _description = 'Ref log' - _log_access = False - - commit_id = fields.Many2one('runbot.commit', index=True) - branch_id = fields.Many2one('runbot.branch', index=True) - date = fields.Datetime(default=fields.Datetime.now) diff --git a/runbot/models/build.py b/runbot/models/build.py deleted file mode 100644 index 32f6d7ce..00000000 --- a/runbot/models/build.py +++ /dev/null @@ -1,1192 +0,0 @@ -# -*- coding: utf-8 -*- -import fnmatch -import logging -import pwd -import re -import shutil -import subprocess -import time -import datetime -import hashlib -from ..common import dt2time, fqdn, now, grep, local_pgadmin_cursor, s2human, dest_reg, os, list_local_dbs, pseudo_markdown, RunbotException, findall -from ..container import docker_stop, docker_state, Command, docker_run -from ..fields import JsonDictField -from odoo import models, fields, api -from odoo.exceptions import UserError, ValidationError -from odoo.http import request -from odoo.tools import appdirs -from odoo.tools.safe_eval import safe_eval -from collections import defaultdict -from pathlib import Path -from psycopg2 import sql -import getpass - -_logger = logging.getLogger(__name__) - -result_order = ['ok', 'warn', 'ko', 'skipped', 'killed', 'manually_killed'] -state_order = ['pending', 'testing', 'waiting', 'running', 'done'] - -COPY_WHITELIST = [ - "params_id", - "description", - "build_type", - "parent_id", - "orphan_result", -] - - -def make_selection(array): - return [(elem, elem.replace('_', ' ').capitalize()) if isinstance(elem, str) else elem for elem in array] - - -class BuildParameters(models.Model): - _name = 'runbot.build.params' - _description = "All information used by a build to run, should be unique and set on create only" - - # on param or on build? - # execution parametter - commit_link_ids = fields.Many2many('runbot.commit.link', copy=True) - commit_ids = fields.Many2many('runbot.commit', compute='_compute_commit_ids') - version_id = fields.Many2one('runbot.version', required=True, index=True) - project_id = fields.Many2one('runbot.project', required=True, index=True) # for access rights - trigger_id = fields.Many2one('runbot.trigger', index=True) # for access rights - create_batch_id = fields.Many2one('runbot.batch') - category = fields.Char('Category', index=True) # normal vs nightly vs weekly, ... - dockerfile_id = fields.Many2one('runbot.dockerfile', index=True, default=lambda self: self.env.ref('runbot.docker_default', raise_if_not_found=False)) - skip_requirements = fields.Boolean('Skip requirements.txt auto install') - # other informations - extra_params = fields.Char('Extra cmd args') - config_id = fields.Many2one('runbot.build.config', 'Run Config', required=True, - default=lambda self: self.env.ref('runbot.runbot_build_config_default', raise_if_not_found=False), index=True) - config_data = JsonDictField('Config Data') - used_custom_trigger = fields.Boolean('Custom trigger was used to generate this build') - - build_ids = fields.One2many('runbot.build', 'params_id') - builds_reference_ids = fields.Many2many('runbot.build', relation='runbot_build_params_references', copy=True) - modules = fields.Char('Modules') - - upgrade_to_build_id = fields.Many2one('runbot.build', index=True) # use to define sources to use with upgrade script - upgrade_from_build_id = fields.Many2one('runbot.build', index=True) # use to download db - dump_db = fields.Many2one('runbot.database', index=True) # use to define db to download - - fingerprint = fields.Char('Fingerprint', compute='_compute_fingerprint', store=True, index=True) - - _sql_constraints = [ - ('unique_fingerprint', 'unique (fingerprint)', 'avoid duplicate params'), - ] - - # @api.depends('version_id', 'project_id', 'extra_params', 'config_id', 'config_data', 'modules', 'commit_link_ids', 'builds_reference_ids') - def _compute_fingerprint(self): - for param in self: - cleaned_vals = { - 'version_id': param.version_id.id, - 'project_id': param.project_id.id, - 'trigger_id': param.trigger_id.id, - 'extra_params': param.extra_params or '', - 'config_id': param.config_id.id, - 'config_data': param.config_data.dict, - 'modules': param.modules or '', - 'commit_link_ids': sorted(param.commit_link_ids.commit_id.ids), - 'builds_reference_ids': sorted(param.builds_reference_ids.ids), - 'upgrade_from_build_id': param.upgrade_from_build_id.id, - 'upgrade_to_build_id': param.upgrade_to_build_id.id, - 'dump_db': param.dump_db.id, - 'dockerfile_id': param.dockerfile_id.id, - 'skip_requirements': param.skip_requirements, - } - if param.trigger_id.batch_dependent: - cleaned_vals['create_batch_id'] = param.create_batch_id.id - if param.used_custom_trigger: - cleaned_vals['used_custom_trigger'] = True - - param.fingerprint = hashlib.sha256(str(cleaned_vals).encode('utf8')).hexdigest() - - @api.depends('commit_link_ids') - def _compute_commit_ids(self): - for params in self: - params.commit_ids = params.commit_link_ids.commit_id - - def create(self, values): - params = self.new(values) - match = self._find_existing(params.fingerprint) - if match: - return match - values = self._convert_to_write(params._cache) - return super().create(values) - - def _find_existing(self, fingerprint): - return self.env['runbot.build.params'].search([('fingerprint', '=', fingerprint)], limit=1) - - def write(self, vals): - raise UserError('Params cannot be modified') - - -class BuildResult(models.Model): - # remove duplicate management - # instead, link between bundle_batch and build - # kill -> only available from bundle. - # kill -> actually detach the build from the bundle - # rebuild: detach and create a new link (a little like exact rebuild), - # if a build is detached from all bundle, kill it - # nigktly? - - _name = 'runbot.build' - _description = "Build" - - _parent_store = True - _order = 'id desc' - _rec_name = 'id' - - # all displayed info removed. How to replace that? - # -> commit corresponding to repo of trigger_id5 - # -> display all? - - params_id = fields.Many2one('runbot.build.params', required=True, index=True, auto_join=True) - no_auto_run = fields.Boolean('No run') - # could be a default value, but possible to change it to allow duplicate accros branches - - description = fields.Char('Description', help='Informative description') - md_description = fields.Char(compute='_compute_md_description', string='MD Parsed Description', help='Informative description markdown parsed') - display_name = fields.Char(compute='_compute_display_name') - - # Related fields for convenience - version_id = fields.Many2one('runbot.version', related='params_id.version_id', store=True, index=True) - config_id = fields.Many2one('runbot.build.config', related='params_id.config_id', store=True, index=True) - trigger_id = fields.Many2one('runbot.trigger', related='params_id.trigger_id', store=True, index=True) - - # state machine - global_state = fields.Selection(make_selection(state_order), string='Status', compute='_compute_global_state', store=True, recursive=True) - local_state = fields.Selection(make_selection(state_order), string='Build Status', default='pending', required=True, index=True) - global_result = fields.Selection(make_selection(result_order), string='Result', compute='_compute_global_result', store=True, recursive=True) - local_result = fields.Selection(make_selection(result_order), string='Build Result') - triggered_result = fields.Selection(make_selection(result_order), string='Triggered Result') # triggered by db only - - requested_action = fields.Selection([('wake_up', 'To wake up'), ('deathrow', 'To kill')], string='Action requested', index=True) - # web infos - host = fields.Char('Host') - keep_host = fields.Boolean('Keep host on rebuild and for children') - - port = fields.Integer('Port') - dest = fields.Char(compute='_compute_dest', type='char', string='Dest', readonly=1, store=True) - domain = fields.Char(compute='_compute_domain', type='char', string='URL') - # logs and stats - log_ids = fields.One2many('ir.logging', 'build_id', string='Logs') - error_log_ids = fields.One2many('ir.logging', 'build_id', domain=[('level', 'in', ['WARNING', 'ERROR', 'CRITICAL'])], string='Error Logs') - stat_ids = fields.One2many('runbot.build.stat', 'build_id', string='Statistics values') - log_list = fields.Char('Comma separted list of step_ids names with logs', compute="_compute_log_list", store=True) - - active_step = fields.Many2one('runbot.build.config.step', 'Active step') - job = fields.Char('Active step display name', compute='_compute_job') - job_start = fields.Datetime('Job start') - job_end = fields.Datetime('Job end') - build_start = fields.Datetime('Build start') - build_end = fields.Datetime('Build end') - docker_start = fields.Datetime('Docker start') - job_time = fields.Integer(compute='_compute_job_time', string='Job time') - build_time = fields.Integer(compute='_compute_build_time', string='Build time') - - gc_date = fields.Datetime('Local cleanup date', compute='_compute_gc_date') - gc_delay = fields.Integer('Cleanup Delay', help='Used to compute gc_date') - - build_age = fields.Integer(compute='_compute_build_age', string='Build age') - - coverage = fields.Boolean('Code coverage was computed for this build') - coverage_result = fields.Float('Coverage result', digits=(5, 2)) - build_type = fields.Selection([('scheduled', 'This build was automatically scheduled'), - ('rebuild', 'This build is a rebuild'), - ('normal', 'normal build'), - ('indirect', 'Automatic rebuild'), # TODO cleanup remove - ], - default='normal', - string='Build type') - - # what about parent_id and duplmicates? - # -> always create build, no duplicate? (make sence since duplicate should be the parent and params should be inherited) - # -> build_link ? - - parent_id = fields.Many2one('runbot.build', 'Parent Build', index=True) - parent_path = fields.Char('Parent path', index=True) - top_parent = fields.Many2one('runbot.build', compute='_compute_top_parent') - ancestors = fields.Many2many('runbot.build', compute='_compute_ancestors') - # should we add a has children stored boolean? - children_ids = fields.One2many('runbot.build', 'parent_id') - - # config of top_build is inherithed from params, but subbuild will have different configs - - orphan_result = fields.Boolean('No effect on the parent result', default=False) - - build_url = fields.Char('Build url', compute='_compute_build_url', store=False) - build_error_ids = fields.Many2many('runbot.build.error', 'runbot_build_error_ids_runbot_build_rel', string='Errors') - keep_running = fields.Boolean('Keep running', help='Keep running', index=True) - log_counter = fields.Integer('Log Lines counter', default=100) - - slot_ids = fields.One2many('runbot.batch.slot', 'build_id') - killable = fields.Boolean('Killable') - - database_ids = fields.One2many('runbot.database', 'build_id') - - static_run = fields.Char('Static run URL') - - @api.depends('description', 'params_id.config_id') - def _compute_display_name(self): - for build in self: - build.display_name = build.description or build.config_id.name - - @api.depends('params_id.config_id') - def _compute_log_list(self): # storing this field because it will be access trhoug repo viewn and keep track of the list at create - for build in self: - build.log_list = ','.join({step.name for step in build.params_id.config_id.step_ids() if step._has_log()}) - # TODO replace logic, add log file to list when executed (avoid 404, link log on docker start, avoid fake is_docker_step) - - @api.depends('children_ids.global_state', 'local_state') - def _compute_global_state(self): - for record in self: - waiting_score = record._get_state_score('waiting') - children_ids = [child for child in record.children_ids if not child.orphan_result] - if record._get_state_score(record.local_state) > waiting_score and children_ids: # if finish, check children - children_state = record._get_youngest_state([child.global_state for child in children_ids]) - if record._get_state_score(children_state) > waiting_score: - record.global_state = record.local_state - else: - record.global_state = 'waiting' - else: - record.global_state = record.local_state - - @api.depends('gc_delay', 'job_end') - def _compute_gc_date(self): - icp = self.env['ir.config_parameter'].sudo() - max_days_main = int(icp.get_param('runbot.db_gc_days', default=30)) - max_days_child = int(icp.get_param('runbot.db_gc_days_child', default=15)) - for build in self: - ref_date = fields.Datetime.from_string(build.job_end or build.create_date or fields.Datetime.now()) - max_days = max_days_main if not build.parent_id else max_days_child - max_days += int(build.gc_delay if build.gc_delay else 0) - build.gc_date = ref_date + datetime.timedelta(days=(max_days)) - - @api.depends('description') - def _compute_md_description(self): - for build in self: - build.md_description = pseudo_markdown(build.description) - - def _compute_top_parent(self): - for build in self: - build.top_parent = self.browse(int(build.parent_path.split('/')[0])) - - def _compute_ancestors(self): - for build in self: - build.ancestors = self.browse([int(b) for b in build.parent_path.split('/') if b]) - - def _get_youngest_state(self, states): - index = min([self._get_state_score(state) for state in states]) - return state_order[index] - - def _get_state_score(self, result): - return state_order.index(result) - - @api.depends('children_ids.global_result', 'local_result', 'children_ids.orphan_result') - def _compute_global_result(self): - for record in self: - if record.local_result and record._get_result_score(record.local_result) >= record._get_result_score('ko'): - record.global_result = record.local_result - else: - children_ids = [child for child in record.children_ids if not child.orphan_result] - if children_ids: - children_result = record._get_worst_result([child.global_result for child in children_ids], max_res='ko') - if record.local_result: - record.global_result = record._get_worst_result([record.local_result, children_result]) - else: - record.global_result = children_result - else: - record.global_result = record.local_result - - def _get_worst_result(self, results, max_res=False): - results = [result for result in results if result] # filter Falsy values - index = max([self._get_result_score(result) for result in results]) if results else 0 - if max_res: - return result_order[min([index, self._get_result_score(max_res)])] - return result_order[index] - - def _get_result_score(self, result): - return result_order.index(result) - - @api.depends('active_step') - def _compute_job(self): - for build in self: - build.job = build.active_step.name - - def copy_data(self, default=None): - values = super().copy_data(default)[0] or {} - default = dict(default or []) - values = {key: value for key, value in values.items() if (key in COPY_WHITELIST or key in default)} - values.update({ - 'host': 'PAUSED', # hack to keep the build in pending waiting for a manual update. Todo: add a paused flag instead - 'local_state': 'pending', - }) - return [values] - - def write(self, values): - # some validation to ensure db consistency - if 'local_state' in values: - build_by_old_values = defaultdict(lambda: self.env['runbot.build']) - for record in self: - build_by_old_values[record.local_state] += record - if values['local_state'] == 'done': - self.env['runbot.commit.export'].search([('build_id', 'in', self.ids)]).unlink() - local_result = values.get('local_result') - for build in self: - if local_result and local_result != self._get_worst_result([build.local_result, local_result]): # dont write ok on a warn/error build - if len(self) == 1: - values.pop('local_result') - else: - raise ValidationError('Local result cannot be set to a less critical level') - res = super(BuildResult, self).write(values) - if 'log_counter' in values: # not 100% usefull but more correct ( see test_ir_logging) - self.flush() - return res - - def _add_child(self, param_values, orphan=False, description=False, additionnal_commit_links=False): - if additionnal_commit_links: - commit_link_ids = self.params_id.commit_link_ids - commit_link_ids |= additionnal_commit_links - param_values['commit_link_ids'] = commit_link_ids - - return self.create({ - 'params_id': self.params_id.copy(param_values).id, - 'parent_id': self.id, - 'build_type': self.build_type, - 'description': description, - 'orphan_result': orphan, - 'keep_host': self.keep_host, - 'host': self.host if self.keep_host else False, - }) - - def result_multi(self): - if all(build.global_result == 'ok' or not build.global_result for build in self): - return 'ok' - if any(build.global_result in ('skipped', 'killed', 'manually_killed') for build in self): - return 'killed' - if any(build.global_result == 'ko' for build in self): - return 'ko' - if any(build.global_result == 'warning' for build in self): - return 'warning' - return 'ko' # ? - - def update_build_end(self): - for build in self: - build.build_end = now() - if build.parent_id and build.parent_id.local_state in ('running', 'done'): - build.parent_id.update_build_end() - - @api.depends('params_id.version_id.name') - def _compute_dest(self): - for build in self: - if build.id: - nickname = build.params_id.version_id.name - nickname = re.sub(r'"|\'|~|\:', '', nickname) - nickname = re.sub(r'_|/|\.', '-', nickname) - build.dest = ("%05d-%s" % (build.id or 0, nickname[:32])).lower() - - @api.depends('port', 'dest', 'host') - def _compute_domain(self): - for build in self: - build.domain = "%s.%s" % (build.dest, build.host) - - @api.depends_context('batch') - def _compute_build_url(self): - batch = self.env.context.get('batch') - for build in self: - if batch: - build.build_url = "/runbot/batch/%s/build/%s" % (batch.id, build.id) - else: - build.build_url = "/runbot/build/%s" % build.id - - @api.depends('job_start', 'job_end') - def _compute_job_time(self): - """Return the time taken by the tests""" - for build in self: - if build.job_end and build.job_start: - build.job_time = int(dt2time(build.job_end) - dt2time(build.job_start)) - elif build.job_start: - build.job_time = int(time.time() - dt2time(build.job_start)) - else: - build.job_time = 0 - - @api.depends('build_start', 'build_end', 'global_state') - def _compute_build_time(self): - for build in self: - if build.build_end and build.global_state != 'waiting': - build.build_time = int(dt2time(build.build_end) - dt2time(build.build_start)) - elif build.build_start: - build.build_time = int(time.time() - dt2time(build.build_start)) - else: - build.build_time = 0 - - @api.depends('job_start') - def _compute_build_age(self): - """Return the time between job start and now""" - for build in self: - if build.job_start: - build.build_age = int(time.time() - dt2time(build.build_start)) - else: - build.build_age = 0 - - def _rebuild(self, message=None): - """Force a rebuild and return a recordset of builds""" - self.ensure_one() - # TODO don't rebuild if there is a more recent build for this params? - values = { - 'params_id': self.params_id.id, - 'build_type': 'rebuild', - } - if self.keep_host: - values['host'] = self.host - values['keep_host'] = True - if self.parent_id: - values.update({ - 'parent_id': self.parent_id.id, - 'description': self.description, - }) - self.orphan_result = True - - new_build = self.create(values) - if self.parent_id: - new_build._github_status() - user = request.env.user if request else self.env.user - new_build._log('rebuild', 'Rebuild initiated by %s%s' % (user.name, (' :%s' % message) if message else '')) - - if self.local_state != 'done': - self._ask_kill('Killed by rebuild requested by %s (%s) (new build:%s)' % (user.name, user.id, new_build.id)) - - if not self.parent_id: - slots = self.env['runbot.batch.slot'].search([('build_id', '=', self.id)]) - for slot in slots: - slot.copy({ - 'build_id': new_build.id, - 'link_type': 'rebuild', - }) - slot.active = False - return new_build - - def _skip(self, reason=None): - """Mark builds ids as skipped""" - if reason: - self._logger('skip %s', reason) - self.write({'local_state': 'done', 'local_result': 'skipped'}) - - def _build_from_dest(self, dest): - if dest_reg.match(dest): - return self.browse(int(dest.split('-')[0])) - return self.browse() - - def _filter_to_clean(self, dest_list, label): - dest_by_builds_ids = defaultdict(list) - ignored = set() - icp = self.env['ir.config_parameter'] - hide_in_logs = icp.get_param('runbot.runbot_db_template', default='template0') - full_gc_days = int(icp.get_param('runbot.full_gc_days', default=365)) - - for dest in dest_list: - build = self._build_from_dest(dest) - if build: - dest_by_builds_ids[build.id].append(dest) - elif dest != hide_in_logs: - ignored.add(dest) - if ignored: - _logger.info('%s (%s) not deleted because not dest format', label, list(ignored)) - builds = self.browse(dest_by_builds_ids) - existing = builds.exists() - remaining = (builds - existing) - if remaining: - dest_list = [dest for sublist in [dest_by_builds_ids[rem_id] for rem_id in remaining.ids] for dest in sublist] - _logger.info('(%s) (%s) not deleted because no corresponding build found', label, " ".join(dest_list)) - for build in existing: - if build.gc_date < fields.datetime.now(): - if build.local_state == 'done': - full = build.gc_date + datetime.timedelta(days=(full_gc_days)) < fields.datetime.now() - for db in dest_by_builds_ids[build.id]: - yield (db, full) - elif build.local_state != 'running': - _logger.warning('db (%s) not deleted because state is not done', " ".join(dest_by_builds_ids[build.id])) - - def _local_cleanup(self, force=False, full=False): - """ - Remove datadir and drop databases of build older than db_gc_days or db_gc_days_child. - If force is set to True, does the same cleaning based on recordset without checking build age. - """ - _logger.info('Local cleaning') - _filter = self._filter_to_clean - additionnal_conditions = [] - - if force is True: - def filter_ids(dest_list, label): - for dest in dest_list: - build = self._build_from_dest(dest) - if build and build in self: - yield (dest, full) - elif not build: - _logger.info('%s (%s) skipped because not dest format', label, dest) - _filter = filter_ids - for _id in self.exists().ids: - additionnal_conditions.append("datname like '%s-%%'" % _id) - - existing_db = list_local_dbs(additionnal_conditions=additionnal_conditions) - - for db, _ in _filter(dest_list=existing_db, label='db'): - self._logger('Removing database') - self._local_pg_dropdb(db) - - builds_dir = Path(self.env['runbot.runbot']._root()) / 'build' - - if force is True: - dests = [(build.dest, full) for build in self] - else: - dests = _filter(dest_list=builds_dir.iterdir(), label='workspace') - - for dest, full in dests: - build_dir = Path(builds_dir) / dest - if full: - _logger.info('Removing build dir "%s"', dest) - shutil.rmtree(build_dir, ignore_errors=True) - continue - gcstamp = build_dir / '.gcstamp' - if gcstamp.exists(): - continue - for bdir_file in build_dir.iterdir(): - if bdir_file.is_dir() and bdir_file.name not in ('logs', 'tests'): - shutil.rmtree(bdir_file) - elif bdir_file.name == 'logs': - for log_file_path in (bdir_file / 'logs').iterdir(): - if log_file_path.is_dir(): - shutil.rmtree(log_file_path) - elif log_file_path.name in ('run.txt', 'wake_up.txt') or not log_file_path.name.endswith('.txt'): - log_file_path.unlink() - gcstamp.write_text(f'gc date: {datetime.datetime.now()}') - - def _find_port(self): - # currently used port - host_name = self.env['runbot.host']._get_current_name() - ids = self.search([('local_state', 'not in', ['pending', 'done']), ('host', '=', host_name)]) - ports = set(i['port'] for i in ids.read(['port'])) - - # starting port - icp = self.env['ir.config_parameter'] - port = int(icp.get_param('runbot.runbot_starting_port', default=2000)) - - # find next free port - while port in ports: - port += 3 - return port - - def _logger(self, *l): - l = list(l) - for build in self: - l[0] = "%s %s" % (build.dest, l[0]) - _logger.info(*l) - - def _get_docker_name(self): - self.ensure_one() - return '%s_%s' % (self.dest, self.active_step.name) - - def _init_pendings(self, host): - for build in self: - if build.local_state != 'pending': - raise UserError("Build %s is not pending" % build.id) - if build.host != host.name: - raise UserError("Build %s does not have correct host" % build.id) - # allocate port and schedule first job - values = { - 'port': self._find_port(), - 'job_start': now(), - 'build_start': now(), - 'job_end': False, - } - values.update(build._next_job_values()) - build.write(values) - if not build.active_step: - build._log('_schedule', 'No job in config, doing nothing') - build.local_result = 'warn' - continue - try: - build._log('_schedule', 'Init build environment with config %s ' % build.params_id.config_id.name) - os.makedirs(build._path('logs'), exist_ok=True) - except Exception: - _logger.exception('Failed initiating build %s', build.dest) - build._log('_schedule', 'Failed initiating build') - build._kill(result='ko') - continue - build._run_job() - - def _process_requested_actions(self): - for build in self: - if build.requested_action == 'deathrow': - result = None - if build.local_state != 'running' and build.global_result not in ('warn', 'ko'): - result = 'manually_killed' - build._kill(result=result) - continue - - if build.requested_action == 'wake_up': - if docker_state(build._get_docker_name(), build._path()) == 'RUNNING': - build.write({'requested_action': False, 'local_state': 'running'}) - build._log('wake_up', 'Waking up failed, **docker is already running**', log_type='markdown', level='SEPARATOR') - elif not os.path.exists(build._path()): - build.write({'requested_action': False, 'local_state': 'done'}) - build._log('wake_up', 'Impossible to wake-up, **build dir does not exists anymore**', log_type='markdown', level='SEPARATOR') - else: - try: - log_path = build._path('logs', 'wake_up.txt') - - port = self._find_port() - build.write({ - 'job_start': now(), - 'job_end': False, - 'active_step': False, - 'requested_action': False, - 'local_state': 'running', - 'port': port, - }) - build._log('wake_up', '**Waking up build**', log_type='markdown', level='SEPARATOR') - step_ids = build.params_id.config_id.step_ids() - if step_ids and step_ids[-1]._step_state() == 'running': - run_step = step_ids[-1] - else: - run_step = self.env.ref('runbot.runbot_build_config_step_run') - run_step._run_step(build, log_path, force=True) - # reload_nginx will be triggered by _run_run_odoo - except Exception: - _logger.exception('Failed to wake up build %s', build.dest) - build._log('_schedule', 'Failed waking up build', level='ERROR') - build.write({'requested_action': False, 'local_state': 'done'}) - continue - - def _schedule(self): - """schedule the build""" - icp = self.env['ir.config_parameter'].sudo() - for build in self: - if build.local_state not in ['testing', 'running']: - raise UserError("Build %s is not testing/running: %s" % (build.id, build.local_state)) - if build.local_state == 'testing': - # failfast in case of docker error (triggered in database) - if build.triggered_result and not build.active_step.ignore_triggered_result: - worst_result = self._get_worst_result([build.triggered_result, build.local_result]) - if worst_result != build.local_result: - build.local_result = build.triggered_result - build._github_status() # failfast - # check if current job is finished - _docker_state = docker_state(build._get_docker_name(), build._path()) - if _docker_state == 'RUNNING': - timeout = min(build.active_step.cpu_limit, int(icp.get_param('runbot.runbot_timeout', default=10000))) - if build.local_state != 'running' and build.job_time > timeout: - build._log('_schedule', '%s time exceeded (%ss)' % (build.active_step.name if build.active_step else "?", build.job_time)) - build._kill(result='killed') - continue - elif _docker_state in ('UNKNOWN', 'GHOST') and (build.local_state == 'running' or build.active_step._is_docker_step()): # todo replace with docker_start - docker_time = time.time() - dt2time(build.docker_start or build.job_start) - if docker_time < 5: - continue - elif docker_time < 60: - _logger.info('container "%s" seems too take a while to start :%s' % (build.job_time, build._get_docker_name())) - continue - else: - build._log('_schedule', 'Docker with state %s not started after 60 seconds, skipping' % _docker_state, level='ERROR') - # No job running, make result and select nex job - build_values = { - 'job_end': now(), - 'docker_start': False, - } - # make result of previous job - try: - results = build.active_step._make_results(build) - except Exception as e: - if isinstance(e, RunbotException): - message = e.args[0][:300000] - else: - message = 'An error occured while computing results of %s:\n %s' % (build.job, str(e).replace('\\n', '\n').replace("\\'", "'")[:10000]) - _logger.exception(message) - build._log('_make_results', message, level='ERROR') - results = {'local_result': 'ko'} - - build_values.update(results) - - # compute statistics before starting next job - build.active_step._make_stats(build) - - build.active_step.log_end(build) - - build_values.update(build._next_job_values()) # find next active_step or set to done - - ending_build = build.local_state not in ('done', 'running') and build_values.get('local_state') in ('done', 'running') - if ending_build: - build.update_build_end() - - build.write(build_values) - if ending_build: - if not build.local_result: # Set 'ok' result if no result set (no tests job on build) - build.local_result = 'ok' - build._logger("No result set, setting ok by default") - build._github_status() - build._run_job() - - - def _run_job(self): - # run job - for build in self: - if build.local_state != 'done': - build._logger('running %s', build.active_step.name) - os.makedirs(build._path('logs'), exist_ok=True) - os.makedirs(build._path('datadir'), exist_ok=True) - try: - build.active_step._run(build) # run should be on build? - except Exception as e: - if isinstance(e, RunbotException): - message = e.args[0] - else: - message = '%s failed running step %s:\n %s' % (build.dest, build.job, str(e).replace('\\n', '\n').replace("\\'", "'")) - _logger.exception(message) - build._log("run", message, level='ERROR') - build._kill(result='ko') - - def _docker_run(self, cmd=None, ro_volumes=None, **kwargs): - self.ensure_one() - _ro_volumes = ro_volumes or {} - ro_volumes = {} - for dest, source in _ro_volumes.items(): - ro_volumes[f'/data/build/{dest}'] = source - if 'image_tag' not in kwargs: - kwargs.update({'image_tag': self.params_id.dockerfile_id.image_tag}) - if kwargs['image_tag'] != 'odoo:DockerDefault': - self._log('Preparing', 'Using Dockerfile Tag %s' % kwargs['image_tag']) - containers_memory_limit = self.env['ir.config_parameter'].sudo().get_param('runbot.runbot_containers_memory', 0) - if containers_memory_limit and 'memory' not in kwargs: - kwargs['memory'] = int(float(containers_memory_limit) * 1024 ** 3) - self.docker_start = now() - if self.job_start: - start_step_time = int(dt2time(self.docker_start) - dt2time(self.job_start)) - if start_step_time > 60: - _logger.info('Step took %s seconds before starting docker', start_step_time) - - starting_config = self.env['ir.config_parameter'].sudo().get_param('runbot.runbot_default_odoorc') - if isinstance(cmd, Command): - rc_content = cmd.get_config(starting_config=starting_config) - else: - rc_content = starting_config - self.write_file('.odoorc', rc_content) - user = getpass.getuser() - ro_volumes[f'/home/{user}/.odoorc'] = self._path('.odoorc') - kwargs.pop('build_dir', False) # todo check python steps - docker_run(cmd=cmd, build_dir=self._path(), ro_volumes=ro_volumes, **kwargs) - - def _path(self, *l, **kw): - """Return the repo build path""" - self.ensure_one() - build = self - root = self.env['runbot.runbot']._root() - return os.path.join(root, 'build', build.dest, *l) - - def http_log_url(self): - return 'http://%s/runbot/static/build/%s/logs/' % (self.host, self.dest) - - def _server(self, *path): - """Return the absolute path to the direcory containing the server file, adding optional *path""" - self.ensure_one() - commit = self._get_server_commit() - if os.path.exists(commit._source_path('odoo')): - return commit._source_path('odoo', *path) - return commit._source_path('openerp', *path) - - def _docker_source_folder(self, commit): - return commit.repo_id.name - - def _checkout(self): - self.ensure_one() # will raise exception if hash not found, we don't want to fail for all build. - # checkout branch - start = time.time() - exports = {} - for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids: - build_export_path = self._docker_source_folder(commit) - if build_export_path in exports: - self._log('_checkout', 'Multiple repo have same export path in build, some source may be missing for %s' % build_export_path, level='ERROR') - self._kill(result='ko') - exports[build_export_path] = commit.export(self) - - checkout_time = time.time() - start - if checkout_time > 60: - self._log('checkout', 'Checkout took %s seconds' % int(checkout_time)) - - return exports - - def _get_available_modules(self): - all_modules = dict() - available_modules = defaultdict(list) - # repo_modules = [] - for commit in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids: - for (addons_path, module, manifest_file_name) in commit._get_available_modules(): - if module in all_modules: - self._log( - 'Building environment', - '%s is a duplicated modules (found in "%s", already defined in %s)' % ( - module, - commit._source_path(addons_path, module, manifest_file_name), - all_modules[module]._source_path(addons_path, module, manifest_file_name)), - level='WARNING' - ) - else: - available_modules[commit.repo_id].append(module) - all_modules[module] = commit - # return repo_modules, available_modules - return available_modules - - def _get_modules_to_test(self, modules_patterns=''): - self.ensure_one() - - def filter_patterns(patterns, default, all): - default = set(default) - patterns_list = (patterns or '').split(',') - patterns_list = [p.strip() for p in patterns_list] - for pat in patterns_list: - if pat.startswith('-'): - pat = pat.strip('- ') - default -= {mod for mod in default if fnmatch.fnmatch(mod, pat)} - elif pat: - default |= {mod for mod in all if fnmatch.fnmatch(mod, pat)} - return default - - available_modules = [] - modules_to_install = set() - for repo, module_list in self._get_available_modules().items(): - available_modules += module_list - modules_to_install |= filter_patterns(repo.modules, module_list, module_list) - - modules_to_install = filter_patterns(self.params_id.modules, modules_to_install, available_modules) - modules_to_install = filter_patterns(modules_patterns, modules_to_install, available_modules) - - return sorted(modules_to_install) - - def _local_pg_dropdb(self, dbname): - with local_pgadmin_cursor() as local_cr: - pid_col = 'pid' if local_cr.connection.server_version >= 90200 else 'procpid' - query = 'SELECT pg_terminate_backend({}) FROM pg_stat_activity WHERE datname=%s'.format(pid_col) - local_cr.execute(query, [dbname]) - local_cr.execute('DROP DATABASE IF EXISTS "%s"' % dbname) - # cleanup filestore - datadir = appdirs.user_data_dir() - paths = [os.path.join(datadir, pn, 'filestore', dbname) for pn in 'OpenERP Odoo'.split()] - cmd = ['rm', '-rf'] + paths - _logger.info(' '.join(cmd)) - subprocess.call(cmd) - - def _local_pg_createdb(self, dbname): - icp = self.env['ir.config_parameter'] - db_template = icp.get_param('runbot.runbot_db_template', default='template0') - self._local_pg_dropdb(dbname) - _logger.info("createdb %s", dbname) - with local_pgadmin_cursor() as local_cr: - local_cr.execute(sql.SQL("""CREATE DATABASE {} TEMPLATE %s LC_COLLATE 'C' ENCODING 'unicode'""").format(sql.Identifier(dbname)), (db_template,)) - self.env['runbot.database'].create({'name': dbname, 'build_id': self.id}) - - def _log(self, func, message, level='INFO', log_type='runbot', path='runbot'): - - if len(message) > 300000: - message = message[:300000] + '[Truncate, message too long]' - - self.ensure_one() - _logger.info("Build %s %s %s", self.id, func, message) - self.env['ir.logging'].create({ - 'build_id': self.id, - 'level': level, - 'type': log_type, - 'name': 'odoo.runbot', - 'message': message, - 'path': path, - 'func': func, - 'line': '0', - }) - - def _kill(self, result=None): - host_name = self.env['runbot.host']._get_current_name() - for build in self: - if build.host != host_name: - continue - build._log('kill', 'Kill build %s' % build.dest) - docker_stop(build._get_docker_name(), build._path()) - v = {'local_state': 'done', 'requested_action': False, 'active_step': False, 'job_end': now()} - if not build.build_end: - v['build_end'] = now() - if result: - v['local_result'] = result - build.write(v) - self.env.cr.commit() - build._github_status() - self.invalidate_cache() - - def _ask_kill(self, lock=True, message=None): - # if build remains in same bundle, it's ok like that - # if build can be cross bundle, need to check number of ref to build - if lock: - self.env.cr.execute("""SELECT id FROM runbot_build WHERE parent_path like %s FOR UPDATE""", ['%s%%' % self.parent_path]) - self.ensure_one() - user = request.env.user if request else self.env.user - uid = user.id - build = self - message = message or 'Killing build %s, requested by %s (user #%s)' % (build.dest, user.name, uid) - build._log('_ask_kill', message) - if build.local_state == 'pending': - build._skip() - elif build.local_state in ['testing', 'running']: - build.requested_action = 'deathrow' - for child in build.children_ids: - child._ask_kill(lock=False) - - def _wake_up(self): - if self.local_state != 'done': - self._log('wake_up', 'Impossibe to wake up, state is not done') - else: - self.requested_action = 'wake_up' - - def _get_server_commit(self): - """ - returns a commit of the first repo containing server files found in commits or in build commits - the commits param is not used in code base but could be usefull for jobs and crons - """ - for commit in (self.env.context.get('defined_commit_ids') or self.params_id.commit_ids): - if commit.repo_id.server_files: - return commit - raise ValidationError('No repo found with defined server_files') - - def _get_addons_path(self): - for commit in (self.env.context.get('defined_commit_ids') or self.params_id.commit_ids): - if not commit.repo_id.manifest_files: - continue # skip repo without addons - source_path = self._docker_source_folder(commit) - for addons_path in (commit.repo_id.addons_paths or '').split(','): - if os.path.isdir(commit._source_path(addons_path)): - yield os.path.join(source_path, addons_path).strip(os.sep) - - def _get_server_info(self, commit=None): - commit = commit or self._get_server_commit() - for server_file in commit.repo_id.server_files.split(','): - if os.path.isfile(commit._source_path(server_file)): - return (commit, server_file) - _logger.error('None of %s found in commit, actual commit content:\n %s' % (commit.repo_id.server_files, os.listdir(commit._source_path()))) - raise RunbotException('No server found in %s' % commit.dname) - - def _cmd(self, python_params=None, py_version=None, local_only=True, sub_command=None): - """Return a list describing the command to start the build - """ - self.ensure_one() - build = self - python_params = python_params or [] - py_version = py_version if py_version is not None else build._get_py_version() - pres = [] - for commit_id in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids: - if not self.params_id.skip_requirements and os.path.isfile(commit_id._source_path('requirements.txt')): - repo_dir = self._docker_source_folder(commit_id) - requirement_path = os.path.join(repo_dir, 'requirements.txt') - pres.append([f'python{py_version}', '-m', 'pip', 'install','--user', '--progress-bar', 'off', '-r', f'{requirement_path}']) - - addons_paths = self._get_addons_path() - (server_commit, server_file) = self._get_server_info() - server_dir = self._docker_source_folder(server_commit) - - # commandline - cmd = ['python%s' % py_version] + python_params + [os.path.join(server_dir, server_file)] - if sub_command: - cmd += [sub_command] - cmd += ['--addons-path', ",".join(addons_paths)] - # options - config_path = build._server("tools/config.py") - if grep(config_path, "no-xmlrpcs"): # move that to configs ? - cmd.append("--no-xmlrpcs") - if grep(config_path, "no-netrpc"): - cmd.append("--no-netrpc") - - command = Command(pres, cmd, [], cmd_checker=build) - - # use the username of the runbot host to connect to the databases - command.add_config_tuple('db_user', '%s' % pwd.getpwuid(os.getuid()).pw_name) - - if local_only: - if grep(config_path, "--http-interface"): - command.add_config_tuple("http_interface", "127.0.0.1") - elif grep(config_path, "--xmlrpc-interface"): - command.add_config_tuple("xmlrpc_interface", "127.0.0.1") - - if grep(config_path, "log-db"): - logdb_uri = self.env['ir.config_parameter'].get_param('runbot.runbot_logdb_uri') - logdb = self.env.cr.dbname - if logdb_uri: # this looks useless - logdb = '%s' % logdb_uri - command.add_config_tuple("log_db", "%s" % logdb) - if grep(config_path, 'log-db-level'): - command.add_config_tuple("log_db_level", '25') - - if grep(config_path, "data-dir"): - datadir = build._path('datadir') - if not os.path.exists(datadir): - os.mkdir(datadir) - command.add_config_tuple("data_dir", '/data/build/datadir') - - return command - - def _cmd_check(self, cmd): - """ - Check the cmd right before creating the build command line executed in - a Docker container. If a database creation is found in the cmd, a - 'runbot.database' is created. - This method is intended to be called from cmd itself - """ - if '-d' in cmd: - dbname = cmd[cmd.index('-d') + 1] - self.env['runbot.database'].create({ - 'name': dbname, - 'build_id': self.id - }) - - def _next_job_values(self): - self.ensure_one() - step_ids = self.params_id.config_id.step_ids() - if not step_ids: # no job to do, build is done - return {'active_step': False, 'local_state': 'done'} - - if not self.active_step and self.local_state != 'pending': - # means that a step has been run manually without using config - return {'active_step': False, 'local_state': 'done'} - - if not self.active_step: - next_index = 0 - else: - if self.active_step not in step_ids: - self._log('run', 'Config was modified and current step does not exists anymore, skipping.', level='ERROR') - return {'active_step': False, 'local_state': 'done', 'local_result': self._get_worst_result([self.local_result, 'ko'])} - next_index = step_ids.index(self.active_step) + 1 - - while True: - if next_index >= len(step_ids): # final job, build is done - return {'active_step': False, 'local_state': 'done'} - new_step = step_ids[next_index] # job to do, state is job_state (testing or running) - if new_step.domain_filter and not self.filtered_domain(safe_eval(new_step.domain_filter)): - - self._log('run', '**Skipping** step ~~%s~~ from config **%s**' % (new_step.name, self.params_id.config_id.name), log_type='markdown', level='SEPARATOR') - next_index += 1 - continue - break - return {'active_step': new_step.id, 'local_state': new_step._step_state()} - - def _get_py_version(self): - """return the python name to use from build batch""" - (server_commit, server_file) = self._get_server_info() - server_path = server_commit._source_path(server_file) - with open(server_path, 'r') as f: - if f.readline().strip().endswith('python3'): - return '3' - return '' - - def _parse_logs(self): - """ Parse build logs to classify errors """ - BuildError = self.env['runbot.build.error'] - # only parse logs from builds in error and not already scanned - builds_to_scan = self.search([('id', 'in', self.ids), ('local_result', '=', 'ko'), ('build_error_ids', '=', False)]) - ir_logs = self.env['ir.logging'].search([('level', '=', 'ERROR'), ('type', '=', 'server'), ('build_id', 'in', builds_to_scan.ids)]) - return BuildError._parse_logs(ir_logs) - - def is_file(self, file, mode='r'): - file_path = self._path(file) - return os.path.exists(file_path) - - def read_file(self, file, mode='r'): - file_path = self._path(file) - try: - with open(file_path, mode) as f: - return f.read() - except Exception as e: - self._log('readfile', 'exception: %s' % e) - return False - - def write_file(self, file, data, mode='w'): - file_path = self._path(file) - file_dir = os.path.split(file_path)[0] - os.makedirs(file_dir, exist_ok=True) - try: - with open(file_path, mode) as f: - f.write(data) - except Exception as e: - self._log('write_file', 'exception: %s' % e) - return False - - def make_dirs(self, dir_path): - full_path = self._path(dir_path) - try: - os.makedirs(full_path, exist_ok=True) - except Exception as e: - self._log('make_dirs', 'exception: %s' % e) - return False - - def build_type_label(self): - self.ensure_one() - return dict(self.fields_get('build_type', 'selection')['build_type']['selection']).get(self.build_type, self.build_type) - - def get_formated_job_time(self): - return s2human(self.job_time) - - def get_formated_build_time(self): - return s2human(self.build_time) - - def get_formated_build_age(self): - return s2human(self.build_age) - - def get_color_class(self): - - if self.global_result == 'ko': - return 'danger' - if self.global_result == 'warn': - return 'warning' - - if self.global_state == 'pending': - return 'default' - if self.global_state in ('testing', 'waiting'): - return 'info' - - if self.global_result == 'ok': - return 'success' - - if self.global_result in ('skipped', 'killed', 'manually_killed'): - return 'killed' - - def _github_status(self): - """Notify github of failed/successful builds""" - for build in self: - # TODO maybe avoid to send status if build is killable (another new build exist and will send the status) - if build.parent_id: - if build.orphan_result: - _logger.info('Skipping result for orphan build %s', self.id) - else: - build.parent_id._github_status() - else: - trigger = self.params_id.trigger_id - if not trigger.ci_context: - continue - - desc = trigger.ci_description or " (runtime %ss)" % (build.job_time,) - if build.params_id.used_custom_trigger: - state = 'error' - desc = "This build used custom config. Remove custom trigger to restore default ci" - elif build.global_result in ('ko', 'warn'): - state = 'failure' - elif build.global_state in ('pending', 'testing'): - state = 'pending' - elif build.global_state in ('running', 'done'): - state = 'error' - if build.global_result == 'ok': - state = 'success' - else: - _logger.info("skipping github status for build %s ", build.id) - continue - - target_url = trigger.ci_url or "%s/runbot/build/%s" % (self.get_base_url(), build.id) - for build_commit in self.params_id.commit_link_ids: - commit = build_commit.commit_id - if 'base_' not in build_commit.match_type and commit.repo_id in trigger.repo_ids: - commit._github_status(build, trigger.ci_context, state, target_url, desc) - - def parse_config(self): - return set(findall(self._server("tools/config.py"), '--[\w-]+', )) diff --git a/runbot/models/build_config.py b/runbot/models/build_config.py deleted file mode 100644 index d7e2d6d7..00000000 --- a/runbot/models/build_config.py +++ /dev/null @@ -1,1112 +0,0 @@ -import base64 -import glob -import json -import logging -import fnmatch -import re -import shlex -import time -from unidiff import PatchSet -from ..common import now, grep, time2str, rfind, s2human, os, RunbotException -from ..container import docker_get_gateway_ip, Command -from odoo import models, fields, api -from odoo.exceptions import UserError, ValidationError -from odoo.tools.safe_eval import safe_eval, test_python_expr - -_logger = logging.getLogger(__name__) - -_re_error = r'^(?:\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ (?:ERROR|CRITICAL) )|(?:Traceback \(most recent call last\):)$' -_re_warning = r'^\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ WARNING ' - -PYTHON_DEFAULT = "# type python code here\n\n\n\n\n\n" - -class ReProxy(): - @classmethod - def match(cls, *args, **kwrags): - return re.match(*args, **kwrags) - - @classmethod - def search(cls, *args, **kwrags): - return re.search(*args, **kwrags) - - @classmethod - def compile(cls, *args, **kwrags): - return re.compile(*args, **kwrags) - - @classmethod - def findall(cls, *args, **kwrags): - return re.findall(*args, **kwrags) - - VERBOSE = re.VERBOSE - MULTILINE = re.MULTILINE - -class Config(models.Model): - _name = 'runbot.build.config' - _description = "Build config" - _inherit = "mail.thread" - - name = fields.Char('Config name', required=True, tracking=True, help="Unique name for config please use trigram as postfix for custom configs") - - description = fields.Char('Config description') - step_order_ids = fields.One2many('runbot.build.config.step.order', 'config_id', copy=True) - protected = fields.Boolean('Protected', default=False, tracking=True) - group = fields.Many2one('runbot.build.config', 'Configuration group', help="Group of config's and config steps") - group_name = fields.Char('Group name', related='group.name') - - @api.model_create_single - def create(self, values): - res = super(Config, self).create(values) - res._check_step_ids_order() - return res - - def write(self, values): - res = super(Config, self).write(values) - self._check_step_ids_order() - return res - - def copy(self): - # remove protection on copy - copy = super(Config, self).copy() - copy.sudo().write({'protected': False}) - return copy - - def unlink(self): - super(Config, self).unlink() - - def step_ids(self): - if self: - self.ensure_one() - return [ordered_step.step_id for ordered_step in self.step_order_ids.sorted('sequence')] - - def _check_step_ids_order(self): - install_job = False - step_ids = self.step_ids() - for step in step_ids: - if step.job_type == 'install_odoo': - install_job = True - if step.job_type == 'run_odoo': - if step != step_ids[-1]: - raise UserError('Jobs of type run_odoo should be the last one') - if not install_job: - raise UserError('Jobs of type run_odoo should be preceded by a job of type install_odoo') - self._check_recustion() - - def _check_recustion(self, visited=None): - visited = visited or [] - recursion = False - if self in visited: - recursion = True - visited.append(self) - if recursion: - raise UserError('Impossible to save config, recursion detected with path: %s' % ">".join([v.name for v in visited])) - for step in self.step_ids(): - if step.job_type == 'create_build': - for create_config in step.create_config_ids: - create_config._check_recustion(visited[:]) - - -class ConfigStepUpgradeDb(models.Model): - _name = 'runbot.config.step.upgrade.db' - _description = "Config Step Upgrade Db" - - step_id = fields.Many2one('runbot.build.config.step', 'Step') - config_id = fields.Many2one('runbot.build.config', 'Config') - db_pattern = fields.Char('Db suffix pattern') - min_target_version_id = fields.Many2one('runbot.version', "Minimal target version_id") - -TYPES = [ - ('install_odoo', 'Test odoo'), - ('run_odoo', 'Run odoo'), - ('python', 'Python code'), - ('create_build', 'Create build'), - ('configure_upgrade', 'Configure Upgrade'), - ('configure_upgrade_complement', 'Configure Upgrade Complement'), - ('test_upgrade', 'Test Upgrade'), - ('restore', 'Restore') - ] -class ConfigStep(models.Model): - _name = 'runbot.build.config.step' - _description = "Config step" - _inherit = 'mail.thread' - - # general info - name = fields.Char('Step name', required=True, tracking=True, help="Unique name for step please use trigram as postfix for custom step_ids") - domain_filter = fields.Char('Domain filter', tracking=True) - job_type = fields.Selection(TYPES, default='install_odoo', required=True, tracking=True, ondelete={t[0]: 'cascade' for t in [TYPES]}) - protected = fields.Boolean('Protected', default=False, tracking=True) - default_sequence = fields.Integer('Sequence', default=100, tracking=True) # or run after? # or in many2many rel? - step_order_ids = fields.One2many('runbot.build.config.step.order', 'step_id') - group = fields.Many2one('runbot.build.config', 'Configuration group', help="Group of config's and config steps") - group_name = fields.Char('Group name', related='group.name') - make_stats = fields.Boolean('Make stats', default=False) - build_stat_regex_ids = fields.Many2many('runbot.build.stat.regex', string='Stats Regexes') - # install_odoo - create_db = fields.Boolean('Create Db', default=True, tracking=True) # future - custom_db_name = fields.Char('Custom Db Name', tracking=True) # future - install_modules = fields.Char('Modules to install', help="List of module patterns to install, use * to install all available modules, prefix the pattern with dash to remove the module.", default='') - db_name = fields.Char('Db Name', compute='_compute_db_name', inverse='_inverse_db_name', tracking=True) - cpu_limit = fields.Integer('Cpu limit', default=3600, tracking=True) - coverage = fields.Boolean('Coverage', default=False, tracking=True) - paths_to_omit = fields.Char('Paths to omit from coverage', tracking=True) - flamegraph = fields.Boolean('Allow Flamegraph', default=False, tracking=True) - test_enable = fields.Boolean('Test enable', default=True, tracking=True) - test_tags = fields.Char('Test tags', help="comma separated list of test tags", tracking=True) - enable_auto_tags = fields.Boolean('Allow auto tag', default=False, tracking=True) - sub_command = fields.Char('Subcommand', tracking=True) - extra_params = fields.Char('Extra cmd args', tracking=True) - additionnal_env = fields.Char('Extra env', help='Example: foo="bar";bar="foo". Cannot contains \' ', tracking=True) - # python - python_code = fields.Text('Python code', tracking=True, default=PYTHON_DEFAULT) - python_result_code = fields.Text('Python code for result', tracking=True, default=PYTHON_DEFAULT) - ignore_triggered_result = fields.Boolean('Ignore error triggered in logs', tracking=True, default=False) - running_job = fields.Boolean('Job final state is running', default=False, help="Docker won't be killed if checked") - # create_build - create_config_ids = fields.Many2many('runbot.build.config', 'runbot_build_config_step_ids_create_config_ids_rel', string='New Build Configs', tracking=True, index=True) - number_builds = fields.Integer('Number of build to create', default=1, tracking=True) - - force_host = fields.Boolean('Use same host as parent for children', default=False, tracking=True) # future - make_orphan = fields.Boolean('No effect on the parent result', help='Created build result will not affect parent build result', default=False, tracking=True) - - # upgrade - # 1. define target - upgrade_to_master = fields.Boolean() # upgrade niglty + (future migration? no, need last master, not nightly master) - upgrade_to_current = fields.Boolean(help="If checked, only upgrade to current will be used, other options will be ignored") - upgrade_to_major_versions = fields.Boolean() # upgrade (no master) - upgrade_to_all_versions = fields.Boolean() # upgrade niglty (no master) - upgrade_to_version_ids = fields.Many2many('runbot.version', relation='runbot_upgrade_to_version_ids', string='Forced version to use as target') - # 2. define source from target - upgrade_from_current = fields.Boolean(help="If checked, only upgrade from current will be used, other options will be ignored Template should be installed in the same build") - upgrade_from_previous_major_version = fields.Boolean() # 13.0 - upgrade_from_last_intermediate_version = fields.Boolean() # 13.3 - upgrade_from_all_intermediate_version = fields.Boolean() # 13.2 # 13.1 - upgrade_from_version_ids = fields.Many2many('runbot.version', relation='runbot_upgrade_from_version_ids', string='Forced version to use as source (cartesian with target)') - - upgrade_flat = fields.Boolean("Flat", help="Take all decisions in on build") - - upgrade_config_id = fields.Many2one('runbot.build.config',string='Upgrade Config', tracking=True, index=True) - upgrade_dbs = fields.One2many('runbot.config.step.upgrade.db', 'step_id', tracking=True) - - restore_download_db_suffix = fields.Char('Download db suffix') - restore_rename_db_suffix = fields.Char('Rename db suffix') - - @api.constrains('python_code') - def _check_python_code(self): - return self._check_python_field('python_code') - - @api.constrains('python_result_code') - def _check_python_result_code(self): - return self._check_python_field('python_result_code') - - def _check_python_field(self, field_name): - for step in self.sudo().filtered(field_name): - msg = test_python_expr(expr=step[field_name].strip(), mode="exec") - if msg: - raise ValidationError(msg) - - @api.onchange('sub_command') - def _onchange_number_builds(self): - if self.sub_command: - self.install_modules = '-*' - self.test_enable = False - self.create_db = False - - @api.depends('name', 'custom_db_name') - def _compute_db_name(self): - for step in self: - step.db_name = step.custom_db_name or step.name - - def _inverse_db_name(self): - for step in self: - step.custom_db_name = step.db_name - - def copy(self): - # remove protection on copy - copy = super(ConfigStep, self).copy() - copy._write({'protected': False}) - return copy - - @api.model_create_single - def create(self, values): - self._check(values) - return super(ConfigStep, self).create(values) - - def write(self, values): - self._check(values) - return super(ConfigStep, self).write(values) - - def unlink(self): - if self.protected: - raise UserError('Protected step') - super(ConfigStep, self).unlink() - - def _check(self, values): - if 'name' in values: - name_reg = r'^[a-zA-Z0-9\-_]*$' - if not re.match(name_reg, values.get('name')): - raise UserError('Name cannot contain special char or spaces exepts "_" and "-"') - if not self.env.user.has_group('runbot.group_build_config_administrator'): - if (values.get('job_type') == 'python' or ('python_code' in values and values['python_code'] and values['python_code'] != PYTHON_DEFAULT)): - raise UserError('cannot create or edit config step of type python code') - if (values.get('job_type') == 'python' or ('python_result_code' in values and values['python_result_code'] and values['python_result_code'] != PYTHON_DEFAULT)): - raise UserError('cannot create or edit config step of type python code') - if (values.get('extra_params')): - reg = r'^[a-zA-Z0-9\-_ "]*$' - if not re.match(reg, values.get('extra_params')): - _logger.log('%s tried to create an non supported test_param %s' % (self.env.user.name, values.get('extra_params'))) - raise UserError('Invalid extra_params on config step') - - def _run(self, build): - log_path = build._path('logs', '%s.txt' % self.name) - build.write({'job_start': now(), 'job_end': False}) # state, ... - log_link = '' - if self._has_log(): - log_url = f'http://{build.host}' - url = f"{log_url}/runbot/static/build/{build.dest}/logs/{self.name}.txt" - log_link = f'[@icon-file-text]({url})' - build._log('run', 'Starting step **%s** from config **%s** %s' % (self.name, build.params_id.config_id.name, log_link), log_type='markdown', level='SEPARATOR') - self._run_step(build, log_path) - - def _run_step(self, build, log_path, **kwargs): - build.log_counter = self.env['ir.config_parameter'].sudo().get_param('runbot.runbot_maxlogs', 100) - run_method = getattr(self, '_run_%s' % self.job_type) - docker_params = run_method(build, log_path, **kwargs) - if docker_params: - build._docker_run(**docker_params) - - def _run_create_build(self, build, log_path): - count = 0 - config_data = build.params_id.config_data - config_ids = config_data.get('create_config_ids', self.create_config_ids) - - child_data_list = config_data.get('child_data',[{}]) - if not isinstance(child_data_list, list): - child_data_list = [child_data_list] - - for child_data in child_data_list: - for create_config in self.env['runbot.build.config'].browse(child_data.get('config_id', config_ids.ids)): - _child_data = {**child_data, 'config_id': create_config} - for _ in range(_child_data.get('number_build', self.number_builds)): - count += 1 - if count > 200: - build._logger('Too much build created') - break - child = build._add_child(_child_data, orphan=self.make_orphan) - build._log('create_build', 'created with config %s' % create_config.name, log_type='subbuild', path=str(child.id)) - - - def make_python_ctx(self, build): - return { - 'self': self, - # 'fields': fields, - # 'models': models, - 'build': build, - '_logger': _logger, - 'log_path': build._path('logs', '%s.txt' % self.name), - 'glob': glob.glob, - 'Command': Command, - 're': ReProxy, - 'grep': grep, - 'rfind': rfind, - 'json_loads': json.loads, - 'PatchSet': PatchSet, - } - - def _run_python(self, build, log_path): - eval_ctx = self.make_python_ctx(build) - try: - safe_eval(self.python_code.strip(), eval_ctx, mode="exec", nocopy=True) - return eval_ctx.get('docker_params') - except ValueError as e: - save_eval_value_error_re = r'<class \'odoo.addons.runbot.models.repo.RunbotException\'>: "(.*)" while evaluating\n.*' - message = e.args[0] - groups = re.match(save_eval_value_error_re, message) - if groups: - build._log("run", groups[1], level='ERROR') - build._kill(result='ko') - else: - raise - - def _is_docker_step(self): - if not self: - return False - self.ensure_one() - return self.job_type in ('install_odoo', 'run_odoo', 'restore', 'test_upgrade') or (self.job_type == 'python' and ('docker_params =' in self.python_code or '_run_' in self.python_code)) - - def _run_run_odoo(self, build, log_path, force=False): - if not force: - if build.parent_id: - build._log('_run_run_odoo', 'build has a parent, skip run') - return - if build.no_auto_run: - build._log('_run_run_odoo', 'build auto run is disabled, skip run') - return - - exports = build._checkout() - - # adjust job_end to record an accurate job_20 job_time - build._log('run', 'Start running build %s' % build.dest) - # run server - cmd = build._cmd(local_only=False) - - available_options = build.parse_config() - - if "--workers" in available_options: - cmd += ["--workers", "2"] - - if "--gevent-port" in available_options: - cmd += ["--gevent-port", "8070"] - - elif "--longpolling-port" in available_options: - cmd += ["--longpolling-port", "8070"] - - if "--max-cron-threads" in available_options: - cmd += ["--max-cron-threads", "1"] - - - install_steps = [step.db_name for step in build.params_id.config_id.step_ids() if step.job_type == 'install_odoo'] - db_name = build.params_id.config_data.get('db_name') or 'all' in install_steps and 'all' or install_steps[0] - # we need to have at least one job of type install_odoo to run odoo, take the last one for db_name. - cmd += ['-d', '%s-%s' % (build.dest, db_name)] - - icp = self.env['ir.config_parameter'].sudo() - if "--proxy-mode" in available_options: - cmd += ["--proxy-mode"] - - if "--db-filter" in available_options: - cmd += ['--db-filter', '%d.*$'] - - if "--smtp" in available_options: - smtp_host = docker_get_gateway_ip() - if smtp_host: - cmd += ['--smtp', smtp_host] - - extra_params = self.extra_params or '' - if extra_params: - cmd.extend(shlex.split(extra_params)) - env_variables = self.additionnal_env.split(';') if self.additionnal_env else [] - - docker_name = build._get_docker_name() - build_port = build.port - self.env.cr.commit() # commit before docker run to be 100% sure that db state is consistent with dockers - self.invalidate_cache() - self.env['runbot.runbot']._reload_nginx() - return dict(cmd=cmd, log_path=log_path, container_name=docker_name, exposed_ports=[build_port, build_port + 1], ro_volumes=exports, env_variables=env_variables) - - - def _run_install_odoo(self, build, log_path): - exports = build._checkout() - - modules_to_install = self._modules_to_install(build) - mods = ",".join(modules_to_install) - python_params = [] - py_version = build._get_py_version() - if self.coverage: - build.coverage = True - coverage_extra_params = self._coverage_params(build, modules_to_install) - python_params = ['-m', 'coverage', 'run', '--branch', '--source', '/data/build'] + coverage_extra_params - elif self.flamegraph: - python_params = ['-m', 'flamegraph', '-o', self._perfs_data_path()] - cmd = build._cmd(python_params, py_version, sub_command=self.sub_command) - # create db if needed - db_suffix = build.params_id.config_data.get('db_name') or (build.params_id.dump_db.db_suffix if not self.create_db else False) or self.db_name - db_name = '%s-%s' % (build.dest, db_suffix) - if self.create_db: - build._local_pg_createdb(db_name) - cmd += ['-d', db_name] - # list module to install - extra_params = build.params_id.extra_params or self.extra_params or '' - if mods and '-i' not in extra_params: - cmd += ['-i', mods] - config_path = build._server("tools/config.py") - - available_options = build.parse_config() - if self.test_enable: - if "--test-enable" in available_options: - cmd.extend(['--test-enable']) - else: - build._log('test_all', 'Installing modules without testing', level='WARNING') - test_tags_in_extra = '--test-tags' in extra_params - if self.test_tags or test_tags_in_extra: - if "--test-tags" in available_options: - if not test_tags_in_extra: - test_tags = self.test_tags.replace(' ', '') - if self.enable_auto_tags: - auto_tags = self.env['runbot.build.error'].disabling_tags() - test_tags = ','.join(test_tags.split(',') + auto_tags) - cmd.extend(['--test-tags', test_tags]) - else: - build._log('test_all', 'Test tags given but not supported') - elif self.enable_auto_tags and self.test_enable: - if grep(config_path, "[/module][:class]"): - auto_tags = self.env['runbot.build.error'].disabling_tags() - if auto_tags: - test_tags = ','.join(auto_tags) - cmd.extend(['--test-tags', test_tags]) - - if "--screenshots" in available_options: - cmd.add_config_tuple('screenshots', '/data/build/tests') - - if "--screencasts" in available_options and self.env['ir.config_parameter'].sudo().get_param('runbot.enable_screencast', False): - cmd.add_config_tuple('screencasts', '/data/build/tests') - - cmd.append('--stop-after-init') # install job should always finish - if '--log-level' not in extra_params: - cmd.append('--log-level=test') - cmd.append('--max-cron-threads=0') - - if extra_params: - cmd.extend(shlex.split(extra_params)) - - cmd.finals.extend(self._post_install_commands(build, modules_to_install, py_version)) # coverage post, extra-checks, ... - dump_dir = '/data/build/logs/%s/' % db_name - sql_dest = '%s/dump.sql' % dump_dir - filestore_path = '/data/build/datadir/filestore/%s' % db_name - filestore_dest = '%s/filestore/' % dump_dir - zip_path = '/data/build/logs/%s.zip' % db_name - cmd.finals.append(['pg_dump', db_name, '>', sql_dest]) - cmd.finals.append(['cp', '-r', filestore_path, filestore_dest]) - cmd.finals.append(['cd', dump_dir, '&&', 'zip', '-rmq9', zip_path, '*']) - infos = '{\n "db_name": "%s",\n "build_id": %s,\n "shas": [%s]\n}' % (db_name, build.id, ', '.join(['"%s"' % build_commit.commit_id.dname for build_commit in build.params_id.commit_link_ids])) - build.write_file('logs/%s/info.json' % db_name, infos) - - if self.flamegraph: - cmd.finals.append(['flamegraph.pl', '--title', 'Flamegraph %s for build %s' % (self.name, build.id), self._perfs_data_path(), '>', self._perfs_data_path(ext='svg')]) - cmd.finals.append(['gzip', '-f', self._perfs_data_path()]) # keep data but gz them to save disc space - max_timeout = int(self.env['ir.config_parameter'].get_param('runbot.runbot_timeout', default=10000)) - timeout = min(self.cpu_limit, max_timeout) - env_variables = self.additionnal_env.split(';') if self.additionnal_env else [] - return dict(cmd=cmd, log_path=log_path, container_name=build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables) - - def _upgrade_create_childs(self): - pass - - def _run_configure_upgrade_complement(self, build, *args): - """ - Parameters: - - upgrade_dumps_trigger_id: a configure_upgradestep - - A complement aims to test the exact oposite of an upgrade trigger. - Ignore configs an categories: only focus on versions. - """ - param = build.params_id - version = param.version_id - builds_references = param.builds_reference_ids - builds_references_by_version_id = {b.params_id.version_id.id: b for b in builds_references} - upgrade_complement_step = build.params_id.trigger_id.upgrade_dumps_trigger_id.upgrade_step_id - version_domain = build.params_id.trigger_id.upgrade_dumps_trigger_id.get_version_domain() - valid_targets = build.browse() - next_versions = version.next_major_version_id | version.next_intermediate_version_ids - if version_domain: # filter only on version where trigger is enabled - next_versions = next_versions.filtered_domain(version_domain) - if next_versions: - for next_version in next_versions: - if version in upgrade_complement_step._get_upgrade_source_versions(next_version): - valid_targets |= (builds_references_by_version_id.get(next_version.id) or build.browse()) - - for target in valid_targets: - build._log('', 'Checking upgrade to [%s](%s)' % (target.params_id.version_id.name, target.build_url), log_type='markdown') - for upgrade_db in upgrade_complement_step.upgrade_dbs: - if not upgrade_db.min_target_version_id or upgrade_db.min_target_version_id.number <= target.params_id.version_id.number: - # note: here we don't consider the upgrade_db config here - dbs = build.database_ids.sorted('db_suffix') - for db in self._filter_upgrade_database(dbs, upgrade_db.db_pattern): - child = build._add_child({ - 'upgrade_to_build_id': target.id, - 'upgrade_from_build_id': build, # always current build - 'dump_db': db.id, - 'config_id': upgrade_complement_step.upgrade_config_id - }) - child.description = 'Testing migration from %s to %s using parent db %s' % ( - version.name, - target.params_id.version_id.name, - db.name, - ) - child._log('', 'This build tests change of schema in stable version testing upgrade to %s' % target.params_id.version_id.name) - - def _run_configure_upgrade(self, build, log_path): - """ - Source/target parameters: - - upgrade_to_current | (upgrade_to_master + (upgrade_to_major_versions | upgrade_to_all_versions)) - - upgrade_from_previous_major_version + (upgrade_from_all_intermediate_version | upgrade_from_last_intermediate_version) - - upgrade_dbs - - upgrade_to_version_ids (use instead of upgrade_to flags) - - upgrade_from_version_ids (use instead of upgrade_from flags) - - Other parameters - - upgrade_flat - - upgrade_config_id - - Create subbuilds with parameters defined for a step of type test_upgrade: - - upgrade_to_build_id - - upgrade_from_build_id - - dump_db - - config_id (upgrade_config_id) - - If upgrade_flat is False, a level of child will be create for target, source and dbs - (if there is multiple choices). - If upgrade_flat is True, all combination will be computed locally and only one level of children will be added to caller build. - - Note: - - This step should be alone in a config since this config is recursive - - A typical upgrade_config_id should have a restore step and a test_upgrade step. - """ - assert len(build.parent_path.split('/')) < 6 # small security to avoid recursion loop, 6 is arbitrary - param = build.params_id - end = False - target_builds = False - source_builds_by_target = {} - builds_references = param.builds_reference_ids - builds_references_by_version_id = {b.params_id.version_id.id: b for b in builds_references} - if param.upgrade_to_build_id: - target_builds = param.upgrade_to_build_id - else: - if self.upgrade_to_current: - target_builds = build - else: - target_builds = build.browse() - if self.upgrade_to_version_ids: - for version in self.upgrade_to_version_ids: - target_builds |= builds_references_by_version_id.get(version.id) or build.browse() - else: - master_build = builds_references.filtered(lambda b: b.params_id.version_id.name == 'master') - base_builds = (builds_references - master_build) - if self.upgrade_to_master: - target_builds = master_build - if self.upgrade_to_major_versions: - target_builds |= base_builds.filtered(lambda b: b.params_id.version_id.is_major) - elif self.upgrade_to_all_versions: - target_builds |= base_builds - target_builds = target_builds.sorted(lambda b: b.params_id.version_id.number) - if target_builds: - build._log('', 'Testing upgrade targeting %s' % ', '.join(target_builds.mapped('params_id.version_id.name'))) - if not target_builds: - build._log('_run_configure_upgrade', 'No reference build found with correct target in availables references, skipping. %s' % builds_references.mapped('params_id.version_id.name'), level='ERROR') - end = True - elif len(target_builds) > 1 and not self.upgrade_flat: - for target_build in target_builds: - build._add_child({'upgrade_to_build_id': target_build.id}) - end = True - if end: - return # replace this by a python job friendly solution - - for target_build in target_builds: - if param.upgrade_from_build_id: - source_builds_by_target[target_build] = param.upgrade_from_build_id - else: - if self.upgrade_from_current: - from_builds = build - else: - target_version = target_build.params_id.version_id - from_builds = self._get_upgrade_source_builds(target_version, builds_references_by_version_id) - source_builds_by_target[target_build] = from_builds - if from_builds: - build._log('', 'Defining source version(s) for %s: %s' % (target_build.params_id.version_id.name, ', '.join(source_builds_by_target[target_build].mapped('params_id.version_id.name')))) - if not from_builds: - build._log('_run_configure_upgrade', 'No source version found for %s, skipping' % target_version.name, level='INFO') - elif not self.upgrade_flat: - for from_build in from_builds: - build._add_child({'upgrade_to_build_id': target_build.id, 'upgrade_from_build_id': from_build.id}) - end = True - - if end: - return # replace this by a python job friendly solution - - assert not param.dump_db - for target, sources in source_builds_by_target.items(): - for source in sources: - valid_databases = [] - if not self.upgrade_dbs: - valid_databases = source.database_ids - for upgrade_db in self.upgrade_dbs: - if not upgrade_db.min_target_version_id or upgrade_db.min_target_version_id.number <= target.params_id.version_id.number: - config_id = upgrade_db.config_id - dump_builds = build.search([('id', 'child_of', source.id), ('params_id.config_id', '=', config_id.id), ('orphan_result', '=', False)]) - # this search is not optimal - if not dump_builds: - build._log('_run_configure_upgrade', 'No child build found with config %s in %s' % (config_id.name, source.id), level='ERROR') - dbs = dump_builds.database_ids.sorted('db_suffix') - valid_databases += list(self._filter_upgrade_database(dbs, upgrade_db.db_pattern)) - if not valid_databases: - build._log('_run_configure_upgrade', 'No datase found for pattern %s' % (upgrade_db.db_pattern), level='ERROR') - for db in valid_databases: - #commit_ids = build.params_id.commit_ids - #if commit_ids != target.params_id.commit_ids: - # repo_ids = commit_ids.mapped('repo_id') - # for commit_link in target.params_id.commit_link_ids: - # if commit_link.commit_id.repo_id not in repo_ids: - # additionnal_commit_links |= commit_link - # build._log('', 'Adding sources from build [%s](%s)' % (target.id, target.build_url), log_type='markdown') - - child = build._add_child({ - 'upgrade_to_build_id': target.id, - 'upgrade_from_build_id': source, - 'dump_db': db.id, - 'config_id': self.upgrade_config_id - }) - - child.description = 'Testing migration from %s to %s using db %s (%s)' % ( - source.params_id.version_id.name, - target.params_id.version_id.name, - db.name, - config_id.name - ) - # TODO log somewhere if no db at all is found for a db_suffix - - def _get_upgrade_source_versions(self, target_version): - if self.upgrade_from_version_ids: - return self.upgrade_from_version_ids - else: - versions = self.env['runbot.version'].browse() - if self.upgrade_from_previous_major_version: - versions |= target_version.previous_major_version_id - if self.upgrade_from_all_intermediate_version: - versions |= target_version.intermediate_version_ids - elif self.upgrade_from_last_intermediate_version: - if target_version.intermediate_version_ids: - versions |= target_version.intermediate_version_ids[-1] - return versions - - def _get_upgrade_source_builds(self, target_version, builds_references_by_version_id): - versions = self._get_upgrade_source_versions(target_version) - from_builds = self.env['runbot.build'].browse() - for version in versions: - from_builds |= builds_references_by_version_id.get(version.id) or self.env['runbot.build'].browse() - return from_builds.sorted(lambda b: b.params_id.version_id.number) - - def _filter_upgrade_database(self, dbs, pattern): - pat_list = pattern.split(',') if pattern else [] - for db in dbs: - if any(fnmatch.fnmatch(db.db_suffix, pat) for pat in pat_list): - yield db - - def _run_test_upgrade(self, build, log_path): - target = build.params_id.upgrade_to_build_id - commit_ids = build.params_id.commit_ids - target_commit_ids = target.params_id.commit_ids - if commit_ids != target_commit_ids: - target_repo_ids = target_commit_ids.mapped('repo_id') - for commit in commit_ids: - if commit.repo_id not in target_repo_ids: - target_commit_ids |= commit - build._log('', 'Adding sources from build [%s](%s)' % (target.id, target.build_url), log_type='markdown') - build = build.with_context(defined_commit_ids=target_commit_ids) - exports = build._checkout() - - dump_db = build.params_id.dump_db - - migrate_db_name = '%s-%s' % (build.dest, dump_db.db_suffix) # only ok if restore does not force db_suffix - - migrate_cmd = build._cmd() - migrate_cmd += ['-u all'] - migrate_cmd += ['-d', migrate_db_name] - migrate_cmd += ['--stop-after-init'] - migrate_cmd += ['--max-cron-threads=0'] - # migrate_cmd += ['--upgrades-paths', '/%s' % migration_scripts] upgrades-paths is broken, ln is created automatically in sources - - build._log('run', 'Start migration build %s' % build.dest) - timeout = self.cpu_limit - - migrate_cmd.finals.append(['psql', migrate_db_name, '-c', '"SELECT id, name, state FROM ir_module_module WHERE state NOT IN (\'installed\', \'uninstalled\', \'uninstallable\') AND name NOT LIKE \'test_%\' "', '>', '/data/build/logs/modules_states.txt']) - - env_variables = self.additionnal_env.split(';') if self.additionnal_env else [] - exception_env = self.env['runbot.upgrade.exception']._generate() - if exception_env: - env_variables.append(exception_env) - return dict(cmd=migrate_cmd, log_path=log_path, container_name=build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables, image_tag=target.params_id.dockerfile_id.image_tag) - - def _run_restore(self, build, log_path): - # exports = build._checkout() - params = build.params_id - - if 'dump_url' in params.config_data: - dump_url = params.config_data['dump_url'] - zip_name = dump_url.split('/')[-1] - build._log('test-migration', 'Restoring db [%s](%s)' % (zip_name, dump_url), log_type='markdown') - suffix = 'all' - else: - download_db_suffix = params.dump_db.db_suffix or self.restore_download_db_suffix - dump_build = params.dump_db.build_id or build.parent_id - assert download_db_suffix and dump_build - download_db_name = '%s-%s' % (dump_build.dest, download_db_suffix) - zip_name = '%s.zip' % download_db_name - dump_url = '%s%s' % (dump_build.http_log_url(), zip_name) - build._log('test-migration', 'Restoring dump [%s](%s) from build [%s](%s)' % (zip_name, dump_url, dump_build.id, dump_build.build_url), log_type='markdown') - restore_suffix = self.restore_rename_db_suffix or params.dump_db.db_suffix or suffix - assert restore_suffix - restore_db_name = '%s-%s' % (build.dest, restore_suffix) - - build._local_pg_createdb(restore_db_name) - cmd = ' && '.join([ - 'mkdir /data/build/restore', - 'cd /data/build/restore', - 'wget %s' % dump_url, - 'unzip -q %s' % zip_name, - 'echo "### restoring filestore"', - 'mkdir -p /data/build/datadir/filestore/%s' % restore_db_name, - 'mv filestore/* /data/build/datadir/filestore/%s' % restore_db_name, - 'echo "### restoring db"', - 'psql -q %s < dump.sql' % (restore_db_name), - 'cd /data/build', - 'echo "### cleaning"', - 'rm -r restore', - 'echo "### listing modules"', - """psql %s -c "select name from ir_module_module where state = 'installed'" -t -A > /data/build/logs/restore_modules_installed.txt""" % restore_db_name, - 'echo "### restore" "successful"', # two part string to avoid miss grep - - ]) - - return dict(cmd=cmd, log_path=log_path, container_name=build._get_docker_name(), cpu_limit=self.cpu_limit) - - def _reference_builds(self, bundle, trigger): - upgrade_dumps_trigger_id = trigger.upgrade_dumps_trigger_id - refs_batches = self._reference_batches(bundle, trigger) - refs_builds = refs_batches.mapped('slot_ids').filtered( - lambda slot: slot.trigger_id == upgrade_dumps_trigger_id - ).mapped('build_id') - # should we filter on active? implicit. On match type? on skipped ? - # is last_"done"_batch enough? - # TODO active test false and take last done/running build limit 1 -> in case of rebuild - return refs_builds - - def _is_upgrade_step(self): - return self.job_type in ('configure_upgrade', 'configure_upgrade_complement') - - def _reference_batches(self, bundle, trigger): - if self.job_type == 'configure_upgrade_complement': - return self._reference_batches_complement(bundle, trigger) - else: - return self._reference_batches_upgrade(bundle, trigger.upgrade_dumps_trigger_id.category_id.id) - - def _reference_batches_complement(self, bundle, trigger): - category_id = trigger.upgrade_dumps_trigger_id.category_id.id - version = bundle.version_id - next_versions = version.next_major_version_id | version.next_intermediate_version_ids # TODO filter on trigger version - target_versions = version.browse() - - upgrade_complement_step = trigger.upgrade_dumps_trigger_id.upgrade_step_id - - if next_versions and bundle.base_id.to_upgrade: - for next_version in next_versions: - if bundle.version_id in upgrade_complement_step._get_upgrade_source_versions(next_version): - target_versions |= next_version - return target_versions.with_context( - category_id=category_id, project_id=bundle.project_id.id - ).mapped('base_bundle_id').filtered('to_upgrade').mapped('last_done_batch') - - def _reference_batches_upgrade(self, bundle, category_id): - target_refs_bundles = self.env['runbot.bundle'] - upgrade_domain = [('to_upgrade', '=', True), ('project_id', '=', bundle.project_id.id)] - if self.upgrade_to_version_ids: - target_refs_bundles |= self.env['runbot.bundle'].search(upgrade_domain + [('version_id', 'in', self.upgrade_to_version_ids.ids)]) - else: - if self.upgrade_to_master: - target_refs_bundles |= self.env['runbot.bundle'].search(upgrade_domain + [('name', '=', 'master')]) - if self.upgrade_to_all_versions: - target_refs_bundles |= self.env['runbot.bundle'].search(upgrade_domain + [('name', '!=', 'master')]) - elif self.upgrade_to_major_versions: - target_refs_bundles |= self.env['runbot.bundle'].search(upgrade_domain + [('name', '!=', 'master'), ('version_id.is_major', '=', True)]) - - source_refs_bundles = self.env['runbot.bundle'] - - def from_versions(f_bundle): - nonlocal source_refs_bundles - if self.upgrade_from_previous_major_version: - source_refs_bundles |= f_bundle.previous_major_version_base_id - if self.upgrade_from_all_intermediate_version: - source_refs_bundles |= f_bundle.intermediate_version_base_ids - elif self.upgrade_from_last_intermediate_version: - if f_bundle.intermediate_version_base_ids: - source_refs_bundles |= f_bundle.intermediate_version_base_ids[-1] - - if self.upgrade_from_version_ids: - source_refs_bundles |= self.env['runbot.bundle'].search(upgrade_domain + [('version_id', 'in', self.upgrade_from_version_ids.ids)]) - # this is subject to discussion. should this be smart and filter 'from_versions' or should it be flexible and do all possibilities - else: - if self.upgrade_to_current: - from_versions(bundle) - for f_bundle in target_refs_bundles: - from_versions(f_bundle) - source_refs_bundles = source_refs_bundles.filtered('to_upgrade') - - return (target_refs_bundles | source_refs_bundles).with_context( - category_id=category_id - ).mapped('last_done_batch') - - def log_end(self, build): - if self.job_type == 'create_build': - build._logger('Step %s finished in %s' % (self.name, s2human(build.job_time))) - return - - kwargs = dict(message='Step %s finished in %s' % (self.name, s2human(build.job_time))) - if self.job_type == 'install_odoo': - kwargs['message'] += ' $$fa-download$$' - db_suffix = build.params_id.config_data.get('db_name') or self.db_name - kwargs['path'] = '%s%s-%s.zip' % (build.http_log_url(), build.dest, db_suffix) - kwargs['log_type'] = 'link' - build._log('', **kwargs) - - if self.coverage: - xml_url = '%scoverage.xml' % build.http_log_url() - html_url = 'http://%s/runbot/static/build/%s/coverage/index.html' % (build.host, build.dest) - message = 'Coverage report: [xml @icon-download](%s), [html @icon-eye](%s)' % (xml_url, html_url) - build._log('end_job', message, log_type='markdown') - - if self.flamegraph: - dat_url = '%sflame_%s.%s' % (build.http_log_url(), self.name, 'log.gz') - svg_url = '%sflame_%s.%s' % (build.http_log_url(), self.name, 'svg') - message = 'Flamegraph report: [data @icon-download](%s), [svg @icon-eye](%s)' % (dat_url, svg_url) - build._log('end_job', message, log_type='markdown') - - def _modules_to_install(self, build): - return set(build._get_modules_to_test(modules_patterns=self.install_modules)) - - def _post_install_commands(self, build, modules_to_install, py_version=None): - cmds = [] - if self.coverage: - py_version = py_version if py_version is not None else build._get_py_version() - # prepare coverage result - cov_path = build._path('coverage') - os.makedirs(cov_path, exist_ok=True) - cmds.append(['python%s' % py_version, "-m", "coverage", "html", "-d", "/data/build/coverage", "--ignore-errors"]) - cmds.append(['python%s' % py_version, "-m", "coverage", "xml", "-o", "/data/build/logs/coverage.xml", "--ignore-errors"]) - return cmds - - def _perfs_data_path(self, ext='log'): - return '/data/build/logs/flame_%s.%s' % (self.name, ext) - - def _coverage_params(self, build, modules_to_install): - pattern_to_omit = set() - if self.paths_to_omit: - pattern_to_omit = set(self.paths_to_omit.split(',')) - for commit in build.params_id.commit_ids: - docker_source_folder = build._docker_source_folder(commit) - for manifest_file in commit.repo_id.manifest_files.split(','): - pattern_to_omit.add('*%s' % manifest_file) - for (addons_path, module, _) in commit._get_available_modules(): - if module not in modules_to_install: - # we want to omit docker_source_folder/[addons/path/]module/* - module_path_in_docker = os.path.join(docker_source_folder, addons_path, module) - pattern_to_omit.add('%s/*' % (module_path_in_docker)) - return ['--omit', ','.join(pattern_to_omit)] - - def _make_results(self, build): - build_values = {} - log_time = self._get_log_last_write(build) - if log_time: - build_values['job_end'] = log_time - if self.job_type == 'python' and self.python_result_code and self.python_result_code != PYTHON_DEFAULT: - build_values.update(self._make_python_results(build)) - elif self.job_type in ['install_odoo', 'python']: - if self.coverage: - build_values.update(self._make_coverage_results(build)) - if self.test_enable or self.test_tags: - build_values.update(self._make_tests_results(build)) - elif self.job_type == 'test_upgrade': - build_values.update(self._make_upgrade_results(build)) - elif self.job_type == 'restore': - build_values.update(self._make_restore_results(build)) - - return build_values - - def _make_python_results(self, build): - eval_ctx = self.make_python_ctx(build) - safe_eval(self.python_result_code.strip(), eval_ctx, mode="exec", nocopy=True) - return_value = eval_ctx.get('return_value') - # todo check return_value or write in try except. Example: local result setted to wrong value - if not isinstance(return_value, dict): - raise RunbotException('python_result_code must set return_value to a dict values on build') - return return_value - - def _make_coverage_results(self, build): - build_values = {} - build._log('coverage_result', 'Start getting coverage result') - cov_path = build._path('coverage/index.html') - if os.path.exists(cov_path): - with open(cov_path, 'r') as f: - data = f.read() - covgrep = re.search(r'pc_cov.>(?P<coverage>\d+)%', data) - build_values['coverage_result'] = covgrep and covgrep.group('coverage') or False - if build_values['coverage_result']: - build._log('coverage_result', 'Coverage result: %s' % build_values['coverage_result']) - else: - build._log('coverage_result', 'Coverage result not found', level='WARNING') - else: - build._log('coverage_result', 'Coverage file not found', level='WARNING') - return build_values - - def _make_upgrade_results(self, build): - build_values = {} - build._log('upgrade', 'Getting results for build %s' % build.dest) - - if build.local_result != 'ko': - checkers = [ - self._check_log, - self._check_module_loaded, - self._check_error, - self._check_module_states, - self._check_build_ended, - self._check_warning, - ] - local_result = self._get_checkers_result(build, checkers) - build_values['local_result'] = build._get_worst_result([build.local_result, local_result]) - - return build_values - - def _check_module_states(self, build): - if not build.is_file('logs/modules_states.txt'): - build._log('', '"logs/modules_states.txt" file not found.', level='ERROR') - return 'ko' - - content = build.read_file('logs/modules_states.txt') or '' - if '(0 rows)' not in content: - build._log('', 'Some modules are not in installed/uninstalled/uninstallable state after migration. \n %s' % content) - return 'ko' - return 'ok' - - def _check_log(self, build): - log_path = build._path('logs', '%s.txt' % self.name) - if not os.path.isfile(log_path): - build._log('_make_tests_results', "Log file not found at the end of test job", level="ERROR") - return 'ko' - return 'ok' - - def _check_module_loaded(self, build): - log_path = build._path('logs', '%s.txt' % self.name) - if not grep(log_path, ".modules.loading: Modules loaded."): - build._log('_make_tests_results', "Modules loaded not found in logs", level="ERROR") - return 'ko' - return 'ok' - - def _check_error(self, build, regex=None): - log_path = build._path('logs', '%s.txt' % self.name) - regex = regex or _re_error - if rfind(log_path, regex): - build._log('_make_tests_results', 'Error or traceback found in logs', level="ERROR") - return 'ko' - return 'ok' - - def _check_warning(self, build, regex=None): - log_path = build._path('logs', '%s.txt' % self.name) - regex = regex or _re_warning - if rfind(log_path, regex): - build._log('_make_tests_results', 'Warning found in logs', level="WARNING") - return 'warn' - return 'ok' - - def _check_build_ended(self, build): - log_path = build._path('logs', '%s.txt' % self.name) - if not grep(log_path, "Initiating shutdown"): - build._log('_make_tests_results', 'No "Initiating shutdown" found in logs, maybe because of cpu limit.', level="ERROR") - return 'ko' - return 'ok' - - def _check_restore_ended(self, build): - log_path = build._path('logs', '%s.txt' % self.name) - if not grep(log_path, "### restore successful"): - build._log('_make_tests_results', 'Restore failed, check text logs for more info', level="ERROR") - return 'ko' - return 'ok' - - def _get_log_last_write(self, build): - log_path = build._path('logs', '%s.txt' % self.name) - if os.path.isfile(log_path): - return time2str(time.localtime(os.path.getmtime(log_path))) - - def _get_checkers_result(self, build, checkers): - for checker in checkers: - result = checker(build) - if result != 'ok': - return result - return 'ok' - - def _make_tests_results(self, build): - build_values = {} - build._log('run', 'Getting results for build %s' % build.dest) - - if build.local_result != 'ko': - checkers = [ - self._check_log, - self._check_module_loaded, - self._check_error, - self._check_build_ended - ] - if build.local_result != 'warn': - checkers.append(self._check_warning) - - local_result = self._get_checkers_result(build, checkers) - build_values['local_result'] = build._get_worst_result([build.local_result, local_result]) - return build_values - - def _make_restore_results(self, build): - build_values = {} - if build.local_result != 'warn': - checkers = [ - self._check_log, - self._check_restore_ended - ] - local_result = self._get_checkers_result(build, checkers) - build_values['local_result'] = build._get_worst_result([build.local_result, local_result]) - return build_values - - def _make_stats(self, build): - if not self.make_stats: # TODO garbage collect non sticky stat - return - build._log('make_stats', 'Getting stats from log file') - log_path = build._path('logs', '%s.txt' % self.name) - if not os.path.exists(log_path): - build._log('make_stats', 'Log **%s.txt** file not found' % self.name, level='INFO', log_type='markdown') - return - try: - regex_ids = self.build_stat_regex_ids - if not regex_ids: - regex_ids = regex_ids.search([('generic', '=', True)]) - stats_per_regex = regex_ids._find_in_file(log_path) - if stats_per_regex: - build_stats = [ - { - 'config_step_id': self.id, - 'build_id': build.id, - 'category': category, - 'values': values, - } for category, values in stats_per_regex.items() - ] - self.env['runbot.build.stat'].create(build_stats) - except Exception as e: - message = '**An error occured while computing statistics of %s:**\n`%s`' % (build.job, str(e).replace('\\n', '\n').replace("\\'", "'")) - _logger.exception(message) - build._log('make_stats', message, level='INFO', log_type='markdown') - - def _step_state(self): - self.ensure_one() - if self.job_type == 'run_odoo' or (self.job_type == 'python' and self.running_job): - return 'running' - return 'testing' - - def _has_log(self): - self.ensure_one() - return self._is_docker_step() - - -class ConfigStepOrder(models.Model): - _name = 'runbot.build.config.step.order' - _description = "Config step order" - _order = 'sequence, id' - # a kind of many2many rel with sequence - - sequence = fields.Integer('Sequence', required=True) - config_id = fields.Many2one('runbot.build.config', 'Config', required=True, ondelete='cascade') - step_id = fields.Many2one('runbot.build.config.step', 'Config Step', required=True, ondelete='cascade') - - @api.onchange('step_id') - def _onchange_step_id(self): - self.sequence = self.step_id.default_sequence - - @api.model_create_single - def create(self, values): - if 'sequence' not in values and values.get('step_id'): - values['sequence'] = self.env['runbot.build.config.step'].browse(values.get('step_id')).default_sequence - if self.pool._init: # do not duplicate entry on install - existing = self.search([('sequence', '=', values.get('sequence')), ('config_id', '=', values.get('config_id')), ('step_id', '=', values.get('step_id'))]) - if existing: - return - return super(ConfigStepOrder, self).create(values) diff --git a/runbot/models/build_error.py b/runbot/models/build_error.py deleted file mode 100644 index 73a83640..00000000 --- a/runbot/models/build_error.py +++ /dev/null @@ -1,334 +0,0 @@ -# -*- coding: utf-8 -*- -import ast -import hashlib -import logging -import re - -from collections import defaultdict -from fnmatch import fnmatch -from odoo import models, fields, api -from odoo.exceptions import ValidationError - -_logger = logging.getLogger(__name__) - - -class BuildError(models.Model): - - _name = "runbot.build.error" - _description = "Build error" - - _inherit = "mail.thread" - _rec_name = "id" - - content = fields.Text('Error message', required=True) - cleaned_content = fields.Text('Cleaned error message') - summary = fields.Char('Content summary', compute='_compute_summary', store=False) - module_name = fields.Char('Module name') # name in ir_logging - file_path = fields.Char('File Path') # path in ir logging - function = fields.Char('Function name') # func name in ir logging - fingerprint = fields.Char('Error fingerprint', index=True) - random = fields.Boolean('underterministic error', tracking=True) - responsible = fields.Many2one('res.users', 'Assigned fixer', tracking=True) - team_id = fields.Many2one('runbot.team', 'Assigned team') - fixing_commit = fields.Char('Fixing commit', tracking=True) - fixing_pr_id = fields.Many2one('runbot.branch', 'Fixing PR', tracking=True) - build_ids = fields.Many2many('runbot.build', 'runbot_build_error_ids_runbot_build_rel', string='Affected builds') - bundle_ids = fields.One2many('runbot.bundle', compute='_compute_bundle_ids') - version_ids = fields.One2many('runbot.version', compute='_compute_version_ids', string='Versions', search='_search_version') - trigger_ids = fields.Many2many('runbot.trigger', compute='_compute_trigger_ids', string='Triggers', search='_search_trigger_ids') - active = fields.Boolean('Error is not fixed', default=True, tracking=True) - tag_ids = fields.Many2many('runbot.build.error.tag', string='Tags') - build_count = fields.Integer(compute='_compute_build_counts', string='Nb seen', store=True) - parent_id = fields.Many2one('runbot.build.error', 'Linked to', index=True) - child_ids = fields.One2many('runbot.build.error', 'parent_id', string='Child Errors', context={'active_test': False}) - children_build_ids = fields.Many2many('runbot.build', compute='_compute_children_build_ids', string='Children builds') - error_history_ids = fields.Many2many('runbot.build.error', compute='_compute_error_history_ids', string='Old errors', context={'active_test': False}) - first_seen_build_id = fields.Many2one('runbot.build', compute='_compute_first_seen_build_id', string='First Seen build') - first_seen_date = fields.Datetime(string='First Seen Date', related='first_seen_build_id.create_date') - last_seen_build_id = fields.Many2one('runbot.build', compute='_compute_last_seen_build_id', string='Last Seen build', store=True) - last_seen_date = fields.Datetime(string='Last Seen Date', related='last_seen_build_id.create_date', store=True) - test_tags = fields.Char(string='Test tags', help="Comma separated list of test_tags to use to reproduce/remove this error", tracking=True) - - @api.constrains('test_tags') - def _check_test_tags(self): - for build_error in self: - if build_error.test_tags and '-' in build_error.test_tags: - raise ValidationError('Build error test_tags should not be negated') - - @api.model_create_single - def create(self, vals): - cleaners = self.env['runbot.error.regex'].search([('re_type', '=', 'cleaning')]) - content = vals.get('content') - cleaned_content = cleaners.r_sub('%', content) - vals.update({'cleaned_content': cleaned_content, - 'fingerprint': self._digest(cleaned_content) - }) - if not 'team_id' in vals and 'module_name' in vals: - vals.update({'team_id': self.env['runbot.team']._get_team(vals['module_name'])}) - return super().create(vals) - - def write(self, vals): - if 'active' in vals: - for build_error in self: - (build_error.child_ids - self).write({'active': vals['active']}) - return super(BuildError, self).write(vals) - - @api.depends('build_ids', 'child_ids.build_ids') - def _compute_build_counts(self): - for build_error in self: - build_error.build_count = len(build_error.build_ids | build_error.mapped('child_ids.build_ids')) - - @api.depends('build_ids') - def _compute_bundle_ids(self): - for build_error in self: - top_parent_builds = build_error.build_ids.mapped(lambda rec: rec and rec.top_parent) - build_error.bundle_ids = top_parent_builds.mapped('slot_ids').mapped('batch_id.bundle_id') - - @api.depends('build_ids', 'child_ids.build_ids') - def _compute_version_ids(self): - for build_error in self: - build_error.version_ids = build_error.build_ids.version_id - - @api.depends('build_ids') - def _compute_trigger_ids(self): - for build_error in self: - build_error.trigger_ids = build_error.build_ids.trigger_id - - @api.depends('content') - def _compute_summary(self): - for build_error in self: - build_error.summary = build_error.content[:50] - - @api.depends('build_ids', 'child_ids.build_ids') - def _compute_children_build_ids(self): - for build_error in self: - all_builds = build_error.build_ids | build_error.mapped('child_ids.build_ids') - build_error.children_build_ids = all_builds.sorted(key=lambda rec: rec.id, reverse=True) - - @api.depends('children_build_ids') - def _compute_last_seen_build_id(self): - for build_error in self: - build_error.last_seen_build_id = build_error.children_build_ids and build_error.children_build_ids[0] or False - - @api.depends('children_build_ids') - def _compute_first_seen_build_id(self): - for build_error in self: - build_error.first_seen_build_id = build_error.children_build_ids and build_error.children_build_ids[-1] or False - - @api.depends('fingerprint', 'child_ids.fingerprint') - def _compute_error_history_ids(self): - for error in self: - fingerprints = [error.fingerprint] + [rec.fingerprint for rec in error.child_ids] - error.error_history_ids = self.search([('fingerprint', 'in', fingerprints), ('active', '=', False), ('id', '!=', error.id or False)]) - - @api.model - def _digest(self, s): - """ - return a hash 256 digest of the string s - """ - return hashlib.sha256(s.encode()).hexdigest() - - @api.model - def _parse_logs(self, ir_logs): - - regexes = self.env['runbot.error.regex'].search([]) - search_regs = regexes.filtered(lambda r: r.re_type == 'filter') - cleaning_regs = regexes.filtered(lambda r: r.re_type == 'cleaning') - - hash_dict = defaultdict(list) - for log in ir_logs: - if search_regs.r_search(log.message): - continue - fingerprint = self._digest(cleaning_regs.r_sub('%', log.message)) - hash_dict[fingerprint].append(log) - - build_errors = self.env['runbot.build.error'] - # add build ids to already detected errors - existing_errors = self.env['runbot.build.error'].search([('fingerprint', 'in', list(hash_dict.keys())), ('active', '=', True)]) - build_errors |= existing_errors - for build_error in existing_errors: - for build in {rec.build_id for rec in hash_dict[build_error.fingerprint]}: - build.build_error_ids += build_error - del hash_dict[build_error.fingerprint] - - # create an error for the remaining entries - for fingerprint, logs in hash_dict.items(): - build_errors |= self.env['runbot.build.error'].create({ - 'content': logs[0].message, - 'module_name': logs[0].name, - 'file_path': logs[0].path, - 'function': logs[0].func, - 'build_ids': [(6, False, [r.build_id.id for r in logs])], - }) - - if build_errors: - window_action = { - "type": "ir.actions.act_window", - "res_model": "runbot.build.error", - "views": [[False, "tree"]], - "domain": [('id', 'in', build_errors.ids)] - } - if len(build_errors) == 1: - window_action["views"] = [[False, "form"]] - window_action["res_id"] = build_errors.id - return window_action - - def link_errors(self): - """ Link errors with the first one of the recordset - choosing parent in error with responsible, random bug and finally fisrt seen - """ - if len(self) < 2: - return - self = self.with_context(active_test=False) - build_errors = self.search([('id', 'in', self.ids)], order='responsible asc, random desc, id asc') - build_errors[1:].write({'parent_id': build_errors[0].id}) - - def clean_content(self): - cleaning_regs = self.env['runbot.error.regex'].search([('re_type', '=', 'cleaning')]) - for build_error in self: - build_error.cleaned_content = cleaning_regs.r_sub('%', build_error.content) - - @api.model - def test_tags_list(self): - active_errors = self.search([('test_tags', '!=', False)]) - test_tag_list = active_errors.mapped('test_tags') - return [test_tag for error_tags in test_tag_list for test_tag in (error_tags).split(',')] - - @api.model - def disabling_tags(self): - return ['-%s' % tag for tag in self.test_tags_list()] - - def _search_version(self, operator, value): - return [('build_ids.version_id', operator, value)] - - def _search_trigger_ids(self, operator, value): - return [('build_ids.trigger_id', operator, value)] - -class BuildErrorTag(models.Model): - - _name = "runbot.build.error.tag" - _description = "Build error tag" - - name = fields.Char('Tag') - error_ids = fields.Many2many('runbot.build.error', string='Errors') - - -class ErrorRegex(models.Model): - - _name = "runbot.error.regex" - _description = "Build error regex" - _inherit = "mail.thread" - _rec_name = 'id' - _order = 'sequence, id' - - regex = fields.Char('Regular expression') - re_type = fields.Selection([('filter', 'Filter out'), ('cleaning', 'Cleaning')], string="Regex type") - sequence = fields.Integer('Sequence', default=100) - - def r_sub(self, replace, s): - """ replaces patterns from the recordset by replace in the given string """ - for c in self: - s = re.sub(c.regex, '%', s) - return s - - def r_search(self, s): - """ Return True if one of the regex is found in s """ - for filter in self: - if re.search(filter.regex, s): - return True - return False - - -class RunbotTeam(models.Model): - - _name = 'runbot.team' - _description = "Runbot Team" - _order = 'name, id' - - name = fields.Char('Team', required=True) - user_ids = fields.Many2many('res.users', string='Team Members', domain=[('share', '=', False)]) - dashboard_id = fields.Many2one('runbot.dashboard', string='Dashboard') - build_error_ids = fields.One2many('runbot.build.error', 'team_id', string='Team Errors', domain=[('parent_id', '=', False)]) - path_glob = fields.Char('Module Wildcards', - help='Comma separated list of `fnmatch` wildcards used to assign errors automaticaly\n' - 'Negative wildcards starting with a `-` can be used to discard some path\n' - 'e.g.: `*website*,-*website_sale*`') - upgrade_exception_ids = fields.One2many('runbot.upgrade.exception', 'team_id', string='Team Upgrade Exceptions') - - @api.model_create_single - def create(self, values): - if 'dashboard_id' not in values or values['dashboard_id'] == False: - dashboard = self.env['runbot.dashboard'].search([('name', '=', values['name'])]) - if not dashboard: - dashboard = dashboard.create({'name': values['name']}) - values['dashboard_id'] = dashboard.id - return super().create(values) - - @api.model - def _get_team(self, module_name): - for team in self.env['runbot.team'].search([('path_glob', '!=', False)]): - if any([fnmatch(module_name, pattern.strip().strip('-')) for pattern in team.path_glob.split(',') if pattern.strip().startswith('-')]): - continue - if any([fnmatch(module_name, pattern.strip()) for pattern in team.path_glob.split(',') if not pattern.strip().startswith('-')]): - return team.id - return False - - -class RunbotDashboard(models.Model): - - _name = 'runbot.dashboard' - _description = "Runbot Dashboard" - _order = 'name, id' - - name = fields.Char('Team', required=True) - team_ids = fields.One2many('runbot.team', 'dashboard_id', string='Teams') - dashboard_tile_ids = fields.Many2many('runbot.dashboard.tile', string='Dashboards tiles') - - -class RunbotDashboardTile(models.Model): - - _name = 'runbot.dashboard.tile' - _description = "Runbot Dashboard Tile" - _order = 'sequence, id' - - sequence = fields.Integer('Sequence') - name = fields.Char('Name') - dashboard_ids = fields.Many2many('runbot.dashboard', string='Dashboards') - display_name = fields.Char(compute='_compute_display_name') - project_id = fields.Many2one('runbot.project', 'Project', help='Project to monitor', required=True, - default=lambda self: self.env.ref('runbot.main_project')) - category_id = fields.Many2one('runbot.category', 'Category', help='Trigger Category to monitor', required=True, - default=lambda self: self.env.ref('runbot.default_category')) - trigger_id = fields.Many2one('runbot.trigger', 'Trigger', help='Trigger to monitor in chosen category') - config_id = fields.Many2one('runbot.build.config', 'Config', help='Select a sub_build with this config') - domain_filter = fields.Char('Domain Filter', help='If present, will be applied on builds', default="[('global_result', '=', 'ko')]") - custom_template_id = fields.Many2one('ir.ui.view', help='Change for a custom Dashboard card template', - domain=[('type', '=', 'qweb')], default=lambda self: self.env.ref('runbot.default_dashboard_tile_view')) - sticky_bundle_ids = fields.Many2many('runbot.bundle', compute='_compute_sticky_bundle_ids', string='Sticky Bundles') - build_ids = fields.Many2many('runbot.build', compute='_compute_build_ids', string='Builds') - - @api.depends('project_id', 'category_id', 'trigger_id', 'config_id') - def _compute_display_name(self): - for board in self: - names = [board.project_id.name, board.category_id.name, board.trigger_id.name, board.config_id.name, board.name] - board.display_name = ' / '.join([n for n in names if n]) - - @api.depends('project_id') - def _compute_sticky_bundle_ids(self): - sticky_bundles = self.env['runbot.bundle'].search([('sticky', '=', True)]) - for dashboard in self: - dashboard.sticky_bundle_ids = sticky_bundles.filtered(lambda b: b.project_id == dashboard.project_id) - - @api.depends('project_id', 'category_id', 'trigger_id', 'config_id', 'domain_filter') - def _compute_build_ids(self): - for dashboard in self: - last_done_batch_ids = dashboard.sticky_bundle_ids.with_context(category_id=dashboard.category_id.id).last_done_batch - if dashboard.trigger_id: - all_build_ids = last_done_batch_ids.slot_ids.filtered(lambda s: s.trigger_id == dashboard.trigger_id).all_build_ids - else: - all_build_ids = last_done_batch_ids.all_build_ids - - domain = ast.literal_eval(dashboard.domain_filter) if dashboard.domain_filter else [] - if dashboard.config_id: - domain.append(('config_id', '=', dashboard.config_id.id)) - dashboard.build_ids = all_build_ids.filtered_domain(domain) diff --git a/runbot/models/build_stat.py b/runbot/models/build_stat.py deleted file mode 100644 index c31d76b3..00000000 --- a/runbot/models/build_stat.py +++ /dev/null @@ -1,27 +0,0 @@ -import logging - -from odoo import models, fields, api, tools -from ..fields import JsonDictField - -_logger = logging.getLogger(__name__) - - -class BuildStat(models.Model): - _name = "runbot.build.stat" - _description = "Statistics" - _log_access = False - - _sql_constraints = [ - ( - "build_config_key_unique", - "unique (build_id, config_step_id, category)", - "Build stats must be unique for the same build step", - ) - ] - - build_id = fields.Many2one("runbot.build", "Build", index=True, ondelete="cascade") - config_step_id = fields.Many2one( - "runbot.build.config.step", "Step", ondelete="cascade" - ) - category = fields.Char("Category", index=True) - values = JsonDictField("Value") diff --git a/runbot/models/build_stat_regex.py b/runbot/models/build_stat_regex.py deleted file mode 100644 index 8e5eb9fc..00000000 --- a/runbot/models/build_stat_regex.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -import logging - -from ..common import os -import re - -from odoo import models, fields, api -from odoo.exceptions import ValidationError - -VALUE_PATTERN = r"\(\?P\<value\>.+\)" # used to verify value group pattern - -_logger = logging.getLogger(__name__) - - -class BuildStatRegex(models.Model): - """ A regular expression to extract a float/int value from a log file - The regulare should contain a named group like '(?P<value>.+)'. - The result will be a key/value like {name: value} - A second named group '(?P<key>.+)' can bu used to augment the key name - like {name.key_result: value} - A 'generic' regex will be used when no regex are defined on a make_stat - step. - """ - - _name = "runbot.build.stat.regex" - _description = "Statistics regex" - _order = 'sequence,id' - - name = fields.Char("Key Name") - regex = fields.Char("Regular Expression") - description = fields.Char("Description") - generic = fields.Boolean('Generic', help='Executed when no regex on the step', default=True) - config_step_ids = fields.Many2many('runbot.build.config.step', string='Config Steps') - sequence = fields.Integer('Sequence') - - @api.constrains("name", "regex") - def _check_regex(self): - for rec in self: - try: - r = re.compile(rec.regex) - except re.error as e: - raise ValidationError("Unable to compile regular expression: %s" % e) - # verify that a named group exist in the pattern - if not re.search(VALUE_PATTERN, r.pattern): - raise ValidationError( - "The regular expresion should contain the name group pattern 'value' e.g: '(?P<value>.+)'" - ) - - def _find_in_file(self, file_path): - """ Search file regexes and write stats - returns a dict of key:values - """ - if not os.path.exists(file_path): - return {} - stats_matches = {} - with open(file_path, "r") as log_file: - data = log_file.read() - for build_stat_regex in self: - current_stat_matches = {} - for match in re.finditer(build_stat_regex.regex, data): - group_dict = match.groupdict() - try: - value = float(group_dict.get("value")) - except ValueError: - _logger.warning( - 'The matched value (%s) of "%s" cannot be converted into float', - group_dict.get("value"), build_stat_regex.regex - ) - continue - current_stat_matches[group_dict.get('key', 'value')] = value - stats_matches[build_stat_regex.name] = current_stat_matches - return stats_matches diff --git a/runbot/models/bundle.py b/runbot/models/bundle.py deleted file mode 100644 index 4578658c..00000000 --- a/runbot/models/bundle.py +++ /dev/null @@ -1,243 +0,0 @@ -import time -import logging -import datetime -import subprocess - -from collections import defaultdict -from odoo import models, fields, api, tools -from ..common import dt2time, s2human_long - -_logger = logging.getLogger(__name__) - - -class Bundle(models.Model): - _name = 'runbot.bundle' - _description = "Bundle" - - name = fields.Char('Bundle name', required=True, help="Name of the base branch") - project_id = fields.Many2one('runbot.project', required=True, index=True) - branch_ids = fields.One2many('runbot.branch', 'bundle_id') - - # custom behaviour - no_build = fields.Boolean('No build') - no_auto_run = fields.Boolean('No run') - build_all = fields.Boolean('Force all triggers') - modules = fields.Char("Modules to install", help="Comma-separated list of modules to install and test.") - - batch_ids = fields.One2many('runbot.batch', 'bundle_id') - last_batch = fields.Many2one('runbot.batch', index=True, domain=lambda self: [('category_id', '=', self.env.ref('runbot.default_category').id)]) - last_batchs = fields.Many2many('runbot.batch', 'Last batchs', compute='_compute_last_batchs') - last_done_batch = fields.Many2many('runbot.batch', 'Last batchs', compute='_compute_last_done_batch') - - sticky = fields.Boolean('Sticky', compute='_compute_sticky', store=True, index=True) - is_base = fields.Boolean('Is base', index=True) - defined_base_id = fields.Many2one('runbot.bundle', 'Forced base bundle', domain="[('project_id', '=', project_id), ('is_base', '=', True)]") - base_id = fields.Many2one('runbot.bundle', 'Base bundle', compute='_compute_base_id', store=True) - to_upgrade = fields.Boolean('To upgrade', compute='_compute_to_upgrade', store=True, index=False) - - version_id = fields.Many2one('runbot.version', 'Version', compute='_compute_version_id', store=True, recursive=True) - version_number = fields.Char(related='version_id.number', store=True, index=True) - - previous_major_version_base_id = fields.Many2one('runbot.bundle', 'Previous base bundle', compute='_compute_relations_base_id') - intermediate_version_base_ids = fields.Many2many('runbot.bundle', 'Intermediate base bundles', compute='_compute_relations_base_id') - - priority = fields.Boolean('Build priority', default=False) - - # Custom parameters - trigger_custom_ids = fields.One2many('runbot.bundle.trigger.custom', 'bundle_id') - host_id = fields.Many2one('runbot.host', compute="_compute_host_id", store=True) - dockerfile_id = fields.Many2one('runbot.dockerfile', index=True, help="Use a custom Dockerfile") - commit_limit = fields.Integer("Commit limit") - file_limit = fields.Integer("File limit") - - @api.depends('name') - def _compute_host_id(self): - assigned_only = None - runbots = {} - for bundle in self: - bundle.host_id = False - elems = (bundle.name or '').split('-') - for elem in elems: - if elem.startswith('runbot'): - if elem.replace('runbot', '') == '_x': - if assigned_only is None: - assigned_only = self.env['runbot.host'].search([('assigned_only', '=', True)], limit=1) - bundle.host_id = assigned_only or False - elif elem.replace('runbot', '').isdigit(): - if elem not in runbots: - runbots[elem] = self.env['runbot.host'].search([('name', 'like', '%s%%' % elem)], limit=1) - bundle.host_id = runbots[elem] or False - - @api.depends('sticky') - def _compute_make_stats(self): - for bundle in self: - bundle.make_stats = bundle.sticky - - @api.depends('is_base') - def _compute_sticky(self): - for bundle in self: - bundle.sticky = bundle.is_base - - @api.depends('is_base') - def _compute_to_upgrade(self): - for bundle in self: - bundle.to_upgrade = bundle.is_base - - @api.depends('name', 'is_base', 'defined_base_id', 'base_id.is_base', 'project_id') - def _compute_base_id(self): - for bundle in self: - if bundle.is_base: - bundle.base_id = bundle - continue - if bundle.defined_base_id: - bundle.base_id = bundle.defined_base_id - continue - project_id = bundle.project_id.id - master_base = False - fallback = False - for bid, bname in self._get_base_ids(project_id): - if bundle.name.startswith('%s-' % bname): - bundle.base_id = self.browse(bid) - break - elif bname == 'master': - master_base = self.browse(bid) - elif not fallback or fallback.id < bid: - fallback = self.browse(bid) - else: - bundle.base_id = master_base or fallback - - @tools.ormcache('project_id') - def _get_base_ids(self, project_id): - return [(b.id, b.name) for b in self.search([('is_base', '=', True), ('project_id', '=', project_id)])] - - @api.depends('is_base', 'base_id.version_id') - def _compute_version_id(self): - for bundle in self.sorted(key='is_base', reverse=True): - if not bundle.is_base: - bundle.version_id = bundle.base_id.version_id - continue - bundle.version_id = self.env['runbot.version']._get(bundle.name) - - @api.depends('version_id') - def _compute_relations_base_id(self): - for bundle in self: - bundle = bundle.with_context(project_id=bundle.project_id.id) - bundle.previous_major_version_base_id = bundle.version_id.previous_major_version_id.base_bundle_id - bundle.intermediate_version_base_ids = bundle.version_id.intermediate_version_ids.mapped('base_bundle_id') - - @api.depends_context('category_id') - def _compute_last_batchs(self): - batch_ids = defaultdict(list) - if self.ids: - category_id = self.env.context.get('category_id', self.env['ir.model.data']._xmlid_to_res_id('runbot.default_category')) - self.env.cr.execute(""" - SELECT - id - FROM ( - SELECT - batch.id AS id, - row_number() OVER (PARTITION BY batch.bundle_id order by batch.id desc) AS row - FROM - runbot_bundle bundle INNER JOIN runbot_batch batch ON bundle.id=batch.bundle_id - WHERE - bundle.id in %s - AND batch.category_id = %s - ) AS bundle_batch - WHERE - row <= 4 - ORDER BY row, id desc - """, [tuple(self.ids), category_id] - ) - batchs = self.env['runbot.batch'].browse([r[0] for r in self.env.cr.fetchall()]) - for batch in batchs: - batch_ids[batch.bundle_id.id].append(batch.id) - - for bundle in self: - bundle.last_batchs = [(6, 0, batch_ids[bundle.id])] if bundle.id in batch_ids else False - - @api.depends_context('category_id') - def _compute_last_done_batch(self): - if self: - # self.env['runbot.batch'].flush() - for bundle in self: - bundle.last_done_batch = False - category_id = self.env.context.get('category_id', self.env['ir.model.data']._xmlid_to_res_id('runbot.default_category')) - self.env.cr.execute(""" - SELECT - id - FROM ( - SELECT - batch.id AS id, - row_number() OVER (PARTITION BY batch.bundle_id order by batch.id desc) AS row - FROM - runbot_bundle bundle INNER JOIN runbot_batch batch ON bundle.id=batch.bundle_id - WHERE - bundle.id in %s - AND batch.state = 'done' - AND batch.category_id = %s - ) AS bundle_batch - WHERE - row = 1 - ORDER BY row, id desc - """, [tuple(self.ids), category_id] - ) - batchs = self.env['runbot.batch'].browse([r[0] for r in self.env.cr.fetchall()]) - for batch in batchs: - batch.bundle_id.last_done_batch = batch - - def _url(self): - self.ensure_one() - return "/runbot/bundle/%s" % self.id - - - def create(self, values_list): - res = super().create(values_list) - if res.is_base: - model = self.browse() - model._get_base_ids.clear_cache(model) - return res - - def write(self, values): - super().write(values) - if 'is_base' in values: - model = self.browse() - model._get_base_ids.clear_cache(model) - - def _force(self, category_id=None): - self.ensure_one() - if self.last_batch.state == 'preparing': - return - values = { - 'last_update': fields.Datetime.now(), - 'bundle_id': self.id, - 'state': 'preparing', - } - if category_id: - values['category_id'] = category_id - new = self.env['runbot.batch'].create(values) - self.last_batch = new - return new - - def consistency_warning(self): - if self.defined_base_id: - return [('info', 'This bundle has a forced base: %s' % self.defined_base_id.name)] - warnings = [] - if not self.base_id: - warnings.append(('warning', 'No base defined on this bundle')) - else: - for branch in self.branch_ids: - if branch.is_pr and branch.target_branch_name != self.base_id.name: - if branch.target_branch_name.startswith(self.base_id.name): - warnings.append(('info', 'PR %s targeting a non base branch: %s' % (branch.dname, branch.target_branch_name))) - else: - warnings.append(('warning' if branch.alive else 'info', 'PR %s targeting wrong version: %s (expecting %s)' % (branch.dname, branch.target_branch_name, self.base_id.name))) - elif not branch.is_pr and not branch.name.startswith(self.base_id.name) and not self.defined_base_id: - warnings.append(('warning', 'Branch %s not starting with version name (%s)' % (branch.dname, self.base_id.name))) - return warnings - - def branch_groups(self): - self.branch_ids.sorted(key=lambda b: (b.remote_id.repo_id.sequence, b.remote_id.repo_id.id, b.is_pr)) - branch_groups = {repo: [] for repo in self.branch_ids.mapped('remote_id.repo_id').sorted('sequence')} - for branch in self.branch_ids.sorted(key=lambda b: (b.is_pr)): - branch_groups[branch.remote_id.repo_id].append(branch) - return branch_groups diff --git a/runbot/models/codeowner.py b/runbot/models/codeowner.py deleted file mode 100644 index d0cc1343..00000000 --- a/runbot/models/codeowner.py +++ /dev/null @@ -1,41 +0,0 @@ -import ast -import re - -from odoo import models, fields, api -from odoo.exceptions import ValidationError - - -class Codeowner(models.Model): - _name = 'runbot.codeowner' - _description = "Notify github teams based on filenames regex" - _inherit = "mail.thread" - - project_id = fields.Many2one('runbot.project', required=True) - regex = fields.Char('Regular Expression', help='Regex to match full file paths', required=True, tracking=True) - github_teams = fields.Char(help='Comma separated list of github teams to notify', required=True, tracking=True) - team_id = fields.Many2one('runbot.team', help='Not mandatory runbot team') - version_domain = fields.Char('Version Domain', help='Codeowner only applies to the filtered versions') - - @api.constrains('regex') - def _validate_regex(self): - for rec in self: - try: - r = re.compile(rec.regex) - except re.error as e: - raise ValidationError("Unable to compile regular expression: %s" % e) - - @api.constrains('version_domain') - def _validate_version_domain(self): - for rec in self: - try: - self._match_version(self.env.ref('runbot.bundle_master').version_id) - except Exception as e: - raise ValidationError("Unable to validate version_domain: %s" % e) - - def _get_version_domain(self): - """ Helper to get the evaluated version domain """ - self.ensure_one() - return ast.literal_eval(self.version_domain) if self.version_domain else [] - - def _match_version(self, version): - return version.filtered_domain(self._get_version_domain()) diff --git a/runbot/models/commit.py b/runbot/models/commit.py deleted file mode 100644 index ae565ea0..00000000 --- a/runbot/models/commit.py +++ /dev/null @@ -1,238 +0,0 @@ - -import subprocess - -from ..common import os, RunbotException -import glob -import shutil - -from odoo import models, fields, api, registry -import logging - -_logger = logging.getLogger(__name__) - - -class Commit(models.Model): - _name = 'runbot.commit' - _description = "Commit" - - _sql_constraints = [ - ( - "commit_unique", - "unique (name, repo_id, rebase_on_id)", - "Commit must be unique to ensure correct duplicate matching", - ) - ] - name = fields.Char('SHA') - repo_id = fields.Many2one('runbot.repo', string='Repo group') - date = fields.Datetime('Commit date') - author = fields.Char('Author') - author_email = fields.Char('Author Email') - committer = fields.Char('Committer') - committer_email = fields.Char('Committer Email') - subject = fields.Text('Subject') - dname = fields.Char('Display name', compute='_compute_dname') - rebase_on_id = fields.Many2one('runbot.commit', 'Rebase on commit') - - def _get(self, name, repo_id, vals=None, rebase_on_id=False): - commit = self.search([('name', '=', name), ('repo_id', '=', repo_id), ('rebase_on_id', '=', rebase_on_id)]) - if not commit: - commit = self.env['runbot.commit'].create({**(vals or {}), 'name': name, 'repo_id': repo_id, 'rebase_on_id': rebase_on_id}) - return commit - - def _rebase_on(self, commit): - if self == commit: - return self - return self._get(self.name, self.repo_id.id, self.read()[0], commit.id) - - def _get_available_modules(self): - for manifest_file_name in self.repo_id.manifest_files.split(','): # '__manifest__.py' '__openerp__.py' - for addons_path in (self.repo_id.addons_paths or '').split(','): # '' 'addons' 'odoo/addons' - sep = os.path.join(addons_path, '*') - for manifest_path in glob.glob(self._source_path(sep, manifest_file_name)): - module = os.path.basename(os.path.dirname(manifest_path)) - yield (addons_path, module, manifest_file_name) - - def export(self, build): - """Export a git repo into a sources""" - # TODO add automated tests - self.ensure_one() - if not self.env['runbot.commit.export'].search([('build_id', '=', build.id), ('commit_id', '=', self.id)]): - self.env['runbot.commit.export'].create({'commit_id': self.id, 'build_id': build.id}) - export_path = self._source_path() - - if os.path.isdir(export_path): - _logger.info('git export: exporting to %s (already exists)', export_path) - return export_path - - - _logger.info('git export: exporting to %s (new)', export_path) - os.makedirs(export_path) - - self.repo_id._fetch(self.name) - export_sha = self.name - if self.rebase_on_id: - export_sha = self.rebase_on_id.name - self.rebase_on_id.repo_id._fetch(export_sha) - - p1 = subprocess.Popen(['git', '--git-dir=%s' % self.repo_id.path, 'archive', export_sha], stderr=subprocess.PIPE, stdout=subprocess.PIPE) - p2 = subprocess.Popen(['tar', '--mtime', self.date.strftime('%Y-%m-%d %H:%M:%S'), '-xC', export_path], stdin=p1.stdout, stdout=subprocess.PIPE) - p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits. - (_, err) = p2.communicate() - p1.poll() # fill the returncode - if p1.returncode: - _logger.info("git export: removing corrupted export %r", export_path) - shutil.rmtree(export_path) - raise RunbotException("Git archive failed for %s with error code %s. (%s)" % (self.name, p1.returncode, p1.stderr.read().decode())) - if err: - _logger.info("git export: removing corrupted export %r", export_path) - shutil.rmtree(export_path) - raise RunbotException("Export for %s failed. (%s)" % (self.name, err)) - - if self.rebase_on_id: - # we could be smart here and detect if merge_base == commit, in witch case checkouting base_commit is enough. Since we don't have this info - # and we are exporting in a custom folder anyway, lets - _logger.info('Applying patch for %s', self.name) - p1 = subprocess.Popen(['git', '--git-dir=%s' % self.repo_id.path, 'diff', '%s...%s' % (export_sha, self.name)], stderr=subprocess.PIPE, stdout=subprocess.PIPE) - p2 = subprocess.Popen(['patch', '-p0', '-d', export_path], stdin=p1.stdout, stdout=subprocess.PIPE) - p1.stdout.close() - (message, err) = p2.communicate() - p1.poll() - if err: - shutil.rmtree(export_path) - raise RunbotException("Apply patch failed for %s...%s. (%s)" % (export_sha, self.name, err)) - if p1.returncode or p2.returncode: - shutil.rmtree(export_path) - raise RunbotException("Apply patch failed for %s...%s with error code %s+%s. (%s)" % (export_sha, self.name, p1.returncode, p2.returncode, message)) - - # migration scripts link if necessary - icp = self.env['ir.config_parameter'] - ln_param = icp.get_param('runbot_migration_ln', default='') - migration_repo_id = int(icp.get_param('runbot_migration_repo_id', default=0)) - if ln_param and migration_repo_id and self.repo_id.server_files: - scripts_dir = self.env['runbot.repo'].browse(migration_repo_id).name - try: - os.symlink('/data/build/%s' % scripts_dir, self._source_path(ln_param)) - except FileNotFoundError: - _logger.warning('Impossible to create migration symlink') - - return export_path - - def read_source(self, file, mode='r'): - file_path = self._source_path(file) - try: - with open(file_path, mode) as f: - return f.read() - except: - return False - - def _source_path(self, *path): - export_name = self.name - if self.rebase_on_id: - export_name = '%s_%s' % (self.name, self.rebase_on_id.name) - return os.path.join(self.env['runbot.runbot']._root(), 'sources', self.repo_id.name, export_name, *path) - - @api.depends('name', 'repo_id.name') - def _compute_dname(self): - for commit in self: - commit.dname = '%s:%s' % (commit.repo_id.name, commit.name[:8]) - - def _github_status(self, build, context, state, target_url, description=None): - self.ensure_one() - Status = self.env['runbot.commit.status'] - last_status = Status.search([('commit_id', '=', self.id), ('context', '=', context)], order='id desc', limit=1) - if last_status and last_status.state == state: - _logger.info('Skipping already sent status %s:%s for %s', context, state, self.name) - return - last_status = Status.create({ - 'build_id': build.id if build else False, - 'commit_id': self.id, - 'context': context, - 'state': state, - 'target_url': target_url, - 'description': description or context, - 'to_process': True, - }) - - -class CommitLink(models.Model): - _name = 'runbot.commit.link' - _description = "Build commit" - - commit_id = fields.Many2one('runbot.commit', 'Commit', required=True, index=True) - # Link info - match_type = fields.Selection([('new', 'New head of branch'), ('head', 'Head of branch'), ('base_head', 'Found on base branch'), ('base_match', 'Found on base branch')]) # HEAD, DEFAULT - branch_id = fields.Many2one('runbot.branch', string='Found in branch') # Shouldn't be use for anything else than display - - base_commit_id = fields.Many2one('runbot.commit', 'Base head commit', index=True) - merge_base_commit_id = fields.Many2one('runbot.commit', 'Merge Base commit', index=True) - base_behind = fields.Integer('# commits behind base') - base_ahead = fields.Integer('# commits ahead base') - file_changed = fields.Integer('# file changed') - diff_add = fields.Integer('# line added') - diff_remove = fields.Integer('# line removed') - - -class CommitStatus(models.Model): - _name = 'runbot.commit.status' - _description = 'Commit status' - _order = 'id desc' - - commit_id = fields.Many2one('runbot.commit', string='Commit', required=True, index=True) - context = fields.Char('Context', required=True) - state = fields.Char('State', required=True, copy=True) - build_id = fields.Many2one('runbot.build', string='Build', index=True) - target_url = fields.Char('Url') - description = fields.Char('Description') - sent_date = fields.Datetime('Sent Date') - to_process = fields.Boolean('Status was not processed yet', index=True) - - def _send_to_process(self): - commits_status = self.search([('to_process', '=', True)], order='create_date DESC, id DESC') - if commits_status: - _logger.info('Sending %s commit status', len(commits_status)) - commits_status._send() - - def _send(self): - session_cache = {} - processed = set() - for commit_status in self.sorted(lambda cs: (cs.create_date, cs.id), reverse=True): # ensure most recent are processed first - commit_status.to_process = False - # only send the last status for each commit+context - key = (commit_status.context, commit_status.commit_id.name) - if key not in processed: - processed.add(key) - status = { - 'context': commit_status.context, - 'state': commit_status.state, - 'target_url': commit_status.target_url, - 'description': commit_status.description, - } - for remote in commit_status.commit_id.repo_id.remote_ids.filtered('send_status'): - if not remote.token: - _logger.warning('No token on remote %s, skipping status', remote.mapped("name")) - else: - if remote.token not in session_cache: - session_cache[remote.token] = remote._make_github_session() - session = session_cache[remote.token] - _logger.info( - "github updating %s status %s to %s in repo %s", - status['context'], commit_status.commit_id.name, status['state'], remote.name) - remote._github('/repos/:owner/:repo/statuses/%s' % commit_status.commit_id.name, - status, - ignore_errors=True, - session=session - ) - commit_status.sent_date = fields.Datetime.now() - else: - _logger.info('Skipping outdated status for %s %s', commit_status.context, commit_status.commit_id.name) - - - -class CommitExport(models.Model): - _name = 'runbot.commit.export' - _description = 'Commit export' - - build_id = fields.Many2one('runbot.build', index=True) - commit_id = fields.Many2one('runbot.commit') - - host = fields.Char(related='build_id.host', store=True) diff --git a/runbot/models/custom_trigger.py b/runbot/models/custom_trigger.py deleted file mode 100644 index cdb4955a..00000000 --- a/runbot/models/custom_trigger.py +++ /dev/null @@ -1,97 +0,0 @@ -import json - -from odoo import models, fields, api -from ..fields import JsonDictField - -class BundleTriggerCustomization(models.Model): - _name = 'runbot.bundle.trigger.custom' - _description = 'Custom trigger' - - trigger_id = fields.Many2one('runbot.trigger') - start_mode = fields.Selection([('disabled', 'Disabled'), ('auto', 'Auto'), ('force', 'Force')], required=True, default='auto') - bundle_id = fields.Many2one('runbot.bundle') - config_id = fields.Many2one('runbot.build.config') - extra_params = fields.Char("Custom parameters") - config_data = JsonDictField("Config data") - - _sql_constraints = [ - ( - "bundle_custom_trigger_unique", - "unique (bundle_id, trigger_id)", - "Only one custom trigger per trigger per bundle is allowed", - ) - ] - -class CustomTriggerWizard(models.TransientModel): - _name = 'runbot.trigger.custom.wizard' - _description = 'Custom trigger Wizard' - - bundle_id = fields.Many2one('runbot.bundle', "Bundle") - project_id = fields.Many2one(related='bundle_id.project_id', string='Project') - trigger_id = fields.Many2one('runbot.trigger', domain="[('project_id', '=', project_id)]") - config_id = fields.Many2one('runbot.build.config', string="Config id", default=lambda self: self.env.ref('runbot.runbot_build_config_custom_multi')) - - config_data = JsonDictField("Config data") - - number_build = fields.Integer('Number builds for config multi', default=10) - - child_extra_params = fields.Char('Extra params for children', default='--test-tags /module.test_method') - child_dump_url = fields.Char('Dump url for children') - child_config_id = fields.Many2one('runbot.build.config', 'Config for children', default=lambda self: self.env.ref('runbot.runbot_build_config_restore_and_test')) - - warnings = fields.Text('Warnings', readonly=True) - - @api.onchange('child_extra_params', 'child_dump_url', 'child_config_id', 'number_build', 'config_id', 'trigger_id') - def _onchange_warnings(self): - for wizard in self: - _warnings = [] - if wizard._get_existing_trigger(): - _warnings.append(f'A custom trigger already exists for trigger {wizard.trigger_id.name} and will be unlinked') - - if wizard.child_dump_url or wizard.child_extra_params or wizard.child_config_id or wizard.number_build: - if not any(step.job_type == 'create_build' for step in wizard.config_id.step_ids()): - _warnings.append('Some multi builds params are given but config as no create step') - - if wizard.child_dump_url and not any(step.job_type == 'restore' for step in wizard.child_config_id.step_ids()): - _warnings.append('A dump_url is defined but child config has no restore step') - - if not wizard.child_dump_url and any(step.job_type == 'restore' for step in wizard.child_config_id.step_ids()): - _warnings.append('Child config has a restore step but no dump_url is given') - - if not wizard.trigger_id.manual: - _warnings.append("This custom trigger will replace an existing non manual trigger. The ci won't be sent anymore") - - wizard.warnings = '\n'.join(_warnings) - - @api.onchange('number_build', 'child_extra_params', 'child_dump_url', 'child_config_id') - def _onchange_config_data(self): - for wizard in self: - wizard.config_data = self._get_config_data() - - def _get_config_data(self): - config_data = {} - if self.number_build: - config_data['number_build'] = self.number_build - child_data = {} - if self.child_extra_params: - child_data['extra_params'] = self.child_extra_params - if self.child_dump_url: - child_data['config_data'] = {'dump_url': self.child_dump_url} - if self.child_config_id: - child_data['config_id'] = self.child_config_id.id - if child_data: - config_data['child_data'] = child_data - return config_data - - def _get_existing_trigger(self): - return self.env['runbot.bundle.trigger.custom'].search([('bundle_id', '=', self.bundle_id.id), ('trigger_id', '=', self.trigger_id.id)]) - - def submit(self): - self.ensure_one() - self._get_existing_trigger().unlink() - self.env['runbot.bundle.trigger.custom'].create({ - 'bundle_id': self.bundle_id.id, - 'trigger_id': self.trigger_id.id, - 'config_id': self.config_id.id, - 'config_data': self.config_data, - }) diff --git a/runbot/models/database.py b/runbot/models/database.py deleted file mode 100644 index 98948f1a..00000000 --- a/runbot/models/database.py +++ /dev/null @@ -1,23 +0,0 @@ -import logging -from odoo import models, fields, api -_logger = logging.getLogger(__name__) - - -class Database(models.Model): - _name = 'runbot.database' - _description = "Database" - - name = fields.Char('Host name', required=True) - build_id = fields.Many2one('runbot.build', index=True, required=True) - db_suffix = fields.Char(compute='_compute_db_suffix') - - def _compute_db_suffix(self): - for record in self: - record.db_suffix = record.name.replace('%s-' % record.build_id.dest, '') - - @api.model_create_single - def create(self, values): - res = self.search([('name', '=', values['name']), ('build_id', '=', values['build_id'])]) - if res: - return res - return super().create(values) diff --git a/runbot/models/dockerfile.py b/runbot/models/dockerfile.py deleted file mode 100644 index f08d8475..00000000 --- a/runbot/models/dockerfile.py +++ /dev/null @@ -1,55 +0,0 @@ -import logging -import re -from odoo import models, fields, api -from odoo.addons.base.models.qweb import QWebException - -_logger = logging.getLogger(__name__) - - -class Dockerfile(models.Model): - _name = 'runbot.dockerfile' - _inherit = [ 'mail.thread' ] - _description = "Dockerfile" - - name = fields.Char('Dockerfile name', required=True, help="Name of Dockerfile") - image_tag = fields.Char(compute='_compute_image_tag', store=True) - template_id = fields.Many2one('ir.ui.view', string='Docker Template', domain=[('type', '=', 'qweb')], context={'default_type': 'qweb', 'default_arch_base': '<t></t>'}) - arch_base = fields.Text(related='template_id.arch_base', readonly=False) - dockerfile = fields.Text(compute='_compute_dockerfile', tracking=True) - to_build = fields.Boolean('To Build', help='Build Dockerfile. Check this when the Dockerfile is ready.', default=False) - version_ids = fields.One2many('runbot.version', 'dockerfile_id', string='Versions') - description = fields.Text('Description') - view_ids = fields.Many2many('ir.ui.view', compute='_compute_view_ids') - project_ids = fields.One2many('runbot.project', 'dockerfile_id', string='Default for Projects') - bundle_ids = fields.One2many('runbot.bundle', 'dockerfile_id', string='Used in Bundles') - - _sql_constraints = [('runbot_dockerfile_name_unique', 'unique(name)', 'A Dockerfile with this name already exists')] - - @api.returns('self', lambda value: value.id) - def copy(self, default=None): - copied_record = super().copy(default={'name': '%s (copy)' % self.name, 'to_build': False}) - copied_record.template_id = self.template_id.copy() - copied_record.template_id.name = '%s (copy)' % copied_record.template_id.name - copied_record.template_id.key = '%s (copy)' % copied_record.template_id.key - return copied_record - - @api.depends('template_id.arch_base') - def _compute_dockerfile(self): - for rec in self: - try: - res = rec.template_id._render() if rec.template_id else '' - rec.dockerfile = re.sub(r'^\s*$', '', res, flags=re.M).strip() - except QWebException: - rec.dockerfile = '' - - @api.depends('name') - def _compute_image_tag(self): - for rec in self: - if rec.name: - rec.image_tag = 'odoo:%s' % re.sub(r'[ /:\(\)\[\]]', '', rec.name) - - @api.depends('template_id') - def _compute_view_ids(self): - for rec in self: - keys = re.findall(r'<t.+t-call="(.+)".+', rec.arch_base or '') - rec.view_ids = self.env['ir.ui.view'].search([('type', '=', 'qweb'), ('key', 'in', keys)]).ids diff --git a/runbot/models/event.py b/runbot/models/event.py deleted file mode 100644 index 01269e32..00000000 --- a/runbot/models/event.py +++ /dev/null @@ -1,230 +0,0 @@ -# -*- coding: utf-8 -*- - -import logging - -from collections import defaultdict - -from ..common import pseudo_markdown -from odoo import models, fields, tools -from odoo.exceptions import UserError - -_logger = logging.getLogger(__name__) - -TYPES = [(t, t.capitalize()) for t in 'client server runbot subbuild link markdown'.split()] - - -class runbot_event(models.Model): - - _inherit = "ir.logging" - _order = 'id' - - build_id = fields.Many2one('runbot.build', 'Build', index=True, ondelete='cascade') - active_step_id = fields.Many2one('runbot.build.config.step', 'Active step', index=True) - type = fields.Selection(selection_add=TYPES, string='Type', required=True, index=True, ondelete={t[0]: 'cascade' for t in TYPES}) - error_id = fields.Many2one('runbot.build.error', compute='_compute_known_error') # remember to never store this field - dbname = fields.Char(string='Database Name', index=False) - - - def init(self): - parent_class = super(runbot_event, self) - if hasattr(parent_class, 'init'): - parent_class.init() - - self._cr.execute(""" -CREATE OR REPLACE FUNCTION runbot_set_logging_build() RETURNS TRIGGER AS $runbot_set_logging_build$ -BEGIN - IF (NEW.build_id IS NULL AND NEW.dbname IS NOT NULL AND NEW.dbname != current_database()) THEN - NEW.build_id := split_part(NEW.dbname, '-', 1)::integer; - SELECT active_step INTO NEW.active_step_id FROM runbot_build WHERE runbot_build.id = NEW.build_id; - END IF; - IF (NEW.build_id IS NOT NULL) AND (NEW.type = 'server') THEN - DECLARE - counter INTEGER; - BEGIN - UPDATE runbot_build b - SET log_counter = log_counter - 1 - WHERE b.id = NEW.build_id; - SELECT log_counter - INTO counter - FROM runbot_build - WHERE runbot_build.id = NEW.build_id; - IF (counter = 0) THEN - NEW.message = 'Log limit reached (full logs are still available in the log file)'; - NEW.level = 'SEPARATOR'; - NEW.func = ''; - NEW.type = 'runbot'; - RETURN NEW; - ELSIF (counter < 0) THEN - RETURN NULL; - END IF; - END; - END IF; - IF (NEW.build_id IS NOT NULL AND UPPER(NEW.level) NOT IN ('INFO', 'SEPARATOR')) THEN - BEGIN - UPDATE runbot_build b - SET triggered_result = CASE WHEN UPPER(NEW.level) = 'WARNING' THEN 'warn' - ELSE 'ko' - END - WHERE b.id = NEW.build_id; - END; - END IF; -RETURN NEW; -END; -$runbot_set_logging_build$ language plpgsql; - -DROP TRIGGER IF EXISTS runbot_new_logging ON ir_logging; -CREATE TRIGGER runbot_new_logging BEFORE INSERT ON ir_logging -FOR EACH ROW EXECUTE PROCEDURE runbot_set_logging_build(); - - """) - - def _markdown(self): - """ Apply pseudo markdown parser for message. - """ - self.ensure_one() - return pseudo_markdown(self.message) - - - def _compute_known_error(self): - cleaning_regexes = self.env['runbot.error.regex'].search([('re_type', '=', 'cleaning')]) - fingerprints = defaultdict(list) - for ir_logging in self: - ir_logging.error_id = False - if ir_logging.level == 'ERROR' and ir_logging.type == 'server': - fingerprints[self.env['runbot.build.error']._digest(cleaning_regexes.r_sub('%', ir_logging.message))].append(ir_logging) - for build_error in self.env['runbot.build.error'].search([('fingerprint', 'in', list(fingerprints.keys()))]): - for ir_logging in fingerprints[build_error.fingerprint]: - ir_logging.error_id = build_error.id - -class RunbotErrorLog(models.Model): - _name = 'runbot.error.log' - _description = "Error log" - _auto = False - _order = 'id desc' - - id = fields.Many2one('ir.logging', string='Log', readonly=True) - name = fields.Char(string='Module', readonly=True) - message = fields.Text(string='Message', readonly=True) - summary = fields.Text(string='Summary', readonly=True) - log_type = fields.Char(string='Type', readonly=True) - log_create_date = fields.Datetime(string='Log create date', readonly=True) - func = fields.Char(string='Method', readonly=True) - path = fields.Char(string='Path', readonly=True) - line = fields.Char(string='Line', readonly=True) - build_id = fields.Many2one('runbot.build', string='Build', readonly=True) - dest = fields.Char(string='Build dest', readonly=True) - local_state = fields.Char(string='Local state', readonly=True) - local_result = fields.Char(string='Local result', readonly=True) - global_state = fields.Char(string='Global state', readonly=True) - global_result = fields.Char(string='Global result', readonly=True) - bu_create_date = fields.Datetime(string='Build create date', readonly=True) - host = fields.Char(string='Host', readonly=True) - parent_id = fields.Many2one('runbot.build', string='Parent build', readonly=True) - top_parent_id = fields.Many2one('runbot.build', string="Top parent", readonly=True) - bundle_ids = fields.Many2many('runbot.bundle', compute='_compute_bundle_id', search='_search_bundle', string='Bundle', readonly=True) - sticky = fields.Boolean(string='Bundle Sticky', compute='_compute_bundle_id', search='_search_sticky', readonly=True) - build_url = fields.Char(compute='_compute_build_url', readonly=True) - - def _compute_repo_short_name(self): - for l in self: - l.repo_short_name = '%s/%s' % (l.repo_id.owner, l.repo_id.repo_name) - - def _compute_build_url(self): - for l in self: - l.build_url = '/runbot/build/%s' % l.build_id.id - - def action_goto_build(self): - self.ensure_one() - return { - "type": "ir.actions.act_url", - "url": "runbot/build/%s" % self.build_id.id, - "target": "new", - } - - def _compute_bundle_id(self): - slots = self.env['runbot.batch.slot'].search([('build_id', 'in', self.mapped('top_parent_id').ids)]) - for l in self: - l.bundle_ids = slots.filtered(lambda rec: rec.build_id.id == l.top_parent_id.id).batch_id.bundle_id - l.sticky = any(l.bundle_ids.filtered('sticky')) - - def _search_bundle(self, operator, value): - query = """ - SELECT id - FROM runbot_build as build - WHERE EXISTS( - SELECT * FROM runbot_batch_slot as slot - JOIN - runbot_batch batch ON batch.id = slot.batch_id - JOIN - runbot_bundle bundle ON bundle.id = batch.bundle_id - %s - """ - if operator in ('ilike', '=', 'in'): - value = '%%%s%%' % value if operator == 'ilike' else value - col_name = 'id' if operator == 'in' else 'name' - where_condition = "WHERE slot.build_id = build.id AND bundle.%s %s any(%%s));" if operator == 'in' else "WHERE slot.build_id = build.id AND bundle.%s %s %%s);" - operator = '=' if operator == 'in' else operator - where_condition = where_condition % (col_name, operator) - query = query % where_condition - self.env.cr.execute(query, (value,)) - build_ids = [t[0] for t in self.env.cr.fetchall()] - return [('top_parent_id', 'in', build_ids)] - - raise UserError('Operator `%s` not implemented for bundle search' % operator) - - def search_count(self, args): - return 4242 # hack to speed up the view - - def _search_sticky(self, operator, value): - if operator == '=': - self.env.cr.execute(""" - SELECT id - FROM runbot_build as build - WHERE EXISTS( - SELECT * FROM runbot_batch_slot as slot - JOIN - runbot_batch batch ON batch.id = slot.batch_id - JOIN - runbot_bundle bundle ON bundle.id = batch.bundle_id - WHERE - bundle.sticky = %s AND slot.build_id = build.id); - """, (value,)) - build_ids = [t[0] for t in self.env.cr.fetchall()] - return [('top_parent_id', 'in', build_ids)] - return [] - - def _parse_logs(self): - BuildError = self.env['runbot.build.error'] - return BuildError._parse_logs(self) - - def init(self): - """ Create an SQL view for ir.logging """ - tools.drop_view_if_exists(self._cr, 'runbot_error_log') - self._cr.execute(""" CREATE VIEW runbot_error_log AS ( - SELECT - l.id AS id, - l.name AS name, - l.message AS message, - left(l.message, 50) as summary, - l.type AS log_type, - l.create_date AS log_create_date, - l.func AS func, - l.path AS path, - l.line AS line, - bu.id AS build_id, - bu.dest AS dest, - bu.local_state AS local_state, - bu.local_result AS local_result, - bu.global_state AS global_state, - bu.global_result AS global_result, - bu.create_date AS bu_create_date, - bu.host AS host, - bu.parent_id AS parent_id, - split_part(bu.parent_path, '/',1)::int AS top_parent_id - FROM - ir_logging AS l - JOIN - runbot_build bu ON l.build_id = bu.id - WHERE - l.level = 'ERROR' - )""") diff --git a/runbot/models/host.py b/runbot/models/host.py deleted file mode 100644 index 8c879a11..00000000 --- a/runbot/models/host.py +++ /dev/null @@ -1,146 +0,0 @@ -import logging -import getpass -from odoo import models, fields, api -from odoo.tools import config -from ..common import fqdn, local_pgadmin_cursor, os -from ..container import docker_build -_logger = logging.getLogger(__name__) - -forced_host_name = None - -class Host(models.Model): - _name = 'runbot.host' - _description = "Host" - _order = 'id' - _inherit = 'mail.thread' - - name = fields.Char('Host name', required=True) - disp_name = fields.Char('Display name') - active = fields.Boolean('Active', default=True, tracking=True) - last_start_loop = fields.Datetime('Last start') - last_end_loop = fields.Datetime('Last end') - last_success = fields.Datetime('Last success') - assigned_only = fields.Boolean('Only accept assigned build', default=False, tracking=True) - nb_worker = fields.Integer( - 'Number of max paralel build', - default=lambda self: self.env['ir.config_parameter'].sudo().get_param('runbot.runbot_workers', default=2), - tracking=True - ) - nb_testing = fields.Integer(compute='_compute_nb') - nb_running = fields.Integer(compute='_compute_nb') - last_exception = fields.Char('Last exception') - exception_count = fields.Integer('Exception count') - psql_conn_count = fields.Integer('SQL connections count', default=0) - - def _compute_nb(self): - groups = self.env['runbot.build'].read_group( - [('host', 'in', self.mapped('name')), ('local_state', 'in', ('testing', 'running'))], - ['host', 'local_state'], - ['host', 'local_state'], - lazy=False - ) - count_by_host_state = {host.name: {} for host in self} - for group in groups: - count_by_host_state[group['host']][group['local_state']] = group['__count'] - for host in self: - host.nb_testing = count_by_host_state[host.name].get('testing', 0) - host.nb_running = count_by_host_state[host.name].get('running', 0) - - @api.model_create_single - def create(self, values): - if 'disp_name' not in values: - values['disp_name'] = values['name'] - return super().create(values) - - def _bootstrap_db_template(self): - """ boostrap template database if needed """ - icp = self.env['ir.config_parameter'] - db_template = icp.get_param('runbot.runbot_db_template', default='template0') - if db_template and db_template != 'template0': - with local_pgadmin_cursor() as local_cr: - local_cr.execute("""SELECT datname FROM pg_catalog.pg_database WHERE datname = '%s';""" % db_template) - res = local_cr.fetchone() - if not res: - local_cr.execute("""CREATE DATABASE "%s" TEMPLATE template0 LC_COLLATE 'C' ENCODING 'unicode'""" % db_template) - # TODO UPDATE pg_database set datallowconn = false, datistemplate = true (but not enough privileges) - - def _bootstrap(self): - """ Create needed directories in static """ - dirs = ['build', 'nginx', 'repo', 'sources', 'src', 'docker'] - static_path = self._get_work_path() - static_dirs = {d: os.path.join(static_path, d) for d in dirs} - for dir, path in static_dirs.items(): - os.makedirs(path, exist_ok=True) - self._bootstrap_db_template() - - def _docker_build(self): - """ build docker images needed by locally pending builds""" - _logger.info('Building docker images...') - self.ensure_one() - static_path = self._get_work_path() - self.clear_caches() # needed to ensure that content is updated on all hosts - for dockerfile in self.env['runbot.dockerfile'].search([('to_build', '=', True)]): - self._docker_build_dockerfile(dockerfile, static_path) - - def _docker_build_dockerfile(self, dockerfile, workdir): - _logger.info('Building %s, %s', dockerfile.name, hash(str(dockerfile.dockerfile))) - docker_build_path = os.path.join(workdir, 'docker', dockerfile.image_tag) - os.makedirs(docker_build_path, exist_ok=True) - - user = getpass.getuser() - - docker_append = f""" - RUN groupadd -g {os.getgid()} {user} \\ - && useradd -u {os.getuid()} -g {user} -G audio,video {user} \\ - && mkdir /home/{user} \\ - && chown -R {user}:{user} /home/{user} - USER {user} - ENV COVERAGE_FILE /data/build/.coverage - """ - - with open(os.path.join(docker_build_path, 'Dockerfile'), 'w') as Dockerfile: - Dockerfile.write(dockerfile.dockerfile + docker_append) - - docker_build_success, msg = docker_build(docker_build_path, dockerfile.image_tag) - if not docker_build_success: - dockerfile.to_build = False - dockerfile.message_post(body=f'Build failure:\n{msg}') - self.env['runbot.runbot'].warning(f'Dockerfile build "{dockerfile.image_tag}" failed on host {self.name}') - - def _get_work_path(self): - return os.path.abspath(os.path.join(os.path.dirname(__file__), '../static')) - - @api.model - def _get_current(self): - name = self._get_current_name() - return self.search([('name', '=', name)]) or self.create({'name': name}) - - @api.model - def _get_current_name(self): - return config.get('forced_host_name') or fqdn() - - def get_running_max(self): - icp = self.env['ir.config_parameter'] - return int(icp.get_param('runbot.runbot_running_max', default=5)) - - def set_psql_conn_count(self): - _logger.info('Updating psql connection count...') - self.ensure_one() - with local_pgadmin_cursor() as local_cr: - local_cr.execute("SELECT sum(numbackends) FROM pg_stat_database;") - res = local_cr.fetchone() - self.psql_conn_count = res and res[0] or 0 - - def _total_testing(self): - return sum(host.nb_testing for host in self) - - def _total_workers(self): - return sum(host.nb_worker for host in self) - - def disable(self): - """ Reserve host if possible """ - self.ensure_one() - nb_hosts = self.env['runbot.host'].search_count([]) - nb_reserved = self.env['runbot.host'].search_count([('assigned_only', '=', True)]) - if nb_reserved < (nb_hosts / 2): - self.assigned_only = True diff --git a/runbot/models/ir_cron.py b/runbot/models/ir_cron.py deleted file mode 100644 index 04e59eb2..00000000 --- a/runbot/models/ir_cron.py +++ /dev/null @@ -1,13 +0,0 @@ -import odoo -from dateutil.relativedelta import relativedelta - -from odoo import models, fields - -odoo.service.server.SLEEP_INTERVAL = 5 -odoo.addons.base.models.ir_cron._intervalTypes['seconds'] = lambda interval: relativedelta(seconds=interval) - - -class ir_cron(models.Model): - _inherit = "ir.cron" - - interval_type = fields.Selection(selection_add=[('seconds', 'Seconds')]) diff --git a/runbot/models/ir_ui_view.py b/runbot/models/ir_ui_view.py deleted file mode 100644 index 08e4ba3e..00000000 --- a/runbot/models/ir_ui_view.py +++ /dev/null @@ -1,15 +0,0 @@ -from ..common import s2human, s2human_long -from odoo import models -from odoo.http import request - - -class IrUiView(models.Model): - _inherit = ["ir.ui.view"] - - def _prepare_qcontext(self): - qcontext = super(IrUiView, self)._prepare_qcontext() - - if request and getattr(request, 'is_frontend', False): - qcontext['s2human'] = s2human - qcontext['s2human_long'] = s2human_long - return qcontext diff --git a/runbot/models/project.py b/runbot/models/project.py deleted file mode 100644 index 10fb9585..00000000 --- a/runbot/models/project.py +++ /dev/null @@ -1,24 +0,0 @@ -from odoo import models, fields - - -class Project(models.Model): - _name = 'runbot.project' - _description = 'Project' - _order = 'sequence, id' - - name = fields.Char('Project name', required=True) - group_ids = fields.Many2many('res.groups', string='Required groups') - keep_sticky_running = fields.Boolean('Keep last sticky builds running') - trigger_ids = fields.One2many('runbot.trigger', 'project_id', string='Triggers') - dockerfile_id = fields.Many2one('runbot.dockerfile', index=True, help="Project Default Dockerfile") - repo_ids = fields.One2many('runbot.repo', 'project_id', string='Repos') - sequence = fields.Integer('Sequence') - - -class Category(models.Model): - _name = 'runbot.category' - _description = 'Trigger category' - - name = fields.Char("Name") - icon = fields.Char("Font awesome icon") - view_id = fields.Many2one('ir.ui.view', "Link template") diff --git a/runbot/models/repo.py b/runbot/models/repo.py deleted file mode 100644 index c593d735..00000000 --- a/runbot/models/repo.py +++ /dev/null @@ -1,579 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -import json -import logging -import re -import subprocess -import time - -import requests - -from pathlib import Path - -from odoo import models, fields, api -from ..common import os, RunbotException -from odoo.exceptions import UserError -from odoo.tools.safe_eval import safe_eval - -_logger = logging.getLogger(__name__) - - -def _sanitize(name): - for i in '@:/': - name = name.replace(i, '_') - return name - - -class Trigger(models.Model): - """ - List of repo parts that must be part of the same bundle - """ - - _name = 'runbot.trigger' - _inherit = 'mail.thread' - _description = 'Triggers' - - _order = 'sequence, id' - - sequence = fields.Integer('Sequence') - name = fields.Char("Name") - description = fields.Char("Description", help="Informative description") - project_id = fields.Many2one('runbot.project', string="Project id", required=True) # main/security/runbot - repo_ids = fields.Many2many('runbot.repo', relation='runbot_trigger_triggers', string="Triggers", domain="[('project_id', '=', project_id)]") - dependency_ids = fields.Many2many('runbot.repo', relation='runbot_trigger_dependencies', string="Dependencies") - config_id = fields.Many2one('runbot.build.config', string="Config", required=True) - batch_dependent = fields.Boolean('Batch Dependent', help="Force adding batch in build parameters to make it unique and give access to bundle") - - ci_context = fields.Char("Ci context", default='ci/runbot', tracking=True) - category_id = fields.Many2one('runbot.category', default=lambda self: self.env.ref('runbot.default_category', raise_if_not_found=False)) - version_domain = fields.Char(string="Version domain") - hide = fields.Boolean('Hide trigger on main page') - manual = fields.Boolean('Only start trigger manually', default=False) - - upgrade_dumps_trigger_id = fields.Many2one('runbot.trigger', string='Template/complement trigger', tracking=True) - upgrade_step_id = fields.Many2one('runbot.build.config.step', compute="_compute_upgrade_step_id", store=True) - ci_url = fields.Char("ci url") - ci_description = fields.Char("ci description") - has_stats = fields.Boolean('Has a make_stats config step', compute="_compute_has_stats", store=True) - - team_ids = fields.Many2many('runbot.team', string="Runbot Teams", help="Teams responsible of this trigger, mainly usefull for nightly") - active = fields.Boolean("Active", default=True) - - @api.depends('config_id.step_order_ids.step_id.make_stats') - def _compute_has_stats(self): - for trigger in self: - trigger.has_stats = any(trigger.config_id.step_order_ids.step_id.mapped('make_stats')) - - @api.depends('upgrade_dumps_trigger_id', 'config_id', 'config_id.step_order_ids.step_id.job_type') - def _compute_upgrade_step_id(self): - for trigger in self: - trigger.upgrade_step_id = False - if trigger.upgrade_dumps_trigger_id: - trigger.upgrade_step_id = self._upgrade_step_from_config(trigger.config_id) - - def _upgrade_step_from_config(self, config): - upgrade_step = next((step_order.step_id for step_order in config.step_order_ids if step_order.step_id._is_upgrade_step()), False) - if not upgrade_step: - raise UserError('Upgrade trigger should have a config with step of type Configure Upgrade') - return upgrade_step - - def _reference_builds(self, bundle): - self.ensure_one() - if self.upgrade_step_id: # this is an upgrade trigger, add corresponding builds - custom_config = next((trigger_custom.config_id for trigger_custom in bundle.trigger_custom_ids if trigger_custom.trigger_id == self), False) - step = self._upgrade_step_from_config(custom_config) if custom_config else self.upgrade_step_id - refs_builds = step._reference_builds(bundle, self) - return [(4, b.id) for b in refs_builds] - return [] - - def get_version_domain(self): - if self.version_domain: - return safe_eval(self.version_domain) - return [] - - -class Remote(models.Model): - """ - Regroups repo and it duplicates (forks): odoo+odoo-dev for each repo - """ - _name = 'runbot.remote' - _description = 'Remote' - _order = 'sequence, id' - _inherit = 'mail.thread' - - name = fields.Char('Url', required=True, tracking=True) - repo_id = fields.Many2one('runbot.repo', required=True, tracking=True) - - owner = fields.Char(compute='_compute_base_infos', string='Repo Owner', store=True, readonly=True, tracking=True) - repo_name = fields.Char(compute='_compute_base_infos', string='Repo Name', store=True, readonly=True, tracking=True) - repo_domain = fields.Char(compute='_compute_base_infos', string='Repo domain', store=True, readonly=True, tracking=True) - - base_url = fields.Char(compute='_compute_base_url', string='Base URL', readonly=True, tracking=True) - - short_name = fields.Char('Short name', compute='_compute_short_name', tracking=True) - remote_name = fields.Char('Remote name', compute='_compute_remote_name', tracking=True) - - sequence = fields.Integer('Sequence', tracking=True) - fetch_heads = fields.Boolean('Fetch branches', default=True, tracking=True) - fetch_pull = fields.Boolean('Fetch PR', default=False, tracking=True) - send_status = fields.Boolean('Send status', default=False, tracking=True) - - token = fields.Char("Github token", groups="runbot.group_runbot_admin") - - @api.depends('name') - def _compute_base_infos(self): - for remote in self: - name = re.sub('.+@', '', remote.name) - name = re.sub('^https://', '', name) # support https repo style - name = re.sub('.git$', '', name) - name = name.replace(':', '/') - s = name.split('/') - remote.repo_domain = s[-3] - remote.owner = s[-2] - remote.repo_name = s[-1] - - @api.depends('repo_domain', 'owner', 'repo_name') - def _compute_base_url(self): - for remote in self: - remote.base_url = '%s/%s/%s' % (remote.repo_domain, remote.owner, remote.repo_name) - - @api.depends('name', 'base_url') - def _compute_short_name(self): - for remote in self: - remote.short_name = '/'.join(remote.base_url.split('/')[-2:]) - - def _compute_remote_name(self): - for remote in self: - remote.remote_name = _sanitize(remote.short_name) - - def create(self, values_list): - remote = super().create(values_list) - if not remote.repo_id.main_remote_id: - remote.repo_id.main_remote_id = remote - remote._cr.postcommit.add(remote.repo_id._update_git_config) - return remote - - def write(self, values): - res = super().write(values) - self._cr.postcommit.add(self.repo_id._update_git_config) - return res - - def _make_github_session(self): - session = requests.Session() - if self.token: - session.auth = (self.token, 'x-oauth-basic') - session.headers.update({'Accept': 'application/vnd.github.she-hulk-preview+json'}) - return session - - def _github(self, url, payload=None, ignore_errors=False, nb_tries=2, recursive=False, session=None): - generator = self.sudo()._github_generator(url, payload=payload, ignore_errors=ignore_errors, nb_tries=nb_tries, recursive=recursive, session=session) - if recursive: - return generator - result = list(generator) - return result[0] if result else False - - def _github_generator(self, url, payload=None, ignore_errors=False, nb_tries=2, recursive=False, session=None): - """Return a http request to be sent to github""" - for remote in self: - if remote.owner and remote.repo_name and remote.repo_domain: - url = url.replace(':owner', remote.owner) - url = url.replace(':repo', remote.repo_name) - url = 'https://api.%s%s' % (remote.repo_domain, url) - session = session or remote._make_github_session() - while url: - if recursive: - _logger.info('Getting page %s', url) - try_count = 0 - while try_count < nb_tries: - try: - if payload: - response = session.post(url, data=json.dumps(payload)) - else: - response = session.get(url) - response.raise_for_status() - if try_count > 0: - _logger.info('Success after %s tries', (try_count + 1)) - if recursive: - link = response.headers.get('link') - url = False - if link: - url = {link.split(';')[1]: link.split(';')[0] for link in link.split(',')}.get(' rel="next"') - if url: - url = url.strip('<> ') - yield response.json() - break - else: - yield response.json() - return - except requests.HTTPError: - try_count += 1 - if try_count < nb_tries: - time.sleep(2) - else: - if ignore_errors: - _logger.exception('Ignored github error %s %r (try %s/%s)', url, payload, try_count, nb_tries) - url = False - else: - raise - - -class Repo(models.Model): - - _name = 'runbot.repo' - _description = "Repo" - _order = 'sequence, id' - _inherit = 'mail.thread' - - name = fields.Char("Name", tracking=True) # odoo/enterprise/upgrade/security/runbot/design_theme - identity_file = fields.Char("Identity File", help="Identity file to use with git/ssh", groups="runbot.group_runbot_admin") - main_remote_id = fields.Many2one('runbot.remote', "Main remote", tracking=True) - remote_ids = fields.One2many('runbot.remote', 'repo_id', "Remotes") - project_id = fields.Many2one('runbot.project', required=True, tracking=True, - help="Default bundle project to use when pushing on this repos", - default=lambda self: self.env.ref('runbot.main_project', raise_if_not_found=False)) - # -> not verry usefull, remove it? (iterate on projects or contraints triggers: - # all trigger where a repo is used must be in the same project. - modules = fields.Char("Modules to install", help="Comma-separated list of modules to install and test.", tracking=True) - server_files = fields.Char('Server files', help='Comma separated list of possible server files', tracking=True) # odoo-bin,openerp-server,openerp-server.py - manifest_files = fields.Char('Manifest files', help='Comma separated list of possible manifest files', default='__manifest__.py', tracking=True) - addons_paths = fields.Char('Addons paths', help='Comma separated list of possible addons path', default='', tracking=True) - - sequence = fields.Integer('Sequence', tracking=True) - path = fields.Char(compute='_get_path', string='Directory', readonly=True) - mode = fields.Selection([('disabled', 'Disabled'), - ('poll', 'Poll'), - ('hook', 'Hook')], - default='poll', - string="Mode", required=True, help="hook: Wait for webhook on /runbot/hook/<id> i.e. github push event", tracking=True) - hook_time = fields.Float('Last hook time', compute='_compute_hook_time') - last_processed_hook_time = fields.Float('Last processed hook time') - get_ref_time = fields.Float('Last refs db update', compute='_compute_get_ref_time') - trigger_ids = fields.Many2many('runbot.trigger', relation='runbot_trigger_triggers', readonly=True) - single_version = fields.Many2one('runbot.version', "Single version", help="Limit the repo to a single version for non versionned repo") - forbidden_regex = fields.Char('Forbidden regex', help="Regex that forid bundle creation if branch name is matching", tracking=True) - invalid_branch_message = fields.Char('Forbidden branch message', tracking=True) - - def _compute_get_ref_time(self): - self.env.cr.execute(""" - SELECT repo_id, time FROM runbot_repo_reftime - WHERE id IN ( - SELECT max(id) FROM runbot_repo_reftime - WHERE repo_id = any(%s) GROUP BY repo_id - ) - """, [self.ids]) - times = dict(self.env.cr.fetchall()) - for repo in self: - repo.get_ref_time = times.get(repo.id, 0) - - def _compute_hook_time(self): - self.env.cr.execute(""" - SELECT repo_id, time FROM runbot_repo_hooktime - WHERE id IN ( - SELECT max(id) FROM runbot_repo_hooktime - WHERE repo_id = any(%s) GROUP BY repo_id - ) - """, [self.ids]) - times = dict(self.env.cr.fetchall()) - - for repo in self: - repo.hook_time = times.get(repo.id, 0) - - def set_hook_time(self, value): - for repo in self: - self.env['runbot.repo.hooktime'].create({'time': value, 'repo_id': repo.id}) - self.invalidate_cache() - - def set_ref_time(self, value): - for repo in self: - self.env['runbot.repo.reftime'].create({'time': value, 'repo_id': repo.id}) - self.invalidate_cache() - - def _gc_times(self): - self.env.cr.execute(""" - DELETE from runbot_repo_reftime WHERE id NOT IN ( - SELECT max(id) FROM runbot_repo_reftime GROUP BY repo_id - ) - """) - self.env.cr.execute(""" - DELETE from runbot_repo_hooktime WHERE id NOT IN ( - SELECT max(id) FROM runbot_repo_hooktime GROUP BY repo_id - ) - """) - - @api.depends('name') - def _get_path(self): - """compute the server path of repo from the name""" - root = self.env['runbot.runbot']._root() - for repo in self: - repo.path = os.path.join(root, 'repo', _sanitize(repo.name)) - - def _git(self, cmd, errors='strict'): - """Execute a git command 'cmd'""" - self.ensure_one() - config_args = [] - if self.identity_file: - config_args = ['-c', 'core.sshCommand=ssh -i %s/.ssh/%s' % (str(Path.home()), self.identity_file)] - cmd = ['git', '-C', self.path] + config_args + cmd - _logger.info("git command: %s", ' '.join(cmd)) - return subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode(errors=errors) - - def _fetch(self, sha): - if not self._hash_exists(sha): - self._update(force=True) - if not self._hash_exists(sha): - for remote in self.remote_ids: - try: - self._git(['fetch', remote.remote_name, sha]) - _logger.info('Success fetching specific head %s on %s', sha, remote) - break - except subprocess.CalledProcessError: - pass - if not self._hash_exists(sha): - raise RunbotException("Commit %s is unreachable. Did you force push the branch?" % sha) - - def _hash_exists(self, commit_hash): - """ Verify that a commit hash exists in the repo """ - self.ensure_one() - try: - self._git(['cat-file', '-e', commit_hash]) - except subprocess.CalledProcessError: - return False - return True - - def _is_branch_forbidden(self, branch_name): - self.ensure_one() - if self.forbidden_regex: - return re.match(self.forbidden_regex, branch_name) - return False - - def _get_fetch_head_time(self): - self.ensure_one() - fname_fetch_head = os.path.join(self.path, 'FETCH_HEAD') - if os.path.exists(fname_fetch_head): - return os.path.getmtime(fname_fetch_head) - return 0 - - def _get_refs(self, max_age=30, ignore=None): - """Find new refs - :return: list of tuples with following refs informations: - name, sha, date, author, author_email, subject, committer, committer_email - """ - self.ensure_one() - get_ref_time = round(self._get_fetch_head_time(), 4) - commit_limit = time.time() - 60*60*24*max_age - if not self.get_ref_time or get_ref_time > self.get_ref_time: - try: - self.set_ref_time(get_ref_time) - fields = ['refname', 'objectname', 'committerdate:unix', 'authorname', 'authoremail', 'subject', 'committername', 'committeremail'] - fmt = "%00".join(["%(" + field + ")" for field in fields]) - cmd = ['for-each-ref', '--format', fmt, '--sort=-committerdate', 'refs/*/heads/*'] - if any(remote.fetch_pull for remote in self.remote_ids): - cmd.append('refs/*/pull/*') - git_refs = self._git(cmd) - git_refs = git_refs.strip() - if not git_refs: - return [] - refs = [tuple(field for field in line.split('\x00')) for line in git_refs.split('\n')] - refs = [r for r in refs if int(r[2]) > commit_limit or self.env['runbot.branch'].match_is_base(r[0].split('/')[-1])] - if ignore: - refs = [r for r in refs if r[0].split('/')[-1] not in ignore] - return refs - except Exception: - _logger.exception('Fail to get refs for repo %s', self.name) - self.env['runbot.runbot'].warning('Fail to get refs for repo %s', self.name) - return [] - - def _find_or_create_branches(self, refs): - """Parse refs and create branches that does not exists yet - :param refs: list of tuples returned by _get_refs() - :return: dict {branch.name: branch.id} - The returned structure contains all the branches from refs newly created - or older ones. - """ - - # FIXME WIP - names = [r[0].split('/')[-1] for r in refs] - branches = self.env['runbot.branch'].search([('name', 'in', names), ('remote_id', 'in', self.remote_ids.ids)]) - ref_branches = {branch.ref(): branch for branch in branches} - new_branch_values = [] - for ref_name, sha, date, author, author_email, subject, committer, committer_email in refs: - if not ref_branches.get(ref_name): - # format example: - # refs/ruodoo-dev/heads/12.0-must-fail - # refs/ruodoo/pull/1 - _, remote_name, branch_type, name = ref_name.split('/') - remote_id = self.remote_ids.filtered(lambda r: r.remote_name == remote_name).id - if not remote_id: - _logger.warning('Remote %s not found', remote_name) - continue - new_branch_values.append({'remote_id': remote_id, 'name': name, 'is_pr': branch_type == 'pull'}) - # TODO catch error for pr info. It may fail for multiple raison. closed? external? check corner cases - _logger.info('new branch %s found in %s', name, self.name) - if new_branch_values: - _logger.info('Creating new branches') - new_branches = self.env['runbot.branch'].create(new_branch_values) - for branch in new_branches: - ref_branches[branch.ref()] = branch - return ref_branches - - def _find_new_commits(self, refs, ref_branches): - """Find new commits in bare repo - :param refs: list of tuples returned by _get_refs() - :param ref_branches: dict structure {branch.name: branch.id} - described in _find_or_create_branches - """ - self.ensure_one() - - for ref_name, sha, date, author, author_email, subject, committer, committer_email in refs: - branch = ref_branches[ref_name] - if branch.head_name != sha: # new push on branch - _logger.info('repo %s branch %s new commit found: %s', self.name, branch.name, sha) - - commit = self.env['runbot.commit']._get(sha, self.id, { - 'author': author, - 'author_email': author_email, - 'committer': committer, - 'committer_email': committer_email, - 'subject': subject, - 'date': datetime.datetime.fromtimestamp(int(date)), - }) - branch.head = commit - if not branch.alive: - if branch.is_pr: - _logger.info('Recomputing infos of dead pr %s', branch.name) - branch._compute_branch_infos() - else: - branch.alive = True - - if branch.reference_name and branch.remote_id and branch.remote_id.repo_id._is_branch_forbidden(branch.reference_name): - message = "This branch name is incorrect. Branch name should be prefixed with a valid version" - message = branch.remote_id.repo_id.invalid_branch_message or message - branch.head._github_status(False, "Branch naming", 'failure', False, message) - - bundle = branch.bundle_id - if bundle.no_build: - continue - - if bundle.last_batch.state != 'preparing': - preparing = self.env['runbot.batch'].create({ - 'last_update': fields.Datetime.now(), - 'bundle_id': bundle.id, - 'state': 'preparing', - }) - bundle.last_batch = preparing - - if bundle.last_batch.state == 'preparing': - bundle.last_batch._new_commit(branch) - - def _update_batches(self, force=False, ignore=None): - """ Find new commits in physical repos""" - updated = False - for repo in self: - if repo.remote_ids and self._update(poll_delay=30 if force else 60*5): - max_age = int(self.env['ir.config_parameter'].get_param('runbot.runbot_max_age', default=30)) - ref = repo._get_refs(max_age, ignore=ignore) - ref_branches = repo._find_or_create_branches(ref) - repo._find_new_commits(ref, ref_branches) - updated = True - return updated - - def _update_git_config(self): - """ Update repo git config file """ - for repo in self: - if repo.mode == 'disabled': - _logger.info(f'skipping disabled repo {repo.name}') - continue - if os.path.isdir(os.path.join(repo.path, 'refs')): - git_config_path = os.path.join(repo.path, 'config') - template_params = {'repo': repo} - git_config = self.env['ir.ui.view']._render_template("runbot.git_config", template_params) - with open(git_config_path, 'w') as config_file: - config_file.write(str(git_config)) - _logger.info('Config updated for repo %s' % repo.name) - else: - _logger.info('Repo not cloned, skiping config update for %s' % repo.name) - - def _git_init(self): - """ Clone the remote repo if needed """ - self.ensure_one() - repo = self - if not os.path.isdir(os.path.join(repo.path, 'refs')): - _logger.info("Initiating repository '%s' in '%s'" % (repo.name, repo.path)) - git_init = subprocess.run(['git', 'init', '--bare', repo.path], stderr=subprocess.PIPE) - if git_init.returncode: - _logger.warning('Git init failed with code %s and message: "%s"', git_init.returncode, git_init.stderr) - return - self._update_git_config() - return True - - def _update_git(self, force=False, poll_delay=5*60): - """ Update the git repo on FS """ - self.ensure_one() - repo = self - if not repo.remote_ids: - return False - if not os.path.isdir(os.path.join(repo.path)): - os.makedirs(repo.path) - force = self._git_init() or force - - fname_fetch_head = os.path.join(repo.path, 'FETCH_HEAD') - if not force and os.path.isfile(fname_fetch_head): - fetch_time = os.path.getmtime(fname_fetch_head) - if repo.mode == 'hook': - if not repo.hook_time or (repo.last_processed_hook_time and repo.hook_time <= repo.last_processed_hook_time): - return False - repo.last_processed_hook_time = repo.hook_time - if repo.mode == 'poll': - if (time.time() < fetch_time + poll_delay): - return False - - _logger.info('Updating repo %s', repo.name) - return self._update_fetch_cmd() - - def _update_fetch_cmd(self): - # Extracted from update_git to be easily overriden in external module - self.ensure_one() - try_count = 0 - success = False - delay = 0 - while not success and try_count < 5: - time.sleep(delay) - try: - self._git(['fetch', '-p', '--all', ]) - success = True - except subprocess.CalledProcessError as e: - try_count += 1 - delay = delay * 1.5 if delay else 0.5 - if try_count > 4: - message = 'Failed to fetch repo %s: %s' % (self.name, e.output.decode()) - host = self.env['runbot.host']._get_current() - host.message_post(body=message) - self.env['runbot.runbot'].warning('Host %s got reserved because of fetch failure' % host.name) - _logger.exception(message) - host.disable() - return success - - def _update(self, force=False, poll_delay=5*60): - """ Update the physical git reposotories on FS""" - self.ensure_one() - try: - return self._update_git(force, poll_delay) - except Exception: - _logger.exception('Fail to update repo %s', self.name) - -class RefTime(models.Model): - _name = 'runbot.repo.reftime' - _description = "Repo reftime" - _log_access = False - - time = fields.Float('Time', index=True, required=True) - repo_id = fields.Many2one('runbot.repo', 'Repository', required=True, ondelete='cascade') - - -class HookTime(models.Model): - _name = 'runbot.repo.hooktime' - _description = "Repo hooktime" - _log_access = False - - time = fields.Float('Time') - repo_id = fields.Many2one('runbot.repo', 'Repository', required=True, ondelete='cascade') diff --git a/runbot/models/res_config_settings.py b/runbot/models/res_config_settings.py deleted file mode 100644 index c66ce58b..00000000 --- a/runbot/models/res_config_settings.py +++ /dev/null @@ -1,111 +0,0 @@ -# -*- coding: utf-8 -*- -import re - -from .. import common -from odoo import api, fields, models -from odoo.exceptions import UserError - - -class ResConfigSettings(models.TransientModel): - _inherit = 'res.config.settings' - - runbot_workers = fields.Integer('Default number of workers') - runbot_containers_memory = fields.Float('Memory limit for containers (in GiB)') - runbot_memory_bytes = fields.Float('Bytes', compute='_compute_memory_bytes') - runbot_running_max = fields.Integer('Maximum number of running builds') - runbot_timeout = fields.Integer('Max allowed step timeout (in seconds)') - runbot_starting_port = fields.Integer('Starting port for running builds') - runbot_max_age = fields.Integer('Max commit age (in days)') - runbot_logdb_uri = fields.Char('Runbot URI for build logs', - help='postgres://user:password@host/db formated uri to give to a build to log in database. Should be a user with limited access rights (ir_logging, runbot_build)') - runbot_update_frequency = fields.Integer('Update frequency (in seconds)') - runbot_template = fields.Char('Postgresql template', help="Postgresql template to use when creating DB's") - runbot_message = fields.Text('Frontend warning message', help="Will be displayed on the frontend when not empty") - runbot_default_odoorc = fields.Text('Default odoorc for builds') - runbot_upgrade_exception_message = fields.Text('Upgrade exception message', help='Template to auto-generate a github message when creating an upgrade exception') - runbot_do_fetch = fields.Boolean('Discover new commits') - runbot_do_schedule = fields.Boolean('Schedule builds') - runbot_is_base_regex = fields.Char('Regex is_base') - - runbot_db_gc_days = fields.Integer( - 'Days before gc', - default=30, - config_parameter='runbot.db_gc_days', - help="Time after the build finished (running time included) to wait before droping db and non log files") - runbot_db_gc_days_child = fields.Integer( - 'Days before gc of child', - default=15, - config_parameter='runbot.db_gc_days_child', - help='Children should have a lower gc delay since the database usually comes from the parent or a multibuild') - runbot_full_gc_days = fields.Integer( - 'Days before directory removal', - default=365, - config_parameter='runbot.full_gc_days', - help='Number of days to wait after to first gc to completely remove build directory (remaining test/log files)') - - runbot_pending_warning = fields.Integer('Pending warning limit', default=5, config_parameter='runbot.pending.warning') - runbot_pending_critical = fields.Integer('Pending critical limit', default=5, config_parameter='runbot.pending.critical') - - # TODO other icp - # runbot.runbot_maxlogs 100 - # migration db - # ln path - - @api.model - def get_values(self): - res = super(ResConfigSettings, self).get_values() - get_param = self.env['ir.config_parameter'].sudo().get_param - res.update(runbot_workers=int(get_param('runbot.runbot_workers', default=2)), - runbot_containers_memory=float(get_param('runbot.runbot_containers_memory', default=0)), - runbot_running_max=int(get_param('runbot.runbot_running_max', default=5)), - runbot_timeout=int(get_param('runbot.runbot_timeout', default=10000)), - runbot_starting_port=int(get_param('runbot.runbot_starting_port', default=2000)), - runbot_max_age=int(get_param('runbot.runbot_max_age', default=30)), - runbot_logdb_uri=get_param('runbot.runbot_logdb_uri', default=False), - runbot_update_frequency=int(get_param('runbot.runbot_update_frequency', default=10)), - runbot_template=get_param('runbot.runbot_db_template'), - runbot_message=get_param('runbot.runbot_message', default=''), - runbot_default_odoorc=get_param('runbot.runbot_default_odoorc'), - runbot_upgrade_exception_message=get_param('runbot.runbot_upgrade_exception_message'), - runbot_do_fetch=get_param('runbot.runbot_do_fetch', default=False), - runbot_do_schedule=get_param('runbot.runbot_do_schedule', default=False), - runbot_is_base_regex=get_param('runbot.runbot_is_base_regex', default='') - ) - return res - - def set_values(self): - super(ResConfigSettings, self).set_values() - set_param = self.env['ir.config_parameter'].sudo().set_param - set_param("runbot.runbot_workers", self.runbot_workers) - set_param("runbot.runbot_containers_memory", self.runbot_containers_memory) - set_param("runbot.runbot_running_max", self.runbot_running_max) - set_param("runbot.runbot_timeout", self.runbot_timeout) - set_param("runbot.runbot_starting_port", self.runbot_starting_port) - set_param("runbot.runbot_max_age", self.runbot_max_age) - set_param("runbot.runbot_logdb_uri", self.runbot_logdb_uri) - set_param('runbot.runbot_update_frequency', self.runbot_update_frequency) - set_param('runbot.runbot_db_template', self.runbot_template) - set_param('runbot.runbot_message', self.runbot_message) - set_param('runbot.runbot_default_odoorc', self.runbot_default_odoorc) - set_param('runbot.runbot_upgrade_exception_message', self.runbot_upgrade_exception_message) - set_param('runbot.runbot_do_fetch', self.runbot_do_fetch) - set_param('runbot.runbot_do_schedule', self.runbot_do_schedule) - set_param('runbot.runbot_is_base_regex', self.runbot_is_base_regex) - - @api.onchange('runbot_is_base_regex') - def _on_change_is_base_regex(self): - """ verify that the base_regex is valid - """ - if self.runbot_is_base_regex: - try: - re.compile(self.runbot_is_base_regex) - except re.error: - raise UserError("The regex is invalid") - - @api.depends('runbot_containers_memory') - def _compute_memory_bytes(self): - for rec in self: - if rec.runbot_containers_memory > 0: - rec.runbot_memory_bytes = rec.runbot_containers_memory * 1024 ** 3 - else: - rec.runbot_memory_bytes = 0 diff --git a/runbot/models/res_users.py b/runbot/models/res_users.py deleted file mode 100644 index 3077c28e..00000000 --- a/runbot/models/res_users.py +++ /dev/null @@ -1,10 +0,0 @@ - -# Part of Odoo. See LICENSE file for full copyright and licensing details. - -from odoo import fields, models - - -class ResUsers(models.Model): - _inherit = 'res.users' - - runbot_team_ids = fields.Many2many('runbot.team', string="Runbot Teams") diff --git a/runbot/models/runbot.py b/runbot/models/runbot.py deleted file mode 100644 index 487ea669..00000000 --- a/runbot/models/runbot.py +++ /dev/null @@ -1,404 +0,0 @@ -import time -import logging -import glob -import random -import re -import signal -import subprocess -import shutil - -from contextlib import contextmanager -from requests.exceptions import HTTPError -from subprocess import CalledProcessError - -from ..common import fqdn, dest_reg, os -from ..container import docker_ps, docker_stop - -from odoo import models, fields -from odoo.osv import expression -from odoo.tools import config -from odoo.modules.module import get_module_resource - -_logger = logging.getLogger(__name__) - -# after this point, not realy a repo buisness -class Runbot(models.AbstractModel): - _name = 'runbot.runbot' - _description = 'Base runbot model' - - def _commit(self): - self.env.cr.commit() - self.env.cache.invalidate() - self.env.clear() - - def _root(self): - """Return root directory of repository""" - default = os.path.join(os.path.dirname(__file__), '../static') - return os.path.abspath(default) - - def _scheduler(self, host): - self._gc_testing(host) - self._commit() - for build in self._get_builds_with_requested_actions(host): - build._process_requested_actions() - self._commit() - for build in self._get_builds_to_schedule(host): - build._schedule() - self._commit() - self._assign_pending_builds(host, host.nb_worker, [('build_type', '!=', 'scheduled')]) - self._commit() - self._assign_pending_builds(host, host.nb_worker-1 or host.nb_worker) - self._commit() - for build in self._get_builds_to_init(host): - build._init_pendings(host) - self._commit() - self._gc_running(host) - self._commit() - self._reload_nginx() - - def build_domain_host(self, host, domain=None): - domain = domain or [] - return [('host', '=', host.name)] + domain - - def _get_builds_with_requested_actions(self, host): - return self.env['runbot.build'].search(self.build_domain_host(host, [('requested_action', 'in', ['wake_up', 'deathrow'])])) - - def _get_builds_to_schedule(self, host): - return self.env['runbot.build'].search(self.build_domain_host(host, [('local_state', 'in', ['testing', 'running'])])) - - def _assign_pending_builds(self, host, nb_worker, domain=None): - if host.assigned_only or nb_worker <= 0: - return - domain_host = self.build_domain_host(host) - reserved_slots = self.env['runbot.build'].search_count(domain_host + [('local_state', 'in', ('testing', 'pending'))]) - assignable_slots = (nb_worker - reserved_slots) - if assignable_slots > 0: - allocated = self._allocate_builds(host, assignable_slots, domain) - if allocated: - - _logger.info('Builds %s where allocated to runbot', allocated) - - def _get_builds_to_init(self, host): - domain_host = self.build_domain_host(host) - used_slots = self.env['runbot.build'].search_count(domain_host + [('local_state', '=', 'testing')]) - available_slots = host.nb_worker - used_slots - if available_slots <= 0: - return self.env['runbot.build'] - return self.env['runbot.build'].search(domain_host + [('local_state', '=', 'pending')], limit=available_slots) - - def _gc_running(self, host): - running_max = host.get_running_max() - domain_host = self.build_domain_host(host) - Build = self.env['runbot.build'] - cannot_be_killed_ids = Build.search(domain_host + [('keep_running', '=', True)]).ids - sticky_bundles = self.env['runbot.bundle'].search([('sticky', '=', True), ('project_id.keep_sticky_running', '=', True)]) - cannot_be_killed_ids += [ - build.id - for build in sticky_bundles.mapped('last_batchs.slot_ids.build_id') - if build.host == host.name - ][:running_max] - build_ids = Build.search(domain_host + [('local_state', '=', 'running'), ('id', 'not in', cannot_be_killed_ids)], order='job_start desc').ids - Build.browse(build_ids)[running_max:]._kill() - - def _gc_testing(self, host): - """garbage collect builds that could be killed""" - # decide if we need room - Build = self.env['runbot.build'] - domain_host = self.build_domain_host(host) - testing_builds = Build.search(domain_host + [('local_state', 'in', ['testing', 'pending']), ('requested_action', '!=', 'deathrow')]) - used_slots = len(testing_builds) - available_slots = host.nb_worker - used_slots - nb_pending = Build.search_count([('local_state', '=', 'pending'), ('host', '=', False)]) - if available_slots > 0 or nb_pending == 0: - return - - for build in testing_builds: - if build.killable: - build.top_parent._ask_kill(message='Build automatically killed, new build found.') - - def _allocate_builds(self, host, nb_slots, domain=None): - if nb_slots <= 0: - return [] - non_allocated_domain = [('local_state', '=', 'pending'), ('host', '=', False)] - if domain: - non_allocated_domain = expression.AND([non_allocated_domain, domain]) - e = expression.expression(non_allocated_domain, self.env['runbot.build']) - query = e.query - query.order = '"runbot_build".parent_path' - select_query, select_params = query.select() - # self-assign to be sure that another runbot batch cannot self assign the same builds - query = """UPDATE - runbot_build - SET - host = %%s - WHERE - runbot_build.id IN ( - %s - FOR UPDATE OF runbot_build SKIP LOCKED - LIMIT %%s - ) - RETURNING id""" % select_query - self.env.cr.execute(query, [host.name] + select_params + [nb_slots]) - return self.env.cr.fetchall() - - def _reload_nginx(self): - env = self.env - settings = {} - settings['port'] = config.get('http_port') - settings['runbot_static'] = os.path.join(get_module_resource('runbot', 'static'), '') - settings['base_url'] = self.get_base_url() - nginx_dir = os.path.join(self._root(), 'nginx') - settings['nginx_dir'] = nginx_dir - settings['re_escape'] = re.escape - host_name = self.env['runbot.host']._get_current_name() - settings['host_name'] = self.env['runbot.host']._get_current_name() - - settings['builds'] = env['runbot.build'].search([('local_state', '=', 'running'), ('host', '=', host_name)]) - - nginx_config = env['ir.ui.view']._render_template("runbot.nginx_config", settings) - os.makedirs(nginx_dir, exist_ok=True) - content = None - nginx_conf_path = os.path.join(nginx_dir, 'nginx.conf') - content = '' - if os.path.isfile(nginx_conf_path): - with open(nginx_conf_path, 'r') as f: - content = f.read() - if content != nginx_config: - _logger.info('reload nginx') - with open(nginx_conf_path, 'w') as f: - f.write(str(nginx_config)) - try: - pid = int(open(os.path.join(nginx_dir, 'nginx.pid')).read().strip(' \n')) - os.kill(pid, signal.SIGHUP) - except Exception: - _logger.info('start nginx') - if subprocess.call(['/usr/sbin/nginx', '-p', nginx_dir, '-c', 'nginx.conf']): - # obscure nginx bug leaving orphan worker listening on nginx port - if not subprocess.call(['pkill', '-f', '-P1', 'nginx: worker']): - _logger.warning('failed to start nginx - orphan worker killed, retrying') - subprocess.call(['/usr/sbin/nginx', '-p', nginx_dir, '-c', 'nginx.conf']) - else: - _logger.warning('failed to start nginx - failed to kill orphan worker - oh well') - - def _get_cron_period(self): - """ Compute a randomized cron period with a 2 min margin below - real cron timeout from config. - """ - cron_limit = config.get('limit_time_real_cron') - req_limit = config.get('limit_time_real') - cron_timeout = cron_limit if cron_limit > -1 else req_limit - return cron_timeout / 2 - - def _cron(self): - """ - This method is the default cron for new commit discovery and build sheduling. - The cron runs for a long time to avoid spamming logs - """ - pull_info_failures = {} - start_time = time.time() - timeout = self._get_cron_period() - get_param = self.env['ir.config_parameter'].get_param - update_frequency = int(get_param('runbot.runbot_update_frequency', default=10)) - runbot_do_fetch = get_param('runbot.runbot_do_fetch') - runbot_do_schedule = get_param('runbot.runbot_do_schedule') - host = self.env['runbot.host']._get_current() - host.set_psql_conn_count() - host.last_start_loop = fields.Datetime.now() - self._commit() - # Bootstrap - host._bootstrap() - if runbot_do_schedule: - host._docker_build() - self._source_cleanup() - self.env['runbot.build']._local_cleanup() - self._docker_cleanup() - _logger.info('Starting loop') - if runbot_do_schedule or runbot_do_fetch: - while time.time() - start_time < timeout: - if runbot_do_fetch: - self._fetch_loop_turn(host, pull_info_failures) - if runbot_do_schedule: - sleep_time = self._scheduler_loop_turn(host, update_frequency) - self.sleep(sleep_time) - else: - self.sleep(update_frequency) - self._commit() - - host.last_end_loop = fields.Datetime.now() - - def sleep(self, t): - time.sleep(t) - - def _fetch_loop_turn(self, host, pull_info_failures, default_sleep=1): - with self.manage_host_exception(host) as manager: - repos = self.env['runbot.repo'].search([('mode', '!=', 'disabled')]) - processing_batch = self.env['runbot.batch'].search([('state', 'in', ('preparing', 'ready'))], order='id asc') - preparing_batch = processing_batch.filtered(lambda b: b.state == 'preparing') - self._commit() - for repo in repos: - try: - repo._update_batches(force=bool(preparing_batch), ignore=pull_info_failures) - self._commit() # commit is mainly here to avoid to lose progression in case of fetch failure or concurrent update - except HTTPError as e: - # Sometimes a pr pull info can fail. - # - Most of the time it is only temporary and it will be successfull on next try. - # - In some rare case the pr will always fail (github inconsistency) The pr exists in git (for-each-ref) but not on github api. - # For this rare case, we store the pr in memory in order to unstuck other pr/branches update. - # We consider that this error should not remain, in this case github needs to fix this inconsistency. - # Another solution would be to create the pr with fake pull info. This idea is not the best one - # since we want to avoid to have many pr with fake pull_info in case of temporary failure of github services. - # With this solution, the pr will be retried once every cron loop (~10 minutes). - # We dont except to have pr with this kind of persistent failure more than every few mounths/years. - self.env.cr.rollback() - self.env.clear() - pull_number = e.response.url.split('/')[-1] - pull_info_failures[pull_number] = time.time() - self.warning('Pr pull info failed for %s', pull_number) - self._commit() - - if processing_batch: - for batch in processing_batch: - if batch._process(): - self._commit() - self._commit() - - self.env['runbot.commit.status']._send_to_process() - self._commit() - - # cleanup old pull_info_failures - for pr_number, t in pull_info_failures.items(): - if t + 15*60 < time.time(): - _logger.warning('Removing %s from pull_info_failures', pr_number) - del pull_info_failures[pr_number] - - - return manager.get('sleep', default_sleep) - - def _scheduler_loop_turn(self, host, default_sleep=5): - _logger.info('Scheduling...') - with self.manage_host_exception(host) as manager: - self._scheduler(host) - return manager.get('sleep', default_sleep) - - @contextmanager - def manage_host_exception(self, host): - res = {} - try: - yield res - host.last_success = fields.Datetime.now() - self._commit() - except Exception as e: - self.env.cr.rollback() - self.env.clear() - _logger.exception(e) - message = str(e) - if host.last_exception == message: - host.exception_count += 1 - else: - host.last_exception = str(e) - host.exception_count = 1 - self._commit() - res['sleep'] = random.uniform(0, 3) - else: - if host.last_exception: - host.last_exception = "" - host.exception_count = 0 - - def _source_cleanup(self): - try: - if self.pool._init: - return - _logger.info('Source cleaning') - - host_name = self.env['runbot.host']._get_current_name() - cannot_be_deleted_path = set() - for commit in self.env['runbot.commit.export'].search([('host', '=', host_name)]).mapped('commit_id'): - cannot_be_deleted_path.add(commit._source_path()) - - - # the following part won't be usefull anymore once runbot.commit.export is populated - cannot_be_deleted_builds = self.env['runbot.build'].search([('host', '=', host_name), ('local_state', '!=', 'done')]) - cannot_be_deleted_builds |= cannot_be_deleted_builds.mapped('params_id.builds_reference_ids') - for build in cannot_be_deleted_builds: - for build_commit in build.params_id.commit_link_ids: - cannot_be_deleted_path.add(build_commit.commit_id._source_path()) - - to_delete = set() - to_keep = set() - repos = self.env['runbot.repo'].search([('mode', '!=', 'disabled')]) - for repo in repos: - repo_source = os.path.join(self._root(), 'sources', repo.name, '*') - for source_dir in glob.glob(repo_source): - if source_dir not in cannot_be_deleted_path: - to_delete.add(source_dir) - else: - to_keep.add(source_dir) - - # we are comparing cannot_be_deleted_path with to keep to sensure that the algorithm is working, we want to avoid to erase file by mistake - # note: it is possible that a parent_build is in testing without checkouting sources, but it should be exceptions - if to_delete: - if cannot_be_deleted_path != to_keep: - _logger.warning('Inconsistency between sources and database: \n%s \n%s' % (cannot_be_deleted_path-to_keep, to_keep-cannot_be_deleted_path)) - to_delete = list(to_delete) - to_keep = list(to_keep) - cannot_be_deleted_path = list(cannot_be_deleted_path) - for source_dir in to_delete: - _logger.info('Deleting source: %s' % source_dir) - assert 'static' in source_dir - shutil.rmtree(source_dir) - _logger.info('%s/%s source folder where deleted (%s kept)' % (len(to_delete), len(to_delete+to_keep), len(to_keep))) - except: - _logger.exception('An exception occured while cleaning sources') - pass - - def _docker_cleanup(self): - _logger.info('Docker cleaning') - docker_ps_result = docker_ps() - - containers = {} - ignored = [] - for dc in docker_ps_result: - build = self.env['runbot.build']._build_from_dest(dc) - if build: - containers[build.id] = dc - if containers: - candidates = self.env['runbot.build'].search([('id', 'in', list(containers.keys())), ('local_state', '=', 'done')]) - for c in candidates: - _logger.info('container %s found running with build state done', containers[c.id]) - docker_stop(containers[c.id], c._path()) - ignored = {dc for dc in docker_ps_result if not dest_reg.match(dc)} - if ignored: - _logger.info('docker (%s) not deleted because not dest format', list(ignored)) - - def _git_gc(self, host): - """ - cleanup and optimize git repositories on the host - """ - for repo in self.env['runbot.repo'].search([]): - try: - repo._git(['gc', '--prune=all', '--quiet']) - except CalledProcessError as e: - message = f'git gc failed for {repo.name} on {host.name} with exit status {e.returncode} and message "{e.output[:60]} ..."' - self.warning(message) - - def warning(self, message, *args): - if args: - message = message % args - existing = self.env['runbot.warning'].search([('message', '=', message)], limit=1) - if existing: - existing.count += 1 - else: - return self.env['runbot.warning'].create({'message': message}) - - -class RunbotWarning(models.Model): - """ - Generic Warnings for runbot - """ - _order = 'write_date desc, id desc' - - _name = 'runbot.warning' - _description = 'Generic Runbot Warning' - - message = fields.Char("Warning", index=True) - count = fields.Integer("Count", default=1) diff --git a/runbot/models/upgrade.py b/runbot/models/upgrade.py deleted file mode 100644 index d8918ccd..00000000 --- a/runbot/models/upgrade.py +++ /dev/null @@ -1,70 +0,0 @@ -import re -from odoo import models, fields -from odoo.exceptions import UserError - - -class UpgradeExceptions(models.Model): - _name = 'runbot.upgrade.exception' - _description = 'Upgrade exception' - - active = fields.Boolean('Active', default=True) - elements = fields.Text('Elements') - bundle_id = fields.Many2one('runbot.bundle', index=True) - info = fields.Text('Info') - team_id = fields.Many2one('runbot.team', 'Assigned team', index=True) - message = fields.Text('Upgrade exception message', compute="_compute_message") - - def _compute_message(self): - message_layout = self.env['ir.config_parameter'].sudo().get_param('runbot.runbot_upgrade_exception_message') - for exception in self: - exception.message = message_layout.format(exception=exception, base_url=exception.get_base_url()) - - def _generate(self): - exceptions = self.search([]) - if exceptions: - return 'suppress_upgrade_warnings=%s' % (','.join(exceptions.mapped('elements'))).replace(' ', '').replace('\n', ',') - return False - - -class UpgradeRegex(models.Model): - _name = 'runbot.upgrade.regex' - _description = 'Upgrade regex' - - active = fields.Boolean('Active', default=True) - prefix = fields.Char('Type') - regex = fields.Char('Regex') - - -class BuildResult(models.Model): - _inherit = 'runbot.build' - - def _parse_upgrade_errors(self): - ir_logs = self.env['ir.logging'].search([('level', 'in', ('ERROR', 'WARNING', 'CRITICAL')), ('type', '=', 'server'), ('build_id', 'in', self.ids)]) - - upgrade_regexes = self.env['runbot.upgrade.regex'].search([]) - exception = {} - for log in ir_logs: - for upgrade_regex in upgrade_regexes: - m = re.search(upgrade_regex.regex, log.message) - if m: - exception['%s:%s' % (upgrade_regex.prefix, m.groups()[0])] = None - exception = list(exception) - if exception: - bundle = False - batches = self.top_parent.slot_ids.mapped('batch_id') - if batches: - bundle = batches[0].bundle_id.id - res = { - 'name': 'Upgrade Exception', - 'type': 'ir.actions.act_window', - 'res_model': 'runbot.upgrade.exception', - 'view_mode': 'form', - 'context': { - 'default_elements': '\n'.join(exception), - 'default_bundle_id': bundle, - 'default_info': 'Automatically generated from build %s' % self.id - } - } - return res - else: - raise UserError('Nothing found here') diff --git a/runbot/models/user.py b/runbot/models/user.py deleted file mode 100644 index 1c14c773..00000000 --- a/runbot/models/user.py +++ /dev/null @@ -1,10 +0,0 @@ - -from odoo import models, fields - - -class User(models.Model): - _inherit = 'res.users' - - # Add default action_id - action_id = fields.Many2one('ir.actions.actions', - default=lambda self: self.env.ref('runbot.open_view_warning_tree', raise_if_not_found=False)) diff --git a/runbot/models/version.py b/runbot/models/version.py deleted file mode 100644 index e381e5c0..00000000 --- a/runbot/models/version.py +++ /dev/null @@ -1,105 +0,0 @@ -import logging -import re -from odoo import models, fields, api, tools - - -_logger = logging.getLogger(__name__) - - -class Version(models.Model): - _name = 'runbot.version' - _description = "Version" - _order = 'sequence desc, number desc,id' - - name = fields.Char('Version name') - number = fields.Char('Version number', compute='_compute_version_number', store=True, help="Usefull to sort by version") - sequence = fields.Integer('sequence') - is_major = fields.Char('Is major version', compute='_compute_version_number', store=True) - - base_bundle_id = fields.Many2one('runbot.bundle', compute='_compute_base_bundle_id') - - previous_major_version_id = fields.Many2one('runbot.version', compute='_compute_version_relations') - intermediate_version_ids = fields.Many2many('runbot.version', compute='_compute_version_relations') - next_major_version_id = fields.Many2one('runbot.version', compute='_compute_version_relations') - next_intermediate_version_ids = fields.Many2many('runbot.version', compute='_compute_version_relations') - - dockerfile_id = fields.Many2one('runbot.dockerfile', default=lambda self: self.env.ref('runbot.docker_default', raise_if_not_found=False)) - - @api.depends('name') - def _compute_version_number(self): - for version in self: - if version.name == 'master': - version.number = '~' - version.is_major = False - else: - # max version number with this format: 99.99 - version.number = '.'.join([elem.zfill(2) for elem in re.sub(r'[^0-9\.]', '', version.name or '').split('.')]) - version.is_major = all(elem == '00' for elem in version.number.split('.')[1:]) - - @api.model_create_multi - def create(self, vals_list): - model = self.browse() - model._get_id.clear_cache(model) - return super().create(vals_list) - - def _get(self, name): - return self.browse(self._get_id(name)) - - @tools.ormcache('name') - def _get_id(self, name): - version = self.search([('name', '=', name)]) - if not version: - version = self.create({ - 'name': name, - }) - return version.id - - @api.depends('is_major', 'number') - def _compute_version_relations(self): - all_versions = self.search([], order='sequence, number') - for version in self: - version.previous_major_version_id = next( - ( - v - for v in reversed(all_versions) - if v.is_major and v.number < version.number and v.sequence <= version.sequence # TODO FIXME, make version comparable? - ), self.browse()) - if version.previous_major_version_id: - version.intermediate_version_ids = all_versions.filtered( - lambda v, current=version: v.number > current.previous_major_version_id.number and v.number < current.number and v.sequence <= current.sequence and v.sequence >= current.previous_major_version_id.sequence - ) - else: - version.intermediate_version_ids = all_versions.filtered( - lambda v, current=version: v.number < current.number and v.sequence <= current.sequence - ) - version.next_major_version_id = next( - ( - v - for v in all_versions - if (v.is_major or v.name == 'master') and v.number > version.number and v.sequence >= version.sequence - ), self.browse()) - if version.next_major_version_id: - version.next_intermediate_version_ids = all_versions.filtered( - lambda v, current=version: v.number < current.next_major_version_id.number and v.number > current.number and v.sequence <= current.next_major_version_id.sequence and v.sequence >= current.sequence - ) - else: - version.next_intermediate_version_ids = all_versions.filtered( - lambda v, current=version: v.number > current.number and v.sequence >= current.sequence - ) - - # @api.depends('base_bundle_id.is_base', 'base_bundle_id.version_id', 'base_bundle_id.project_id') - @api.depends_context('project_id') - def _compute_base_bundle_id(self): - project_id = self.env.context.get('project_id') - if not project_id: - _logger.warning("_compute_base_bundle_id: no project_id in context") - project_id = self.env.ref('runbot.main_project').id - - bundles = self.env['runbot.bundle'].search([ - ('version_id', 'in', self.ids), - ('is_base', '=', True), - ('project_id', '=', project_id) - ]) - bundle_by_version = {bundle.version_id.id: bundle for bundle in bundles} - for version in self: - version.base_bundle_id = bundle_by_version.get(version.id) diff --git a/runbot/security/ir.model.access.csv b/runbot/security/ir.model.access.csv deleted file mode 100644 index c2227dc8..00000000 --- a/runbot/security/ir.model.access.csv +++ /dev/null @@ -1,118 +0,0 @@ -id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink -access_runbot_remote,runbot_remote,runbot.model_runbot_remote,group_user,1,0,0,0 -access_runbot_branch,runbot_branch,runbot.model_runbot_branch,group_user,1,0,0,0 -access_runbot_build,runbot_build,runbot.model_runbot_build,group_user,1,0,0,0 -access_runbot_remote_admin,runbot_remote_admin,runbot.model_runbot_remote,runbot.group_runbot_admin,1,1,1,1 -access_runbot_branch_admin,runbot_branch_admin,runbot.model_runbot_branch,runbot.group_runbot_admin,1,1,1,1 -access_runbot_build_admin,runbot_build_admin,runbot.model_runbot_build,runbot.group_runbot_admin,1,1,1,1 -access_irlogging,log by runbot users,base.model_ir_logging,group_user,0,0,1,0 - -access_runbot_build_config_step_user,runbot_build_config_step_user,runbot.model_runbot_build_config_step,group_user,1,0,0,0 -access_runbot_build_config_step_manager,runbot_build_config_step_manager,runbot.model_runbot_build_config_step,runbot.group_build_config_user,1,1,1,1 - -access_runbot_build_config_user,runbot_build_config_user,runbot.model_runbot_build_config,group_user,1,0,0,0 -access_runbot_build_config_manager,runbot_build_config_manager,runbot.model_runbot_build_config,runbot.group_build_config_user,1,1,1,1 - -access_runbot_build_config_step_order_user,runbot_build_config_step_order_user,runbot.model_runbot_build_config_step_order,group_user,1,0,0,0 -access_runbot_build_config_step_order_manager,runbot_build_config_step_order_manager,runbot.model_runbot_build_config_step_order,runbot.group_build_config_user,1,1,1,1 - -access_runbot_config_step_upgrade_db_user,runbot_config_step_upgrade_db_user,runbot.model_runbot_config_step_upgrade_db,group_user,1,0,0,0 -access_runbot_config_step_upgrade_db_manager,runbot_config_step_upgrade_db_manager,runbot.model_runbot_config_step_upgrade_db,runbot.group_build_config_user,1,1,1,1 - -access_runbot_build_error_user,runbot_build_error_user,runbot.model_runbot_build_error,group_user,1,0,0,0 -access_runbot_build_error_admin,runbot_build_error_admin,runbot.model_runbot_build_error,runbot.group_runbot_admin,1,1,1,1 -access_runbot_build_error_manager,runbot_build_error_manager,runbot.model_runbot_build_error,runbot.group_runbot_error_manager,1,1,1,1 -access_runbot_build_error_tag_user,runbot_build_error_tag_user,runbot.model_runbot_build_error_tag,group_user,1,0,0,0 -access_runbot_build_error_tag_admin,runbot_build_error_tag_admin,runbot.model_runbot_build_error_tag,runbot.group_runbot_admin,1,1,1,1 -access_runbot_build_error_tag_manager,runbot_build_error_tag_manager,runbot.model_runbot_build_error_tag,runbot.group_runbot_error_manager,1,1,1,1 -access_runbot_team_admin,runbot_team_admin,runbot.model_runbot_team,runbot.group_runbot_admin,1,1,1,1 -access_runbot_team_user,runbot_team_user,runbot.model_runbot_team,group_user,1,0,0,0 -access_runbot_dashboard_admin,runbot_dashboard_admin,runbot.model_runbot_dashboard,runbot.group_runbot_admin,1,1,1,1 -access_runbot_dashboard_user,runbot_dashboard_user,runbot.model_runbot_dashboard,group_user,1,0,0,0 -access_runbot_dashboard_tile_admin,runbot_dashboard_tile_admin,runbot.model_runbot_dashboard_tile,runbot.group_runbot_admin,1,1,1,1 -access_runbot_dashboard_tile_user,runbot_dashboard_tile_user,runbot.model_runbot_dashboard_tile,group_user,1,0,0,0 - -access_runbot_error_regex_user,runbot_error_regex_user,runbot.model_runbot_error_regex,group_user,1,0,0,0 -access_runbot_error_regex_manager,runbot_error_regex_manager,runbot.model_runbot_error_regex,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_host_user,runbot_host_user,runbot.model_runbot_host,group_user,1,0,0,0 -access_runbot_host_manager,runbot_host_manager,runbot.model_runbot_host,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_error_log_user,runbot_error_log_user,runbot.model_runbot_error_log,group_user,1,0,0,0 -access_runbot_error_log_manager,runbot_error_log_manager,runbot.model_runbot_error_log,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_repo_hooktime,runbot_repo_hooktime,runbot.model_runbot_repo_hooktime,group_user,1,0,0,0 -access_runbot_repo_referencetime,runbot_repo_referencetime,runbot.model_runbot_repo_reftime,group_user,1,0,0,0 - -access_runbot_build_stat_user,runbot_build_stat_user,runbot.model_runbot_build_stat,group_user,1,0,0,0 -access_runbot_build_stat_admin,runbot_build_stat_admin,runbot.model_runbot_build_stat,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_build_stat_regex_user,access_runbot_build_stat_regex_user,runbot.model_runbot_build_stat_regex,runbot.group_user,1,0,0,0 -access_runbot_build_stat_regex_admin,access_runbot_build_stat_regex_admin,runbot.model_runbot_build_stat_regex,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_trigger_user,access_runbot_trigger_user,runbot.model_runbot_trigger,runbot.group_user,1,0,0,0 -access_runbot_trigger_runbot_admin,access_runbot_trigger_runbot_admin,runbot.model_runbot_trigger,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_repo_user,access_runbot_repo_user,runbot.model_runbot_repo,runbot.group_user,1,0,0,0 -access_runbot_repo_runbot_admin,access_runbot_repo_runbot_admin,runbot.model_runbot_repo,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_commit_user,access_runbot_commit_user,runbot.model_runbot_commit,runbot.group_user,1,0,0,0 - -access_runbot_build_params_user,access_runbot_build_params_user,runbot.model_runbot_build_params,runbot.group_user,1,0,0,0 -access_runbot_build_params_runbot_admin,access_runbot_build_params_runbot_admin,runbot.model_runbot_build_params,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_commit_link_user,access_runbot_commit_link_user,runbot.model_runbot_commit_link,runbot.group_user,1,0,0,0 -access_runbot_commit_link_runbot_admin,access_runbot_commit_link_runbot_admin,runbot.model_runbot_commit_link,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_version_user,access_runbot_version_user,runbot.model_runbot_version,runbot.group_user,1,0,0,0 -access_runbot_version_runbot_admin,access_runbot_version_runbot_admin,runbot.model_runbot_version,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_project_user,access_runbot_project_user,runbot.model_runbot_project,runbot.group_user,1,0,0,0 -access_runbot_project_runbot_admin,access_runbot_project_runbot_admin,runbot.model_runbot_project,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_bundle_user,access_runbot_bundle_user,runbot.model_runbot_bundle,runbot.group_user,1,0,0,0 -access_runbot_bundle_runbot_admin,access_runbot_bundle_runbot_admin,runbot.model_runbot_bundle,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_batch_user,access_runbot_batch_user,runbot.model_runbot_batch,runbot.group_user,1,0,0,0 -access_runbot_batch_runbot_admin,access_runbot_batch_runbot_admin,runbot.model_runbot_batch,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_batch_slot_user,access_runbot_batch_slot_user,runbot.model_runbot_batch_slot,runbot.group_user,1,0,0,0 -access_runbot_batch_slot_runbot_admin,access_runbot_batch_slot_runbot_admin,runbot.model_runbot_batch_slot,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_ref_log_runbot_user,access_runbot_ref_log_runbot_user,runbot.model_runbot_ref_log,runbot.group_user,1,0,0,0 -access_runbot_ref_log_runbot_admin,access_runbot_ref_log_runbot_admin,runbot.model_runbot_ref_log,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_commit_status_runbot_user,access_runbot_commit_status_runbot_user,runbot.model_runbot_commit_status,runbot.group_user,1,0,0,0 -access_runbot_commit_status_runbot_admin,access_runbot_commit_status_runbot_admin,runbot.model_runbot_commit_status,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_bundle_trigger_custom_runbot_user,access_runbot_bundle_trigger_custom_runbot_user,runbot.model_runbot_bundle_trigger_custom,runbot.group_user,1,0,0,0 -access_runbot_bundle_trigger_custom_runbot_admin,access_runbot_bundle_trigger_custom_runbot_admin,runbot.model_runbot_bundle_trigger_custom,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_category_runbot_user,access_runbot_category_runbot_user,runbot.model_runbot_category,runbot.group_user,1,0,0,0 -access_runbot_category_runbot_admin,access_runbot_category_runbot_admin,runbot.model_runbot_category,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_batch_log_runbot_user,access_runbot_batch_log_runbot_user,runbot.model_runbot_batch_log,runbot.group_user,1,0,0,0 - -access_runbot_warning_user,access_runbot_warning_user,runbot.model_runbot_warning,runbot.group_user,1,0,0,0 -access_runbot_warning_admin,access_runbot_warning_admin,runbot.model_runbot_warning,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_database_user,access_runbot_database_user,runbot.model_runbot_database,runbot.group_user,1,0,0,0 -access_runbot_database_admin,access_runbot_database_admin,runbot.model_runbot_database,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_upgrade_regex_user,access_runbot_upgrade_regex_user,runbot.model_runbot_upgrade_regex,runbot.group_user,1,0,0,0 -access_runbot_upgrade_regex_admin,access_runbot_upgrade_regex_admin,runbot.model_runbot_upgrade_regex,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_upgrade_exception_user,access_runbot_upgrade_exception_user,runbot.model_runbot_upgrade_exception,runbot.group_user,1,0,0,0 -access_runbot_upgrade_exception_admin,access_runbot_upgrade_exception_admin,runbot.model_runbot_upgrade_exception,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_dockerfile_user,access_runbot_dockerfile_user,runbot.model_runbot_dockerfile,runbot.group_user,1,0,0,0 -access_runbot_dockerfile_admin,access_runbot_dockerfile_admin,runbot.model_runbot_dockerfile,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_codeowner_admin,runbot_codeowner_admin,runbot.model_runbot_codeowner,runbot.group_runbot_admin,1,1,1,1 -access_runbot_codeowner_user,runbot_codeowner_user,runbot.model_runbot_codeowner,group_user,1,0,0,0 - -access_runbot_commit_export_admin,runbot_commit_export_admin,runbot.model_runbot_commit_export,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_trigger_custom_wizard,access_runbot_trigger_custom_wizard,model_runbot_trigger_custom_wizard,runbot.group_runbot_admin,1,1,1,1 - -access_runbot_build_stat_regex_wizard,access_runbot_build_stat_regex_wizard,model_runbot_build_stat_regex_wizard,runbot.group_runbot_admin,1,1,1,1 diff --git a/runbot/security/ir.rule.csv b/runbot/security/ir.rule.csv deleted file mode 100644 index 0e6bd929..00000000 --- a/runbot/security/ir.rule.csv +++ /dev/null @@ -1,14 +0,0 @@ -id,name,model_id/id,groups/id,domain_force,perm_read,perm_create,perm_write,perm_unlink - - -rule_project,"limited to groups",model_runbot_project,group_user,"['|', ('group_ids', '=', False), ('group_ids', 'in', [g.id for g in user.groups_id])]",1,1,1,1 -rule_project_mgmt,"manager can see all",model_runbot_project,group_runbot_admin,"[(1, '=', 1)]",1,1,1,1 - -rule_repo,"limited to groups",model_runbot_repo,group_user,"['|', ('project_id.group_ids', '=', False), ('project_id.group_ids', 'in', [g.id for g in user.groups_id])]",1,1,1,1 -rule_repo_mgmt,"manager can see all",model_runbot_repo,group_runbot_admin,"[(1, '=', 1)]",1,1,1,1 -rule_branch,"limited to groups",model_runbot_branch,group_user,"['|', ('remote_id.repo_id.project_id.group_ids', '=', False), ('remote_id.repo_id.project_id.group_ids', 'in', [g.id for g in user.groups_id])]",1,1,1,1 -rule_branch_mgmt,"manager can see all",model_runbot_branch,group_runbot_admin,"[(1, '=', 1)]",1,1,1,1 -rule_commit,"limited to groups",model_runbot_commit,group_user,"['|', ('repo_id.project_id.group_ids', '=', False), ('repo_id.project_id.group_ids', 'in', [g.id for g in user.groups_id])]",1,1,1,1 -rule_commit_mgmt,"manager can see all",model_runbot_commit,group_runbot_admin,"[(1, '=', 1)]",1,1,1,1 -rule_build,"limited to groups",model_runbot_build,group_user,"['|', ('params_id.project_id.group_ids', '=', False), ('params_id.project_id.group_ids', 'in', [g.id for g in user.groups_id])]",1,1,1,1 -rule_build_mgmt,"manager can see all",model_runbot_build,group_runbot_admin,"[(1, '=', 1)]",1,1,1,1 diff --git a/runbot/security/runbot_security.xml b/runbot/security/runbot_security.xml deleted file mode 100644 index ca0a3e6b..00000000 --- a/runbot/security/runbot_security.xml +++ /dev/null @@ -1,123 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <record model="ir.module.category" id="module_project"> - <field name="name">Runbot</field> - </record> - - <record id="group_user" model="res.groups"> - <field name="name">User</field> - <field name="category_id" ref="module_project"/> - <!-- as public user is inactive, it wont be automatically added - to this group via implied groups. add it manually --> - <field name="users" eval="[(4, ref('base.public_user'))]"/> - </record> - - <record id="base.group_public" model="res.groups"> - <field name="implied_ids" eval="[(4, ref('runbot.group_user'))]"/> - </record> - - <record id="base.group_user" model="res.groups"> - <field name="implied_ids" eval="[(4, ref('runbot.group_user'))]"/> - </record> - - <record id="base.group_portal" model="res.groups"> - <field name="implied_ids" eval="[(4, ref('runbot.group_user'))]"/> - </record> - - <record model="ir.module.category" id="build_config_project"> - <field name="name">Build Config</field> - </record> - - <record id="group_build_config_user" model="res.groups"> - <field name="name">Build config user</field> - <field name="category_id" ref="build_config_project"/> - </record> - - <record id="group_build_config_manager" model="res.groups"> - <field name="name">Build config manager</field> - <field name="category_id" ref="build_config_project"/> - <field name="implied_ids" eval="[(4, ref('runbot.group_build_config_user'))]"/> - </record> - - <record id="group_build_config_administrator" model="res.groups"> - <field name="name">Build config administrator</field> - <field name="category_id" ref="build_config_project"/> - <field name="implied_ids" eval="[(4, ref('runbot.group_build_config_manager'))]"/> - <field name="users" eval="[(4, ref('base.user_root'))]"/> - </record> - - <record id="group_runbot_error_manager" model="res.groups"> - <field name="name">Build error manager</field> - <field name="category_id" ref="module_project"/> - </record> - - <record id="group_runbot_admin" model="res.groups"> - <field name="name">Runbot administrator</field> - <field name="category_id" ref="module_project"/> - <field name="users" eval="[(4, ref('base.user_root')), (4, ref('base.user_admin'))]"/> - <field name="implied_ids" eval="[(4, ref('runbot.group_user')), (4, ref('runbot.group_build_config_administrator'))]"/> - </record> - - <!-- config access rules--> - <record id="runbot_build_config_access_administrator" model="ir.rule"> - <field name="name">All config can be edited by config admin</field> - <field name="groups" eval="[(4, ref('group_build_config_administrator'))]"/> - <field name="model_id" ref="model_runbot_build_config"/> - <field name="domain_force">[(1, '=', 1)]</field> - <field name="perm_write" eval="True"/> - <field name="perm_unlink" eval="True"/> - <field name="perm_read" eval="False"/> - <field name="perm_create" eval="False"/> - </record> - - <record id="runbot_build_config_access_manager" model="ir.rule"> - <field name="name">Own config can be edited by user</field> - <field name="groups" eval="[(4, ref('group_build_config_manager'))]"/> - <field name="model_id" ref="model_runbot_build_config"/> - <field name="domain_force">[('protected', '=', False)]</field> - <field name="perm_write" eval="True"/> - <field name="perm_unlink" eval="True"/> - <field name="perm_read" eval="False"/> - <field name="perm_create" eval="True"/> - </record> - - <record id="runbot_build_config_access_user" model="ir.rule"> - <field name="name">All config can be edited by config admin</field> - <field name="groups" eval="[(4, ref('group_build_config_user'))]"/> - <field name="model_id" ref="model_runbot_build_config"/> - <field name="domain_force">[('create_uid', '=', user.id)]</field> - <field name="perm_write" eval="True"/> - <field name="perm_unlink" eval="True"/> - <field name="perm_read" eval="False"/> - <field name="perm_create" eval="True"/> - </record> - - - <!-- step access rules--> - <record id="runbot_build_config_step_access_administrator" model="ir.rule"> - <field name="name">All config step can be edited by config admin</field> - <field name="groups" eval="[(4, ref('group_build_config_administrator'))]"/> - <field name="model_id" ref="model_runbot_build_config_step"/> - <field name="domain_force">[(1, '=', 1)]</field> - <field name="perm_read" eval="False"/> - </record> - - <record id="runbot_build_config_step_access_manager" model="ir.rule"> - <field name="name">Unprotected config step can be edited by manager</field> - <field name="groups" eval="[(4, ref('group_build_config_manager'))]"/> - <field name="model_id" ref="model_runbot_build_config_step"/> - <field name="domain_force">[('protected', '=', False)]</field> - <field name="perm_read" eval="False"/> - </record> - - <record id="runbot_build_config_step_access_user" model="ir.rule"> - <field name="name">Own config step can be edited by user</field> - <field name="groups" eval="[(4, ref('group_build_config_user'))]"/> - <field name="model_id" ref="model_runbot_build_config_step"/> - <field name="domain_force">[('protected', '=', False), ('create_uid', '=', user.id)]</field> - <field name="perm_read" eval="False"/> - </record> - - </data> -</odoo> diff --git a/runbot/static/src/css/runbot.css b/runbot/static/src/css/runbot.css deleted file mode 100644 index 442bf07b..00000000 --- a/runbot/static/src/css/runbot.css +++ /dev/null @@ -1,329 +0,0 @@ -body { - margin: 0; - font-size: 0.875rem; - font-weight: 400; - line-height: 1.5; - color: #212529; - text-align: left; - background-color: white; -} - -form { - margin: 0; -} - -table { - font-size: 0.875rem; -} - -.fa { - line-height: inherit; /* reset fa icon line height to body height*/ -} - -a { - color: #00A09D; - text-decoration: none; -} - -a:hover { - color: #005452; -} - -a.slots_infos:hover { - text-decoration: none; -} - - -.breadcrumb-item.active a { - color: #6c757d; -} - -.breadcrumb { - background-color: inherit; - margin-bottom: 0; -} - -.build_details { - padding: 5px; -} - -.separator { - border-top: 2px solid #666; -} - -[data-toggle="collapse"] .fa:before { - content: "\f139"; -} - -[data-toggle="collapse"].collapsed .fa:before { - content: "\f13a"; -} - -body, .table { - font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; - color: #444; -} - -.btn-default { - background-color: #fff; - color: #444; - border-color: #ccc; -} - -.btn-default:hover { - background-color: #ccc; - color: #444; - border-color: #ccc; -} - -.btn-sm, .btn-group-sm > .btn { - padding: 0.25rem 0.5rem; - font-size: 0.89rem; - line-height: 1.5; - border-radius: 0.2rem; -} - -.btn-ssm, .btn-group-ssm > .btn { - padding: 0.22rem 0.4rem; - font-size: 0.82rem; - line-height: 1; - border-radius: 0.2rem; -} - -.killed, .bg-killed, .bg-killed-light { - background-color: #aaa; -} - -.dropdown-toggle:after { - content: none; -} - -.one_line { - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; -} - -.batch_tile { - padding: 6px; -} - -.branch_time { - float: right; - margin-left: 10px; -} - -:root { - --info-light: #d9edf7; -} - -.bg-success-light { - background-color: #dff0d8; -} - -.bg-danger-light { - background-color: #f2dede; -} - -.bg-info-light { - background-color: var(--info-light); -} - -.bg-warning-light { - background-color: #fff9e6; -} - -.text-info { - color: #096b72 !important; -} - -.build_subject_buttons { - display: flex; -} - -.build_buttons { - margin-left: auto; -} - -.bg-killed { - background-color: #aaa; -} - -.badge-killed { - background-color: #aaa; -} - -.table-condensed td { - padding: 0.25rem; -} - -.line-through { - text-decoration: line-through; -} - -.badge-light { - border: 1px solid #AAA; -} - -.slot_button_group { - display: flex; - padding: 0 1px; -} - -.slot_button_group .btn { - flex: 0 0 25px; -} - -.slot_button_group .btn.slot_name { - width: 40px; - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; - flex: 1 1 auto; - text-align: left; -} - -.batch_header { - padding: 6px; -} - -.batch_header:hover { - background-color: rgba(0, 0, 0, 0.1) -} - -.header_hover { - visibility: hidden; -} - -.batch_header:hover .header_hover { - visibility: visible; -} - -.batch_slots { - display: flex; - flex-wrap: wrap; - padding: 6px; -} - -.batch_commits { - background-color: white; -} - -.batch_commits { - padding: 2px; -} - -.match_type_new { - background-color: var(--info-light); -} - -.batch_row .slot_container { - flex: 1 0 200px; - padding: 0 4px; -} - -.batch_row .slot_filler { - width: 100px; - height: 0px; - flex: 1 0 200px; - padding: 0 4px; -} - -.bundle_row { - border-bottom: 1px solid var(--gray); -} - -.bundle_row .batch_commits { - font-size: 80%; -} - -.bundle_row .slot_container { - flex: 1 0 50%; -} - -.bundle_row .slot_filler { - flex: 1 0 50%; -} - -.bundle_row .more .batch_commits { - display: block; -} - -/*.bundle_row .nomore .batch_commits { - display: none; - padding: 8px; -} - -.bundle_row .nomore.batch_tile:hover .batch_commits { - display: block; - position: absolute; - bottom: 1px; - transform: translateY(100%); - z-index: 100; - border: 1px solid rgba(0, 0, 0, 0.125); - border-radius: 0.2rem; - box-sizing: border-box; - margin-left: -1px; -}*/ - -.chart-legend { - max-height: calc(100vh - 160px); - overflow-y: scroll; - overflow-x: hidden; - cursor: pointer; - padding: 5px; -} - -.chart-legend .label { - margin-left: 5px; - font-weight: bold; -} - -.chart-legend .disabled .color { - visibility: hidden; -} - -.chart-legend .disabled .label { - font-weight: normal; - text-decoration: line-through; - margin-left: 5px; -} - -.chart-legend ul { - list-style-type: none; - margin: 0; - padding: 0; -} - -.limited-height { - max-height: 180px; - overflow: scroll; - -ms-overflow-style: none; - scrollbar-width: none; -} - -.limited-height > hr { - margin: 2px 0px; -} - -.limited-height:before { - content: ''; - width: 100%; - height: 30px; - position: absolute; - left: 0; - bottom: 0; - background: linear-gradient(transparent 0px, white 27px); -} - -.limited-height::-webkit-scrollbar { - display: none; -} - -.limited-height-toggle:hover { - background-color: #DDD; -} - -.o_runbot_team_searchbar .nav { - margin-left: 0px !important; -} diff --git a/runbot/static/src/js/json_field.js b/runbot/static/src/js/json_field.js deleted file mode 100644 index 520b5997..00000000 --- a/runbot/static/src/js/json_field.js +++ /dev/null @@ -1,72 +0,0 @@ -odoo.define('runbot.json_field', function (require) { -"use strict"; - -var basic_fields = require('web.basic_fields'); -var relational_fields = require('web.relational_fields'); -var registry = require('web.field_registry'); -var field_utils = require('web.field_utils'); -var dom = require('web.dom'); - - -var FieldJson = basic_fields.FieldChar.extend({ - init: function () { - this._super.apply(this, arguments); - - if (this.mode === 'edit') { - this.tagName = 'textarea'; - } - this.autoResizeOptions = {parent: this}; - }, - - start: function () { - if (this.mode === 'edit') { - dom.autoresize(this.$el, this.autoResizeOptions); - } - return this._super(); - }, - _onKeydown: function (ev) { - if (ev.which === $.ui.keyCode.ENTER) { - ev.stopPropagation(); - return; - } - this._super.apply(this, arguments); - }, - -}); - -registry.add('jsonb', FieldJson) - - -var FrontendUrl = relational_fields.FieldMany2One.extend({ - isQuickEditable: false, - events: _.extend({'click .external_link': '_stopPropagation'}, relational_fields.FieldMany2One.prototype.events), - init() { - this._super.apply(this, arguments); - if (this.value) { - const model = this.value.model.split('.').slice(1).join('_'); - const res_id = this.value.res_id; - this.route = '/runbot/' + model+ '/' + res_id; - } else { - this.route = false; - } - }, - _renderReadonly: function () { - this._super.apply(this, arguments); - var link = '' - if (this.route) { - link = ' <a href="'+this.route+'" ><i class="external_link fa fa-fw o_button_icon fa-external-link "/></a>' - } - this.$el.html('<span>' + this.$el.html() + link + '<span>') - }, - _stopPropagation: function(event) { - event.stopPropagation() - } -}); -registry.add('frontend_url', FrontendUrl) - -function stringify(obj) { - return JSON.stringify(obj, null, '\t') -} -field_utils.format.jsonb = stringify; -field_utils.parse.jsonb = JSON.parse; -}); \ No newline at end of file diff --git a/runbot/static/src/js/runbot.js b/runbot/static/src/js/runbot.js deleted file mode 100644 index fe09e18d..00000000 --- a/runbot/static/src/js/runbot.js +++ /dev/null @@ -1,32 +0,0 @@ -(function($) { - "use strict"; - $(function () { - $(document).on('click', '[data-runbot]', function (e) { - e.preventDefault(); - var data = $(this).data(); - var operation = data.runbot; - if (!operation) { - return; - } - var xhr = new XMLHttpRequest(); - xhr.addEventListener('load', function () { - if (operation == 'rebuild' && window.location.href.split('?')[0].endsWith('/build/' + data.runbotBuild)){ - window.location.href = window.location.href.replace('/build/' + data.runbotBuild, '/build/' + xhr.responseText); - } else { - window.location.reload(); - } - }); - xhr.open('POST', '/runbot/build/' + data.runbotBuild + '/' + operation); - xhr.send(); - }); - }); -})(jQuery); - - -function copyToClipboard(text) { - if (!navigator.clipboard) { - console.error('Clipboard not supported'); - return; - } - navigator.clipboard.writeText(text); -} diff --git a/runbot/static/src/js/stats.js b/runbot/static/src/js/stats.js deleted file mode 100644 index e33a41b8..00000000 --- a/runbot/static/src/js/stats.js +++ /dev/null @@ -1,292 +0,0 @@ - -var config = { - type: 'line', - options: { - - animation: { - duration: 0 - }, - legend: { - display: false, - }, - responsive: true, - tooltips: { - mode: 'point' - }, - scales: { - xAxes: [{ - display: true, - scaleLabel: { - display: true, - labelString: 'Builds' - } - }], - yAxes: [{ - display: true, - scaleLabel: { - display: true, - labelString: 'Value' - }, - }] - } - } -}; - -config.options.onClick = function(event, activeElements) { - if (activeElements.length === 0){ - var x_label_index = this.scales['x-axis-0'].getValueForPixel(event.x); - var build_id = config.data.labels[x_label_index] - if (event.layerY > this.chartArea.bottom && event.layerY < this.chartArea.bottom + this.scales['x-axis-0'].height){ - config.searchParams['center_build_id'] = build_id; - fetchUpdateChart(); - } - return; - } - window.open('/runbot/build/stats/' + config.data.labels[activeElements[0]._index]); -}; - -function fetch(path, data, then) { - const xhttp = new XMLHttpRequest(); - xhttp.onreadystatechange = function() { - if (this.readyState == 4 && this.status == 200) { - const res = JSON.parse(this.responseText); - then(res.result); - } - }; - xhttp.open("POST", path); - xhttp.setRequestHeader('Content-Type', 'application/json'); - xhttp.send(JSON.stringify({params:data})); -}; - -function random_color(name){ - var colors = ['#004acd', '#3658c3', '#4a66ba', '#5974b2', '#6581aa', '#6f8fa3', '#7a9c9d', '#85a899', '#91b596', '#a0c096', '#fdaf56', '#f89a59', '#f1865a', '#e87359', '#dc6158', '#ce5055', '#bf4150', '#ad344b', '#992a45', '#84243d']; - var sum = 0; - for (var i = 0; i < name.length; i++) { - sum += name.charCodeAt(i); - } - sum = sum % colors.length; - color = colors[sum]; - - return color -}; - - -function process_chart_data(){ - if (! config.result || Object.keys(config.result).length == 0) - { - config.data = { - labels:[], - datasets: [], - } - return - } - - var aggregate = document.getElementById('display_aggregate_selector').value; - var aggregates = {}; - - - var builds = Object.keys(config.result); - var newer_build_stats = config.result[builds.slice(-1)[0]]; - var older_build_stats = config.result[builds[0]]; - var keys = Object.keys(newer_build_stats) ; - if (aggregate != 'sum') { - keys.splice(keys.indexOf('Aggregate Sum')); - } - if (aggregate != 'average') { - keys.splice(keys.indexOf('Aggregate Average')); - } - var mode = document.getElementById('mode_selector').value; - - var sort_values = {} - for (key of keys) { - sort_value = NaN - if (mode == 'normal') { - sort_value = newer_build_stats[key] - } else if (mode == 'alpha') { - sort_value = key - } else if (mode == 'change_count') { - sort_value = 0 - previous = undefined - for (build of builds) { - res = config.result[build] - value = res[key] - if (previous !== undefined && value !== undefined && previous != value) { - sort_value +=1 - } - previous = value - } - } - else { - if (mode == "difference") { - var previous_value = 0; - if (older_build_stats[key] !== undefined) { - previous_value = older_build_stats[key] - } - sort_value = Math.abs(newer_build_stats[key] - previous_value) - } - } - sort_values[key] = sort_value - } - keys.sort((m1, m2) => sort_values[m2] - sort_values[m1]); - - if (config.searchParams.nb_dataset != -1) { - visible_keys = new Set(keys.slice(0, config.searchParams.nb_dataset)); - } else { - visible_keys = new Set(config.searchParams.visible_keys.split('-')) - } - console.log(visible_keys); - function display_value(key, build_stats){ - if (build_stats[key] === undefined) - return NaN; - if (mode == 'normal' || mode == 'alpha') - return build_stats[key] - var previous_value = 0; - if (older_build_stats[key] !== undefined) { - previous_value = older_build_stats[key] - } - return build_stats[key] - previous_value - } - - config.data = { - labels: builds, - datasets: keys.map(function (key){ - return { - label: key, - data: builds.map(build => display_value(key, config.result[build])), - borderColor: random_color(key), - backgroundColor: 'rgba(0, 0, 0, 0)', - lineTension: 0, - hidden: !visible_keys.has(key), - } - }) - }; -} - -function fetchUpdateChart() { - var chart_spinner = document.getElementById('chart_spinner'); - chart_spinner.style.visibility = 'visible'; - fetch_params = compute_fetch_params(); - console.log('fetch') - fetch('/runbot/stats/', fetch_params, function(result) { - config.result = result; - Object.values(config.result).forEach(v => v['Aggregate Sum'] = Object.values(v).reduce((a, b) => a + b, 0)) - Object.values(config.result).forEach(v => v['Aggregate Average'] = Object.values(v).reduce((a, b) => a + b, 0)/Object.values(v).length) - chart_spinner.style.visibility = 'hidden'; - updateChart() - }); -} - -function generateLegend() { - var legend = $("<ul></ul>"); - for (data of config.data.datasets) { - var legendElement = $(`<li><span class="color" style="border: 2px solid ${data.borderColor};"></span><span class="label" title="${data.label}">${data.label}<span></li>`) - if (data.hidden){ - legendElement.addClass('disabled') - } - legend.append(legendElement) - } - $("#js-legend").html(legend); - $("#js-legend > ul > li").on("click",function(e){ - var index = $(this).index(); - //$(this).toggleClass("disabled") - var curr = window.statsChart.data.datasets[index]; - curr.hidden = !curr.hidden; - config.searchParams.nb_dataset=-1; - config.searchParams.visible_keys = window.statsChart.data.datasets.filter(dataset => !dataset.hidden).map(dataset => dataset.label).join('-') - updateChart(); - }) -} - -function updateForm() { - for([key, value] of Object.entries(config.searchParams)){ - var selector = document.getElementById(key + '_selector'); - if (selector != null){ - selector.value = value; - selector.onchange = function(){ - var id = this.id.replace('_selector', ''); - config.searchParams[this.id.replace('_selector', '')] = this.value; - if (localParams.indexOf(id) == -1){ - fetchUpdateChart(); - } else { - updateChart() - } - } - } - } - let display_forward = config.result && config.searchParams.center_build_id != 0 && (config.searchParams.center_build_id !== Object.keys(config.result).slice(-1)[0]) - document.getElementById("forward_button").style.visibility = display_forward ? "visible":"hidden"; - document.getElementById("fast_forward_button").style.visibility = display_forward ? "visible":"hidden"; - let display_backward = config.result && (config.searchParams.center_build_id !== Object.keys(config.result)[0]) - document.getElementById("backward_button").style.visibility = display_backward ? "visible":"hidden"; -} - -function updateChart(){ - updateForm() - updateUrl(); - process_chart_data(); - if (! window.statsChart) { - var ctx = document.getElementById('canvas').getContext('2d'); - window.statsChart = new Chart(ctx, config); - } else { - window.statsChart.update(); - } - generateLegend(); -} - -function compute_fetch_params(){ - return { - ...config.searchParams, - bundle_id: document.getElementById('bundle_id').value, - trigger_id: document.getElementById('trigger_id').value, - } -}; - -function updateUrl(){ - window.location.hash = new URLSearchParams(config.searchParams).toString(); -} - -async function waitForChart() { - - function loop(resolve) { - if (window.Chart) { - resolve(); - } else { - setTimeout(loop.bind(null, resolve),10); - } - } - return new Promise((resolve) => { - loop(resolve); - }) -} - -window.onload = function() { - config.searchParams = { - limit: 25, - center_build_id: 0, - key_category: 'module_loading_queries', - mode: 'normal', - nb_dataset: 20, - display_aggregate: 'none', - visible_keys: '', - }; - localParams = ['display_aggregate', 'mode', 'nb_dataset', 'visible_keys'] - - for([key, value] of new URLSearchParams(window.location.hash.replace("#","?"))){ - config.searchParams[key] = value; - } - - document.getElementById('backward_button').onclick = function(){ - config.searchParams['center_build_id'] = Object.keys(config.result)[0]; - fetchUpdateChart(); - } - document.getElementById('forward_button').onclick = function(){ - config.searchParams['center_build_id'] = Object.keys(config.result).slice(-1)[0]; - fetchUpdateChart(); - } - document.getElementById('fast_forward_button').onclick = function(){ - config.searchParams['center_build_id'] = 0; - fetchUpdateChart(); - } - - waitForChart().then(fetchUpdateChart); -}; diff --git a/runbot/templates/badge.xml b/runbot/templates/badge.xml deleted file mode 100644 index 98f3f289..00000000 --- a/runbot/templates/badge.xml +++ /dev/null @@ -1,47 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.badge_default"><?xml version="1.0"?> - <svg t-attf-xmlns="http://www.w3.org/2000/svg" t-attf-width="{{left.width + right.width}}" height="18"> - <!-- from https://github.com/badges/shields/tree/master/templates --> - <linearGradient id="smooth" x2="0" y2="100%"> - <stop offset="0" stop-color="#fff" stop-opacity=".7"/> - <stop offset=".1" stop-color="#aaa" stop-opacity=".1"/> - <stop offset=".9" stop-color="#000" stop-opacity=".3"/> - <stop offset="1" stop-color="#000" stop-opacity=".5"/> - </linearGradient> - <rect rx="4" t-attf-width="{{ left.width + right.width }}" height="18" t-att-fill="left.color"/> - <rect rx="4" t-att-x="left.width" t-att-width="right.width" height="18" t-att-fill="right.color"/> - <rect t-att-x="left.width" width="4" height="18" t-att-fill="right.color"/> - <rect rx="4" t-attf-width="{{ left.width + right.width }}" height="18" fill="url(#smooth)"/> - <g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"> - <text t-attf-x="{{left.width/2+1}}" y="13" fill="#010101" fill-opacity=".3"><t t-esc="left.text"/></text> - <text t-attf-x="{{left.width/2+1}}" y="12"><t t-esc="left.text"/></text> - <text t-attf-x="{{left.width+right.width/2-1}}" y="13" fill="#010101" fill-opacity=".3"><t t-esc="right.text"/></text> - <text t-attf-x="{{left.width+right.width/2-1}}" y="12"><t t-esc="right.text"/></text> - </g> - </svg> - </template> - <template id="runbot.badge_flat"><?xml version="1.0"?> - <svg t-attf-xmlns="http://www.w3.org/2000/svg" t-attf-width="{{left.width + right.width}}" height="20"> - <!-- from https://github.com/badges/shields/tree/master/templates --> - <linearGradient id="smooth" x2="0" y2="100%"> - <stop offset="0" stop-color="#fff" stop-opacity=".1"/> - <stop offset=".1" stop-color="#fff" stop-opacity=".1"/> - <stop offset=".9" stop-color="#fff" stop-opacity=".1"/> - <stop offset="1" stop-color="#fff" stop-opacity=".1"/> - </linearGradient> - <rect rx="3" t-attf-width="{{ left.width + right.width }}" height="20" t-att-fill="left.color"/> - <rect rx="3" t-att-x="left.width" t-att-width="right.width" height="20" t-att-fill="right.color"/> - <rect t-att-x="left.width" width="4" height="20" t-att-fill="right.color"/> - <rect rx="3" t-attf-width="{{ left.width + right.width }}" height="20" fill="url(#smooth)"/> - <g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"> - <text t-attf-x="{{left.width/2+1}}" y="15" fill="#010101" fill-opacity=".3"><t t-esc="left.text"/></text> - <text t-attf-x="{{left.width/2+1}}" y="14"><t t-esc="left.text"/></text> - <text t-attf-x="{{left.width+right.width/2-1}}" y="15" fill="#010101" fill-opacity=".3"><t t-esc="right.text"/></text> - <text t-attf-x="{{left.width+right.width/2-1}}" y="14"><t t-esc="right.text"/></text> - </g> - </svg> - </template> - </data> -</odoo> diff --git a/runbot/templates/batch.xml b/runbot/templates/batch.xml deleted file mode 100644 index 1e74b35f..00000000 --- a/runbot/templates/batch.xml +++ /dev/null @@ -1,154 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.batch"> - <t t-call="runbot.layout"> - <div class="row"> - <div class="col-lg-6"> - <table class="table table-stripped"> - <tr> - <td>Bundle</td> - <td> - <a t-esc="batch.bundle_id.name" t-attf-href="/runbot/bundle/{{batch.bundle_id.id}}"/> - </td> - </tr> - <tr t-if="batch.category_id.id != default_category"> - <td>Category</td> - <td t-esc="batch.category_id.name"></td> - </tr> - <tr> - <td>Version</td> - <td t-esc="batch.slot_ids[0].params_id.version_id.name if batch.slot_ids else batch.bundle_id.version_id.name"/> - </tr> - <tr> - <td>Create date</td> - <td t-esc="batch.create_date"/> - </tr> - <tr t-if="more"> - <td>Last update</td> - <td> - <t t-esc="batch.last_update"/> - <span class="badge badge-info" t-esc="s2human(batch.last_update - batch.create_date)"/> - </td> - </tr> - <tr t-att-class="'bg-info-light' if batch.state=='preparing' else 'bg-success-light' if not any(log.level != 'INFO' for log in batch.log_ids) else 'bg-warning-light'"> - <td>Commits</td> - <td> - <div t-foreach="batch.commit_link_ids.sorted(key=lambda lc: (lc.commit_id.repo_id.sequence, lc.commit_id.repo_id.id))" t-as="commit_link"> - <t t-set="commit" t-value="commit_link.commit_id"/> - <span/> - <a t-attf-href="/runbot/commit/#{commit.id}"> - <i class="fa fa-fw fa-hashtag" t-if="commit_link.match_type == 'new'" title="This commit is a new head"/> - <i class="fa fa-fw fa-link" t-if="commit_link.match_type == 'head'" title="This commit is an existing head from bundle branches"/> - <i class="fa fa-fw fa-code-fork" t-if="commit_link.match_type == 'base_match'" title="This commit is matched from a base batch with matching merge_base"/> - <i class="fa fa-fw fa-clock-o" t-if="commit_link.match_type == 'base_head'" title="This commit is the head of a base branch"/> - <span class="label" t-esc="commit.dname"/> - </a> - <a t-att-href="'https://%s/commit/%s' % (commit_link.branch_id.remote_id.base_url, commit_link.commit_id.name)" class="badge badge-light" title="View Commit on Github"><i class="fa fa-github"/></a> - <small t-if="commit_link.match_type and commit_link.match_type.startswith('base')"> - from base: - <span t-esc="commit_link.branch_id.name"/> - <br/> - </small> - <small t-else=""> - found in branch - <span t-esc="commit_link.branch_id.name"/> - <t t-if="batch.state != 'preparing'"> - <span t-esc="'+%s' % commit_link.diff_add" class="text-success"/> - <span t-esc="'-%s' % commit_link.diff_remove" class="text-danger"/> - <span class="text-info"> - ( - <span t-esc="commit_link.file_changed"/> - <i class="fa fa-file"/> - ) - <span io="behind">( - <span t-esc="'%s ahead' % commit_link.base_ahead" class="text-success"/> - , - <span t-esc="'%s behind' % commit_link.base_behind" class="text-danger"/> - )</span> - </span> - </t> - <br/> - <t t-if="more"> - Base head: - <span t-esc="commit_link.base_commit_id.name"/> - <br/> - Merge base: - <span t-esc="commit_link.merge_base_commit_id.name"/> - <br/> - </t> - </small> - <b t-if="commit.rebase_on_id">Automatic rebase on <t t-esc="commit.rebase_on_id.name"/><br/></b> - <t t-if="more or not (commit_link.match_type and commit_link.match_type.startswith('base'))"> - Subject: - <span t-esc="commit.subject"/> - <br/> - Author: - <span t-esc="commit.author"/> - ( - <span t-esc="commit.author_email"/> - ) - <br/> - <t t-if="commit.author != commit.committer"> - Committer: - <span t-esc="commit.committer"/> - ( - <span t-esc="commit.committer_email"/> - ) - <br/> - </t> - Commit date: - <span t-esc="commit.date"/> - <br/> - </t> - <hr/> - </div> - </td> - </tr> - </table> - </div> - <div class="col-lg-6"> - <table class="table table-stripped"> - <tr> - <td>Builds</td> - <td> - <t t-foreach="batch.slot_ids.filtered(lambda s: not s.trigger_id.manual)" t-as="slot"> - <t t-call="runbot.slot_button"/> - </t> - </td> - </tr> - <tr> - <td>Manual</td> - <td> - <t t-foreach="batch.slot_ids.filtered(lambda s: s.trigger_id.manual)" t-as="slot"> - <t t-if="slot.build_id or (not slot.trigger_id.team_ids) or (user_id in slot.trigger_id.team_ids.user_ids)"> - <t t-call="runbot.slot_button"/> - </t> - </t> - </td> - </tr> - <tr t-if="more"> - <td>Old builds</td> - <td> - <t t-foreach="batch.with_context(active_test=False).slot_ids.filtered(lambda s: not s.active)" t-as="slot"> - <s> - <t t-call="runbot.slot_button"/> - </s> - </t> - </td> - </tr> - </table> - </div> - </div> - <t t-foreach="batch.log_ids" t-as="log"> - <t t-set="logclass" t-value="dict(ERROR='danger', WARNING='warning', INFO='info').get(log.level, 'warning')"/> - <div t-attf-class="alert alert-{{logclass}}"> - <b t-esc="log.level"/> - -- - <t t-out="log._markdown()"/> - </div> - </t> - </t> - </template> - </data> -</odoo> diff --git a/runbot/templates/branch.xml b/runbot/templates/branch.xml deleted file mode 100644 index e01809c9..00000000 --- a/runbot/templates/branch.xml +++ /dev/null @@ -1,81 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.branch"> - <t t-call='runbot.layout'> - <div class="container-fluid"> - <div class="row"> - <div class='col-md-12'> - <div class="navbar navbar-default"> - <h3> - <span class="text-muted"><t t-esc="branch.remote_id.short_name"/>:</span><t t-esc="branch.name"/> <i t-if="not branch.alive" title="deleted/closed" class="fa fa-ban text-danger"/> - <div class="btn-group" role="group"> - <a t-att-href="branch.branch_url" class="btn btn-sm text-left" title="View Branch on Github"><i class="fa fa-github"/></a> - <a groups="runbot.group_runbot_admin" class="btn btn-sm fa fa-list text-left" t-attf-href="/web/#id={{branch.id}}&view_type=form&model=runbot.branch" target="new" title="View Branch in Backend"/> - </div> - </h3> - </div> - <table class="table table-condensed table-responsive table-stripped"> - <tr> - <td>Remote:</td> - <td t-esc="branch.remote_id.name"></td> - </tr> - <tr> - <td>Head:</td> - <td t-esc="branch.head_name"></td> - </tr> - <tr> - <td>Bundle:</td> - <td> - <small> - <div class="btn-toolbar mb-1" role="toolbar"> - <div class="btn-group btn-group-ssm w-100" role="group"> - <a t-attf-href="/runbot/bundle/{{branch.bundle_id.id}}" t-esc="branch.bundle_id.name" class="btn btn-default text-left" title="View Bundle Details"/> - </div> - </div> - </small> - </td> - </tr> - <t t-if="branch.is_pr"> - <tr t-if="pr_branch"> - <td>Pull Head Name</td> - <td><a t-attf-href="/runbot/branch/{{pr_branch.id}}" t-esc="branch.pull_head_name" title="View PR Details"/></td> - </tr> - <tr> - <td>Target Branch</td> - <td t-esc="branch.target_branch_name"></td> - </tr> - </t> - <t t-elif="branch_pr"> - <tr> - <td>Pull Request:</td> - <td><a t-attf-href="/runbot/branch/{{branch_pr.id}}" t-esc="branch_pr.name" title="View Branch Details"/></td> - </tr> - </t> - </table> - <table t-if="branch.reflog_ids" class="table table-condensed table-stripped" style="table-layout: initial;"> - <thead> - <tr> - <th>Ref Date</th> - <th>SHA</th> - <th>Commit Date</th> - <th>Author</th> - <th>Subject</th> - </tr> - </thead> - <tr t-foreach='branch.reflog_ids' t-as='reflog'> - <td t-esc="reflog.date"/> - <td><a t-attf-href="/runbot/commit/{{reflog.commit_id.id}}" t-esc="reflog.commit_id.name"/></td> - <td t-esc="reflog.commit_id.date"/> - <td t-esc="reflog.commit_id.author"/> - <td t-esc="reflog.commit_id.subject"/> - </tr> - </table> - <h4 t-else="">No Reflogs Found</h4> - </div> - </div> - </div> - </t> - </template> - </data> -</odoo> diff --git a/runbot/templates/build.xml b/runbot/templates/build.xml deleted file mode 100644 index b24e9547..00000000 --- a/runbot/templates/build.xml +++ /dev/null @@ -1,408 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.build"> - <t t-call='runbot.layout'> - <t t-set="nav_form"> - <form class="form-inline"> - <div class="btn-group"> - <t t-call="runbot.build_button"> - <t t-set="bu" t-value="build"/> - <t t-set="klass" t-value="''"/> - <t t-set="show_commit_button" t-value="True"/> - </t> - </div> - </form> - </t> - <div class="row"> - <div class="col-md-12"> - <t t-set="batches" t-value="build.top_parent.with_context(active_test=False).slot_ids.mapped('batch_id')"/> - <t t-set="bundles" t-value="batches.mapped('bundle_id')"/> - - <t t-if="from_batch" t-set="unique_batch" t-value="from_batch"/> - <t t-if="from_batch" t-set="unique_bundle" t-value="from_batch.bundle_id"/> - <t t-if="not unique_batch and len(batches) == 1" t-set="unique_batch" t-value="batches"/> - <t t-if="not unique_bundle and len(bundles) == 1" t-set="unique_bundle" t-value="bundles"/> - - <!-- Breadcrumbs & Previous/Next--> - <nav aria-label="breadcrumb" class="d-flex justify-content-between align-items-center"> - <ol class="breadcrumb mb-0"> - <li t-attf-class="breadcrumb-item"> - <a t-attf-href="/runbot/{{build.params_id.project_id.id}}"> - <t t-esc="build.params_id.project_id.name"/> - </a> - </li> - <li t-if="unique_bundle" t-attf-class="breadcrumb-item"> - <a t-att-href="unique_bundle._url()"> - <t t-esc="unique_bundle.name"/> - </a> - </li> - <li t-if="unique_batch" t-attf-class="breadcrumb-item"> - <a t-att-href="unique_batch._url()"> - batch-<t t-esc="unique_batch.id"/> (<t t-esc="build.params_id.trigger_id.name"/>) - </a> - </li> - <li t-foreach="build.ancestors" t-as="ancestor" t-attf-class="breadcrumb-item{{' active' if ancestor == build else ''}}"> - <a t-att-href="ancestor.build_url"> - <t t-esc="ancestor.description or ancestor.config_id.name"/> - </a> - </li> - </ol> - <span class="btn-group pr-3"> - <a t-att-href="prev_ko.build_url" role="button" t-attf-title="Previous ko {{prev_ko.display_name}}" - t-attf-class="{{'' if prev_ko else 'disabled '}}btn btn-default fa fa-angle-double-left"></a> - <a t-att-href="prev_bu.build_url" role="button" t-attf-title="Previous {{prev_bu.display_name}}" - t-attf-class="{{'' if prev_bu else 'disabled '}}btn btn-default fa fa-chevron-left"></a> - <a t-att-href="next_bu.build_url" role="button" t-attf-title="Next {{next_bu.display_name}}" - t-attf-class="{{'' if next_bu else 'disabled '}}btn btn-default fa fa-chevron-right"></a> - <a t-att-href="next_ko.build_url" role="button" t-attf-title="Next ko {{next_ko.display_name}}" - t-attf-class="{{'' if next_ko else 'disabled '}}btn btn-default fa fa-angle-double-right"></a> - </span> - </nav> - </div> - <!-- Build details--> - <t t-set="rowclass"> - <t t-call="runbot.build_class"> - <t t-set="build" t-value="build"/> - </t> - </t> - <div t-attf-class="bg-{{rowclass.strip()}}-light {{'col-md-6' if build.children_ids else 'col-md-12'}}"> - <div class="build_details"> - <!-- Batch/bundles links--> - <t t-if="len(bundles) > 1"> - This build is referenced in <t t-esc="len(bundles)"/> bundles - <ul> - <li t-foreach="bundles" t-as="bundle" ><a t-esc="bundle.name" t-attf-href="/runbot/bundle/{{bundle.id}}"/></li> - </ul> - </t> - <t t-if="len(batches) > 1"> - <b>First apparition:</b> <a t-esc="batches[0].bundle_id.name" t-attf-href="/runbot/batch/{{batches[0].id}}"/><br/> - <b>Last apparition:</b> <a t-esc="batches[-1].bundle_id.name" t-attf-href="/runbot/batch/{{batches[-1].id}}"/><br/> - </t> - <!-- Parent --> - <div t-if="build.parent_id and build.orphan_result"> - <i class="fa fa-chain-broken" title="Build result ignored for parent" /> - &nbsp;Orphaned build, the result does not affect parent build result - </div> - - <t t-if="build.description"> - <b>Description:</b> - <t t-out="build.md_description"/> - <br/> - </t> - - <!-- Commits --> - <t t-foreach="build.params_id.sudo().commit_link_ids" t-as="build_commit"> - <b>Commit:</b> - <a t-attf-href="/runbot/commit/{{build_commit.commit_id.id}}"> - <t t-esc="build_commit.commit_id.dname"/> - </a> - &nbsp; - <a t-att-href="'https://%s/commit/%s' % (build_commit.branch_id.remote_id.base_url, build_commit.commit_id.name)" title="View Commit on Github"><i class="fa fa-github"/></a> - <t t-if="build_commit.match_type in ('default', 'pr_target', 'prefix') "> - from base branch - <br/> - </t> - <div t-else="" class="ml-3"> - <b>Subject:</b> - <t t-esc="build_commit.commit_id.subject"/> - <br/> - <b>Author:</b> - <t t-esc="build_commit.commit_id.author"/> - <br/> - <b>Committer:</b> - <t t-esc="build_commit.commit_id.committer"/> - <br/> - </div> - </t> - <b>Version:</b> - <t t-esc="build.params_id.version_id.name"/> - <br/> - <b>Config:</b> - <t t-esc="build.params_id.config_id.name"/> - <br/> - <t t-if='more'> - <b>Trigger:</b> - <t t-esc="build.params_id.trigger_id.name"/> - <br/> - <b>Config data:</b> - <t t-esc="build.params_id.config_data.dict"/> - <br/> - <b>Modules:</b> - <t t-esc="build.params_id.modules"/> - <br/> - <b>Extra params:</b> - <t t-esc="build.params_id.extra_params"/> - <br/> - - <t t-if="len(build.params_id.builds_reference_ids) > 1"> - <b>Reference batch:</b> - <t t-foreach="build.params_id.builds_reference_ids" t-as="reference"> - <span t-esc="reference.id"/> - </t> - <br/> - </t> - - <t t-if="len(build.params_id.build_ids) > 1"> - <b>Similar builds:</b> - <t t-foreach="build.params_id.build_ids" t-as="simbuild"> - <a t-if="simbuild.id != build.id" t-attf-href="/runbot/build/#{simbuild.id}"> - <span - t-attf-class="badge badge-{{simbuild.get_color_class()}}" - t-esc="simbuild.id"/> - </a> - </t> - <br/> - </t> - <b>Host:</b> - <t t-esc="build.host"/> - <br/> - </t> - <b>Total time:</b> - <t t-esc="build.get_formated_build_time()"/> - <br/> - <t t-if="build.stat_ids"> - <b>Stats:</b> - <a t-attf-href="/runbot/build/stats/{{build.id}}">Build <t t-esc="build.id"/></a> - <br/> - </t> - </div> - </div> - <div class="col-md-6" t-if="build.children_ids"> - Children: - <table class="table table-condensed"> - <t t-foreach="build.children_ids.sorted('id')" t-as="child"> - <t t-set="rowclass"> - <t t-call="runbot.build_class"> - <t t-set="build" t-value="child"/> - </t> - </t> - <tr t-attf-class="bg-{{rowclass.strip()}}-light{{' line-through' if child.orphan_result else ''}}"> - <td> - <a t-attf-href="/runbot/{{'batch/%s/' % from_batch.id if from_batch else ''}}build/{{child.id}}"> - Build - <t t-esc="child.id"/> - </a> - <t t-if="child.description"> - <t t-out="child.md_description" /> - </t> - <t t-else=""> - with config - <t t-esc="child.params_id.config_id.name"/> - </t> - <a groups="runbot.group_build_config_user" t-attf-href="/web#id={{child.params_id.config_id.id}}&view_type=form&model=runbot.build.config">...</a> - <t t-if="child.orphan_result"> - <i class="fa fa-chain-broken" title="Build result ignored for parent" /> - </t> - <t t-if="child.job"> - Running step: - <t t-esc="child.job"/> - </t> - <t t-if="child.global_state in ['testing', 'waiting']"> - <i class="fa fa-spinner fa-spin"/> - <t t-esc="child.global_state"/> - </t> - </td> - <td> - <span t-attf-class="badge badge-info" t-esc="child.get_formated_build_time()"/> - </td> - <td> - <t t-call="runbot.build_button"> - <t t-set="bu" t-value="child"/> - <t t-set="klass" t-value="'btn-group-ssm'"/> - </t> - - </td> - </tr> - </t> - </table> - </div> - <div class="col-md-12"> - <table class="table table-condensed"> - <tr> - <th>Date</th> - <th>Level</th> - <th>Type</th> - <th>Message</th> - </tr> - - <t t-set="commit_link_per_name" t-value="{commit_link.commit_id.repo_id.name:commit_link for commit_link in build.params_id.commit_link_ids}"/> - <t t-foreach="build.sudo().log_ids" t-as="l"> - <t t-set="subbuild" t-value="(([child for child in build.children_ids if child.id == int(l.path)] if l.type == 'subbuild' else False) or [build.browse()])[0]"/> - <t t-set="logclass" t-value="dict(CRITICAL='danger', ERROR='danger', WARNING='warning', OK='success', SEPARATOR='separator').get(l.level)"/> - <tr t-att-class="'separator' if logclass == 'separator' else ''"> - <td style="white-space: nowrap; width:1%;"> - <t t-esc="l.create_date.strftime('%Y-%m-%d %H:%M:%S')"/> - </td> - <td style="white-space: nowrap; width:1%;"> - <b t-if="l.level != 'SEPARATOR' and l.type not in ['link', 'markdown']" t-esc="l.level"/> - </td> - <td style="white-space: nowrap; width:1%;"> - <t t-if="l.level != 'SEPARATOR' and l.type not in ['link', 'markdown']" t-esc="l.type"/> - </td> - <t t-set="message_class" t-value="''"/> - <t t-if="subbuild" t-set="message_class"> - <t t-call="runbot.build_class"> - <t t-set="build" t-value="subbuild"/> - </t> - </t> - <td t-attf-class="bg-{{message_class.strip() or logclass}}-light"> - <t t-if="l.type not in ('runbot', 'link', 'markdown')"> - <t t-if="l.type == 'subbuild'"> - <a t-attf-href="/runbot/build/{{l.path}}"> - Build # - <t t-esc="l.path"/> - </a> - </t> - <t t-else=""> - <t t-set="repo_name" t-value="l.path.replace('/data/build/', '').split('/')[0] "/> - <t t-set="href" t-value=""/> - <t t-if="repo_name in commit_link_per_name"> - <t t-set="repo_base_url" t-value="commit_link_per_name[repo_name].branch_id.remote_id.base_url if repo_name in commit_link_per_name else ''"/> - <t t-set="commit_hash" t-value="commit_link_per_name[repo_name].commit_id.name if repo_name in commit_link_per_name else ''"/> - <t t-set="path" t-value="l.path.replace('/data/build/%s/' % repo_name, '')"/> - <t t-set="href" t-value="'https://%s/blob/%s/%s#L%s' % (repo_base_url, commit_hash, path, l.line)"/> - </t> - <a t-att-href="href" t-att-title="l.func"><t t-esc="l.name"/>:<t t-esc="l.line"/></a> - </t> - </t> - <t t-if="l.type == 'link' and len(l.message.split('$$')) == 3"> - <t t-set="message" t-value="l.message.split('$$')"/> - <t t-if="message[1].startswith('fa-')"> - <t t-esc="message[0]"/> - <a t-attf-href="{{l.path}}"> - <i t-attf-class="fa {{message[1]}}"/> - </a> - <t t-esc="message[2]"/> - </t> - <t t-else=""> - <t t-esc="message[0]"/> - <a t-attf-href="{{l.path}}"> - <t t-esc="message[1]"/> - </a> - <t t-esc="message[2]"/> - </t> - </t> - <t t-elif="l.type == 'markdown'" t-out="l._markdown()"/> - <t t-else=""> - <t t-if="'\n' not in l.message" t-esc="l.message"/> - <pre t-if="'\n' in l.message" style="margin:0;padding:0; border: none;"><t t-esc="l.message"/></pre> - <t t-if="l.type == 'subbuild' and subbuild.sudo().error_log_ids"> - <a class="show" data-toggle="collapse" t-attf-data-target="#subbuild-{{subbuild.id}}"> - <i class="fa"/> - </a> - <div t-attf-id="subbuild-{{subbuild.id}}" class="collapse in"> - <table class="table table-condensed" style="margin-bottom:0;"> - <t t-foreach="subbuild.sudo().error_log_ids" t-as="sl"> - <tr> - <td t-att-class="dict(CRITICAL='danger', ERROR='danger', WARNING='warning', OK='success', SEPARATOR='separator').get(sl.level)"> - <t t-if="sl.type == 'server'"> - <!--t-attf-href="https://{{repo.base_url}}/blob/{{build['name']}}/{{sl.path}}#L{{sl.line}}"--> - <a t-att-title="sl.func"><t t-esc="sl.name"/>:<t t-esc="sl.line"/></a> - </t> - <t t-if="'\n' not in sl.message" t-esc="sl.message"/> - <pre t-if="'\n' in sl.message" style="margin:0;padding:0; border: none;"> - <t t-esc="sl.message"/> - </pre> - </td> - </tr> - </t> - </table> - </div> - </t> - </t> - </td> - </tr> - <t t-if="l.error_id"> - <t t-set="icon" t-value="'list'"/> - <t t-set="error" t-value="l.error_id"/> - <t t-set="size" t-value=""/> - <t t-if="l.error_id.parent_id"> - <t t-set="icon" t-value="'link'"/> - <t t-set="error" t-value="l.error_id.parent_id"/> - <t t-set="size" t-value="'small'"/> - </t> - <tr> - <td/><td/><td/> - <td t-attf-class="bg-info-light {{size}}"> - This error is already known. - <a groups="runbot.group_user" t-attf-href="/web#id={{l.error_id.id}}&view_type=form&model=runbot.build.error&menu_id={{env.ref('runbot.runbot_menu_root').id}}" title="View in Backend" target="new"> - <i t-attf-class="fa fa-{{icon}}"/> - </a> - <span groups="runbot.group_runbot_admin" t-if="error.responsible or error.responsible.id == uid">(<i t-esc="error.responsible.name"/>)</span> - </td> - </tr> - </t> - </t> - </table> - </div> - </div> - </t> - </template> - <template id="runbot.build_search"> - <t t-call='runbot.layout'> - <div class="row"> - <div class="col-md-12"> - <table class="table table-condensed"> - <t t-foreach="builds" t-as="build"> - <t t-set="rowclass"> - <t t-call="runbot.build_class"> - <t t-set="build" t-value="build"/> - </t> - </t> - <tr t-attf-class="bg-{{rowclass.strip()}}-light{{' line-through' if build.orphan_result else ''}}"> - <td> - <t t-esc="build.create_date"/> - </td> - <td> - <a t-attf-href="/runbot/{{'batch/%s/' % from_batch.id if from_batch else ''}}build/{{build.id}}"> - <t t-esc="build.id"/> - </a> - </td> - <td> - <t t-if="build.description"> - <t t-out="build.md_description" /> - </t> - </td> - <td> - <t t-if="build.global_state in ['testing', 'waiting']"> - <i class="fa fa-spinner fa-spin"/> - <t t-esc="build.global_state"/> - </t> - </td> - <td> - <span t-esc="build.params_id.config_id.name"/> - </td> - <td> - <span t-esc="build.params_id.version_id.name"/> - </td> - <td> - <span t-esc="build.get_formated_build_time()"/> - </td> - <td> - <t t-call="runbot.build_button"> - <t t-set="bu" t-value="build"/> - <t t-set="klass" t-value="'btn-group-ssm'"/> - </t> - </td> - <td> - <t t-set="commits" t-value="build.params_id.commit_link_ids.commit_id.sorted(key=lambda c: c.repo_id.id)"/> - <t t-if="build_index+1 < len(builds)" t-set="previous_commits" t-value="list(builds[build_index+1].params_id.commit_link_ids.commit_id.sorted(key=lambda c: c.repo_id.id))"/> - <t t-else="" t-set="previous_commits" t-value="[]"/> - <t t-foreach="zip(previous_commits, commits)" t-as="compare"> - <t t-set="previous_commit" t-value="compare[0]"/> - <t t-set="commit" t-value="compare[1]"/> - <a t-attf-href="https://{{commit.repo_id.main_remote_id.base_url}}/compare/{{previous_commit.name}}..{{commit.name}}" t-att-title="commit.repo_id.name"> - <i class="fa fa-plus"/> - </a> - </t> - </td> - </tr> - </t> - </table> - </div> - </div> - </t> - </template> - </data> -</odoo> diff --git a/runbot/templates/build_error.xml b/runbot/templates/build_error.xml deleted file mode 100644 index a7550261..00000000 --- a/runbot/templates/build_error.xml +++ /dev/null @@ -1,144 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.build_error_cards"> - <div t-if="build_errors" class="accordion" t-attf-id="accordion_{{accordion_id}}"> - <div class="card"> - <div class="card-header"> - <div class="row"> - <div class="col">Last seen date</div> - <div class="col col-md-3">Module</div> - <div class="col col-md-3">Summary</div> - <div class="col">Nb Seen</div> - <div class="col">Random</div> - <div class="col">Assigned to</div> - <div class="col">&nbsp;</div> - </div> - </div> - </div> - <t t-foreach="build_errors" t-as="build_error"> - <div class="card"> - <div class="card-header"> - <div class="row"> - <div class="col"><t t-esc="build_error.last_seen_date" t-options='{"widget": "datetime"}'/></div> - <div class="col col-md-3"><t t-esc="build_error.module_name"/></div> - <div class="col col-md-3"> - <button class="btn btn-link" type="button" data-toggle="collapse" t-attf-data-target="#collapse{{build_error.id}}" aria-expanded="true" aria-controls="collapseOne"> - <i class="fa fa-minus"/> - </button> - <code><t t-esc="build_error.summary"/></code> - </div> - <div class="col"> - <t t-esc="build_error.build_count"/> - </div> - <div class="col"> - <i t-if="build_error.random" class="fa fa-random"/> - </div> - <div class="col"><t t-esc="build_error.responsible.name"/></div> - <div class="col"> - <a groups="base.group_user" t-attf-href="/web/#id={{build_error.id}}&view_type=form&model=runbot.build.error&menu_id={{env.ref('runbot.runbot_menu_root').id}}" target="new" title="View in Backend"> - <i class="fa fa-list"/> - </a> - <a t-att-href="build_error.last_seen_build_id.build_url" t-attf-title="View last affected build ({{build_error.last_seen_build_id.id}})"><i class="fa fa-external-link"/></a> - </div> - </div> - </div> - - <div t-attf-id="collapse{{build_error.id}}" class="collapse" aria-labelledby="headingOne" t-attf-data-parent="#accordion_{{accordion_id}}"> - <div class="card-body"> - <pre class="pre-scrollable bg-danger-light"><t t-esc="build_error.content.strip()" /></pre> - </div> - </div> - </div> - </t> - </div> - </template> - - <template id="runbot.build_error"> - <t t-call='runbot.layout'> - <div class="container-fluid"> - <div class="row"> - <div class='col-md-12'> - <h3>Your assigned bug on Runbot Builds</h3> - <t t-call="runbot.build_error_cards"> - <t t-set="build_errors" t-value="current_user_errors"/> - <t t-set="accordion_id">user_errors</t> - </t> - <h3>Current Bugs on Runbot Builds</h3> - <t t-if="build_errors"> - <div class="container"> - <nav class="navbar navbar-expand-lg navbar-light bg-light"> - <div class="dropdown mr-auto"> - <a role="button" href="#" class="dropdown-toggle btn btn-secondary" data-toggle="dropdown"> - Sort By: <t t-esc="request.params.get('sort', '')"/> - </a> - <div class="dropdown-menu" aria-labelledby="sortMenuButton" role="menu"> - <t t-foreach="sort_order_choices" t-as="sort_choice"> - <a role="menuitem" class="dropdown-item" t-attf-href="/runbot/errors?sort={{sort_choice}}"><t t-esc="sort_order_choices[sort_choice]"/></a> - </t> - </div> - </div> - <span class="ml-auto"> - <t t-call="website.pager" /> - </span> - </nav> - </div> - - <t t-call="runbot.build_error_cards"> - <t t-set="build_errors" t-value="build_errors"/> - <t t-set="accordion_id">all_errors</t> - </t> - </t> - </div> - </div> - </div> - </t> - </template> - - <template id="runbot.team"> - <t t-call='runbot.layout'> - <div class="container-fluid bg-light"> - <div class="row"> - <div t-if="team" class='col-md-12'> - <div class="col-lg-12 text-center mb16"> - <h2>Team <t t-esc="team.name.capitalize()"/> - <a groups="base.group_user" t-attf-href="/web/#id={{team.id}}&view_type=form&model=runbot.team&menu_id={{env.ref('runbot.runbot_menu_root').id}}" target="new" title="View in Backend"> - <i class="fa fa-list"/> - </a> - </h2> - </div> - <div t-if="team.dashboard_id"> - <h3 t-if="team.dashboard_id.dashboard_tile_ids">Dashboards</h3> - <t t-call="runbot.dashboard"> - <t t-set="dashboard" t-value="team.dashboard_id"/> - </t> - </div> - <div class="d-flex"> - <h3 t-if="build_error_ids">Team assigned Errors</h3> - <t t-call="portal.portal_searchbar"> - <t t-set="classes" t-valuef="o_runbot_team_searchbar border-0"/> - <t t-set="title">&nbsp;</t> - </t> - </div> - <t t-call="runbot.build_error_cards"> - <t t-set="build_errors" t-value="build_error_ids"/> - <t t-set="accordion_id">team_errors</t> - </t> - </div> - <!-- Display list of teams of no team is supplied --> - <div t-if="not team" class='col-md-12'> - <h3> Teams</h3> - <div class="row"> - <div class="list-group list-group-horizontal"> - <t t-foreach="teams" t-as="team"> - <a t-attf-href="/runbot/teams/{{ team.id }}" class="list-group-item list-group-item-action"><t t-esc="team.name"/></a> - </t> - </div> - </div> - </div> - </div> - </div> - </t> - </template> - </data> -</odoo> diff --git a/runbot/templates/build_stats.xml b/runbot/templates/build_stats.xml deleted file mode 100644 index 2e257921..00000000 --- a/runbot/templates/build_stats.xml +++ /dev/null @@ -1,136 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.build_stats"> - <t t-call='runbot.layout'> - <t t-set="bundles" t-value="build.slot_ids.mapped('batch_id.bundle_id')"/> - <div> - <div class="row"> - <div class="col-md-4"> - <div class="bg-success-light"> - <b>Build: </b><a t-attf-href="/runbot/build/{{build.id}}"><t t-esc="build.id"/></a><br/> - <t t-if="build.description"> - <b>Description:</b> - <t t-out="build.md_description"/> - <br/> - </t> - <b>Date: </b><t t-esc="build.create_date" /><br/> - <b>Config: </b><t t-esc="build.params_id.config_id.name" /><br/> - <b>Bundle(s): </b> - <t t-foreach="bundles" t-as="bundle"> - <a t-attf-href="/runbot/bundle/{{bundle.id}}"><t t-esc="bundle.name" /></a> - </t><br/> - <t t-foreach="build.params_id.sudo().commit_link_ids" t-as="build_commit"> - <b>Commit:</b> - <a t-attf-href="/runbot/commit/{{build_commit.commit_id.id}}"> - <t t-esc="build_commit.commit_id.dname"/> - </a> - <a t-att-href="'https://%s/commit/%s' % (build_commit.branch_id.remote_id.base_url, build_commit.commit_id.name)" class="btn btn-sm text-left" title="View Commit on Github"><i class="fa fa-github"/></a> - <t t-if="build_commit.match_type in ('default', 'pr_target', 'prefix') "> - from base branch - <br/> - </t> - <div t-else="" class="ml-3"> - <b>Subject:</b> - <t t-esc="build_commit.commit_id.subject"/> - <br/> - <b>Author:</b> - <t t-esc="build_commit.commit_id.author"/> - <br/> - <b>Committer:</b> - <t t-esc="build_commit.commit_id.committer"/> - <br/> - </div> - </t> - <b>Version:</b> - <t t-esc="build.params_id.version_id.name"/> - <br/> - </div> - </div> - <div t-foreach="sorted(build_stats.keys())" t-as="category" class="col-md-4"> - <h3><t t-esc="category.title().replace('_', ' ')"/></h3> - <table class="table table-condensed table-responsive table-stripped"> - <tr t-foreach="build_stats[category].keys()" t-as="module"> - <td><t t-esc="module"/></td> - <td><t t-esc="build_stats[category][module]"/></td> - </tr> - </table> - </div> - <div t-if="not build_stats" class="col-md-12 alert alert-warning">No stats records found for this build</div> - </div> - </div> - </t> - </template> - - <template id="runbot.modules_stats"> - <t t-call='runbot.layout'> - <input type="hidden" id="bundle_id" t-att-value="bundle.id"/> - <input type="hidden" id="trigger_id" t-att-value="trigger.id"/> - <div class="container-fluid"> - <nav class="navbar navbar-light"> - <div class="container"> - <b>Bundle:</b><t t-esc="bundle.name"/><br/> - <b>Trigger:</b><t t-esc="trigger.name"/> - <b>Stat Category:</b> - <select id="key_category_selector" class="form-select" aria-label="Stat Category"> - <option t-foreach="stats_categories" t-as="category" t-attf-value="{{category}}"><t t-esc="category.replace('_',' ').title()"/></option> - </select> - <b>Nb of builds:</b> - <select id="limit_selector" class="form-select" aria-label="Number Of Builds"> - <option value="10">10</option> - <option value="25">25</option> - <option value="50">50</option> - <option value="100">100</option> - <option value="250">250</option> - </select> - <button id="backward_button" class="btn btn-default" title="Previous Builds" aria-label="Previous Builds"> - <i t-attf-class="fa fa-backward"/> - </button> - <button id="forward_button" class="btn btn-default" title="Previous Builds" aria-label="Previous Builds"> - <i t-attf-class="fa fa-forward"/> - </button> - <button id="fast_forward_button" class="btn btn-default" title="Previous Builds" aria-label="Previous Builds"> - <i t-attf-class="fa fa-fast-forward"/> - </button> - <i id="chart_spinner" class="fa fa-2x fa-circle-o-notch fa-spin"/> - </div> - </nav> - <div class="row"> - <div class="col-xs-9 col-md-10"><canvas id="canvas"></canvas></div> - <div class="col-xs-3 col-md-2"> - <b>Mode:</b> - <select id="mode_selector" class="form-select" aria-label="Display mode"> - <option title="Real Values ordered by value" selected="selected" value="normal">Value</option> - <option title="Real Values ordered by name" selected="selected" value="alpha">Alphabetical</option> - <option title="Delta With Reference Build Values" value="difference">Difference</option> - <option title="Bigger # of datapoint varying from previous one" value="change_count">Noisy</option> - </select><br/> - - <b>Display:</b> - <select id="nb_dataset_selector" class="form-select" aria-label="Number Of Builds"> - <option value="-1">Custom</option> - <option value="0">0</option> - <option value="10">Top 10</option> - <option value="20">Top 20</option> - <option value="50">Top 50</option> - </select><br/> - - <b>Display aggregate:</b> - <select id="display_aggregate_selector" class="form-select" aria-label="Display sum"> - <option selected="selected" value="none">No</option> - <option value="sum">Sum</option> - <option value="average">Average</option> - </select><br/> - <div id="js-legend" class="chart-legend"> - - </div> - - </div> - </div> - </div> - </t> - <script type="text/javascript" src="/web/static/lib/Chart/Chart.js"></script> - <script type="text/javascript" src="/runbot/static/src/js/stats.js"></script> - </template> - </data> -</odoo> diff --git a/runbot/templates/bundle.xml b/runbot/templates/bundle.xml deleted file mode 100644 index cfe11b65..00000000 --- a/runbot/templates/bundle.xml +++ /dev/null @@ -1,88 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.bundle"> - <t t-call='runbot.layout'> - <div class="container-fluid"> - <div class="row"> - <div class='col-md-12'> - <div class="navbar navbar-default"> - <span class="text-center" style="font-size: 18px;"> - <t t-esc="bundle.name"/> - <i t-if="bundle.sticky" class="fa fa-star" style="color: #f0ad4e" /> - <div class="btn-group" role="group"> - <a groups="runbot.group_runbot_admin" t-attf-href="/web/#id={{bundle.id}}&view_type=form&model=runbot.bundle&menu_id={{env.ref('runbot.runbot_menu_root').id}}" class="btn btn-default btn-sm" target="new" title="View in Backend"> - <i class="fa fa-list"/> - </a> - <a class="btn btn-default" groups="base.group_user" t-attf-href="/runbot/bundle/{{bundle.id}}/force" title="Force A New Batch"> - <i class="fa fa-refresh"/> - </a> - <a class="btn btn-default" groups="base.group_user" t-attf-href="/runbot/bundle/{{bundle.id}}/force/1" title="Force A New Batch with automatic rebase"> - <i class="fa fa-fast-forward"/> - </a> - <t t-call="runbot.branch_copy_button"/> - <t t-call="runbot.bundle_stats_dropdown"/> - </div> - </span> - <span class="pull-right"> - <t t-call="website.pager" /> - </span> - </div> - <div> - <table class="table table-condensed table-responsive table-stripped"> - <tr> - <td>Version</td> - <td> - <t t-esc="bundle.version_id.name"/> - </td> - </tr> - <tr> - <td>Branches</td> - <td> - <t t-foreach="bundle.branch_groups().items()" t-as="group"> - <t t-foreach="group[1]" t-as="branch"> - <small> - <div class="btn-toolbar mb-1" role="toolbar"> - <div class="btn-group btn-group-ssm" role="group"> - <a t-att-href="branch.branch_url" class="btn btn-default text-left" title="View Branch on Github"><i class="fa fa-github"/></a> - <a groups="runbot.group_runbot_admin" class="btn btn-default fa fa-list text-left" t-attf-href="/web/#id={{branch.id}}&view_type=form&model=runbot.branch" target="new" title="View Branch in Backend"/> - <a href="#" t-esc="branch.remote_id.short_name" class="btn btn-default disabled text-left"/> - <a t-attf-href="/runbot/branch/{{branch.id}}" class="btn btn-default text-left" title="View Branch Details"><span t-att-class="'' if branch.alive else 'line-through'" t-esc="branch.name"/> <i t-if="not branch.alive" title="deleted/closed" class="fa fa-ban text-danger"/></a> - <t t-if="len(group[1]) == 1 and not branch.is_pr"> - <a t-attf-href="https://{{group[0].main_remote_id.base_url}}/compare/{{bundle.version_id.name}}...{{branch.remote_id.owner}}:{{branch.name}}?expand=1" class="btn btn-default text-left" title="Create pr"><i class="fa fa-code-fork"/> Create pr</a> - </t> - </div> - </div> - </small> - </t> - </t> - </td> - </tr> - - <tr t-if="more"> - <td>Project</td> - <td t-esc="bundle.project_id.name"/> - </tr> - <tr t-if="more"> - <td>New build enabled</td> - <td> - <i t-attf-class="fa fa-{{'times' if bundle.no_build else 'check'}}"/> - </td> - </tr> - <tr t-if="more"> - <td>Modules</td> - <td t-esc="bundle.modules or '/'"/> - </tr> - </table> - </div> - <div t-foreach="bundle.consistency_warning()" t-as="warning" t-esc="warning[1]" t-attf-class="alert alert-{{warning[0]}}"/> - <div class="batch_row" t-foreach="batchs" t-as="batch"> - <t t-call="runbot.batch_tile"/> - </div> - </div> - </div> - </div> - </t> - </template> - </data> -</odoo> diff --git a/runbot/templates/commit.xml b/runbot/templates/commit.xml deleted file mode 100644 index c3422ed1..00000000 --- a/runbot/templates/commit.xml +++ /dev/null @@ -1,126 +0,0 @@ - -<odoo> - <data> - <template id="runbot.commit_status_state_td"> - <!-- Must be called with a `state` variable !--> - <td t-if="state=='pending'"> - <i class="fa fa-circle text-warning"/> - &nbsp; - <t t-esc="state"/> - </td> - <td t-if="state=='success'"> - <i class="fa fa-check text-success"/> - &nbsp; - <t t-esc="state"/> - </td> - <td t-if="state in ('failure', 'error')"> - <i class="fa fa-times text-danger"/> - &nbsp; - <t t-esc="state"/> - </td> - </template> - - <template id="runbot.commit"> - <t t-call='runbot.layout'> - <div class="row"> - <!-- Commit base informations --> - <div class="col-md-6"> - <table class="table table-stripped"> - <tr> - <td>Name</td> - <td> - <t t-esc="commit.name"/> - <div class="btn-group" role="group"> - <a t-att-href="'' if not reflogs else 'https://%s/commit/%s' % (reflogs[0].branch_id.remote_id.base_url, commit.name)" class="btn btn-sm text-left" title="View Commit on Github"><i class="fa fa-github"/></a> - <a groups="runbot.group_runbot_admin" class="btn btn-sm fa fa-list text-left" t-attf-href="/web/#id={{commit.id}}&view_type=form&model=runbot.commit" target="new" title="View Commit in Backend"/> - </div> - </td> - </tr> - <tr> - <td>Repo</td> - <td t-esc="commit.repo_id.name"/> - </tr> - <tr> - <td>Subject</td> - <td t-esc="commit.subject"/> - </tr> - <tr> - <td>Date</td> - <td t-esc="commit.date"/> - </tr> - <tr> - <td>Author</td> - <td> - <t t-esc="commit.author"/> - <small t-esc="commit.author_email"/> - </td> - </tr> - <tr t-if="commit.author != commit.committer"> - <td>Commiter</td> - <td> - <t t-esc="commit.committer"/> - <small t-esc="commit.committer_email"/> - </td> - </tr> - </table> - </div> - <!-- Status --> - <div class="col-md-4"> - <h3>Last Status</h3> - <table class="table table-sm table-borderless"> - <tr t-foreach='last_status_by_context' t-as='context'> - <t t-set="status" t-value="last_status_by_context[context]"/> - <td t-esc="status.sent_date and status.sent_date.strftime('%Y-%m-%d %H:%M:%S') or '—'"/> - <td t-esc="context"/> - <t t-call="runbot.commit_status_state_td"> - <t t-set="state" t-value="status.state"/> - </t> - <td> - <a t-att-href="status.target_url"> - build - <t t-if="status.target_url" t-esc="status.target_url.split('/')[-1]" /> - </a> - </td> - <td groups="base.group_user"> - <a t-attf-href="/runbot/commit/resend/{{status.id}}" title="Resend github status"> - <i class="fa fa-repeat"/> - </a> - </td> - </tr> - </table> - </div> - </div> - <div class="row"> - <div class="col-md-6"> - <h3>Branch presence history</h3> - <table class="table table-stripped"> - <tr t-foreach='reflogs' t-as='reflog'> - <td t-esc="reflog.date"/> - <td t-esc="reflog.branch_id.remote_id.short_name"/> - <td><a t-attf-href="/runbot/branch/{{reflog.branch_id.id}}" t-esc="reflog.branch_id.name" title="View Branch Details"/></td> - </tr> - </table> - </div> - <div class="col-md-6"> - <h3>Status history</h3> - <table class="table table-stripped"> - <tr t-foreach='status_list' t-as='status'> - <td t-esc="status.sent_date and status.sent_date.strftime('%Y-%m-%d %H:%M:%S') or '—'"/> - <td t-esc="status.context"/> - <t t-call="runbot.commit_status_state_td"> - <t t-set="state" t-value="status.state"/> - </t> - <td> - <a t-attf-href="/runbot/build/{{status.build_id.id}}"> - build - <t t-esc="status.build_id.id" /> - </a> - </td> - </tr> - </table> - </div> - </div> - </t> - </template> - </data> -</odoo> \ No newline at end of file diff --git a/runbot/templates/dashboard.xml b/runbot/templates/dashboard.xml deleted file mode 100644 index d96cdea9..00000000 --- a/runbot/templates/dashboard.xml +++ /dev/null @@ -1,283 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.glances"> - <t t-call="runbot.layout"> - <t t-set="head"> - <t t-if="refresh"> - <meta http-equiv="refresh" t-att-content="refresh"/> - </t> - </t> - <div class="container-fluid"> - <div class="row"> - <div class='col-md-12'> - <div> - <span t-attf-class="badge badge-{{pending_level}}"> - Pending: - <t t-esc="pending_total"/> - </span> - </div> - <t t-set="project_id"/> - <t t-set="nb_project" t-value="len(bundles.mapped('project_id'))"/> - <t t-foreach="bundles.sorted(lambda b: (-b.project_id.id, b.version_id.number), reverse=True)" t-as="bundle"> - <h3 t-if="nb_project > 1 and project_id != bundle.project_id.id" t-esc="bundle.project_id.name"/> - <t t-set="project_id" t-value="bundle.project_id.id"/> - <h4> - <t t-esc="bundle.name"/> - </h4> - <t t-foreach="bundle.last_done_batch.slot_ids" t-as="slot"> - <span t-attf-class="badge badge-{{slot.build_id.get_color_class()}}"> - <t t-esc="slot.trigger_id.name"/> - </span> - </t> - </t> - </div> - </div> - </div> - </t> - </template> - - <template id="frontend_no_nav" inherit_id="runbot.layout" primary="True"> - <xpath expr="//header" position="replace"> - </xpath> - </template> - - <template id="runbot.config_monitoring"> - <t t-call="runbot.frontend_no_nav"> - <t t-set="head"> - <t t-if="refresh"> - <meta http-equiv="refresh" t-att-content="refresh"/> - </t> - </t> - </t> - </template> - - <template id="runbot.monitoring"> - <t t-call="runbot.frontend_no_nav"> - <t t-set="head"> - <t t-if="refresh"> - <meta http-equiv="refresh" t-att-content="refresh"/> - </t> - <style> - h4 { - padding: 3px 0; - border-bottom: 1px solid grey; - } - .r-mb02 { margin-bottom: 0.2em; } - </style> - </t> - <div class="container-fluid"> - <div class="row"> - <div class="col-md-12"> - <div> - <t t-call="runbot.slots_infos"/> - </div> - <t t-foreach="hosts_data.sorted(key=lambda h:h.name)" t-as="host"> - <div> - <span t-esc="host.name.split('.')[0]"/> - <t t-if="host.nb_testing == 0"> - <t t-set="klass">success</t> - </t> - <t t-if="host.nb_testing > 0"> - <t t-set="klass">info</t> - </t> - <t t-if="host.nb_testing == host.nb_worker"> - <t t-set="klass">warning</t> - </t> - <t t-if="host.nb_testing > host.nb_worker"> - <t t-set="klass">danger</t> - </t> - <span t-attf-class="badge badge-{{klass}}"> - <span t-esc="host.nb_testing"/> - / - <span t-esc="host.nb_worker"/> - </span> - <t t-esc="host.nb_running"/> - <t t-set="succes_time" t-value="int(datetime.datetime.now().timestamp() - host.last_success.timestamp())"/> - <t t-set="start_time" t-value="int(datetime.datetime.now().timestamp() - host.last_start_loop.timestamp())"/> - <t t-set="end_time" t-value="int(datetime.datetime.now().timestamp() - host.last_end_loop.timestamp())"/> - - <t t-set="klass">success</t> - <t t-if="succes_time > 30"> - <t t-set="klass">info</t> - </t> - <t t-if="succes_time > 180"> - <t t-set="klass">danger</t> - </t> - - <span t-attf-class="badge badge-{{klass}}"> - <span t-esc="succes_time"/> - </span> - - <t t-set="klass">success</t> - <t t-if="start_time > 60*10"> - <t t-set="klass">info</t> - </t> - <t t-if="start_time > 60*15"> - <t t-set="klass">danger</t> - </t> - - <span t-attf-class="badge badge-{{klass}}"> - <span t-esc="start_time"/> - </span> - - <t t-set="klass">success</t> - <t t-if="end_time > 60*10"> - <t t-set="klass">info</t> - </t> - <t t-if="end_time > 60*15"> - <t t-set="klass">danger</t> - </t> - - <span t-attf-class="badge badge-{{klass}}"> - <span t-esc="end_time"/> - </span> - - <t t-set="cron_time" t-value="end_time-start_time"/> - <t t-set="klass">success</t> - <t t-if="abs(cron_time) > 10"> - <t t-set="klass">info</t> - </t> - <t t-if="abs(cron_time) > 60"> - <t t-set="klass">danger</t> - </t> - <span t-attf-class="badge badge-{{klass}}"> - <span t-esc="cron_time"/> - </span> - - </div> - </t> - - <table> - <tr t-foreach="bundles.sorted(lambda b: b.version_id.number, reverse=True)" t-as="bundle"> - <td> - <t t-esc="bundle.version_id.number"/> - </td> - <td> - <t t-set='batch' t-value="bundle.with_context({'category_id': category.id}).last_done_batch"/> - <table> - <t t-foreach="batch.slot_ids" t-as='slot'> - <tr> - <td> - <t t-esc="slot.trigger_id.name[:4]"/> - </td> - <t t-set="build" t-value="slot.build_id"/> - <td> - <span t-attf-class="badge badge-{{slot.build_id.get_color_class()}}"> - <i t-attf-class="fa fa-{{category.icon}}"/> - </span> - </td> - <td t-foreach="build.children_ids" t-as="child"> - <span t-attf-class="badge badge-{{slot.build_id.get_color_class()}}"> - <t t-esc="child.params_id.config_id.name[:4]"/> - </span> - </td> - </tr> - </t> - </table> - </td> - </tr> - </table> - </div> - </div> - </div> - </t> - </template> - - <template id="runbot.default_dashboard_tile_view"> - <div class="col-md-3 col-lg-2 p-2"> - <div class="card"> - <div class="card-header limited-height-toggle" t-attf-onclick="$('#tile_{{tile.id}}').toggleClass('limited-height')"> - <t t-esc="tile.display_name"/> <t t-if="tile.build_ids"> (<t t-esc="len(tile.build_ids)"/>)</t> - </div> - <div class="card-body limited-height" t-attf-id="tile_{{tile.id}}"> - <p t-if="not tile.build_ids" class="text-success my-0">No build found 👍</p> - <t t-foreach="tile.sticky_bundle_ids.sorted(lambda b: b.version_id.number, reverse=True)" t-as="bundle"> - <t t-set="failed_builds" t-value="tile.build_ids.filtered(lambda b: b.top_parent.slot_ids.batch_id.bundle_id == bundle)"/> - <h4 class="card-title" t-if="failed_builds" t-esc="bundle.name"/> - <p t-foreach="failed_builds" t-as="build" class="my-0"> - <a class="text-danger" t-attf-href="/runbot/build/{{build.id}}" target="new"> - <t t-esc="build.description or build.id"/> - </a> - </p> - </t> - <hr/> - </div> - </div> - </div> - </template> - - <template id="runbot.dashboard"> - <div class="row"> - <t t-foreach="dashboard.dashboard_tile_ids" t-as="tile"> - <t t-if="tile.build_ids or not hide_empty" t-call="{{ tile.custom_template_id.id }}"/> - </t> - </div> - </template> - - <template id="runbot.dashboard_page"> - <t t-call="runbot.frontend_no_nav"> - <t t-set="head"> - <t t-if="refresh"> - <meta http-equiv="refresh" t-att-content="refresh"/> - </t> - </t> - <t t-call="runbot.dashboard"/> - </t> - </template> - - <template id="runbot.load_info"> - <t t-call='runbot.layout'> - <div class="container-fluid frontend"> - <div class="row"> - <div class="col-md-12"> - <t t-call="runbot.slots_infos"/> - </div> - </div> - - <div t-foreach="build_by_bundle" t-as="bundle_builds" class="row bundle_row"> - <t t-set="bundle" t-value="bundle_builds[0]"/> - <t t-set="builds" t-value="bundle_builds[1]"/> - - <div class="col-md-3 col-lg-2 cell"> - <div class="one_line"> - <i t-if="bundle.sticky" class="fa fa-star" style="color: #f0ad4e" /> - <a t-attf-href="/runbot/bundle/#{bundle.id}" title="View Bundle"> - <b t-esc="bundle.name"/> - </a> - </div> - <div class="badge badge-info" t-out="len(builds)"/> - </div> - <div class="col-md-9 col-lg-10"> - <div class="table-responsive"> - <table class="table table-condensed"> - <t t-foreach="builds" t-as="build"> - <t t-set="rowclass"> - <t t-call="runbot.build_class"> - <t t-set="build" t-value="build"/> - </t> - </t> - <tr t-attf-class="bg-{{rowclass.strip()}}-light{{'line-through' if build.orphan_result else ''}}"> - <td> Batch: <a t-attf-href="/runbot/batch/{{build.params_id.create_batch_id.id}}"><t t-out="build.params_id.create_batch_id.id"/></a></td> - <td><a t-attf-href="/runbot/build/{{build.id}}"><t t-out="build.parent_path"/></a></td> - <td t-out="build.local_state"/> - <td t-out="build.host"/> - <td t-out="build.config_id.name"/> - <td> - <t t-call="runbot.build_button"> - <t t-set="bu" t-value="build"/> - <t t-set="klass" t-value="'btn-group-ssm'"/> - </t> - </td> - </tr> - </t> - </table> - </div> - </div> - - </div> - </div> - </t> - </template> - </data> -</odoo> diff --git a/runbot/templates/dockerfile.xml b/runbot/templates/dockerfile.xml deleted file mode 100644 index 8bbdd407..00000000 --- a/runbot/templates/dockerfile.xml +++ /dev/null @@ -1,151 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - - <template id="runbot.docker_from"> -FROM <t t-esc="values['from']"/> -ENV LANG C.UTF-8 -USER root - </template> - - <template id="runbot.docker_install_debs"> -# Install debian packages -RUN set -x ; \ - apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends <t t-esc="deb_packages or values['deb_packages']"/> \ - && rm -rf /var/lib/apt/lists/* - </template> - - <template id="runbot.docker_install_chrome"> - <t t-set="chrome_distrib" t-value="values.get('chrome_distrib')"/> - <t t-set="chrome_version" t-value="values['chrome_version']"/> - <t t-set="chrome_source" t-value="values.get('chrome_source')"/> -# Install Google Chrome - <t t-if="chrome_source == 'google'"> -RUN curl -sSL https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_<t t-esc="chrome_version"/>_amd64.deb -o /tmp/chrome.deb \ - && apt-get update \ - && apt-get -y install --no-install-recommends /tmp/chrome.deb \ - && rm /tmp/chrome.deb - </t> - <t t-else=""> -RUN curl -sSL http://nightly.odoo.com/odoo.key | apt-key add - \ - && echo "deb http://nightly.odoo.com/deb/<t t-esc="chrome_distrib"/> ./" > /etc/apt/sources.list.d/google-chrome.list \ - && apt-get update \ - && apt-get install -y -qq google-chrome-stable=<t t-esc="chrome_version"/> \ - && rm -rf /var/lib/apt/lists/* - </t> - </template> - - <template id="runbot.docker_install_phantomjs"> -# Install phantomjs -RUN curl -sSL https://nightly.odoo.com/resources/phantomjs.tar.bz2 -o /tmp/phantomjs.tar.bz2 \ - && tar xvfO /tmp/phantomjs.tar.bz2 phantomjs-2.1.1-linux-x86_64/bin/phantomjs > /usr/local/bin/phantomjs \ - && chmod +x /usr/local/bin/phantomjs \ - && rm -f /tmp/phantomjs.tar.bz2 - </template> - - <template id="runbot.docker_install_wkhtml"> -# Install wkhtml -RUN curl -sSL <t t-esc="values['wkhtml_url']"/> -o /tmp/wkhtml.deb \ - && apt-get update \ - && dpkg --force-depends -i /tmp/wkhtml.deb \ - && apt-get install -y -f --no-install-recommends \ - && rm /tmp/wkhtml.deb - </template> - - <template id="runbot.docker_install_nodejs"> - <t t-set="node_version" t-value="node_version or '15'"/> -# Install nodejs -RUN curl -sSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - \ - && echo "deb https://deb.nodesource.com/node_<t t-esc="values['node_version']"/>.x `lsb_release -c -s` main" > /etc/apt/sources.list.d/nodesource.list \ - && apt-get update \ - && apt-get install -y nodejs - </template> - - <template id="runbot.docker_install_node_packages"> -RUN npm install -g <t t-esc="values['node_packages']"/> - </template> - - <template id="runbot.docker_install_flamegraph"> -ADD https://raw.githubusercontent.com/brendangregg/FlameGraph/master/flamegraph.pl /usr/local/bin/flamegraph.pl -RUN chmod +rx /usr/local/bin/flamegraph.pl - </template> - - <template id="runbot.docker_install_psql"> - <t t-set="psql_version" t-value="psql_version or False"/> -RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \ - && echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -s -c`-pgdg main" > /etc/apt/sources.list.d/pgclient.list \ - && apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-client<t t-if="values['psql_version']">-</t><t t-esc="values['psql_version']"/> \ - && rm -rf /var/lib/apt/lists/* - </template> - - <template id="runbot.docker_install_odoo_debs"> - <t t-set="odoo_branch" t-value="odoo_branch or 'master'"/> -ADD https://raw.githubusercontent.com/odoo/odoo/<t t-esc="values['odoo_branch']"/>/debian/control /tmp/control.txt -RUN apt-get update \ - && sed -n '/^Depends:/,/^[A-Z]/p' /tmp/control.txt \ - | awk '/^ [a-z]/ { gsub(/,/,"") ; print $1 }' | sort -u \ - | egrep -v 'postgresql-client' \ - | sed 's/python-imaging/python-pil/'| sed 's/python-pypdf/python-pypdf2/' \ - | DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - </template> - - <template id="runbot.docker_install_odoo_python_requirements"> -ADD https://raw.githubusercontent.com/odoo/odoo/<t t-esc="values['odoo_branch']"/>/requirements.txt /root/requirements.txt -RUN <t t-esc="values['python_version']"/> -m pip install --no-cache-dir setuptools wheel && \ - <t t-esc="values['python_version']"/> -m pip install --no-cache-dir -r /root/requirements.txt && \ - <t t-esc="values['python_version']"/> -m pip install --no-cache-dir <t t-esc="values['additional_pip']"/> - </template> - - <template id="runbot.docker_install_runbot_python_requirements"> -RUN <t t-esc="values['python_version']"/> -m pip install --no-cache-dir setuptools wheel && \ - <t t-esc="values['python_version']"/> -m pip install <t t-esc="values['runbot_pip']"/> - </template> - - - <template id="runbot.docker_base"> - <t t-set="default" t-value="{ -'from': 'ubuntu:focal', -'odoo_branch': 'master', -'chrome_source': 'google', -'chrome_version': '90.0.4430.93-1', -'node_packages': 'rtlcss es-check eslint', -'node_version': '15', -'psql_version': '12', -'wkhtml_url': 'https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox_0.12.5-1.bionic_amd64.deb', -'chrome': True, -'phantom': False, -'do_requirements': True, -'python_version': 'python3', -'deb_packages_python': 'python3 python3-dbfread python3-dev python3-pip python3-setuptools python3-wheel python3-markdown python3-mock python3-phonenumbers python3-vatnumber python3-websocket libpq-dev', -'deb_package_default': 'apt-transport-https build-essential ca-certificates curl ffmpeg file fonts-freefont-ttf fonts-noto-cjk gawk gnupg gsfonts libldap2-dev libjpeg9-dev libsasl2-dev libxslt1-dev lsb-release node-less ocrmypdf sed sudo unzip xfonts-75dpi zip zlib1g-dev', -'additional_pip': 'ebaysdk==2.1.5 pdf417gen==0.7.1', -'runbot_pip': 'coverage==4.5.4 astroid==2.4.2 pylint==2.5.0 flamegraph' -}"/> - <t t-set="values" t-value="default"/> - <t t-set="dummy" t-value="values.update(custom_values)" t-if="custom_values" /> - - <t t-call="runbot.docker_from"/> - <t t-call="runbot.docker_install_debs"> - <t t-set="deb_packages" t-value="values['deb_package_default']"/> - </t> - <t t-call="runbot.docker_install_debs"> - <t t-set="deb_packages" t-value="values['deb_packages_python']"/> - </t> - <t t-out="0"/><!-- custom content from caller t-call--> - <t t-call="runbot.docker_install_wkhtml"/> - <t t-call="runbot.docker_install_nodejs"/> - <t t-call="runbot.docker_install_node_packages"/> - <t t-call="runbot.docker_install_flamegraph"/> - <t t-call="runbot.docker_install_odoo_debs"/> - <t t-call="runbot.docker_install_runbot_python_requirements"/> - <t t-call="runbot.docker_install_psql"/> - <t t-if="values['chrome']" t-call="runbot.docker_install_chrome"/> - <t t-if="values['phantom']" t-call="runbot.docker_install_phantomjs"/> - <t t-if="values['do_requirements']" t-call="runbot.docker_install_odoo_python_requirements"/> - </template> - </data> -</odoo> diff --git a/runbot/templates/frontend.xml b/runbot/templates/frontend.xml deleted file mode 100644 index 08bff742..00000000 --- a/runbot/templates/frontend.xml +++ /dev/null @@ -1,127 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.bundles"> - <t t-call='runbot.layout'> - <t t-set="nav_form"> - <form class="form-inline my-2 my-lg-0" role="search" t-att-action="qu(search='')" method="get"> - <div class="input-group md-form form-sm form-2 pl-0"> - <input class="form-control my-0 py-1" type="text" placeholder="Search" aria-label="Search" name="search" t-att-value="search"/> - <div class="input-group-append"> - <button type='submit' class="input-group-text red lighten-3" id="basic-text1"> - <i class="fa fa-search text-grey"/> - </button> - </div> - </div> - </form> - </t> - <div class="container-fluid frontend"> - <div class="row"> - <div class='col-md-12'> - <span class="pull-right" t-call="runbot.slots_infos"/> - </div> - <div class='col-md-12'> - <div t-if="message" class="alert alert-warning" role="alert"> - <t t-esc="message" /> - </div> - <div t-if="not project" class="mb32"> - <h3>No project</h3> - </div> - <div t-else=""> - <div t-foreach="bundles" t-as="bundle" class="row bundle_row"> - <div class="col-md-3 col-lg-2 cell"> - <div class="one_line"> - <i t-if="bundle.sticky" class="fa fa-star" style="color: #f0ad4e" /> - <a t-attf-href="/runbot/bundle/#{bundle.id}" t-attf-title="View Bundle #{bundle.name}"> - <b t-esc="bundle.name"/> - </a> - </div> - <div class="btn-toolbar" role="toolbar" aria-label="Toolbar with button groups"> - <div class="btn-group" role="group"> - <t t-foreach="categories" t-as="category"> - <t t-if="active_category_id != category.id"> - <t t-set="last_category_batch" t-value="bundle.with_context(category_id=category.id).last_done_batch"/> - <t t-if="last_category_batch"> - <t t-if="category.view_id" t-call="{{category.view_id.sudo().key}}"/> - <a t-else="" - t-attf-title="View last {{category.name}} batch" - t-attf-href="/runbot/batch/{{last_category_batch.id}}" - t-attf-class="fa fa-{{category.icon}}" - /> - </t> - </t> - </t> - </div> - <div class="btn-group" role="group"> - <t t-if="not bundle.sticky" t-call="runbot.branch_copy_button"/> - <t t-call="runbot.branch_github_menu"/> - </div> - </div> - <div t-if="bundle.host_id"> - <span class="badge badge-info" t-esc="bundle.host_id.name"></span> - </div> - </div> - <div class="col-md-9 col-lg-10"> - <div class="row no-gutters"> - <div t-foreach="bundle.last_batchs" t-as="batch" t-attf-class="col-md-6 col-xl-3 {{'d-none d-xl-block' if batch_index > 1 else ''}}"> - <t t-call="runbot.batch_tile"/> - </div> - </div> - </div> - </div> - </div> - </div> - </div> - </div> - </t> - </template> - - <template id="runbot.batch_tile"> - <t t-set="klass">info</t> - <t t-if="batch.state=='skipped'" t-set="klass">killed</t> - <t t-if="batch.state=='done' and all(slot.build_id.global_result == 'ok' for slot in batch.slot_ids if slot.build_id)" t-set="klass">success</t> - <t t-if="batch.state=='done' and any(slot.build_id.global_result in ('ko', 'warn') for slot in batch.slot_ids)" t-set="klass">danger</t> - - <div t-attf-class="batch_tile if more"> - <div t-attf-class="card bg-{{klass}}-light"> - <a t-attf-href="/runbot/batch/#{batch.id}" title="View Batch"> - <div class="batch_header"> - <span t-attf-class="badge badge-{{'warning' if batch.has_warning else 'light'}}"> - <t t-esc="batch.get_formated_age()"/> - <i class="fa fa-exclamation-triangle" t-if="batch.has_warning"/> - </span> - <span class="float-right header_hover">View batch...</span> - </div> - </a> - <t t-if="batch.state=='preparing'"> - <span><i class="fa fa-cog fa-spin fa-fw"/> preparing</span> - </t> - <div class="batch_slots"> - <t t-foreach="batch.slot_ids" t-as="slot"> - <t t-if="slot.build_id"> - <div t-if="((not slot.trigger_id.hide and trigger_display is None) or (trigger_display and slot.trigger_id.id in trigger_display)) or slot.build_id.global_result == 'ko'" - t-call="runbot.slot_button" class="slot_container"/> - </t> - </t> - <div class="slot_filler" t-foreach="range(10)" t-as="x"/> - </div> - <div t-if='more' class="batch_commits"> - <div t-foreach="batch.commit_link_ids.sorted(lambda cl: (cl.commit_id.repo_id.sequence, cl.commit_id.repo_id.id))" t-as="commit_link" class="one_line"> - - <a t-attf-href="/runbot/commit/#{commit_link.commit_id.id}" t-attf-class="badge badge-light batch_commit match_type_{{commit_link.match_type}}"> - <i class="fa fa-fw fa-hashtag" t-if="commit_link.match_type == 'new'" title="This commit is a new head"/> - <i class="fa fa-fw fa-link" t-if="commit_link.match_type == 'head'" title="This commit is an existing head from bundle branches"/> - <i class="fa fa-fw fa-code-fork" t-if="commit_link.match_type == 'base_match'" title="This commit is matched from a base batch with matching merge_base"/> - <i class="fa fa-fw fa-clock-o" t-if="commit_link.match_type == 'base_head'" title="This commit is the head of a base branch"/> - <t t-esc="commit_link.commit_id.dname"/> - </a> - <a t-att-href="'https://%s/commit/%s' % (commit_link.branch_id.remote_id.base_url, commit_link.commit_id.name)" class="badge badge-light" title="View Commit on Github"><i class="fa fa-github"/></a> - <span t-esc="commit_link.commit_id.subject"/> - </div> - </div> - </div> - </div> - </template> - - </data> -</odoo> diff --git a/runbot/templates/git.xml b/runbot/templates/git.xml deleted file mode 100644 index 54984370..00000000 --- a/runbot/templates/git.xml +++ /dev/null @@ -1,15 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.git_config">[core] - repositoryformatversion = 0 - filemode = true - bare = true -<t t-foreach="repo.remote_ids" t-as="remote_id"> -[remote "<t t-esc="remote_id.remote_name"/>"] - url = <t t-esc="remote_id.name"/> -<t t-if = "remote_id.fetch_heads"> fetch = +refs/heads/*:refs/<t t-esc='remote_id.remote_name'/>/heads/*</t> -<t t-if = "remote_id.fetch_pull"> fetch = +refs/pull/*/head:refs/<t t-esc='remote_id.remote_name'/>/pull/*</t> -</t></template> - </data> -</odoo> diff --git a/runbot/templates/nginx.xml b/runbot/templates/nginx.xml deleted file mode 100644 index 1c843991..00000000 --- a/runbot/templates/nginx.xml +++ /dev/null @@ -1,76 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <template id="runbot.nginx_config"> -pid <t t-esc="nginx_dir"/>/nginx.pid; -error_log <t t-esc="nginx_dir"/>/error.log; -worker_processes 1; -events { worker_connections 1024; } -http { -include /etc/nginx/mime.types; -server_names_hash_max_size 512; -server_names_hash_bucket_size 256; -client_max_body_size 64M; -index index.html; -log_format full '$remote_addr - $remote_user [$time_local] ' - '"$request" $status $body_bytes_sent ' - '"$http_referer" "$http_user_agent" $request_time'; -access_log <t t-esc="nginx_dir"/>/access.log full; -error_log <t t-esc="nginx_dir"/>/error.log; -client_body_temp_path <t t-esc="nginx_dir"/>; -fastcgi_temp_path <t t-esc="nginx_dir"/>; - -autoindex on; - -gzip on; -gzip_types text/css text/plain application/xml application/json application/javascript; - -map $http_x_forwarded_proto $real_scheme { - default $http_x_forwarded_proto; - '' $scheme; -} - -proxy_temp_path <t t-esc="nginx_dir"/>; -proxy_read_timeout 600; -proxy_connect_timeout 600; -proxy_set_header X-Forwarded-Host $host; -proxy_set_header X-Forwarded-Proto $real_scheme; -proxy_set_header Host $host; - -server { - listen 8080 default; - location /runbot/static/ { - alias <t t-esc="runbot_static"/>; - autoindex off; - location ~ /runbot/static/build/[^/]+/(logs|tests)/ { - autoindex on; - add_header 'Access-Control-Allow-Origin' '<t t-esc="base_url"/>'; - } - } -} - -<t t-foreach="builds" t-as="build"> -server { - listen 8080; - server_name ~^<t t-out="re_escape(build.dest)"/>(-[a-z0-9_]+)?\.<t t-esc="re_escape(build.host)"/>$; - location / { proxy_pass http://127.0.0.1:<t t-esc="build.port"/>; } - location /longpolling { proxy_pass http://127.0.0.1:<t t-esc="build.port + 1"/>; } - location /websocket { - proxy_pass http://127.0.0.1:<t t-esc="build.port + 1"/>; - proxy_set_header X-Forwarded-Host $host; - proxy_set_header X-Forwarded-Proto $real_scheme; - proxy_set_header Host $host; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection "Upgrade"; - } -} -</t> -server { - listen 8080; - server_name ~.+\.<t t-out="re_escape(host_name)"/>$; - location / { return 404; } -} -} - </template> - </data> -</odoo> diff --git a/runbot/templates/utils.xml b/runbot/templates/utils.xml deleted file mode 100644 index 2c29ae13..00000000 --- a/runbot/templates/utils.xml +++ /dev/null @@ -1,359 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <!-- base layout --> - <template id="runbot.base_page"> - <html> - <head> - <title t-esc="title or 'Runbot'"/> - <link rel="stylesheet" type="text/css" href="/web/static/lib/bootstrap/css/bootstrap.css"/> - <link rel="stylesheet" type="text/css" href="/web/static/lib/fontawesome/css/font-awesome.css"/> - <link rel="stylesheet" type="text/css" href="/runbot/static/src/css/runbot.css"/> - - <script src="/web/static/lib/jquery/jquery.js" type="text/javascript"/> - <script type="text/javascript" src="/web/static/lib/popper/popper.js"/> - <script type="text/javascript" src="/web/static/lib/bootstrap/js/util.js"/> - <script type="text/javascript" src="/web/static/lib/bootstrap/js/dropdown.js"/> - <script type="text/javascript" src="/web/static/lib/bootstrap/js/collapse.js"/> - <script type="text/javascript" src="/runbot/static/src/js/runbot.js"/> - - <t t-if="refresh"> - <meta http-equiv="refresh" t-att-content="refresh"/> - </t> - - </head> - <body> - <t t-out="0"/> - </body> - </html> - </template> - - <template id="runbot.layout" inherit_id="runbot.base_page" primary="True"> - <xpath expr="//body" position="replace"> - <body> - <header> - <nav class="navbar navbar-expand-md navbar-light bg-light"> - <a t-if="project" t-att-href="qu(search=search)"> - <b style="color:#777;"> - <t t-esc="project.name"/> - </b> - </a> - <button type="button" class="navbar-toggler" data-toggle="collapse" data-target="#top_menu_collapse"> - <span class="navbar-toggler-icon"/> - </button> - <div class="collapse navbar-collapse" id="top_menu_collapse"> - <ul class="nav navbar-nav ml-auto text-right" id="top_menu"> - <t t-if="projects"> - <t t-foreach="projects" t-as="l_project"> - <li class="nav-item"> - <a class="nav-link" t-att-href="qu('/runbot/%s' % slug(l_project), search=search)"> - <t t-esc="l_project.name"/> - </a> - </li> - </t> - </t> - <li class="nav-item divider"/> - <li class="nav-item dropdown"> - <a href="#" class="nav-link dropdown-toggle" data-toggle="dropdown"> - <i class="fa fa-gear"/> - </a> - <div class="dropdown-menu" role="menu"> - <form class="px-4 py-3" method="post" action="/runbot/submit"> - <input type="hidden" name="save" value="1"/> - <input type="hidden" name="redirect" t-att-value="current_path"/> - <div class="text-nowrap"> - <input type="checkbox" name="more" id="more" t-att-checked="more"/> - <label for="more">More info</label> - </div> - <div class="text-nowrap"> - <input type="checkbox" name="keep_search" id="keep_search" t-att-checked="keep_search"/> - <label for="keep_search">Persistent search</label> - </div> - <hr class="separator"/> - <div class="text-nowrap"> - <label for="filter_mode">Filter</label> - <select class="form-control" name="filter_mode" id="filter_mode"> - <option value="all" t-att-selected="filter_mode=='all'">All</option> - <option value="sticky" t-att-selected="filter_mode=='sticky'">Sticky only</option> - <option value="nosticky" t-att-selected="filter_mode=='nosticky'">Dev only</option> - </select> - </div> - <div t-if="categories" class="text-nowrap"> - <label for="category">Category</label> - <select class="form-control" name="category" id="category"> - <option t-foreach="categories" t-as="category" t-att-value="category.id" t-esc="category.name" t-att-selected="category.id==active_category_id"/> - </select> - </div> - <hr class="separator"/> - <t t-if="triggers"> - <input type="hidden" name="update_triggers" t-att-value="project.id"/> - <t t-foreach="triggers" t-as="trigger"> - <div class="text-nowrap"> - <input type="checkbox" t-attf-name="trigger_{{trigger.id}}" t-attf-id="trigger_{{trigger.id}}" t-att-checked="trigger_display is None or trigger.id in trigger_display"/> - <label t-attf-for="trigger_{{trigger.id}}" t-esc="trigger.name"/> - </div> - </t> - </t> - <button type="submit" class="btn btn-primary">Save</button> - </form> - </div> - </li> - <li class="nav-item divider" t-ignore="true"/> - <t t-if="not user_id._is_public()"> - <t t-call="runbot.build_errors_link"/> - <li class="nav-item dropdown" t-ignore="true"> - <a href="#" class="nav-link dropdown-toggle" data-toggle="dropdown"> - <b> - <span t-esc="user_id.name[:23] + '...' if user_id.name and len(user_id.name) > 25 else user_id.name"/> - </b> - </a> - <div class="dropdown-menu js_usermenu" role="menu"> - <a class="dropdown-item" id="o_logout" role="menuitem" t-attf-href="/web/session/logout?redirect=/">Logout</a> - <a class="dropdown-item" role="menuitem" t-attf-href="/web">Web</a> - <div t-if="user_id.runbot_team_ids" class="dropdown-divider"/> - <div t-if="user_id.runbot_team_ids" class="dropdown-header">Teams</div> - <a t-foreach="user_id.runbot_team_ids" t-as="team" class="dropdown-item" role="menuitem" t-attf-href="/runbot/teams/{{team.id}}"> - <t t-esc="team.name.capitalize()"/> - </a> - </div> - </li> - </t> - <t t-else=""> - <li class="nav-item dropdown" t-ignore="true"> - <b> - <a class="nav-link" t-attf-href="/web/login?redirect={{request.httprequest.path}}">Login</a> - </b> - </li> - </t> - </ul> - <t t-out="nav_form or ''"> - </t> - </div> - </nav> - </header> - <t t-out="0"/> - </body> - </xpath> - </template> - - <template id="runbot.build_errors_link"> - <t t-if="nb_assigned_errors and nb_assigned_errors > 0"> - <li class="nav-item divider"/> - <li class="nav-item"> - <a href="/runbot/errors" class="nav-link text-danger" t-attf-title="You have {{nb_assigned_errors}} random bug assigned"><i class="fa fa-bug"/><t t-esc="nb_assigned_errors"/></a> - </li> - </t> - <t t-elif="nb_build_errors and nb_build_errors > 0"> - <li class="nav-item divider"/> - <li class="nav-item"> - <a href="/runbot/errors" class="nav-link" title="Random Bugs"><i class="fa fa-bug"/></a> - </li> - </t> - </template> - - <template id="runbot.slots_infos" name="Hosts slot nb pending/testing/slots"> - <a href="/runbot/load_info" class="slots_infos"> - <span t-attf-class="badge badge-{{pending_level}}"> - Pending: - <t t-esc="pending_total"/> - </span> - <t t-set="testing" t-value="hosts_data._total_testing()"/> - <t t-set="workers" t-value="hosts_data._total_workers()"/> - <t t-set="klass">success</t> - <t t-if="not workers" t-set="klass">danger</t> - <t t-else=""> - <t t-if="int(testing)/workers > 0" t-set="klass">info</t> - <t t-if="int(testing)/workers > 0.75" t-set="klass">warning</t> - <t t-if="int(testing)/workers >= 1" t-set="klass">danger</t> - </t> - <span t-attf-class="badge badge-{{klass}}"> - Testing: - <t t-esc="testing"/> - / - <t t-esc="workers"/> - </span> - </a> - </template> - - <template id="runbot.slot_button"> - <t t-set="bu" t-value="slot.build_id"/> - <t t-set="color" t-value="bu.get_color_class()"/> - <div t-attf-class="btn-group btn-group-ssm slot_button_group"> - <span t-attf-class="btn btn-{{color}} disabled" t-att-title="slot.link_type"> - <i t-attf-class="fa fa-{{slot.fa_link_type()}}"/> - </span> - <a t-if="bu" t-attf-href="/runbot/batch/{{slot.batch_id.id}}/build/#{bu.id}" t-attf-class="btn btn-default slot_name"> - <span t-esc="slot.trigger_id.name"/> - </a> - <span t-else="" t-attf-class="btn btn-default disabled slot_name"> - <span t-esc="slot.trigger_id.name"/> - </span> - <a t-if="bu.local_state == 'running' and bu.database_ids" t-attf-href="http://{{sorted(bu.mapped('database_ids.name'))[0]}}.{{bu.host}}" class="fa fa-sign-in btn btn-info"/> - <a t-if="bu.static_run" t-att-href="bu.static_run" class="fa fa-sign-in btn btn-info"/> - <t t-if="bu" t-call="runbot.build_menu"/> - <a t-if="not bu" groups="base.group_user" class="btn btn-default" title="Create build" t-attf-href="/runbot/batch/slot/{{slot.id}}/build"> - <i class="fa fa-play fa-fw"/> - </a> - </div> - </template> - - <template id="runbot.build_button"> - <div t-attf-class="pull-right"> - <div t-attf-class="btn-group {{klass}}"> - <a t-if="bu.local_state == 'running' and bu.database_ids" t-attf-href="http://{{sorted(bu.mapped('database_ids.name'))[0]}}.{{bu.host}}" class="btn btn-info" title="Sign in on this build" aria-label="Sign in on this build"> - <i class="fa fa-sign-in"/> - </a> - <a t-if="bu.static_run" t-att-href="bu.static_run" class="btn btn-info" title="View result" aria-label="View result"> - <i class="fa fa-sign-in"/> - </a> - <a t-if="bu.local_state=='done' and bu.requested_action != 'wake_up' and bu.database_ids" href="#" data-runbot="wakeup" t-att-data-runbot-build="bu.id" class="btn btn-default" title="Wake up this build" aria-label="Wake up this build"> - <i class="fa fa-coffee"/> - </a> - <a t-attf-href="/runbot/build/{{bu['id']}}" class="btn btn-default" title="Build details" aria-label="Build details"> - <i class="fa fa-file-text-o"/> - </a> - <!--<a t-if="show_commit_button" t-attf-href="https://#{repo.base_url}/commit/#{bu['name']}" class="btn btn-default" title="Open commit on GitHub" aria-label="Open commit on GitHub"><i class="fa fa-github"/></a>--> - <t t-call="runbot.build_menu"/> - </div> - </div> - </template> - <!-- Event / Logs page --> - <template id="runbot.build_class"> - <t t-set="rowclass">info</t> - <t t-if="build.global_state in ['running','done']"> - <t t-if="build.global_result == 'ok'"> - <t t-set="rowclass">success</t> - </t> - <t t-if="build.global_result == 'skipped'"> - <t t-set="rowclass">default</t> - </t> - <t t-if="build.global_result in ['killed', 'manually_killed']"> - <t t-set="rowclass">killed</t> - </t> - </t> - <t t-if="build.global_result == 'ko'"> - <t t-set="rowclass">danger</t> - </t> - <t t-if="build.global_result == 'warn'"> - <t t-set="rowclass">warning</t> - </t> - <t t-esc="rowclass"/> - </template> - - <template id="runbot.build_menu"> - <button t-attf-class="btn btn-default dropdown-toggle" data-toggle="dropdown" title="Build options" aria-label="Build options" aria-expanded="false"> - <i t-attf-class="fa {{'fa-spinner' if bu.global_state == 'pending' else 'fa-cog'}} {{'' if bu.global_state in ('done', 'running') else 'fa-spin'}} fa-fw"/> - <span class="caret"/> - </button> - <div class="dropdown-menu dropdown-menu-right" role="menu"> - <a t-if="bu.global_result=='skipped'" groups="runbot.group_runbot_admin" class="dropdown-item" href="#" data-runbot="rebuild" t-att-data-runbot-build="bu['id']"> - <i class="fa fa-level-up"/> - Force Build - </a> - <t t-if="bu.local_state=='running'"> - <t t-foreach="bu.database_ids.sorted('name')[1:]" t-as="db"> - <a class="dropdown-item" t-attf-href="http://{{db.name}}.{{bu.host}}/"> - <i class="fa fa-sign-in"/> - Connect <t t-esc="db.db_suffix"></t> - </a> - </t> - <a class="dropdown-item" t-attf-href="http://{{bu.domain}}/web/database/selector"> - <i class="fa fa-sign-in"/> - Database selector - </a> - </t> - <a class="dropdown-item" t-if="bu.global_state in ['done','running'] or requested_action == 'deathrow'" groups="base.group_user" href="#" data-runbot="rebuild" t-att-data-runbot-build="bu['id']" title="Retry this build, usefull for false positive"> - <i class="fa fa-refresh"/> - Rebuild - </a> - <t t-if="bu.global_state != 'done'"> - <t t-if="bu.requested_action != 'deathrow'"> - <a groups="base.group_user" href="#" data-runbot="kill" class="dropdown-item" t-att-data-runbot-build="bu['id']"> - <i class="fa fa-crosshairs"/> - Kill - </a> - </t> - <t t-else=""> - <a groups="base.group_user" class="dropdown-item disabled"> - <i class="fa fa-spinner fa-spin"/> - Killing - <i class="fa fa-crosshairs"/> - </a> - </t> - </t> - <t t-if="bu.global_state == 'done'"> - <t t-if="bu.requested_action != 'wake_up'"> - <a groups="base.group_user" class="dropdown-item" href="#" data-runbot="wakeup" t-att-data-runbot-build="bu['id']"> - <i class="fa fa-coffee"/> - Wake up - </a> - </t> - <t t-else=""> - <a groups="base.group_user" class="dropdown-item disabled"> - <i class="fa fa-spinner fa-spin"/> - Waking up - <i class="fa fa-crosshairs"/> - </a> - </t> - </t> - <div t-if="bu.global_state not in ('testing', 'waiting', 'pending')" groups="base.group_user" class="dropdown-divider"/> - <t t-set="log_url" t-value="'http://%s' % bu.host if bu.host != fqdn else ''"/> - <t t-if="bu.host" t-foreach="bu.log_list.split(',') if bu.log_list else []" t-as="log_name"> - <a class="dropdown-item" t-attf-href="{{log_url}}/runbot/static/build/#{bu.dest}/logs/#{log_name}.txt"> - <i class="fa fa-file-text-o"/> - Full - <t t-esc="log_name"/> - logs - </a> - </t> - <t groups="runbot.group_runbot_admin"> - <div class="dropdown-divider"/> - <a class="dropdown-item" t-attf-href="/runbot/build/search?config_id={{bu.params_id.config_id.id}}&trigger_id={{bu.params_id.trigger_id.id}}&version_id={{bu.params_id.version_id.id}}&create_batch_id.bundle_id={{bu.params_id.create_batch_id.bundle_id.id}}&description={{bu.description or ''}}"> - <i class="fa fa-search"/> - Find similar builds - </a> - <a class="dropdown-item" t-attf-href="/web/#id={{bu['id']}}&view_type=form&model=runbot.build&menu_id={{env.ref('runbot.runbot_menu_root').id}}" target="new"> - <i class="fa fa-list"/> - View in backend - </a> - </t> - </div> - </template> - - <template id="runbot.branch_github_menu"> - <button t-attf-class="btn btn-default btn-ssm" data-toggle="dropdown" title="Github links" aria-label="Github links" aria-expanded="false"> - <i t-attf-class="fa fa-github {{'text-primary' if any(branch_id.is_pr and branch_id.alive for branch_id in bundle.branch_ids) else ''}}"/> - <span class="caret"/> - </button> - <div class="dropdown-menu" role="menu"> - <t t-foreach="bundle.branch_ids.sorted(key=lambda b: (not b.alive, b.remote_id.repo_id.sequence, b.remote_id.repo_id.id, b.is_pr, b.id))" t-as="branch"> - <t t-set="link_title" t-value="'View %s %s on Github' % ('PR' if branch.is_pr else 'Branch', branch.name)"/> - <a t-att-href="branch.branch_url" class="dropdown-item" t-att-title="link_title"> - <span class="font-italic text-muted" t-esc="branch.remote_id.short_name"/> <span t-att-class="'' if branch.alive else 'line-through'" t-esc="branch.name"/> <i t-if="not branch.alive" title="deleted/closed" class="fa fa-ban text-danger"/> - </a> - </t> - </div> - </template> - - <template id="runbot.branch_copy_button"> - <button t-attf-class="btn btn-default btn-ssm" title="Copy Bundle name" aria-label="Copy Bundle name" t-attf-onclick="copyToClipboard('{{ bundle.name.split(':')[-1] }}')"> - <i t-attf-class="fa fa-clipboard"/> - </button> - </template> - - - <template id="runbot.bundle_stats_dropdown"> - <button t-attf-class="btn btn-default dropdown-toggle" data-toggle="dropdown" title="Bundle Stats" aria-label="Bundle Stats" aria-expanded="false"> - <i t-attf-class="fa fa-bar-chart"/> - <span class="caret"/> - </button> - <div class="dropdown-menu dropdown-menu-right" role="menu"> - <t t-foreach="project.trigger_ids" t-as="trigger"> - <a class="dropdown-item" t-if="trigger.has_stats" t-attf-href="/runbot/stats/{{bundle.id}}/{{trigger.id}}"> - <t t-esc="trigger.name" /> - </a> - </t> - </div> - </template> - </data> -</odoo> diff --git a/runbot/tests/__init__.py b/runbot/tests/__init__.py deleted file mode 100644 index 810e0b47..00000000 --- a/runbot/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -from . import common -from . import test_repo -from . import test_build_error -from . import test_branch -from . import test_build -from . import test_schedule -from . import test_cron -from . import test_build_config_step -from . import test_event -from . import test_command -from . import test_build_stat -from . import test_version -from . import test_runbot -from . import test_commit -from . import test_upgrade -from . import test_dockerfile diff --git a/runbot/tests/common.py b/runbot/tests/common.py deleted file mode 100644 index 6d71d985..00000000 --- a/runbot/tests/common.py +++ /dev/null @@ -1,232 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -import time -from odoo.tests.common import TransactionCase -from unittest.mock import patch, DEFAULT - -import logging - -_logger = logging.getLogger(__name__) - - -class RunbotCase(TransactionCase): - - def mock_git_helper(self): - """Helper that returns a mock for repo._git()""" - def mock_git(repo, cmd): - if cmd[:2] == ['show', '-s'] or cmd[:3] == ['show', '--pretty="%H -- %s"', '-s']: - return 'commit message for %s' % cmd[-1] - if cmd[:2] == ['cat-file', '-e']: - return True - if cmd[0] == 'for-each-ref': - if self.commit_list.get(repo.id): - return '\n'.join(['\0'.join(commit_fields) for commit_fields in self.commit_list[repo.id]]) - else: - return '' - else: - _logger.warning('Unsupported mock command %s' % cmd) - return mock_git - - def push_commit(self, remote, branch_name, subject, sha=None, tstamp=None, committer=None, author=None): - """Helper to simulate a commit pushed""" - - committer = committer or "Marc Bidule" - commiter_email = '%s@somewhere.com' % committer.lower().replace(' ', '_') - author = author or committer - author_email = '%s@somewhere.com' % author.lower().replace(' ', '_') - self.commit_list[self.repo_server.id] = [( - 'refs/%s/heads/%s' % (remote.remote_name, branch_name), - sha or 'd0d0caca', - str(tstamp or int(time.time())), - committer, - commiter_email, - subject, - author, - author_email)] - - def setUp(self): - super().setUp() - self.Project = self.env['runbot.project'] - self.Build = self.env['runbot.build'] - self.BuildParameters = self.env['runbot.build.params'] - self.Repo = self.env['runbot.repo'].with_context(mail_create_nolog=True, mail_notrack=True) - self.Remote = self.env['runbot.remote'].with_context(mail_create_nolog=True, mail_notrack=True) - self.Trigger = self.env['runbot.trigger'].with_context(mail_create_nolog=True, mail_notrack=True) - self.Branch = self.env['runbot.branch'] - self.Bundle = self.env['runbot.bundle'] - self.Version = self.env['runbot.version'] - self.Config = self.env['runbot.build.config'].with_context(mail_create_nolog=True, mail_notrack=True) - self.Step = self.env['runbot.build.config.step'].with_context(mail_create_nolog=True, mail_notrack=True) - self.Commit = self.env['runbot.commit'] - self.Runbot = self.env['runbot.runbot'] - self.project = self.env['runbot.project'].create({'name': 'Tests'}) - self.repo_server = self.Repo.create({ - 'name': 'server', - 'project_id': self.project.id, - 'server_files': 'server.py', - 'addons_paths': 'addons,core/addons' - }) - self.repo_addons = self.Repo.create({ - 'name': 'addons', - 'project_id': self.project.id, - }) - - self.remote_server = self.Remote.create({ - 'name': 'bla@example.com:base/server', - 'repo_id': self.repo_server.id, - 'token': '123', - }) - self.remote_server_dev = self.Remote.create({ - 'name': 'bla@example.com:dev/server', - 'repo_id': self.repo_server.id, - 'token': '123', - }) - self.remote_addons = self.Remote.create({ - 'name': 'bla@example.com:base/addons', - 'repo_id': self.repo_addons.id, - 'token': '123', - }) - self.remote_addons_dev = self.Remote.create({ - 'name': 'bla@example.com:dev/addons', - 'repo_id': self.repo_addons.id, - 'token': '123', - }) - - self.version_13 = self.Version.create({'name': '13.0'}) - self.default_config = self.env.ref('runbot.runbot_build_config_default') - - self.base_params = self.BuildParameters.create({ - 'version_id': self.version_13.id, - 'project_id': self.project.id, - 'config_id': self.default_config.id, - }) - - self.trigger_server = self.Trigger.create({ - 'name': 'Server trigger', - 'repo_ids': [(4, self.repo_server.id)], - 'config_id': self.default_config.id, - 'project_id': self.project.id, - }) - - self.trigger_addons = self.Trigger.create({ - 'name': 'Addons trigger', - 'repo_ids': [(4, self.repo_addons.id)], - 'dependency_ids': [(4, self.repo_server.id)], - 'config_id': self.default_config.id, - 'project_id': self.project.id, - }) - - self.patchers = {} - self.patcher_objects = {} - self.commit_list = {} - - self.start_patcher('git_patcher', 'odoo.addons.runbot.models.repo.Repo._git', new=self.mock_git_helper()) - self.start_patcher('fqdn_patcher', 'odoo.addons.runbot.common.socket.getfqdn', 'host.runbot.com') - self.start_patcher('github_patcher', 'odoo.addons.runbot.models.repo.Remote._github', {}) - self.start_patcher('repo_root_patcher', 'odoo.addons.runbot.models.runbot.Runbot._root', '/tmp/runbot_test/static') - self.start_patcher('makedirs', 'odoo.addons.runbot.common.os.makedirs', True) - self.start_patcher('mkdir', 'odoo.addons.runbot.common.os.mkdir', True) - self.start_patcher('local_pgadmin_cursor', 'odoo.addons.runbot.common.local_pgadmin_cursor', False) # avoid to create databases - self.start_patcher('isdir', 'odoo.addons.runbot.common.os.path.isdir', True) - self.start_patcher('isfile', 'odoo.addons.runbot.common.os.path.isfile', True) - self.start_patcher('docker_run', 'odoo.addons.runbot.container._docker_run') - self.start_patcher('docker_build', 'odoo.addons.runbot.container._docker_build') - self.start_patcher('docker_ps', 'odoo.addons.runbot.container._docker_ps', []) - self.start_patcher('docker_stop', 'odoo.addons.runbot.container._docker_stop') - self.start_patcher('docker_get_gateway_ip', 'odoo.addons.runbot.models.build_config.docker_get_gateway_ip', None) - - self.start_patcher('cr_commit', 'odoo.sql_db.Cursor.commit', None) - self.start_patcher('repo_commit', 'odoo.addons.runbot.models.runbot.Runbot._commit', None) - self.start_patcher('_local_cleanup_patcher', 'odoo.addons.runbot.models.build.BuildResult._local_cleanup') - self.start_patcher('_local_pg_dropdb_patcher', 'odoo.addons.runbot.models.build.BuildResult._local_pg_dropdb') - - self.start_patcher('set_psql_conn_count', 'odoo.addons.runbot.models.host.Host.set_psql_conn_count', None) - self.start_patcher('reload_nginx', 'odoo.addons.runbot.models.runbot.Runbot._reload_nginx', None) - self.start_patcher('update_commits_infos', 'odoo.addons.runbot.models.batch.Batch._update_commits_infos', None) - self.start_patcher('_local_pg_createdb', 'odoo.addons.runbot.models.build.BuildResult._local_pg_createdb', True) - self.start_patcher('getmtime', 'odoo.addons.runbot.common.os.path.getmtime', datetime.datetime.now().timestamp()) - - self.start_patcher('_get_py_version', 'odoo.addons.runbot.models.build.BuildResult._get_py_version', 3) - - def start_patcher(self, patcher_name, patcher_path, return_value=DEFAULT, side_effect=DEFAULT, new=DEFAULT): - - def stop_patcher_wrapper(): - self.stop_patcher(patcher_name) - - patcher = patch(patcher_path, new=new) - if not hasattr(patcher, 'is_local'): - res = patcher.start() - self.addCleanup(stop_patcher_wrapper) - self.patchers[patcher_name] = res - self.patcher_objects[patcher_name] = patcher - if side_effect != DEFAULT: - res.side_effect = side_effect - elif return_value != DEFAULT: - res.return_value = return_value - - def stop_patcher(self, patcher_name): - if patcher_name in self.patcher_objects: - self.patcher_objects[patcher_name].stop() - del self.patcher_objects[patcher_name] - - def additionnal_setup(self): - """Helper that setup a the repos with base branches and heads""" - - self.env['ir.config_parameter'].sudo().set_param('runbot.runbot_is_base_regex', r'^((master)|(saas-)?\d+\.\d+)$') - - self.initial_server_commit = self.Commit.create({ - 'name': 'aaaaaaa', - 'repo_id': self.repo_server.id, - 'date': '2006-12-07', - 'subject': 'New trunk', - 'author': 'purply', - 'author_email': 'puprly@somewhere.com' - }) - - self.branch_server = self.Branch.create({ - 'name': 'master', - 'remote_id': self.remote_server.id, - 'is_pr': False, - 'head': self.initial_server_commit.id, - }) - self.assertEqual(self.branch_server.bundle_id.name, 'master') - self.branch_server.bundle_id.is_base = True - initial_addons_commit = self.Commit.create({ - 'name': 'cccccc', - 'repo_id': self.repo_addons.id, - 'date': '2015-03-12', - 'subject': 'Initial commit', - 'author': 'someone', - 'author_email': 'someone@somewhere.com' - }) - - self.branch_addons = self.Branch.create({ - 'name': 'master', - 'remote_id': self.remote_addons.id, - 'is_pr': False, - 'head': initial_addons_commit.id, - }) - self.assertEqual(self.branch_addons.bundle_id, self.branch_server.bundle_id) - triggers = self.env['runbot.trigger'].search([]) - - self.assertEqual(triggers.repo_ids + triggers.dependency_ids, self.remote_addons.repo_id + self.remote_server.repo_id) - - batch = self.branch_addons.bundle_id._force() - batch._prepare() - - -class RunbotCaseMinimalSetup(RunbotCase): - - def start_patchers(self): - """Start necessary patchers for tests that use repo__update_batch() and batch._prepare()""" - def counter(): - i = 100000 - while True: - i += 1 - yield i - - # start patchers - self.start_patcher('repo_get_fetch_head_time_patcher', 'odoo.addons.runbot.models.repo.Repo._get_fetch_head_time') - self.patchers['repo_get_fetch_head_time_patcher'].side_effect = counter() - self.start_patcher('repo_update_patcher', 'odoo.addons.runbot.models.repo.Repo._update') - self.start_patcher('batch_update_commits_infos', 'odoo.addons.runbot.models.batch.Batch._update_commits_infos') diff --git a/runbot/tests/test_branch.py b/runbot/tests/test_branch.py deleted file mode 100644 index 2ce2855e..00000000 --- a/runbot/tests/test_branch.py +++ /dev/null @@ -1,252 +0,0 @@ -# -*- coding: utf-8 -*- -from odoo.tools import mute_logger -from .common import RunbotCase, RunbotCaseMinimalSetup - - -class TestBranch(RunbotCase): - - def test_base_fields(self): - branch = self.Branch.create({ - 'remote_id': self.remote_server.id, - 'name': 'master', - 'is_pr': False, - }) - - self.assertEqual(branch.branch_url, 'https://example.com/base/server/tree/master') - - def test_pull_request(self): - mock_github = self.patchers['github_patcher'] - mock_github.return_value = { - 'base': {'ref': 'master'}, - 'head': {'label': 'foo-dev:bar_branch', 'repo': {'full_name': 'foo-dev/bar'}}, - } - pr = self.Branch.create({ - 'remote_id': self.remote_server.id, - 'name': '12345', - 'is_pr': True, - }) - self.assertEqual(pr.name, '12345') - self.assertEqual(pr.branch_url, 'https://example.com/base/server/pull/12345') - self.assertEqual(pr.target_branch_name, 'master') - self.assertEqual(pr.pull_head_name, 'foo-dev:bar_branch') - -class TestBranchRelations(RunbotCase): - - def setUp(self): - super(TestBranchRelations, self).setUp() - - def create_base(name): - branch = self.Branch.create({ - 'remote_id': self.remote_server.id, - 'name': name, - 'is_pr': False, - }) - branch.bundle_id.is_base = True - return branch - self.master = create_base('master') - create_base('11.0') - create_base('saas-11.1') - create_base('12.0') - create_base('saas-12.3') - create_base('13.0') - create_base('saas-13.1') - self.last = create_base('saas-13.2') - self.env['runbot.bundle'].flush() - self.env['runbot.version'].flush() - - def test_relations_master_dev(self): - b = self.Branch.create({ - 'remote_id': self.remote_server_dev.id, - 'name': 'master-test-tri', - 'is_pr': False, - }) - self.assertEqual(b.bundle_id.base_id.name, 'master') - self.assertEqual(b.bundle_id.previous_major_version_base_id.name, '13.0') - self.assertEqual(b.bundle_id.intermediate_version_base_ids.mapped('name'), ['saas-13.1', 'saas-13.2']) - - def test_relations_master(self): - b = self.master - self.assertEqual(b.bundle_id.base_id.name, 'master') - self.assertEqual(b.bundle_id.previous_major_version_base_id.name, '13.0') - self.assertEqual(b.bundle_id.intermediate_version_base_ids.mapped('name'), ['saas-13.1', 'saas-13.2']) - - def test_relations_no_intermediate(self): - b = self.Branch.create({ - 'remote_id': self.remote_server_dev.id, - 'name': 'saas-13.1-test-tri', - 'is_pr': False, - }) - self.assertEqual(b.bundle_id.base_id.name, 'saas-13.1') - self.assertEqual(b.bundle_id.previous_major_version_base_id.name, '13.0') - self.assertEqual(b.bundle_id.intermediate_version_base_ids.mapped('name'), []) - - def test_relations_old_branch(self): - b = self.Branch.create({ - 'remote_id': self.remote_server_dev.id, - 'name': '11.0-test-tri', - 'is_pr': False, - }) - self.assertEqual(b.bundle_id.base_id.name, '11.0') - self.assertEqual(b.bundle_id.previous_major_version_base_id.name, False) - self.assertEqual(sorted(b.bundle_id.intermediate_version_base_ids.mapped('name')), []) - - def test_relations_closest_forced(self): - b = self.Branch.create({ - 'remote_id': self.remote_server_dev.id, - 'name': 'master-test-tri', - 'is_pr': False, - }) - self.assertEqual(b.bundle_id.base_id.name, 'master') - self.assertEqual(b.bundle_id.previous_major_version_base_id.name, '13.0') - self.assertEqual(sorted(b.bundle_id.intermediate_version_base_ids.mapped('name')), ['saas-13.1', 'saas-13.2']) - - b.bundle_id.defined_base_id = self.last.bundle_id - - self.assertEqual(b.bundle_id.base_id.name, 'saas-13.2') - self.assertEqual(b.bundle_id.previous_major_version_base_id.name, '13.0') - self.assertEqual(sorted(b.bundle_id.intermediate_version_base_ids.mapped('name')), ['saas-13.1']) - - def test_relations_no_match(self): - b = self.Branch.create({ - 'remote_id': self.remote_server_dev.id, - 'name': 'icantnamemybranches', - 'is_pr': False, - }) - - self.assertEqual(b.bundle_id.base_id.name, 'master') - - def test_relations_pr(self): - self.Branch.create({ - 'remote_id': self.remote_server_dev.id, - 'name': 'master-test-tri', - 'is_pr': False, - }) - - self.patchers['github_patcher'].return_value = { - 'base': {'ref': 'master-test-tri'}, - 'head': {'label': 'dev:master-test-tri-imp', 'repo': {'full_name': 'dev/server'}}, - } - b = self.Branch.create({ - 'remote_id': self.remote_server_dev.id, - 'name': '100', - 'is_pr': True, - }) - - self.assertEqual(b.bundle_id.name, 'master-test-tri-imp') - self.assertEqual(b.bundle_id.base_id.name, 'master') - self.assertEqual(b.bundle_id.previous_major_version_base_id.name, '13.0') - self.assertEqual(sorted(b.bundle_id.intermediate_version_base_ids.mapped('name')), ['saas-13.1', 'saas-13.2']) - - -class TestBranchForbidden(RunbotCase): - """Test that a branch matching the repo forbidden regex, goes to dummy bundle""" - - def test_forbidden(self): - dummy_bundle = self.env.ref('runbot.bundle_dummy') - self.remote_server_dev.repo_id.forbidden_regex = '^bad_name.+' - with mute_logger("odoo.addons.runbot.models.branch"): - branch = self.Branch.create({ - 'remote_id': self.remote_server_dev.id, - 'name': 'bad_name-evil', - 'is_pr': False, - }) - self.assertEqual(branch.bundle_id.id, dummy_bundle.id, "A forbidden branch should goes in dummy bundle") - - -class TestBranchIsBase(RunbotCaseMinimalSetup): - """Test that a branch matching the is_base_regex goes in the right bundle""" - - def setUp(self): - super(TestBranchIsBase, self).setUp() - self.additionnal_setup() - - def test_is_base_regex_on_main_remote(self): - branch = self.Branch.create({ - 'remote_id': self.remote_server.id, - 'name': 'saas-13.4', - 'is_pr': False, - }) - self.assertTrue(branch.bundle_id.is_base, "A branch matching the is_base_regex parameter should create is_base bundle") - self.assertTrue(branch.bundle_id.sticky, "A branch matching the is_base_regex parameter should create sticky bundle") - - def test_host(self): - r10 = self.env['runbot.host'].create({'name': 'runbot10.odoo.com'}) - r12 = self.env['runbot.host'].create({'name': 'runbot12.odoo.com', 'assigned_only': True}) - - branch = self.Branch.create({ - 'remote_id': self.remote_server.id, - 'name': 'saas-13.4-runbotinexist-test', - 'is_pr': False, - }) - self.assertFalse(branch.bundle_id.host_id) - branch = self.Branch.create({ - 'remote_id': self.remote_server.id, - 'name': 'saas-13.4-runbot10-test', - 'is_pr': False, - }) - self.assertEqual(branch.bundle_id.host_id, r10) - branch = self.Branch.create({ - 'remote_id': self.remote_server.id, - 'name': 'saas-13.4-runbot_x-test', - 'is_pr': False, - }) - self.assertEqual(branch.bundle_id.host_id, r12) - - @mute_logger("odoo.addons.runbot.models.branch") - def test_is_base_regex_on_dev_remote(self): - """Test that a branch matching the is_base regex on a secondary remote goes to the dummy bundles.""" - dummy_bundle = self.env.ref('runbot.bundle_dummy') - - # master branch on dev remote - initial_addons_dev_commit = self.Commit.create({ - 'name': 'dddddd', - 'repo_id': self.repo_addons.id, - 'date': '2015-09-30', - 'subject': 'Please use the right repo', - 'author': 'oxo', - 'author_email': 'oxo@somewhere.com' - }) - - branch_addons_dev = self.Branch.create({ - 'name': 'master', - 'remote_id': self.remote_addons_dev.id, - 'is_pr': False, - 'head': initial_addons_dev_commit.id - }) - self.assertEqual(branch_addons_dev.bundle_id, dummy_bundle, "A branch matching the is_base_regex should on a secondary repo should goes in dummy bundle") - - # saas-12.3 branch on dev remote - initial_server_dev_commit = self.Commit.create({ - 'name': 'bbbbbb', - 'repo_id': self.repo_server.id, - 'date': '2014-05-26', - 'subject': 'Please use the right repo', - 'author': 'oxo', - 'author_email': 'oxo@somewhere.com' - }) - - branch_server_dev = self.Branch.create({ - 'name': 'saas-12.3', - 'remote_id': self.remote_server_dev.id, - 'is_pr': False, - 'head': initial_server_dev_commit.id - }) - self.assertEqual(branch_server_dev.bundle_id, dummy_bundle, "A branch matching the is_base_regex should on a secondary repo should goes in dummy bundle") - - # 12.0 branch on dev remote - mistaken_commit = self.Commit.create({ - 'name': 'eeeeee', - 'repo_id': self.repo_server.id, - 'date': '2015-06-27', - 'subject': 'dummy commit', - 'author': 'brol', - 'author_email': 'brol@somewhere.com' - }) - - branch_mistake_dev = self.Branch.create({ - 'name': '12.0', - 'remote_id': self.remote_server_dev.id, - 'is_pr': False, - 'head': mistaken_commit.id - }) - self.assertEqual(branch_mistake_dev.bundle_id, dummy_bundle, "A branch matching the is_base_regex should on a secondary repo should goes in dummy bundle") diff --git a/runbot/tests/test_build.py b/runbot/tests/test_build.py deleted file mode 100644 index 851edb8f..00000000 --- a/runbot/tests/test_build.py +++ /dev/null @@ -1,491 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime - -from unittest.mock import patch - -from odoo import fields -from odoo.exceptions import UserError, ValidationError -from .common import RunbotCase, RunbotCaseMinimalSetup - - -def rev_parse(repo, branch_name): - """ - simulate a rev parse by returning a fake hash of form - 'rp_odoo-dev/enterprise_saas-12.2__head' - should be overwitten if a pr head should match a branch head - """ - head_hash = 'rp_%s_%s_head' % (repo.name.split(':')[1], branch_name.split('/')[-1]) - return head_hash - - -class TestBuildParams(RunbotCaseMinimalSetup): - - def setUp(self): - super(TestBuildParams, self).setUp() - - def test_params(self): - - server_commit = self.Commit.create({ - 'name': 'dfdfcfcf0000ffffffffffffffffffffffffffff', - 'repo_id': self.repo_server.id - }) - - params = self.BuildParameters.create({ - 'version_id': self.version_13.id, - 'project_id': self.project.id, - 'config_id': self.default_config.id, - 'commit_link_ids': [ - (0, 0, {'commit_id': server_commit.id}) - ], - 'config_data': {'foo': 'bar'} - }) - - # test that when the same params does not create a new record - same_params = self.BuildParameters.create({ - 'version_id': self.version_13.id, - 'project_id': self.project.id, - 'config_id': self.default_config.id, - 'commit_link_ids': [ - (0, 0, {'commit_id': server_commit.id}) - ], - 'config_data': {'foo': 'bar'} - }) - - self.assertEqual(params.fingerprint, same_params.fingerprint) - self.assertEqual(params.id, same_params.id) - - # test that params cannot be overwitten - with self.assertRaises(UserError): - params.write({'modules': 'bar'}) - - # Test that a copied param without changes does not create a new record - copied_params = params.copy() - self.assertEqual(copied_params.id, params.id) - - # Test copy with a parameter change - other_commit = self.Commit.create({ - 'name': 'deadbeef0000ffffffffffffffffffffffffffff', - 'repo_id': self.repo_server.id - }) - - copied_params = params.copy({ - 'commit_link_ids': [ - (0, 0, {'commit_id': other_commit.id}) - ] - }) - self.assertNotEqual(copied_params.id, params.id) - - def test_trigger_build_config(self): - """Test that a build gets the build config from the trigger""" - self.additionnal_setup() - self.start_patchers() - - self.trigger_server.description = expected_description = "A nice trigger description" - - # A commit is found on the dev remote - branch_a_name = 'master-test-something' - self.push_commit(self.remote_server_dev, branch_a_name, 'nice subject', sha='d0d0caca') - - # batch preparation - self.repo_server._update_batches() - - # prepare last_batch - bundle = self.env['runbot.bundle'].search([('name', '=', branch_a_name), ('project_id', '=', self.project.id)]) - bundle.last_batch._prepare() - build_slot = bundle.last_batch.slot_ids.filtered(lambda rec: rec.trigger_id == self.trigger_server) - self.assertEqual(build_slot.build_id.params_id.config_id, self.trigger_server.config_id) - self.assertEqual(build_slot.build_id.description, expected_description, "A build description should reflect the trigger description") - - def test_custom_trigger_config(self): - """Test that a bundle with a custom trigger creates a build with appropriate config""" - self.additionnal_setup() - self.start_patchers() - - # A commit is found on the dev remote - branch_a_name = 'master-test-something' - self.push_commit(self.remote_server_dev, branch_a_name, 'nice subject', sha='d0d0caca') - # batch preparation - self.repo_server._update_batches() - - # create a custom config and a new trigger - custom_config = self.env['runbot.build.config'].create({'name': 'A Custom Config'}) - - # create a custom trigger for the bundle - bundle = self.Bundle.search([('name', '=', branch_a_name), ('project_id', '=', self.project.id)]) - - # create a custom trigger with the custom config linked to the bundle - self.env['runbot.bundle.trigger.custom'].create({ - 'trigger_id': self.trigger_server.id, - 'bundle_id': bundle.id, - 'config_id': custom_config.id - }) - - bundle.last_batch._prepare() - build_slot = bundle.last_batch.slot_ids.filtered(lambda rec: rec.trigger_id == self.trigger_server) - self.assertEqual(build_slot.build_id.params_id.config_id, custom_config) - - -class TestBuildResult(RunbotCase): - - def setUp(self): - super(TestBuildResult, self).setUp() - - self.server_commit = self.Commit.create({ - 'name': 'dfdfcfcf0000ffffffffffffffffffffffffffff', - 'repo_id': self.repo_server.id - }) - - self.addons_commit = self.Commit.create({ - 'name': 'd0d0caca0000ffffffffffffffffffffffffffff', - 'repo_id': self.repo_addons.id, - }) - - self.server_params = self.base_params.copy({'commit_link_ids': [ - (0, 0, {'commit_id': self.server_commit.id}) - ]}) - - self.addons_params = self.base_params.copy({'commit_link_ids': [ - (0, 0, {'commit_id': self.server_commit.id}), - (0, 0, {'commit_id': self.addons_commit.id}) - ]}) - - self.start_patcher('find_patcher', 'odoo.addons.runbot.common.find', 0) - - def test_base_fields(self): - - build = self.Build.create({ - 'params_id': self.server_params.id, - 'port': '1234' - }) - - self.assertEqual(build.dest, '%05d-13-0' % build.id) - - other = self.Build.create({ - 'params_id': self.server_params.id, - 'local_result': 'ko' - }) - - other.write({'local_result': 'ok'}) - self.assertEqual(other.local_result, 'ko') - - # test a bulk write, that one cannot change from 'ko' to 'ok' - builds = self.Build.browse([build.id, other.id]) - with self.assertRaises(ValidationError): - builds.write({'local_result': 'ok'}) - - def test_markdown_description(self): - build = self.Build.create({ - 'params_id': self.server_params.id, - 'description': 'A nice **description**' - }) - self.assertEqual(build.md_description, 'A nice <strong>description</strong>') - - build.description = "<script>console.log(foo)</script>" - self.assertEqual(build.md_description, "<script>console.log(foo)</script>") - - @patch('odoo.addons.runbot.models.build.BuildResult._get_available_modules') - def test_filter_modules(self, mock_get_available_modules): - """ test module filtering """ - - build = self.Build.create({ - 'params_id': self.addons_params.id, - }) - - mock_get_available_modules.return_value = { - self.repo_server: ['good_module', 'bad_module', 'other_good', 'l10n_be', 'hw_foo', 'hwgood', 'hw_explicit'], - self.repo_addons: ['other_mod_1', 'other_mod_2'], - } - - self.repo_server.modules = '-bad_module,-hw_*,hw_explicit,-l10n_*' - self.repo_addons.modules = '-*' - - modules_to_test = build._get_modules_to_test(modules_patterns='') - self.assertEqual(modules_to_test, sorted(['good_module', 'hwgood', 'other_good', 'hw_explicit'])) - - modules_to_test = build._get_modules_to_test(modules_patterns='-*, l10n_be') - self.assertEqual(modules_to_test, sorted(['l10n_be'])) - modules_to_test = build._get_modules_to_test(modules_patterns='l10n_be') - self.assertEqual(modules_to_test, sorted(['good_module', 'hwgood', 'other_good', 'hw_explicit', 'l10n_be'])) - # star to get all available mods - modules_to_test = build._get_modules_to_test(modules_patterns='*, -hw_*, hw_explicit') - self.assertEqual(modules_to_test, sorted(['good_module', 'bad_module', 'other_good', 'l10n_be', 'hwgood', 'hw_explicit', 'other_mod_1', 'other_mod_2'])) - - def test_build_cmd_log_db(self, ): - """ test that the logdb connection URI is taken from the .odoorc file """ - uri = 'postgres://someone:pass@somewhere.com/db' - self.env['ir.config_parameter'].sudo().set_param("runbot.runbot_logdb_uri", uri) - - build = self.Build.create({ - 'params_id': self.server_params.id, - }) - cmd = build._cmd(py_version=3) - self.assertIn('log_db = %s' % uri, cmd.get_config()) - - def test_build_cmd_server_path_no_dep(self): - """ test that the server path and addons path """ - build = self.Build.create({ - 'params_id': self.server_params.id, - }) - cmd = build._cmd(py_version=3) - self.assertEqual('python3', cmd[0]) - self.assertEqual('server/server.py', cmd[1]) - self.assertIn('--addons-path', cmd) - # TODO fix the _get_addons_path and/or _docker_source_folder - # addons_path_pos = cmd.index('--addons-path') + 1 - # self.assertEqual(cmd[addons_path_pos], 'bar/addons,bar/core/addons') - - def test_build_cmd_server_path_with_dep(self): - """ test that the server path and addons path are correct""" - - def is_file(file): - self.assertIn(file, [ - '/tmp/runbot_test/static/sources/addons/d0d0caca0000ffffffffffffffffffffffffffff/requirements.txt', - '/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/requirements.txt', - '/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/server.py', - '/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/openerp/tools/config.py' - ]) - if file == '/tmp/runbot_test/static/sources/addons/d0d0caca0000ffffffffffffffffffffffffffff/requirements.txt': - return False - return True - - def is_dir(file): - paths = [ - 'sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/addons', - 'sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/core/addons', - 'sources/addons/d0d0caca0000ffffffffffffffffffffffffffff' - ] - self.assertTrue(any([path in file for path in paths])) # checking that addons path existence check looks ok - return True - - self.patchers['isfile'].side_effect = is_file - self.patchers['isdir'].side_effect = is_dir - - build = self.Build.create({ - 'params_id': self.addons_params.id, - }) - - cmd = build._cmd(py_version=3) - self.assertIn('--addons-path', cmd) - addons_path_pos = cmd.index('--addons-path') + 1 - self.assertEqual(cmd[addons_path_pos], 'server/addons,server/core/addons,addons') - self.assertEqual('server/server.py', cmd[1]) - self.assertEqual('python3', cmd[0]) - - def test_build_gc_date(self): - """ test build gc date and gc_delay""" - build = self.Build.create({ - 'params_id': self.server_params.id, - 'local_state': 'done' - }) - - child_build = self.Build.create({ - 'params_id': self.server_params.id, - 'parent_id': build.id, - 'local_state': 'done' - }) - - # verify that the gc_day is set 30 days later (29 days since we should be a few microseconds later) - delta = fields.Datetime.from_string(build.gc_date) - datetime.datetime.now() - self.assertEqual(delta.days, 29) - child_delta = fields.Datetime.from_string(child_build.gc_date) - datetime.datetime.now() - self.assertEqual(child_delta.days, 14) - - # Keep child build ten days more - child_build.gc_delay = 10 - child_delta = fields.Datetime.from_string(child_build.gc_date) - datetime.datetime.now() - self.assertEqual(child_delta.days, 24) - - # test the real _local_cleanup method - self.stop_patcher('_local_cleanup_patcher') - self.start_patcher('build_local_pgadmin_cursor_patcher', 'odoo.addons.runbot.models.build.local_pgadmin_cursor') - self.start_patcher('build_path_patcher', 'odoo.addons.runbot.models.build.Path') - dbname = '%s-foobar' % build.dest - self.start_patcher('list_local_dbs_patcher', 'odoo.addons.runbot.models.build.list_local_dbs', return_value=[dbname]) - - build._local_cleanup() - self.assertFalse(self.patchers['_local_pg_dropdb_patcher'].called) - build.job_end = datetime.datetime.now() - datetime.timedelta(days=31) - build._local_cleanup() - self.patchers['_local_pg_dropdb_patcher'].assert_called_with(dbname) - - @patch('odoo.addons.runbot.models.build._logger') - def test_build_skip(self, mock_logger): - """test build is skipped""" - build = self.Build.create({ - 'params_id': self.server_params.id, - 'port': '1234', - }) - build._skip() - self.assertEqual(build.local_state, 'done') - self.assertEqual(build.local_result, 'skipped') - - other_build = self.Build.create({ - 'params_id': self.server_params.id, - 'port': '1234', - }) - other_build._skip(reason='A good reason') - self.assertEqual(other_build.local_state, 'done') - self.assertEqual(other_build.local_result, 'skipped') - log_first_part = '%s skip %%s' % (other_build.dest) - mock_logger.info.assert_called_with(log_first_part, 'A good reason') - - def test_children(self): - build1 = self.Build.create({ - 'params_id': self.server_params.id, - }) - build1_1 = self.Build.create({ - 'params_id': self.server_params.id, - 'parent_id': build1.id, - }) - build1_2 = self.Build.create({ - 'params_id': self.server_params.id, - 'parent_id': build1.id, - }) - build1_1_1 = self.Build.create({ - 'params_id': self.server_params.id, - 'parent_id': build1_1.id, - }) - build1_1_2 = self.Build.create({ - 'params_id': self.server_params.id, - 'parent_id': build1_1.id, - }) - - def assert_state(global_state, build): - self.assertEqual(build.global_state, global_state) - - assert_state('pending', build1) - assert_state('pending', build1_1) - assert_state('pending', build1_2) - assert_state('pending', build1_1_1) - assert_state('pending', build1_1_2) - - build1.local_state = 'testing' - build1_1.local_state = 'testing' - build1.local_state = 'done' - build1_1.local_state = 'done' - - assert_state('waiting', build1) - assert_state('waiting', build1_1) - assert_state('pending', build1_2) - assert_state('pending', build1_1_1) - assert_state('pending', build1_1_2) - - build1_1_1.local_state = 'testing' - - assert_state('waiting', build1) - assert_state('waiting', build1_1) - assert_state('pending', build1_2) - assert_state('testing', build1_1_1) - assert_state('pending', build1_1_2) - - build1_2.local_state = 'testing' - - assert_state('waiting', build1) - assert_state('waiting', build1_1) - assert_state('testing', build1_2) - assert_state('testing', build1_1_1) - assert_state('pending', build1_1_2) - - build1_2.local_state = 'testing' # writing same state a second time - - assert_state('waiting', build1) - assert_state('waiting', build1_1) - assert_state('testing', build1_2) - assert_state('testing', build1_1_1) - assert_state('pending', build1_1_2) - - build1_1_2.local_state = 'done' - build1_1_1.local_state = 'done' - build1_2.local_state = 'done' - - assert_state('done', build1) - assert_state('done', build1_1) - assert_state('done', build1_2) - assert_state('done', build1_1_1) - assert_state('done', build1_1_2) - - -class TestGc(RunbotCaseMinimalSetup): - - def test_repo_gc_testing(self): - """ test that builds are killed when room is needed on a host """ - - self.additionnal_setup() - - self.start_patchers() - - host = self.env['runbot.host'].create({ - 'name': 'runbot_xxx', - 'nb_worker': 2 - }) - - # A commit is found on the dev remote - branch_a_name = 'master-test-something' - self.push_commit(self.remote_server_dev, branch_a_name, 'nice subject', sha='d0d0caca') - - # batch preparation - self.repo_server._update_batches() - - # prepare last_batch - bundle_a = self.env['runbot.bundle'].search([('name', '=', branch_a_name)]) - bundle_a.last_batch._prepare() - - # now we should have a build in pending state in the bundle - self.assertEqual(len(bundle_a.last_batch.slot_ids), 2) - build_a = bundle_a.last_batch.slot_ids[0].build_id - self.assertEqual(build_a.global_state, 'pending') - - # now another commit is found in another branch - branch_b_name = 'master-test-other-thing' - self.push_commit(self.remote_server_dev, branch_b_name, 'other subject', sha='cacad0d0') - self.repo_server._update_batches() - bundle_b = self.env['runbot.bundle'].search([('name', '=', branch_b_name)]) - bundle_b.last_batch._prepare() - - build_b = bundle_b.last_batch.slot_ids[0].build_id - - # the two builds are starting tests on two different hosts - build_a.write({'local_state': 'testing', 'host': host.name}) - build_b.write({'local_state': 'testing', 'host': 'runbot_yyy'}) - - # no room needed, verify that nobody got killed - self.Runbot._gc_testing(host) - self.assertFalse(build_a.requested_action) - self.assertFalse(build_b.requested_action) - - # a new commit is pushed on branch_a - self.push_commit(self.remote_server_dev, branch_a_name, 'new subject', sha='d0cad0ca') - self.repo_server._update_batches() - bundle_a = self.env['runbot.bundle'].search([('name', '=', branch_a_name)]) - bundle_a.last_batch._prepare() - build_a_last = bundle_a.last_batch.slot_ids[0].build_id - self.assertEqual(build_a_last.local_state, 'pending') - self.assertTrue(build_a.killable, 'The previous build in the batch should be killable') - - # the build_b create a child build - children_b = self.Build.create({ - 'params_id': build_b.params_id.copy().id, - 'parent_id': build_b.id, - 'build_type': build_b.build_type, - }) - - # no room needed, verify that nobody got killed - self.Runbot._gc_testing(host) - self.assertFalse(build_a.requested_action) - self.assertFalse(build_b.requested_action) - self.assertFalse(build_a_last.requested_action) - self.assertFalse(children_b.requested_action) - - # now children_b starts on runbot_xxx - children_b.write({'local_state': 'testing', 'host': host.name}) - - # we are now in a situation where there is no more room on runbot_xxx - # and there is a pending build: build_a_last - # so we need to make room - self.Runbot._gc_testing(host) - - # the killable build should have been marked to be killed - self.assertEqual(build_a.requested_action, 'deathrow') - self.assertFalse(build_b.requested_action) - self.assertFalse(build_a_last.requested_action) - self.assertFalse(children_b.requested_action) diff --git a/runbot/tests/test_build_config_step.py b/runbot/tests/test_build_config_step.py deleted file mode 100644 index 6911eac1..00000000 --- a/runbot/tests/test_build_config_step.py +++ /dev/null @@ -1,499 +0,0 @@ -# -*- coding: utf-8 -*- -from unittest.mock import patch, mock_open -from odoo.exceptions import UserError -from odoo.addons.runbot.common import RunbotException -from .common import RunbotCase - -class TestBuildConfigStepCommon(RunbotCase): - def setUp(self): - super().setUp() - - self.Build = self.env['runbot.build'] - self.ConfigStep = self.env['runbot.build.config.step'] - self.Config = self.env['runbot.build.config'] - - server_commit = self.Commit.create({ - 'name': 'dfdfcfcf0000ffffffffffffffffffffffffffff', - 'repo_id': self.repo_server.id - }) - self.parent_build = self.Build.create({ - 'params_id': self.base_params.copy({'commit_link_ids': [(0, 0, {'commit_id': server_commit.id})]}).id, - }) - self.start_patcher('find_patcher', 'odoo.addons.runbot.common.find', 0) - self.start_patcher('findall_patcher', 'odoo.addons.runbot.models.build.BuildResult.parse_config', {}) - - -class TestBuildConfigStepCreate(TestBuildConfigStepCommon): - - def setUp(self): - super().setUp() - self.config_step = self.ConfigStep.create({ - 'name': 'test_step', - 'job_type': 'create_build', - 'number_builds': 2, - }) - self.child_config = self.Config.create({'name': 'test_config'}) - self.config_step.create_config_ids = [self.child_config.id] - - def test_config_step_create_results(self): - """ Test child builds are taken into account""" - - - self.config_step._run_create_build(self.parent_build, '/tmp/essai') - self.assertEqual(len(self.parent_build.children_ids), 2, 'Two sub-builds should have been generated') - - # check that the result will be ignored by parent build - for child_build in self.parent_build.children_ids: - self.assertFalse(child_build.orphan_result) - child_build.local_result = 'ko' - self.assertEqual(child_build.global_result, 'ko') - - self.assertEqual(self.parent_build.global_result, 'ko') - - def test_config_step_create(self): - """ Test the config step of type create """ - self.config_step.make_orphan = True - self.config_step._run_create_build(self.parent_build, '/tmp/essai') - self.assertEqual(len(self.parent_build.children_ids), 2, 'Two sub-builds should have been generated') - - # check that the result will be ignored by parent build - for child_build in self.parent_build.children_ids: - self.assertTrue(child_build.orphan_result, 'An orphan result config step should mark the build as orphan_result') - child_build.local_result = 'ko' - - self.assertFalse(self.parent_build.global_result) - - def test_config_step_create_child_data(self): - """ Test the config step of type create """ - self.config_step.number_builds = 5 - json_config = {'child_data': [{'extra_params': '-i m1'}, {'extra_params': '-i m2'}]} - self.parent_build = self.Build.create({ - 'params_id': self.base_params.create({ - 'version_id': self.version_13.id, - 'project_id': self.project.id, - 'config_id': self.default_config.id, - 'config_data': json_config, - }).id, - }) - - self.config_step._run_create_build(self.parent_build, '/tmp/essai') - self.assertEqual(len(self.parent_build.children_ids), 10, '10 build should have been generated') - - # check that the result will be ignored by parent build - for child_build in self.parent_build.children_ids: - self.assertTrue(child_build.config_id, self.child_config) - - def test_config_step_create_child_data_unique(self): - """ Test the config step of type create """ - self.config_step.number_builds = 5 - json_config = {'child_data': {'extra_params': '-i m1'}} - self.parent_build = self.Build.create({ - 'params_id': self.base_params.create({ - 'version_id': self.version_13.id, - 'project_id': self.project.id, - 'config_id': self.default_config.id, - 'config_data': json_config, - }).id, - }) - - self.config_step._run_create_build(self.parent_build, '/tmp/essai') - self.assertEqual(len(self.parent_build.children_ids), 5, '5 build should have been generated') - - # check that the result will be ignored by parent build - for child_build in self.parent_build.children_ids: - self.assertTrue(child_build.config_id, self.child_config) - - def test_config_step_create_child_data_with_config(self): - """ Test the config step of type create """ - - test_config_1 = self.Config.create({'name': 'test_config1'}) - test_config_2 = self.Config.create({'name': 'test_config2'}) - - self.config_step.number_builds = 5 - json_config = {'child_data': [{'extra_params': '-i m1', 'config_id': test_config_1.id}, {'config_id': test_config_2.id}]} - self.parent_build = self.Build.create({ - 'params_id': self.base_params.create({ - 'version_id': self.version_13.id, - 'project_id': self.project.id, - 'config_id': self.default_config.id, - 'config_data': json_config, - }).id, - }) - - self.config_step._run_create_build(self.parent_build, '/tmp/essai') - self.assertEqual(len(self.parent_build.children_ids), 10, '10 build should have been generated') - self.assertEqual(len(self.parent_build.children_ids.filtered(lambda b: b.config_id == test_config_1)), 5) - self.assertEqual(len(self.parent_build.children_ids.filtered(lambda b: b.config_id == test_config_2)), 5) - - - - -class TestBuildConfigStep(TestBuildConfigStepCommon): - - def test_config_step_raises(self): - """ Test a config raises when run step position is wrong""" - - run_step = self.ConfigStep.create({ - 'name': 'run_step', - 'job_type': 'run_odoo', - }) - - create_step = self.ConfigStep.create({ - 'name': 'test_step', - 'job_type': 'create_build', - }) - - config = self.Config.create({'name': 'test_config'}) - - # test that the run_odoo step has to be the last one - with self.assertRaises(UserError): - config.write({ - 'step_order_ids': [ - (0, 0, {'sequence': 10, 'step_id': run_step.id}), - (0, 0, {'sequence': 15, 'step_id': create_step.id}), - ] - }) - - # test that the run_odoo step should be preceded by an install step - with self.assertRaises(UserError): - config.write({ - 'step_order_ids': [ - (0, 0, {'sequence': 15, 'step_id': run_step.id}), - (0, 0, {'sequence': 10, 'step_id': create_step.id}), - ] - }) - - def test_config_step_copy(self): - """ Test a config copy with step_order_ids """ - - install_step = self.ConfigStep.create({ - 'name': 'install_step', - 'job_type': 'install_odoo' - }) - - run_step = self.ConfigStep.create({ - 'name': 'run_step', - 'job_type': 'run_odoo', - }) - - create_step = self.ConfigStep.create({ - 'name': 'test_step', - 'job_type': 'create_build', - }) - - config = self.Config.create({'name': 'test_config'}) - StepOrder = self.env['runbot.build.config.step.order'] - # Creation order is impoortant to reproduce the Odoo copy bug/feature :-) - StepOrder.create({'sequence': 15, 'step_id': run_step.id, 'config_id': config.id}) - StepOrder.create({'sequence': 10, 'step_id': create_step.id, 'config_id': config.id}) - StepOrder.create({'sequence': 12, 'step_id': install_step.id, 'config_id': config.id}) - - dup_config = config.copy() - self.assertEqual(dup_config.step_order_ids.mapped('step_id'), config.step_order_ids.mapped('step_id')) - - @patch('odoo.addons.runbot.models.build.BuildResult._checkout') - def test_coverage(self, mock_checkout): - config_step = self.ConfigStep.create({ - 'name': 'coverage', - 'job_type': 'install_odoo', - 'coverage': True - }) - - def docker_run(cmd, log_path, *args, **kwargs): - self.assertEqual(cmd.pres, [['sudo', 'pip3', 'install', '-r', 'server/requirements.txt']]) - self.assertEqual(cmd.cmd[:10], ['python3', '-m', 'coverage', 'run', '--branch', '--source', '/data/build', '--omit', '*__manifest__.py', 'server/server.py']) - self.assertIn(['python3', '-m', 'coverage', 'html', '-d', '/data/build/coverage', '--ignore-errors'], cmd.posts) - self.assertIn(['python3', '-m', 'coverage', 'xml', '-o', '/data/build/logs/coverage.xml', '--ignore-errors'], cmd.posts) - self.assertEqual(log_path, 'dev/null/logpath') - - self.patchers['docker_run'].side_effect = docker_run - config_step._run_install_odoo(self.parent_build, 'dev/null/logpath') - - @patch('odoo.addons.runbot.models.build.BuildResult._checkout') - def test_dump(self, mock_checkout): - config_step = self.ConfigStep.create({ - 'name': 'all', - 'job_type': 'install_odoo', - }) - - def docker_run(cmd, log_path, *args, **kwargs): - dest = self.parent_build.dest - self.assertEqual(cmd.cmd[:2], ['python3', 'server/server.py']) - self.assertEqual(cmd.finals[0], ['pg_dump', '%s-all' % dest, '>', '/data/build/logs/%s-all//dump.sql' % dest]) - self.assertEqual(cmd.finals[1], ['cp', '-r', '/data/build/datadir/filestore/%s-all' % dest, '/data/build/logs/%s-all//filestore/' % dest]) - self.assertEqual(cmd.finals[2], ['cd', '/data/build/logs/%s-all/' % dest, '&&', 'zip', '-rmq9', '/data/build/logs/%s-all.zip' % dest, '*']) - self.assertEqual(log_path, 'dev/null/logpath') - - self.patchers['docker_run'].side_effect = docker_run - - config_step._run_install_odoo(self.parent_build, 'dev/null/logpath') - - @patch('odoo.addons.runbot.models.build.BuildResult._checkout') - def test_install_tags(self, mock_checkout): - config_step = self.ConfigStep.create({ - 'name': 'all', - 'job_type': 'install_odoo', - 'enable_auto_tags': False, - 'test_tags': '/module,:class.method', - }) - self.env['runbot.build.error'].create({ - 'content': 'foo', - 'random': True, - 'test_tags': ':otherclass.othertest' - }) - - def docker_run(cmd, *args, **kwargs): - cmds = cmd.build().split(' && ') - self.assertEqual(cmds[1].split(' server/server.py')[0], 'python3') - tags = cmds[1].split('--test-tags ')[1].split(' ')[0] - self.assertEqual(tags, '/module,:class.method') - - self.patchers['docker_run'].side_effect = docker_run - config_step._run_install_odoo(self.parent_build, 'dev/null/logpath') - - config_step.enable_auto_tags = True - - def docker_run2(cmd, *args, **kwargs): - cmds = cmd.build().split(' && ') - self.assertEqual(cmds[1].split(' server/server.py')[0], 'python3') - tags = cmds[1].split('--test-tags ')[1].split(' ')[0] - self.assertEqual(tags, '/module,:class.method,-:otherclass.othertest') - - self.patchers['docker_run'].side_effect = docker_run2 - config_step._run_install_odoo(self.parent_build, 'dev/null/logpath') - - @patch('odoo.addons.runbot.models.build.BuildResult._checkout') - def test_db_name(self, mock_checkout): - config_step = self.ConfigStep.create({ - 'name': 'default', - 'job_type': 'install_odoo', - 'custom_db_name': 'custom', - }) - call_count = 0 - assert_db_name = 'custom' - - def docker_run(cmd, log_path, *args, **kwargs): - db_sufgfix = cmd.cmd[cmd.index('-d')+1].split('-')[-1] - self.assertEqual(db_sufgfix, assert_db_name) - nonlocal call_count - call_count += 1 - - self.patchers['docker_run'].side_effect = docker_run - - config_step._run_step(self.parent_build, 'dev/null/logpath') - - assert_db_name = 'custom_build' - parent_build_params = self.parent_build.params_id.copy({'config_data': {'db_name': 'custom_build'}}) - parent_build = self.parent_build.copy({'params_id': parent_build_params.id}) - config_step._run_step(parent_build, 'dev/null/logpath') - - config_step = self.ConfigStep.create({ - 'name': 'run_test', - 'job_type': 'run_odoo', - 'custom_db_name': 'custom', - }) - config_step._run_step(parent_build, 'dev/null/logpath') - - self.assertEqual(call_count, 3) - - @patch('odoo.addons.runbot.models.build.BuildResult._checkout') - def test_run_python(self, mock_checkout): - """minimal test for python steps. Also test that `-d` in cmd creates a database""" - test_code = """cmd = build._cmd() -cmd += ['-d', 'test_database'] -docker_params = dict(cmd=cmd) - """ - config_step = self.ConfigStep.create({ - 'name': 'default', - 'job_type': 'python', - 'python_code': test_code, - }) - - def docker_run(cmd, *args, **kwargs): - run_cmd = cmd.build() - self.assertIn('-d test_database', run_cmd) - - self.patchers['docker_run'].side_effect = docker_run - config_step._run_step(self.parent_build, 'dev/null/logpath') - self.patchers['docker_run'].assert_called_once() - db = self.env['runbot.database'].search([('name', '=', 'test_database')]) - self.assertEqual(db.build_id, self.parent_build) - - @patch('odoo.addons.runbot.models.build.BuildResult._checkout') - def test_sub_command(self, mock_checkout): - config_step = self.ConfigStep.create({ - 'name': 'default', - 'job_type': 'install_odoo', - 'sub_command': 'subcommand', - }) - call_count = 0 - - def docker_run(cmd, log_path, *args, **kwargs): - nonlocal call_count - sub_command = cmd.cmd[cmd.index('server/server.py')+1] - self.assertEqual(sub_command, 'subcommand') - call_count += 1 - - self.patchers['docker_run'].side_effect = docker_run - config_step._run_step(self.parent_build, 'dev/null/logpath') - - self.assertEqual(call_count, 1) - - -class TestMakeResult(RunbotCase): - - def setUp(self): - super(TestMakeResult, self).setUp() - self.ConfigStep = self.env['runbot.build.config.step'] - self.Config = self.env['runbot.build.config'] - - @patch('odoo.addons.runbot.models.build_config.os.path.getmtime') - @patch('odoo.addons.runbot.models.build.BuildResult._log') - def test_make_result(self, mock_log, mock_getmtime): - file_content = """ -Loading stuff -odoo.stuff.modules.loading: Modules loaded. -Some post install stuff -Initiating shutdown -""" - logs = [] - - def _log(func, message, level='INFO', log_type='runbot', path='runbot'): - logs.append((level, message)) - - mock_log.side_effect = _log - mock_getmtime.return_value = 7200 - - config_step = self.ConfigStep.create({ - 'name': 'all', - 'job_type': 'install_odoo', - 'test_tags': '/module,:class.method', - }) - build = self.Build.create({ - 'params_id': self.base_params.id, - }) - logs = [] - with patch('builtins.open', mock_open(read_data=file_content)): - result = config_step._make_results(build) - self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'ok'}) - self.assertEqual(logs, [('INFO', 'Getting results for build %s' % build.dest)]) - # no shutdown - logs = [] - file_content = """ -Loading stuff -odoo.stuff.modules.loading: Modules loaded. -Some post install stuff - """ - with patch('builtins.open', mock_open(read_data=file_content)): - result = config_step._make_results(build) - self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'ko'}) - self.assertEqual(logs, [ - ('INFO', 'Getting results for build %s' % build.dest), - ('ERROR', 'No "Initiating shutdown" found in logs, maybe because of cpu limit.') - ]) - # no loaded - logs = [] - file_content = """ -Loading stuff -""" - with patch('builtins.open', mock_open(read_data=file_content)): - result = config_step._make_results(build) - self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'ko'}) - self.assertEqual(logs, [ - ('INFO', 'Getting results for build %s' % build.dest), - ('ERROR', 'Modules loaded not found in logs') - ]) - - # traceback - logs = [] - file_content = """ -Loading stuff -odoo.stuff.modules.loading: Modules loaded. -Some post install stuff -2019-12-17 17:34:37,692 17 ERROR dbname path.to.test: FAIL: TestClass.test_ -Traceback (most recent call last): -File "x.py", line a, in test_ - .... -Initiating shutdown -""" - with patch('builtins.open', mock_open(read_data=file_content)): - result = config_step._make_results(build) - self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'ko'}) - self.assertEqual(logs, [ - ('INFO', 'Getting results for build %s' % build.dest), - ('ERROR', 'Error or traceback found in logs') - ]) - - # warning in logs - logs = [] - file_content = """ -Loading stuff -odoo.stuff.modules.loading: Modules loaded. -Some post install stuff -2019-12-17 17:34:37,692 17 WARNING dbname path.to.test: timeout exceded -Initiating shutdown -""" - with patch('builtins.open', mock_open(read_data=file_content)): - result = config_step._make_results(build) - self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'warn'}) - self.assertEqual(logs, [ - ('INFO', 'Getting results for build %s' % build.dest), - ('WARNING', 'Warning found in logs') - ]) - - # no log file - logs = [] - self.patchers['isfile'].return_value = False - result = config_step._make_results(build) - - self.assertEqual(result, {'local_result': 'ko'}) - self.assertEqual(logs, [ - ('INFO', 'Getting results for build %s' % build.dest), - ('ERROR', 'Log file not found at the end of test job') - ]) - - # no error but build was already in warn - logs = [] - file_content = """ -Loading stuff -odoo.stuff.modules.loading: Modules loaded. -Some post install stuff -Initiating shutdown -""" - self.patchers['isfile'].return_value = True - build.local_result = 'warn' - with patch('builtins.open', mock_open(read_data=file_content)): - result = config_step._make_results(build) - self.assertEqual(logs, [ - ('INFO', 'Getting results for build %s' % build.dest) - ]) - self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'warn'}) - - @patch('odoo.addons.runbot.models.build_config.ConfigStep._make_tests_results') - def test_make_python_result(self, mock_make_tests_results): - config_step = self.ConfigStep.create({ - 'name': 'all', - 'job_type': 'python', - 'test_tags': '/module,:class.method', - 'python_result_code': """a = 2*5\nreturn_value = {'local_result': 'ok'}""" - }) - build = self.Build.create({ - 'params_id': self.base_params.id, - }) - build.local_state = 'testing' - self.patchers['isfile'].return_value = False - result = config_step._make_results(build) - self.assertEqual(result, {'local_result': 'ok'}) - - # invalid result code (no return_value set) - config_step.python_result_code = """a = 2*5\nr = {'a': 'ok'}""" - with self.assertRaises(RunbotException): - result = config_step._make_results(build) - - # no result defined - config_step.python_result_code = "" - mock_make_tests_results.return_value = {'local_result': 'warning'} - result = config_step._make_results(build) - self.assertEqual(result, {'local_result': 'warning'}) - -# TODO add generic test to copy_paste _run_* in a python step diff --git a/runbot/tests/test_build_error.py b/runbot/tests/test_build_error.py deleted file mode 100644 index b898ee94..00000000 --- a/runbot/tests/test_build_error.py +++ /dev/null @@ -1,225 +0,0 @@ -# -*- coding: utf-8 -*- -from odoo.exceptions import ValidationError -from .common import RunbotCase - -RTE_ERROR = """FAIL: TestUiTranslate.test_admin_tour_rte_translator -Traceback (most recent call last): - File "/data/build/odoo/addons/website/tests/test_ui.py", line 89, in test_admin_tour_rte_translator - self.start_tour("/", 'rte_translator', login='admin', timeout=120) - File "/data/build/odoo/odoo/tests/common.py", line 1062, in start_tour - res = self.browser_js(url_path=url_path, code=code, ready=ready, **kwargs) - File "/data/build/odoo/odoo/tests/common.py", line 1046, in browser_js - self.fail('%s\n%s' % (message, error)) -AssertionError: The test code "odoo.startTour('rte_translator')" failed -Tour rte_translator failed at step click language dropdown (trigger: .js_language_selector .dropdown-toggle) -""" - - -class TestBuildError(RunbotCase): - - def create_test_build(self, vals): - create_vals = { - 'params_id': self.base_params.id, - 'port': '1234', - 'local_result': 'ok' - } - create_vals.update(vals) - return self.Build.create(create_vals) - - def setUp(self): - super(TestBuildError, self).setUp() - self.BuildError = self.env['runbot.build.error'] - self.BuildErrorTeam = self.env['runbot.team'] - - def test_build_scan(self): - IrLog = self.env['ir.logging'] - ko_build = self.create_test_build({'local_result': 'ko'}) - ok_build = self.create_test_build({'local_result': 'ok'}) - - error_team = self.BuildErrorTeam.create({ - 'name': 'test-error-team', - 'path_glob': '*build-error-n*' - }) - - log = {'message': RTE_ERROR, - 'build_id': ko_build.id, - 'level': 'ERROR', - 'type': 'server', - 'name': 'test-build-error-name', - 'path': 'test-build-error-path', - 'func': 'test-build-error-func', - 'line': 1, - } - - # Test the build parse and ensure that an 'ok' build is not parsed - IrLog.create(log) - log.update({'build_id': ok_build.id}) - IrLog.create(log) - ko_build._parse_logs() - ok_build._parse_logs() - build_error = self.BuildError.search([('build_ids', 'in', [ko_build.id])]) - self.assertIn(ko_build, build_error.build_ids, 'The parsed build should be added to the runbot.build.error') - self.assertFalse(self.BuildError.search([('build_ids', 'in', [ok_build.id])]), 'A successful build should not associated to a runbot.build.error') - self.assertEqual(error_team, build_error.team_id) - - # Test that build with same error is added to the errors - ko_build_same_error = self.create_test_build({'local_result': 'ko'}) - log.update({'build_id': ko_build_same_error.id}) - IrLog.create(log) - ko_build_same_error._parse_logs() - self.assertIn(ko_build_same_error, build_error.build_ids, 'The parsed build should be added to the existing runbot.build.error') - - # Test that line numbers does not interfere with error recognition - ko_build_diff_number = self.create_test_build({'local_result': 'ko'}) - rte_diff_numbers = RTE_ERROR.replace('89', '100').replace('1062', '1000').replace('1046', '4610') - log.update({'build_id': ko_build_diff_number.id, 'message': rte_diff_numbers}) - IrLog.create(log) - ko_build_diff_number._parse_logs() - self.assertIn(ko_build_diff_number, build_error.build_ids, 'The parsed build with different line numbers in error should be added to the runbot.build.error') - - # Test that when an error re-appears after the bug has been fixed, - # a new build error is created, with the old one linked - build_error.active = False - ko_build_new = self.create_test_build({'local_result': 'ko'}) - log.update({'build_id': ko_build_new.id}) - IrLog.create(log) - ko_build_new._parse_logs() - self.assertNotIn(ko_build_new, build_error.build_ids, 'The parsed build should not be added to a fixed runbot.build.error') - new_build_error = self.BuildError.search([('build_ids', 'in', [ko_build_new.id])]) - self.assertIn(ko_build_new, new_build_error.build_ids, 'The parsed build with a re-apearing error should generate a new runbot.build.error') - self.assertIn(build_error, new_build_error.error_history_ids, 'The old error should appear in history') - - def test_build_error_links(self): - build_a = self.create_test_build({'local_result': 'ko'}) - build_b = self.create_test_build({'local_result': 'ko'}) - - error_a = self.env['runbot.build.error'].create({ - 'content': 'foo', - 'build_ids': [(6, 0, [build_a.id])], - 'active': False # Even a fixed error coul be linked - }) - - error_b = self.env['runbot.build.error'].create({ - 'content': 'bar', - 'build_ids': [(6, 0, [build_b.id])], - 'random': True - }) - - # test that the random bug is parent when linking errors - all_errors = error_a | error_b - all_errors.link_errors() - self.assertEqual(error_b.child_ids, error_a, 'Random error should be the parent') - - # Test that changing bug resolution is propagated to children - error_b.active = True - self.assertTrue(error_a.active) - error_b.active = False - self.assertFalse(error_a.active) - - # Test build_ids - self.assertIn(build_b, error_b.build_ids) - self.assertNotIn(build_a, error_b.build_ids) - - # Test that children builds contains all builds - self.assertIn(build_b, error_b.children_build_ids) - self.assertIn(build_a, error_b.children_build_ids) - self.assertEqual(error_a.build_count, 1) - self.assertEqual(error_b.build_count, 2) - - def test_build_error_test_tags(self): - build_a = self.create_test_build({'local_result': 'ko'}) - build_b = self.create_test_build({'local_result': 'ko'}) - - error_a = self.BuildError.create({ - 'content': 'foo', - 'build_ids': [(6, 0, [build_a.id])], - 'random': True, - 'active': True - }) - - error_b = self.BuildError.create({ - 'content': 'bar', - 'build_ids': [(6, 0, [build_b.id])], - 'random': True, - 'active': False - }) - - # test that a test tag with a dash raise an Vamlidation error - with self.assertRaises(ValidationError): - error_a.test_tags = '-foo' - - error_a.test_tags = 'foo,bar' - error_b.test_tags = 'blah' - self.assertIn('foo', self.BuildError.test_tags_list()) - self.assertIn('bar', self.BuildError.test_tags_list()) - self.assertIn('-foo', self.BuildError.disabling_tags()) - self.assertIn('-bar', self.BuildError.disabling_tags()) - - # test that test tags on fixed errors are not taken into account - self.assertNotIn('blah', self.BuildError.test_tags_list()) - self.assertNotIn('-blah', self.BuildError.disabling_tags()) - - def test_build_error_team_wildcards(self): - website_team = self.BuildErrorTeam.create({ - 'name': 'website_test', - 'path_glob': '*website*,-*website_sale*' - }) - - self.assertTrue(website_team.dashboard_id.exists()) - - self.assertFalse(self.BuildErrorTeam._get_team('odoo/addons/web_studio/tests/test_ui.py')) - self.assertFalse(self.BuildErrorTeam._get_team('odoo/addons/website_sale/tests/test_sale_process.py')) - self.assertEqual(website_team.id, self.BuildErrorTeam._get_team('odoo/addons/website_crm/tests/test_website_crm')) - self.assertEqual(website_team.id, self.BuildErrorTeam._get_team('odoo/addons/website/tests/test_ui')) - - def test_dashboard_tile_simple(self): - self.additionnal_setup() - bundle = self.env['runbot.bundle'].search([('project_id', '=', self.project.id)]) - bundle.last_batch.state = 'done' - bundle.flush() - bundle._compute_last_done_batch() # force the recompute - self.assertTrue(bool(bundle.last_done_batch.exists())) - # simulate a failed build that we want to monitor - failed_build = bundle.last_done_batch.slot_ids[0].build_id - failed_build.global_result = 'ko' - failed_build.flush() - - team = self.env['runbot.team'].create({'name': 'Test team'}) - dashboard = self.env['runbot.dashboard.tile'].create({ - 'project_id': self.project.id, - 'category_id': bundle.last_done_batch.category_id.id, - }) - - self.assertEqual(dashboard.build_ids, failed_build) - -class TestCodeOwner(RunbotCase): - - def setUp(self): - super().setUp() - self.cow_deb = self.env['runbot.codeowner'].create({ - 'project_id' : self.project.id, - 'github_teams': 'runbot', - 'regex': '.*debian.*' - }) - - self.cow_web = self.env['runbot.codeowner'].create({ - 'project_id' : self.project.id, - 'github_teams': 'website', - 'regex': '.*website.*' - }) - - self.cow_crm = self.env['runbot.codeowner'].create({ - 'project_id' : self.project.id, - 'github_teams': 'crm', - 'regex': '.*crm.*' - }) - - self.cow_all = self.cow_deb | self.cow_web | self.cow_crm - - def test_codeowner_invalid_regex(self): - with self.assertRaises(ValidationError): - self.env['runbot.codeowner'].create({ - 'project_id': self.project.id, - 'regex': '*debian.*', - 'github_teams': 'rd-test' - }) diff --git a/runbot/tests/test_build_stat.py b/runbot/tests/test_build_stat.py deleted file mode 100644 index 8e2872c3..00000000 --- a/runbot/tests/test_build_stat.py +++ /dev/null @@ -1,141 +0,0 @@ -# -*- coding: utf-8 -*- -from psycopg2 import IntegrityError -from unittest.mock import patch, mock_open -from odoo.exceptions import ValidationError -from odoo.tools import mute_logger -from .common import RunbotCase - - -class TestBuildStatRegex(RunbotCase): - def setUp(self): - super(TestBuildStatRegex, self).setUp() - self.StatRegex = self.env["runbot.build.stat.regex"] - self.ConfigStep = self.env["runbot.build.config.step"] - self.BuildStat = self.env["runbot.build.stat"] - self.Build = self.env["runbot.build"] - - params = self.BuildParameters.create({ - 'version_id': self.version_13.id, - 'project_id': self.project.id, - 'config_id': self.default_config.id, - 'config_data': {'make_stats': True} - }) - - self.build = self.Build.create( - { - "params_id": params.id, - "port": "1234", - } - ) - - self.config_step = self.env["runbot.build.config.step"].create( - { - "name": "a_nice_step", - "job_type": "install_odoo", - "make_stats": True, - "build_stat_regex_ids": [(0, 0, {"name": "query_count", "regex": r"odoo.addons.(?P<key>.+) tested in .+, (?P<value>\d+) queries", "generic": False})] - } - ) - - def test_build_stat_regex_validation(self): - - # test that a regex without a named key 'value' raises a ValidationError - with self.assertRaises(ValidationError): - self.StatRegex.create( - {"name": "query_count", "regex": "All post-tested in .+s, .+ queries"} - ) - - def test_build_stat_regex_find_in_file(self): - - max_id = self.BuildStat.search([], order="id desc", limit=1).id or 0 - file_content = """foo bar -2020-03-02 22:06:58,391 17 INFO xxx odoo.modules.module: odoo.addons.website_blog.tests.test_ui tested in 10.35s, 2501 queries -some garbage -2020-03-02 22:07:14,340 17 INFO xxx odoo.modules.module: odoo.addons.website_event.tests.test_ui tested in 9.26s, 2435 queries -nothing to see here -""" - self.start_patcher( - "isdir", "odoo.addons.runbot.models.build_stat_regex.os.path.exists", True - ) - with patch("builtins.open", mock_open(read_data=file_content)): - self.config_step._make_stats(self.build) - self.assertEqual( - dict(self.BuildStat.search([('category', '=', 'query_count'), ('id', '>', max_id)]).values), - { - 'website_event.tests.test_ui': 2435.0, - 'website_blog.tests.test_ui': 2501.0 - } - ) - - # Check unicity - with self.assertRaises(IntegrityError): - with mute_logger("odoo.sql_db"): - with self.cr.savepoint(): # needed to continue tests - self.env["runbot.build.stat"].create({ - 'build_id': self.build.id, - 'config_step_id': self.config_step.id, - 'category': 'query_count', - 'values': {'website_event.tests.test_ui': 2435}, - } - - ) - - def test_build_stat_regex_generic(self): - """ test that regex are not used when generic is False and that _make_stats use all genreic regex if there are no regex on step """ - max_id = self.BuildStat.search([], order="id desc", limit=1).id or 0 - file_content = """foo bar -odoo.addons.foobar tested in 2s, 25 queries -useless 10 -chocolate 15 -""" - - self.config_step.build_stat_regex_ids = False - - # this one is not generic and thus should not be used - self.StatRegex.create({"name": "useless_count", "regex": r"(?P<key>useless) (?P<value>\d+)", "generic": False}) - - # this is one is the only one that should be used - self.StatRegex.create({"name": "chocolate_count", "regex": r"(?P<key>chocolate) (?P<value>\d+)"}) - - self.start_patcher( - "isdir", "odoo.addons.runbot.models.build_stat_regex.os.path.exists", True - ) - with patch("builtins.open", mock_open(read_data=file_content)): - self.config_step._make_stats(self.build) - - self.assertEqual(self.BuildStat.search_count([('category', '=', 'query_count'), ('id', '>', max_id)]), 0) - self.assertEqual(self.BuildStat.search_count([('category', '=', 'useless_count'), ('id', '>', max_id)]), 0) - self.assertEqual(dict(self.BuildStat.search([('category', '=', 'chocolate_count'), ('id', '>', max_id)]).values), {'chocolate': 15.0}) - - def test_build_stat_regex_find_in_file_perf(self): - max_id = self.BuildStat.search([], order="id desc", limit=1).id or 0 - noise_lines = """2020-03-17 13:26:15,472 2376 INFO runbottest odoo.modules.loading: loading runbot/views/build_views.xml -2020-03-10 22:58:34,472 17 INFO 1709329-master-9938b2-all_no_autotag werkzeug: 127.0.0.1 - - [10/Mar/2020 22:58:34] "POST /mail/read_followers HTTP/1.1" 200 - 13 0.004 0.009 -2020-03-10 22:58:30,137 17 INFO ? werkzeug: 127.0.0.1 - - [10/Mar/2020 22:58:30] "GET /website/static/src/xml/website.editor.xml HTTP/1.1" 200 - - - - -""" - - match_lines = [ - "2020-03-02 22:06:58,391 17 INFO xxx odoo.modules.module: odoo.addons.website_blog.tests.test_ui tested in 10.35s, 2501 queries", - "2020-03-02 22:07:14,340 17 INFO xxx odoo.modules.module: odoo.addons.website_event.tests.test_ui tested in 9.26s, 2435 queries" - ] - - # generate a 13 MiB log file with two potential matches - log_data = "" - for l in match_lines: - log_data += noise_lines * 10000 - log_data += l - log_data += noise_lines * 10000 - - self.start_patcher( - "isdir", "odoo.addons.runbot.models.build_stat_regex.os.path.exists", True - ) - with patch("builtins.open", mock_open(read_data=log_data)): - self.config_step._make_stats(self.build) - - self.assertEqual( - dict(self.BuildStat.search([('category', '=', 'query_count'), ('id', '>', max_id)]).values), - { - 'website_event.tests.test_ui': 2435.0, - 'website_blog.tests.test_ui': 2501.0 - } - ) diff --git a/runbot/tests/test_command.py b/runbot/tests/test_command.py deleted file mode 100644 index 6583a4ab..00000000 --- a/runbot/tests/test_command.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -from odoo.tests import common -from ..container import Command -from ..container import sanitize_container_name - - -CONFIG = """[options] -foo = bar -""" - - -class Test_Command(common.TransactionCase): - - def test_command(self): - pres = ['pip3', 'install', 'foo'] - posts = ['python3', '-m', 'coverage', 'html'] - finals = ['pgdump bar'] - cmd = Command([pres], ['python3', 'odoo-bin'], [posts], finals=[finals]) - self.assertEqual(str(cmd), 'python3 odoo-bin') - - expected = 'pip3 install foo && python3 odoo-bin && python3 -m coverage html ; pgdump bar' - self.assertEqual(cmd.build(), expected) - - cmd = Command([pres], ['python3', 'odoo-bin'], [posts]) - cmd.add_config_tuple('a', 'b') - cmd += ['bar'] - self.assertIn('bar', cmd.cmd) - cmd.add_config_tuple('x', 'y') - - content = cmd.get_config(starting_config=CONFIG) - - self.assertIn('[options]', content) - self.assertIn('foo = bar', content) - self.assertIn('a = b', content) - self.assertIn('x = y', content) - - with self.assertRaises(AssertionError): - cmd.add_config_tuple('http-interface', '127.0.0.1') - - -class TestSanitizeContainerName(common.TransactionCase): - - def test_sanitize_container_name(self): - - # 1. test that a valid name remains unchanged - valid_name = '3155889-saas-13.4-container-all_at_install' - self.assertEqual(sanitize_container_name(valid_name), valid_name) - - # 2. test a name starting with an invalid character - invalid_name = '#3155889-saas-13.4-container-all_at_install' - self.assertEqual(sanitize_container_name(invalid_name), valid_name) - - # 3. test a name with an invalid character somewhere - invalid_name = '3155889-saas-13.4-container#-all_at_install' - self.assertEqual(sanitize_container_name(invalid_name), valid_name) - - # 4. test a name starting with multiple invalid characters - invalid_name = '#/.3155889-saas-13.4-container-all_at_install' - self.assertEqual(sanitize_container_name(invalid_name), valid_name) - - # 5. test both - invalid_name = '_.3155889-saas-13.4-##container/-all_at_install' - self.assertEqual(sanitize_container_name(invalid_name), valid_name) diff --git a/runbot/tests/test_commit.py b/runbot/tests/test_commit.py deleted file mode 100644 index c29d684c..00000000 --- a/runbot/tests/test_commit.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -from unittest.mock import patch -from werkzeug.urls import url_parse - -from odoo.tests.common import HttpCase, new_test_user, tagged -from odoo.tools import mute_logger - - -@tagged('post_install', '-at_install') -class TestCommitStatus(HttpCase): - - def setUp(self): - super(TestCommitStatus, self).setUp() - self.project = self.env['runbot.project'].create({'name': 'Tests'}) - self.repo_server = self.env['runbot.repo'].create({ - 'name': 'server', - 'project_id': self.project.id, - 'server_files': 'server.py', - 'addons_paths': 'addons,core/addons' - }) - - self.server_commit = self.env['runbot.commit'].create({ - 'name': 'dfdfcfcf0000ffffffffffffffffffffffffffff', - 'repo_id': self.repo_server.id - }) - - create_context = {'no_reset_password': True, 'mail_create_nolog': True, 'mail_create_nosubscribe': True, 'mail_notrack': True} - with mute_logger('odoo.addons.base.models.ir_attachment'): - self.simple_user = new_test_user(self.env, login='simple', name='simple', password='simple', context=create_context) - self.runbot_admin = new_test_user(self.env, groups='runbot.group_runbot_admin,base.group_user', login='runbot_admin', name='runbot_admin', password='admin', context=create_context) - - def test_commit_status_resend(self): - """test commit status resend""" - - with mute_logger('odoo.addons.http_routing.models.ir_http'), mute_logger('odoo.addons.base.models.ir_attachment'): - commit_status = self.env['runbot.commit.status'].create({ - 'commit_id': self.server_commit.id, - 'context': 'ci/test', - 'state': 'failure', - 'target_url': 'https://www.somewhere.com', - 'description': 'test status' - }) - - # 1. test that unauthenticated users are redirected to the login page - response = self.url_open('/runbot/commit/resend/%s' % commit_status.id) - parsed_response = url_parse(response.url) - self.assertIn('redirect=', parsed_response.query) - self.assertEqual(parsed_response.path, '/web/login') - - # 2. test that a simple Odoo user cannot resend a status - # removed since the 'runbot.group_user' has been given to the 'base.group_user'. - # self.assertEqual(response.status_code, 403) - - # 3. test that a non-existsing commit_status returns a 404 - # 3.1 find a non existing commit status id - non_existing_id = self.env['runbot.commit.status'].browse(50000).exists() or 50000 - while self.env['runbot.commit.status'].browse(non_existing_id).exists(): - non_existing_id += 1 - - self.authenticate('runbot_admin', 'admin') - response = self.url_open('/runbot/commit/resend/%s' % non_existing_id) - self.assertEqual(response.status_code, 404) - - #4.1 Test that a status not sent (with not sent_date) can be manually resend - with patch('odoo.addons.runbot.models.commit.CommitStatus._send') as send_patcher: - response = self.url_open('/runbot/commit/resend/%s' % commit_status.id) - self.assertEqual(response.status_code, 200) - send_patcher.assert_called() - - commit_status = self.env['runbot.commit.status'].search([], order='id desc', limit=1) - self.assertEqual(commit_status.description, 'Status resent by runbot_admin') - - # 4.2 Finally test that a new status is created on resend and that the _send method is called - with patch('odoo.addons.runbot.models.commit.CommitStatus._send') as send_patcher: - a_minute_ago = datetime.datetime.now() - datetime.timedelta(seconds=65) - commit_status.sent_date = a_minute_ago - response = self.url_open('/runbot/commit/resend/%s' % commit_status.id) - self.assertEqual(response.status_code, 200) - send_patcher.assert_called() - - last_commit_status = self.env['runbot.commit.status'].search([], order='id desc', limit=1) - self.assertEqual(last_commit_status.description, 'Status resent by runbot_admin') - - # 5. Now that the a new status was created, status is not the last one and thus, cannot be resent - response = self.url_open('/runbot/commit/resend/%s' % commit_status.id) - self.assertEqual(response.status_code, 403) - - # 6. try to immediately resend the commit should fail to avoid spamming github - last_commit_status.sent_date = datetime.datetime.now() # as _send is mocked, the sent_date is not set - with patch('odoo.addons.runbot.models.commit.CommitStatus._send') as send_patcher: - response = self.url_open('/runbot/commit/resend/%s' % last_commit_status.id) - self.assertEqual(response.status_code, 200) - send_patcher.assert_not_called() diff --git a/runbot/tests/test_cron.py b/runbot/tests/test_cron.py deleted file mode 100644 index d9c8a403..00000000 --- a/runbot/tests/test_cron.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -from unittest.mock import patch -from .common import RunbotCase - - -class SleepException(Exception): - ... - - -def sleep(time): - raise SleepException() - - -class TestCron(RunbotCase): - - def setUp(self): - super(TestCron, self).setUp() - self.start_patcher('_get_cron_period', 'odoo.addons.runbot.models.runbot.Runbot._get_cron_period', 2) - - @patch('time.sleep', side_effect=sleep) - @patch('odoo.addons.runbot.models.repo.Repo._update_batches') - def test_cron_schedule(self, mock_update_batches, *args): - """ test that cron_fetch_and_schedule do its work """ - self.env['ir.config_parameter'].sudo().set_param('runbot.runbot_update_frequency', 1) - self.env['ir.config_parameter'].sudo().set_param('runbot.runbot_do_fetch', True) - self.env['runbot.repo'].search([('id', '!=', self.repo_server.id)]).write({'mode': 'disabled'}) # disable all other existing repo than repo_server - try: - self.Runbot._cron() - except SleepException: - pass # sleep raises an exception to avoid to stay stuck in loop - mock_update_batches.assert_called() - - @patch('time.sleep', side_effect=sleep) - @patch('odoo.addons.runbot.models.host.Host._docker_build') - @patch('odoo.addons.runbot.models.host.Host._bootstrap') - @patch('odoo.addons.runbot.models.runbot.Runbot._scheduler') - def test_cron_build(self, mock_scheduler, mock_host_bootstrap, mock_host_docker_build, *args): - """ test that cron_fetch_and_build do its work """ - hostname = 'cronhost.runbot.com' - self.patchers['fqdn_patcher'].return_value = hostname - self.env['ir.config_parameter'].sudo().set_param('runbot.runbot_update_frequency', 1) - self.env['ir.config_parameter'].sudo().set_param('runbot.runbot_do_schedule', True) - self.env['runbot.repo'].search([('id', '!=', self.repo_server.id)]).write({'mode': 'disabled'}) # disable all other existing repo than repo_server - - try: - self.Runbot._cron() - except SleepException: - pass # sleep raises an exception to avoid to stay stuck in loop - mock_scheduler.assert_called() - mock_host_bootstrap.assert_called() - mock_host_docker_build.assert_called() - host = self.env['runbot.host'].search([('name', '=', hostname)]) - self.assertTrue(host, 'A new host should have been created') - # self.assertGreater(host.psql_conn_count, 0, 'A least one connection should exist on the current psql batch') diff --git a/runbot/tests/test_dockerfile.py b/runbot/tests/test_dockerfile.py deleted file mode 100644 index 01bea380..00000000 --- a/runbot/tests/test_dockerfile.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -import logging - -from unittest.mock import patch, mock_open - -from odoo.tests.common import Form, tagged, HttpCase -from .common import RunbotCase - -_logger = logging.getLogger(__name__) - - -@tagged('-at_install', 'post_install') -class TestDockerfile(RunbotCase, HttpCase): - - def test_dockerfile_base_fields(self): - xml_content = """<t t-call="runbot.docker_base"> - <t t-set="custom_values" t-value="{ - 'from': 'ubuntu:focal', - 'phantom': True, - 'additional_pip': 'babel==2.8.0', - 'chrome_source': 'odoo', - 'chrome_version': '86.0.4240.183-1', - }"/> -</t> -""" - - focal_template = self.env['ir.ui.view'].create({ - 'name': 'docker_focal_test', - 'type': 'qweb', - 'key': 'docker.docker_focal_test', - 'arch_db': xml_content - }) - - dockerfile = self.env['runbot.dockerfile'].create({ - 'name': 'Tests Ubuntu Focal (20.0)[Chrome 86]', - 'template_id': focal_template.id, - 'to_build': True - }) - - self.assertEqual(dockerfile.image_tag, 'odoo:TestsUbuntuFocal20.0Chrome86') - self.assertTrue(dockerfile.dockerfile.startswith('FROM ubuntu:focal')) - self.assertIn(' apt-get install -y -qq google-chrome-stable=86.0.4240.183-1', dockerfile.dockerfile) - self.assertIn('# Install phantomjs', dockerfile.dockerfile) - self.assertIn('pip install --no-cache-dir babel==2.8.0', dockerfile.dockerfile) - - # test view update - xml_content = xml_content.replace('86.0.4240.183-1', '87.0-1') - dockerfile_form = Form(dockerfile) - dockerfile_form.arch_base = xml_content - dockerfile_form.save() - - self.assertIn('apt-get install -y -qq google-chrome-stable=87.0-1', dockerfile.dockerfile) - - # Ensure that only the test dockerfile will be found by docker_run - self.env['runbot.dockerfile'].search([('id', '!=', dockerfile.id)]).update({'to_build': False}) - - def write_side_effect(content): - self.assertIn('apt-get install -y -qq google-chrome-stable=87.0-1', content) - - docker_build_mock = self.patchers['docker_build'] - docker_build_mock.return_value = (True, None) - mopen = mock_open() - rb_host = self.env['runbot.host'].create({'name': 'runbotxxx.odoo.com'}) - with patch('builtins.open', mopen) as file_mock: - file_handle_mock = file_mock.return_value.__enter__.return_value - file_handle_mock.write.side_effect = write_side_effect - rb_host._docker_build() diff --git a/runbot/tests/test_event.py b/runbot/tests/test_event.py deleted file mode 100644 index fb945196..00000000 --- a/runbot/tests/test_event.py +++ /dev/null @@ -1,133 +0,0 @@ -# -*- coding: utf-8 -*- -from .common import RunbotCase - - -class TestIrLogging(RunbotCase): - - def simulate_log(self, build, func, message, level='INFO'): - """ simulate ir_logging from an external build """ - dest = '%s-fake-dest' % build.id - val = ('server', dest, 'test', level, message, 'test', '0', func) - self.cr.execute(""" - INSERT INTO ir_logging(create_date, type, dbname, name, level, message, path, line, func) - VALUES (NOW() at time zone 'UTC', %s, %s, %s, %s, %s, %s, %s, %s) - """, val) - - def test_ir_logging(self): - build = self.Build.create({ - 'active_step': self.env.ref('runbot.runbot_build_config_step_test_all').id, - 'params_id': self.base_params.id, - }) - - build.log_counter = 10 - - # Test that an ir_logging is created and a the trigger set the build_id - self.simulate_log(build, 'test function', 'test message') - log_line = self.env['ir.logging'].search([('func', '=', 'test function'), ('message', '=', 'test message'), ('level', '=', 'INFO')]) - self.assertEqual(len(log_line), 1, "A build log event should have been created") - self.assertEqual(log_line.build_id, build) - self.assertEqual(log_line.active_step_id, self.env.ref('runbot.runbot_build_config_step_test_all'), 'The active step should be set on the log line') - - # Test that a warn log line sets the build in warn - self.simulate_log(build, 'test function', 'test message', level='WARNING') - build.invalidate_cache() - self.assertEqual(build.triggered_result, 'warn', 'A warning log should sets the build in warn') - - # Test that a error log line sets the build in ko - self.simulate_log(build, 'test function', 'test message', level='ERROR') - build.invalidate_cache() - self.assertEqual(build.triggered_result, 'ko', 'An error log should sets the build in ko') - self.assertEqual(7, build.log_counter, 'server lines should decrement the build log_counter') - - build.log_counter = 10 - - # Test the log limit - for i in range(11): - self.simulate_log(build, 'limit function', 'limit message') - log_lines = self.env['ir.logging'].search([('build_id', '=', build.id), ('type', '=', 'server'), ('func', '=', 'limit function'), ('message', '=', 'limit message'), ('level', '=', 'INFO')]) - self.assertGreater(len(log_lines), 7, 'Trigger should have created logs with appropriate build id') - self.assertLess(len(log_lines), 10, 'Trigger should prevent insert more lines of logs than log_counter') - last_log_line = self.env['ir.logging'].search([('build_id', '=', build.id)], order='id DESC', limit=1) - self.assertIn('Log limit reached', last_log_line.message, 'Trigger should modify last log message') - - # Test that the _log method is still able to add logs - build._log('runbot function', 'runbot message') - log_lines = self.env['ir.logging'].search([('type', '=', 'runbot'), ('name', '=', 'odoo.runbot'), ('func', '=', 'runbot function'), ('message', '=', 'runbot message'), ('level', '=', 'INFO')]) - self.assertEqual(len(log_lines), 1, '_log should be able to add logs from the runbot') - - def test_markdown(self): - log = self.env['ir.logging'].create({ - 'name': 'odoo.runbot', - 'type': 'runbot', - 'path': 'runbot', - 'level': 'INFO', - 'line': 0, - 'func': 'test_markdown', - 'message': 'some **bold text** and also some __underlined text__ and maybe a bit of ~~strikethrough text~~' - }) - - self.assertEqual( - log._markdown(), - 'some <strong>bold text</strong> and also some <ins>underlined text</ins> and maybe a bit of <del>strikethrough text</del>' - ) - - #log.message = 'a bit of code `import foo\nfoo.bar`' - #self.assertEqual( - # log._markdown(), - # 'a bit of code <code>import foo\nfoo.bar</code>' - #) - - log.message = 'a bit of code :\n`import foo`' - self.assertEqual( - log._markdown(), - 'a bit of code :<br/><code>import foo</code>' - ) - - - # test icon - log.message = 'Hello @icon-file-text-o' - self.assertEqual( - log._markdown(), - 'Hello <i class="fa fa-file-text-o"></i>' - ) - - log.message = 'a bit of code :\n`print(__name__)`' - self.assertEqual( - log._markdown(), - 'a bit of code :<br/><code>print(__name__)</code>' - ) - - log.message = 'a bit of __code__ :\n`print(__name__)` **but also** `print(__name__)`' - self.assertEqual( - log._markdown(), - 'a bit of <ins>code</ins> :<br/><code>print(__name__)</code> <strong>but also</strong> <code>print(__name__)</code>' - ) - - - # test links - log.message = 'This [link](https://wwww.somewhere.com) goes to somewhere and [this one](http://www.nowhere.com) to nowhere.' - self.assertEqual( - log._markdown(), - 'This <a href="https://wwww.somewhere.com">link</a> goes to somewhere and <a href="http://www.nowhere.com">this one</a> to nowhere.' - ) - - # test link with icon - log.message = '[@icon-download](https://wwww.somewhere.com) goes to somewhere.' - self.assertEqual( - log._markdown(), - '<a href="https://wwww.somewhere.com"><i class="fa fa-download"></i></a> goes to somewhere.' - ) - - # test links with icon and text - log.message = 'This [link@icon-download](https://wwww.somewhere.com) goes to somewhere.' - self.assertEqual( - log._markdown(), - 'This <a href="https://wwww.somewhere.com">link<i class="fa fa-download"></i></a> goes to somewhere.' - ) - - # test sanitization - log.message = 'foo <script>console.log("hello world")</script>' - self.assertEqual( - log._markdown(), - 'foo <script>console.log("hello world")</script>' - ) diff --git a/runbot/tests/test_repo.py b/runbot/tests/test_repo.py deleted file mode 100644 index 78e3ca70..00000000 --- a/runbot/tests/test_repo.py +++ /dev/null @@ -1,469 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -import re -from unittest import skip -from unittest.mock import patch, Mock -from subprocess import CalledProcessError -from odoo.tests import common, TransactionCase -from odoo.tools import mute_logger -import logging -import odoo -import time - -from .common import RunbotCase, RunbotCaseMinimalSetup - -_logger = logging.getLogger(__name__) - - -class TestRepo(RunbotCaseMinimalSetup): - - def setUp(self): - super(TestRepo, self).setUp() - self.commit_list = {} - self.mock_root = self.patchers['repo_root_patcher'] - - def test_base_fields(self): - self.mock_root.return_value = '/tmp/static' - - repo = self.repo_server - remote = self.remote_server - # name = 'bla@example.com:base/server' - self.assertEqual(repo.path, '/tmp/static/repo/server') - self.assertEqual(remote.base_url, 'example.com/base/server') - self.assertEqual(remote.short_name, 'base/server') - self.assertEqual(remote.owner, 'base') - self.assertEqual(remote.repo_name, 'server') - - # HTTPS - remote.name = 'https://bla@example.com/base/server.git' - self.assertEqual(remote.short_name, 'base/server') - self.assertEqual(remote.owner, 'base') - self.assertEqual(remote.repo_name, 'server') - - # LOCAL - remote.name = '/path/somewhere/bar.git' - self.assertEqual(remote.short_name, 'somewhere/bar') - self.assertEqual(remote.owner, 'somewhere') - self.assertEqual(remote.repo_name, 'bar') - - def test_repo_update_batches(self): - """ Test that when finding new refs in a repo, the missing branches - are created and new builds are created in pending state - """ - self.repo_addons = self.repo_addons # lazy repo_addons fails on union - self.repo_server = self.repo_server # lazy repo_addons fails on union - self.additionnal_setup() - self.start_patchers() - max_bundle_id = self.env['runbot.bundle'].search([], order='id desc', limit=1).id or 0 - - branch_name = 'master-test' - - def github(url, payload=None, ignore_errors=False, nb_tries=2, recursive=False): - self.assertEqual(ignore_errors, False) - self.assertEqual(url, '/repos/:owner/:repo/pulls/123') - return { - 'base': {'ref': 'master'}, - 'head': {'label': 'dev:%s' % branch_name, 'repo': {'full_name': 'dev/server'}}, - } - - repos = self.repo_addons | self.repo_server - - first_commit = [( - 'refs/%s/heads/%s' % (self.remote_server_dev.remote_name, branch_name), - 'd0d0caca', - str(int(time.time())), - 'Marc Bidule', - '<marc.bidule@somewhere.com>', - 'Server subject', - 'Marc Bidule', - '<marc.bidule@somewhere.com>')] - - self.commit_list[self.repo_server.id] = first_commit - - self.patchers['github_patcher'].side_effect = github - repos._update_batches() - - dev_branch = self.env['runbot.branch'].search([('remote_id', '=', self.remote_server_dev.id)]) - - bundle = dev_branch.bundle_id - self.assertEqual(dev_branch.name, branch_name, 'A new branch should have been created') - - batch = self.env['runbot.batch'].search([('bundle_id', '=', bundle.id)]) - self.assertEqual(len(batch), 1, 'Batch found') - self.assertEqual(batch.commit_link_ids.commit_id.subject, 'Server subject') - self.assertEqual(batch.state, 'preparing') - self.assertEqual(dev_branch.head_name, 'd0d0caca') - self.assertEqual(bundle.last_batch, batch) - last_batch = batch - - # create a addons branch in the same bundle - self.commit_list[self.repo_addons.id] = [('refs/%s/heads/%s' % (self.remote_addons_dev.remote_name, branch_name), - 'deadbeef', - str(int(time.time())), - 'Marc Bidule', - '<marc.bidule@somewhere.com>', - 'Addons subject', - 'Marc Bidule', - '<marc.bidule@somewhere.com>')] - - repos._update_batches() - - addons_dev_branch = self.env['runbot.branch'].search([('remote_id', '=', self.remote_addons_dev.id)]) - - self.assertEqual(addons_dev_branch.bundle_id, bundle) - - self.assertEqual(dev_branch.head_name, 'd0d0caca', "Dev branch head name shoudn't have change") - self.assertEqual(addons_dev_branch.head_name, 'deadbeef') - - branch_count = self.env['runbot.branch'].search_count([('remote_id', '=', self.remote_server_dev.id)]) - self.assertEqual(branch_count, 1, 'No new branch should have been created') - - batch = self.env['runbot.batch'].search([('bundle_id', '=', bundle.id)]) - self.assertEqual(last_batch, batch, "No new batch should have been created") - self.assertEqual(bundle.last_batch, batch) - self.assertEqual(batch.commit_link_ids.commit_id.mapped('subject'), ['Server subject', 'Addons subject']) - - # create a server pr in the same bundle with the same hash - self.commit_list[self.repo_server.id] += [ - ('refs/%s/pull/123' % self.remote_server.remote_name, - 'd0d0caca', - str(int(time.time())), - 'Marc Bidule', - '<marc.bidule@somewhere.com>', - 'Another subject', - 'Marc Bidule', - '<marc.bidule@somewhere.com>')] - - # Create Batches - repos._update_batches() - - pull_request = self.env['runbot.branch'].search([('remote_id', '=', self.remote_server.id), ('id', '!=', self.branch_server.id)]) - self.assertEqual(pull_request.bundle_id, bundle) - - self.assertEqual(dev_branch.head_name, 'd0d0caca') - self.assertEqual(pull_request.head_name, 'd0d0caca') - self.assertEqual(addons_dev_branch.head_name, 'deadbeef') - - self.assertEqual(dev_branch, self.env['runbot.branch'].search([('remote_id', '=', self.remote_server_dev.id)])) - self.assertEqual(addons_dev_branch, self.env['runbot.branch'].search([('remote_id', '=', self.remote_addons_dev.id)])) - - batch = self.env['runbot.batch'].search([('bundle_id', '=', bundle.id)]) - self.assertEqual(last_batch, batch, "No new batch should have been created") - self.assertEqual(bundle.last_batch, batch) - self.assertEqual(batch.commit_link_ids.commit_id.mapped('subject'), ['Server subject', 'Addons subject']) - - # A new commit is found in the server repo - self.commit_list[self.repo_server.id] = [ - ( - 'refs/%s/heads/%s' % (self.remote_server_dev.remote_name, branch_name), - 'b00b', - str(int(time.time())), - 'Marc Bidule', - '<marc.bidule@somewhere.com>', - 'A new subject', - 'Marc Bidule', - '<marc.bidule@somewhere.com>' - ), - ( - 'refs/%s/pull/123' % self.remote_server.remote_name, - 'b00b', - str(int(time.time())), - 'Marc Bidule', - '<marc.bidule@somewhere.com>', - 'A new subject', - 'Marc Bidule', - '<marc.bidule@somewhere.com>' - )] - - # Create Batches - repos._update_batches() - - self.assertEqual(dev_branch, self.env['runbot.branch'].search([('remote_id', '=', self.remote_server_dev.id)])) - self.assertEqual(pull_request + self.branch_server, self.env['runbot.branch'].search([('remote_id', '=', self.remote_server.id)])) - self.assertEqual(addons_dev_branch, self.env['runbot.branch'].search([('remote_id', '=', self.remote_addons_dev.id)])) - - batch = self.env['runbot.batch'].search([('bundle_id', '=', bundle.id)]) - self.assertEqual(bundle.last_batch, batch) - self.assertEqual(len(batch), 1, 'No new batch created, updated') - self.assertEqual(batch.commit_link_ids.commit_id.mapped('subject'), ['A new subject', 'Addons subject'], 'commits should have been updated') - self.assertEqual(batch.state, 'preparing') - - self.assertEqual(dev_branch.head_name, 'b00b') - self.assertEqual(pull_request.head_name, 'b00b') - self.assertEqual(addons_dev_branch.head_name, 'deadbeef') - - # TODO move this - # previous_build = self.env['runbot.build'].search([('repo_id', '=', repo.id), ('branch_id', '=', branch.id), ('name', '=', 'd0d0caca')]) - # self.assertEqual(previous_build.local_state, 'done', 'Previous pending build should be done') - # self.assertEqual(previous_build.local_result, 'skipped', 'Previous pending build result should be skipped') - - batch.state = 'done' - - repos._update_batches() - - batch = self.env['runbot.batch'].search([('bundle_id', '=', bundle.id)]) - self.assertEqual(len(batch), 1, 'No new batch created, no head change') - - self.commit_list[self.repo_server.id] = [ - ('refs/%s/heads/%s' % (self.remote_server_dev.remote_name, branch_name), - 'dead1234', - str(int(time.time())), - 'Marc Bidule', - '<marc.bidule@somewhere.com>', - 'A last subject', - 'Marc Bidule', - '<marc.bidule@somewhere.com>')] - - repos._update_batches() - - bundles = self.env['runbot.bundle'].search([('id', '>', max_bundle_id)]) - self.assertEqual(bundles, bundle) - batches = self.env['runbot.batch'].search([('bundle_id', '=', bundle.id)]) - self.assertEqual(len(batches), 2, 'No preparing instance and new head -> new batch') - self.assertEqual(bundle.last_batch.state, 'preparing') - self.assertEqual(bundle.last_batch.commit_link_ids.commit_id.subject, 'A last subject') - - self.commit_list[self.repo_server.id] = first_commit # branch reset hard to an old commit (and pr closed) - - repos._update_batches() - - batches = self.env['runbot.batch'].search([('bundle_id', '=', bundle.id)], order='id desc') - last_batch = bundle.last_batch - self.assertEqual(len(batches), 2, 'No new batch created, updated') - self.assertEqual(last_batch.commit_link_ids.commit_id.mapped('subject'), ['Server subject'], 'commits should have been updated') - self.assertEqual(last_batch.state, 'preparing') - self.assertEqual(dev_branch.head_name, 'd0d0caca') - - def github2(url, payload=None, ignore_errors=False, nb_tries=2, recursive=False): - self.assertEqual(ignore_errors, True) - self.assertIn(url, ['/repos/:owner/:repo/statuses/d0d0caca', '/repos/:owner/:repo/statuses/deadbeef']) - return {} - - self.patchers['github_patcher'].side_effect = github2 - last_batch._prepare() - self.assertEqual(last_batch.commit_link_ids.commit_id.mapped('subject'), ['Server subject', 'Addons subject']) - - self.assertEqual(last_batch.state, 'ready') - - self.assertEqual(2, len(last_batch.slot_ids)) - self.assertEqual(2, len(last_batch.slot_ids.mapped('build_id'))) - - @skip('This test is for performances. It needs a lot of real branches in DB to mean something') - def test_repo_perf_find_new_commits(self): - self.mock_root.return_value = '/tmp/static' - repo = self.env['runbot.repo'].search([('name', '=', 'blabla')]) - - self.commit_list[self.repo_server.id] = [] - - # create 20000 branches and refs - start_time = time.time() - self.env['runbot.build'].search([], limit=5).write({'name': 'jflsdjflj'}) - - for i in range(20005): - self.commit_list[self.repo_server.id].append(['refs/heads/bidon-%05d' % i, - 'd0d0caca %s' % i, - str(int(time.time())), - 'Marc Bidule', - '<marc.bidule@somewhere.com>', - 'A nice subject', - 'Marc Bidule', - '<marc.bidule@somewhere.com>']) - inserted_time = time.time() - _logger.info('Insert took: %ssec', (inserted_time - start_time)) - repo._update_batches() - - _logger.info('Create pending builds took: %ssec', (time.time() - inserted_time)) - - @common.warmup - def test_times(self): - def _test_times(model, setter, field_name): - repo1 = self.repo_server - repo2 = self.repo_addons - - with self.assertQueryCount(1): - getattr(repo1, setter)(1.1) - getattr(repo2, setter)(1.2) - self.assertEqual(len(self.env[model].search([])), 2) - self.assertEqual(repo1[field_name], 1.1) - self.assertEqual(repo2[field_name], 1.2) - - getattr(repo1, setter)(1.3) - getattr(repo2, setter)(1.4) - - self.assertEqual(len(self.env[model].search([])), 4) - self.assertEqual(repo1[field_name], 1.3) - self.assertEqual(repo2[field_name], 1.4) - - self.Repo.invalidate_cache() - self.assertEqual(repo1[field_name], 1.3) - self.assertEqual(repo2[field_name], 1.4) - - self.Repo._gc_times() - - self.assertEqual(len(self.env[model].search([])), 2) - self.assertEqual(repo1[field_name], 1.3) - self.assertEqual(repo2[field_name], 1.4) - - _test_times('runbot.repo.hooktime', 'set_hook_time', 'hook_time') - _test_times('runbot.repo.reftime', 'set_ref_time', 'get_ref_time') - - -class TestGithub(TransactionCase): - - def test_github(self): - """ Test different github responses or failures""" - - project = self.env['runbot.project'].create({'name': 'Tests'}) - repo_server = self.env['runbot.repo'].create({ - 'name': 'server', - 'project_id': project.id, - }) - remote_server = self.env['runbot.remote'].create({ - 'name': 'bla@example.com:base/server', - 'repo_id': repo_server.id, - }) - - # self.assertEqual(remote_server._github('/repos/:owner/:repo/statuses/abcdef', dict(), ignore_errors=True), None, 'A repo without token should return None') - remote_server.token = 'abc' - - import requests - with patch('odoo.addons.runbot.models.repo.requests.Session') as mock_session, patch('time.sleep') as mock_sleep: - mock_sleep.return_value = None - with self.assertRaises(Exception, msg='should raise an exception with ignore_errors=False'): - mock_session.return_value.post.side_effect = requests.HTTPError('301: Bad gateway') - remote_server._github('/repos/:owner/:repo/statuses/abcdef', {'foo': 'bar'}, ignore_errors=False) - - mock_session.return_value.post.reset_mock() - with self.assertLogs(logger='odoo.addons.runbot.models.repo') as assert_log: - remote_server._github('/repos/:owner/:repo/statuses/abcdef', {'foo': 'bar'}, ignore_errors=True) - self.assertIn('Ignored github error', assert_log.output[0]) - - self.assertEqual(2, mock_session.return_value.post.call_count, "_github method should try two times by default") - - mock_session.return_value.post.reset_mock() - mock_session.return_value.post.side_effect = [requests.HTTPError('301: Bad gateway'), Mock()] - with self.assertLogs(logger='odoo.addons.runbot.models.repo') as assert_log: - remote_server._github('/repos/:owner/:repo/statuses/abcdef', {'foo': 'bar'}, ignore_errors=True) - self.assertIn('Success after 2 tries', assert_log.output[0]) - - self.assertEqual(2, mock_session.return_value.post.call_count, "_github method should try two times by default") - - -class TestFetch(RunbotCase): - - def setUp(self): - super(TestFetch, self).setUp() - self.mock_root = self.patchers['repo_root_patcher'] - self.fetch_count = 0 - self.force_failure = False - - def mock_git_helper(self): - """Helper that returns a mock for repo._git()""" - def mock_git(repo, cmd): - self.assertIn('fetch', cmd) - self.fetch_count += 1 - if self.fetch_count < 3 or self.force_failure: - raise CalledProcessError(128, cmd, 'Dummy Error'.encode('utf-8')) - else: - return True - return mock_git - - @patch('time.sleep', return_value=None) - def test_update_fetch_cmd(self, mock_time): - """ Test that git fetch is tried multiple times before disabling host """ - - host = self.env['runbot.host']._get_current() - - self.assertFalse(host.assigned_only) - # Ensure that Host is not disabled if fetch succeeds after 3 tries - with mute_logger("odoo.addons.runbot.models.repo"): - self.repo_server._update_fetch_cmd() - self.assertFalse(host.assigned_only, "Host should not be disabled when fetch succeeds") - self.assertEqual(self.fetch_count, 3) - - # Now ensure that host is disabled after 5 unsuccesful tries - self.force_failure = True - self.fetch_count = 0 - with mute_logger("odoo.addons.runbot.models.repo"): - self.repo_server._update_fetch_cmd() - self.assertTrue(host.assigned_only) - self.assertEqual(self.fetch_count, 5) - - -class TestIdentityFile(RunbotCase): - - def check_output_helper(self): - """Helper that returns a mock for repo._git()""" - def mock_check_output(cmd, *args, **kwargs): - expected_option = '-c core.sshCommand=ssh -i \/.+\/\.ssh\/fake_identity' - git_cmd = ' '.join(cmd) - self.assertTrue(re.search(expected_option, git_cmd), '%s did not match %s' % (git_cmd, expected_option)) - return Mock() - - return mock_check_output - - def test_identity_file(self): - """test that the identity file is used in git command""" - - self.stop_patcher('git_patcher') - self.start_patcher('check_output_patcher', 'odoo.addons.runbot.models.repo.subprocess.check_output', new=self.check_output_helper()) - - self.repo_server.identity_file = 'fake_identity' - - with mute_logger("odoo.addons.runbot.models.repo"): - self.repo_server._update_fetch_cmd() - - -class TestRepoScheduler(RunbotCase): - - def setUp(self): - # as the _scheduler method commits, we need to protect the database - super(TestRepoScheduler, self).setUp() - - self.fqdn_patcher = patch('odoo.addons.runbot.models.host.fqdn') - mock_root = self.patchers['repo_root_patcher'] - mock_root.return_value = '/tmp/static' - - @patch('odoo.addons.runbot.models.build.BuildResult._kill') - @patch('odoo.addons.runbot.models.build.BuildResult._schedule') - @patch('odoo.addons.runbot.models.build.BuildResult._init_pendings') - def test_repo_scheduler(self, mock_init_pendings, mock_schedule, mock_kill): - - self.env['ir.config_parameter'].set_param('runbot.runbot_workers', 6) - builds = [] - # create 6 builds that are testing on the host to verify that - # workers are not overfilled - for _ in range(6): - build = self.Build.create({ - 'params_id': self.base_params.id, - 'build_type': 'normal', - 'local_state': 'testing', - 'host': 'host.runbot.com' - }) - builds.append(build) - # now the pending build that should stay unasigned - scheduled_build = self.Build.create({ - 'params_id': self.base_params.id, - 'build_type': 'scheduled', - 'local_state': 'pending', - }) - builds.append(scheduled_build) - # create the build that should be assigned once a slot is available - build = self.Build.create({ - 'params_id': self.base_params.id, - 'build_type': 'normal', - 'local_state': 'pending', - }) - builds.append(build) - host = self.env['runbot.host']._get_current() - self.Runbot._scheduler(host) - - build.invalidate_cache() - scheduled_build.invalidate_cache() - self.assertFalse(build.host) - self.assertFalse(scheduled_build.host) - - # give some room for the pending build - builds[0].write({'local_state': 'done'}) - - self.Runbot._scheduler(host) diff --git a/runbot/tests/test_runbot.py b/runbot/tests/test_runbot.py deleted file mode 100644 index 2ce3cce2..00000000 --- a/runbot/tests/test_runbot.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -import logging - -from .common import RunbotCase - -_logger = logging.getLogger(__name__) - - -class TestRunbot(RunbotCase): - - def test_warning_from_runbot_abstract(self): - warning = self.env['runbot.runbot'].warning('Test warning message') - - self.assertTrue(self.env['runbot.warning'].browse(warning.id).exists()) diff --git a/runbot/tests/test_schedule.py b/runbot/tests/test_schedule.py deleted file mode 100644 index 467ae509..00000000 --- a/runbot/tests/test_schedule.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -from unittest.mock import patch -from .common import RunbotCase - - -class TestSchedule(RunbotCase): - - @patch('odoo.addons.runbot.models.build.os.path.getmtime') - @patch('odoo.addons.runbot.models.build.docker_state') - def test_schedule_mark_done(self, mock_docker_state, mock_getmtime): - """ Test that results are set even when job_30_run is skipped """ - job_end_time = datetime.datetime.now() - mock_getmtime.return_value = job_end_time.timestamp() # looks wrong - - params = self.BuildParameters.create({ - 'version_id': self.version_13, - 'project_id': self.project, - 'config_id': self.env.ref('runbot.runbot_build_config_default').id, - }) - build = self.Build.create({ - 'local_state': 'testing', - 'port': '1234', - 'host': 'runbotxx', - 'job_start': datetime.datetime.now(), - 'active_step': self.env.ref('runbot.runbot_build_config_step_run').id, - 'params_id': params.id - }) - mock_docker_state.return_value = 'UNKNOWN' - self.assertEqual(build.local_state, 'testing') - build._schedule() # too fast, docker not started - self.assertEqual(build.local_state, 'testing') - - build.write({'job_start': datetime.datetime.now() - datetime.timedelta(seconds=70)}) # docker never started - build._schedule() - self.assertEqual(build.local_state, 'done') - self.assertEqual(build.local_result, 'ok') diff --git a/runbot/tests/test_upgrade.py b/runbot/tests/test_upgrade.py deleted file mode 100644 index c8a1cffb..00000000 --- a/runbot/tests/test_upgrade.py +++ /dev/null @@ -1,544 +0,0 @@ -import getpass -import logging -import getpass -from odoo.exceptions import UserError -from odoo.tools import mute_logger -from .common import RunbotCase - -_logger = logging.getLogger(__name__) - - -class TestUpgradeFlow(RunbotCase): - - def setUp(self): - super().setUp() - self.upgrade_flow_setup() - - def upgrade_flow_setup(self): - self.start_patcher('find_patcher', 'odoo.addons.runbot.common.find', 0) - self.additionnal_setup() - - self.master_bundle = self.branch_server.bundle_id - self.config_test = self.env['runbot.build.config'].create({'name': 'Test'}) - ################# - # upgrade branch - ################# - self.repo_upgrade = self.env['runbot.repo'].create({ - 'name': 'upgrade', - 'project_id': self.project.id, - 'manifest_files': False, - }) - self.remote_upgrade = self.env['runbot.remote'].create({ - 'name': 'bla@example.com:base/upgrade', - 'repo_id': self.repo_upgrade.id, - 'token': '123', - }) - self.branch_upgrade = self.Branch.create({ - 'name': 'master', - 'remote_id': self.remote_upgrade.id, - 'is_pr': False, - 'head': self.Commit.create({ - 'name': '123abc789', - 'repo_id': self.repo_upgrade.id, - }).id, - }) - - ####################### - # Basic upgrade config - ####################### - self.step_restore = self.env['runbot.build.config.step'].create({ - 'name': 'restore', - 'job_type': 'restore', - 'restore_rename_db_suffix': False - }) - self.step_test_upgrade = self.env['runbot.build.config.step'].create({ - 'name': 'test_upgrade', - 'job_type': 'test_upgrade', - }) - self.test_upgrade_config = self.env['runbot.build.config'].create({ - 'name': 'Upgrade server', - 'step_order_ids': [ - (0, 0, {'step_id': self.step_restore.id}), - (0, 0, {'step_id': self.step_test_upgrade.id}) - ] - }) - - ########## - # Nightly - ########## - self.nightly_category = self.env.ref('runbot.nightly_category') - self.config_nightly = self.env['runbot.build.config'].create({'name': 'Nightly config'}) - self.config_nightly_db_generate = self.env['runbot.build.config'].create({'name': 'Nightly generate'}) - self.config_all = self.env['runbot.build.config'].create({'name': 'Demo'}) - self.config_all_no_demo = self.env['runbot.build.config'].create({'name': 'No demo'}) - self.trigger_server_nightly = self.env['runbot.trigger'].create({ - 'name': 'Nighly server', - 'dependency_ids': [(4, self.repo_server.id)], - 'config_id': self.config_nightly.id, - 'project_id': self.project.id, - 'category_id': self.nightly_category.id - }) - self.trigger_addons_nightly = self.env['runbot.trigger'].create({ - 'name': 'Nighly addons', - 'dependency_ids': [(4, self.repo_server.id), (4, self.repo_addons.id)], - 'config_id': self.config_nightly.id, - 'project_id': self.project.id, - 'category_id': self.nightly_category.id - }) - - ########## - # Weekly - ########## - self.weekly_category = self.env.ref('runbot.weekly_category') - self.config_weekly = self.env['runbot.build.config'].create({'name': 'Nightly config'}) - self.config_single = self.env['runbot.build.config'].create({'name': 'Single'}) - self.trigger_server_weekly = self.env['runbot.trigger'].create({ - 'name': 'Nighly server', - 'dependency_ids': [(4, self.repo_server.id)], - 'config_id': self.config_weekly.id, - 'project_id': self.project.id, - 'category_id': self.weekly_category.id - }) - self.trigger_addons_weekly = self.env['runbot.trigger'].create({ - 'name': 'Nighly addons', - 'dependency_ids': [(4, self.repo_server.id), (4, self.repo_addons.id)], - 'config_id': self.config_weekly.id, - 'project_id': self.project.id, - 'category_id': self.weekly_category.id - }) - - ######################################## - # Configure upgrades for 'to current' version - ######################################## - master = self.env['runbot.version']._get('master') - self.step_upgrade_server = self.env['runbot.build.config.step'].create({ - 'name': 'upgrade_server', - 'job_type': 'configure_upgrade', - 'upgrade_to_current': True, - 'upgrade_from_previous_major_version': True, - 'upgrade_from_last_intermediate_version': True, - 'upgrade_flat': True, - 'upgrade_config_id': self.test_upgrade_config.id, - 'upgrade_dbs': [ - (0, 0, {'config_id': self.config_all.id, 'db_pattern': 'all', 'min_target_version_id': master.id}), - (0, 0, {'config_id': self.config_all_no_demo.id, 'db_pattern': 'no-demo-all'}) - ] - }) - self.upgrade_server_config = self.env['runbot.build.config'].create({ - 'name': 'Upgrade server', - 'step_order_ids': [(0, 0, {'step_id': self.step_upgrade_server.id})] - }) - self.trigger_upgrade_server = self.env['runbot.trigger'].create({ - 'name': 'Server upgrade', - 'repo_ids': [(4, self.repo_upgrade.id), (4, self.repo_server.id)], - 'config_id': self.upgrade_server_config.id, - 'project_id': self.project.id, - 'upgrade_dumps_trigger_id': self.trigger_server_nightly.id, - }) - - ######################################## - # Configure upgrades for previouses versions - ######################################## - self.step_upgrade = self.env['runbot.build.config.step'].create({ - 'name': 'upgrade', - 'job_type': 'configure_upgrade', - 'upgrade_to_major_versions': True, - 'upgrade_from_previous_major_version': True, - 'upgrade_flat': True, - 'upgrade_config_id': self.test_upgrade_config.id, - 'upgrade_dbs': [ - (0, 0, {'config_id': self.config_all.id, 'db_pattern': 'all', 'min_target_version_id': master.id}), - (0, 0, {'config_id': self.config_all_no_demo.id, 'db_pattern': 'no-demo-all'}) - ] - }) - self.upgrade_config = self.env['runbot.build.config'].create({ - 'name': 'Upgrade', - 'step_order_ids': [(0, 0, {'step_id': self.step_upgrade.id})] - }) - self.trigger_upgrade = self.env['runbot.trigger'].create({ - 'name': 'Upgrade', - 'repo_ids': [(4, self.repo_upgrade.id)], - 'config_id': self.upgrade_config.id, - 'project_id': self.project.id, - 'upgrade_dumps_trigger_id': self.trigger_addons_nightly.id, - }) - - with mute_logger('odoo.addons.runbot.models.commit'): - self.build_niglty_master, self.build_weekly_master = self.create_version('master') - self.build_niglty_11, self.build_weekly_11 = self.create_version('11.0') - self.build_niglty_113, self.build_weekly_113 = self.create_version('saas-11.3') - self.build_niglty_12, self.build_weekly_12 = self.create_version('12.0') - self.build_niglty_123, self.build_weekly_123 = self.create_version('saas-12.3') - self.build_niglty_13, self.build_weekly_13 = self.create_version('13.0') - self.build_niglty_131, self.build_weekly_131 = self.create_version('saas-13.1') - self.build_niglty_132, self.build_weekly_132 = self.create_version('saas-13.2') - self.build_niglty_133, self.build_weekly_133 = self.create_version('saas-13.3') - - def create_version(self, name): - intname = int(''.join(c for c in name if c.isdigit())) if name != 'master' else 0 - if name != 'master': - branch_server = self.Branch.create({ - 'name': name, - 'remote_id': self.remote_server.id, - 'is_pr': False, - 'head': self.Commit.create({ - 'name': 'server%s' % intname, - 'repo_id': self.repo_server.id, - }).id, - }) - branch_addons = self.Branch.create({ - 'name': name, - 'remote_id': self.remote_addons.id, - 'is_pr': False, - 'head': self.Commit.create({ - 'name': 'addons%s' % intname, - 'repo_id': self.repo_addons.id, - }).id, - }) - else: - branch_server = self.branch_server - branch_addons = self.branch_addons - - self.assertEqual(branch_server.bundle_id, branch_addons.bundle_id) - bundle = branch_server.bundle_id - self.assertEqual(bundle.name, name) - bundle.is_base = True - # create nightly - - batch_nigthly = bundle._force(self.nightly_category.id) - batch_nigthly._prepare() - self.assertEqual(batch_nigthly.category_id, self.nightly_category) - builds_nigthly = {} - host = self.env['runbot.host']._get_current() - for build in batch_nigthly.slot_ids.mapped('build_id'): - self.assertEqual(build.params_id.config_id, self.config_nightly) - main_child = build._add_child({'config_id': self.config_nightly_db_generate.id}) - demo = main_child._add_child({'config_id': self.config_all.id}) - demo.database_ids = [ - (0, 0, {'name': '%s-%s' % (demo.dest, 'base')}), - (0, 0, {'name': '%s-%s' % (demo.dest, 'dummy')}), - (0, 0, {'name': '%s-%s' % (demo.dest, 'all')})] - demo.host = host.name - no_demo = main_child._add_child({'config_id': self.config_all_no_demo.id}) - no_demo.database_ids = [ - (0, 0, {'name': '%s-%s' % (no_demo.dest, 'base')}), - (0, 0, {'name': '%s-%s' % (no_demo.dest, 'dummy')}), - (0, 0, {'name': '%s-%s' % (no_demo.dest, 'no-demo-all')})] - no_demo.host = host.name - (build | main_child | demo | no_demo).write({'local_state': 'done'}) - builds_nigthly[('root', build.params_id.trigger_id)] = build - builds_nigthly[('demo', build.params_id.trigger_id)] = demo - builds_nigthly[('no_demo', build.params_id.trigger_id)] = no_demo - batch_nigthly.state = 'done' - - batch_weekly = bundle._force(self.weekly_category.id) - batch_weekly._prepare() - self.assertEqual(batch_weekly.category_id, self.weekly_category) - builds_weekly = {} - build = batch_weekly.slot_ids.filtered(lambda s: s.trigger_id == self.trigger_addons_weekly).build_id - build.database_ids = [(0, 0, {'name': '%s-%s' % (build.dest, 'dummy')})] - self.assertEqual(build.params_id.config_id, self.config_weekly) - builds_weekly[('root', build.params_id.trigger_id)] = build - for db in ['l10n_be', 'l10n_ch', 'mail', 'account', 'stock']: - child = build._add_child({'config_id': self.config_single.id}) - child.database_ids = [(0, 0, {'name': '%s-%s' % (child.dest, db)})] - child.local_state = 'done' - child.host = host.name - builds_weekly[(db, build.params_id.trigger_id)] = child - build.local_state = 'done' - batch_weekly.state = 'done' - - batch_default = bundle._force() - batch_default._prepare() - build = batch_default.slot_ids.filtered(lambda s: s.trigger_id == self.trigger_server).build_id - build.local_state = 'done' - batch_default.state = 'done' - - return builds_nigthly, builds_weekly - - def test_all(self): - # Test setup - self.assertEqual(self.branch_server.bundle_id, self.branch_upgrade.bundle_id) - self.assertTrue(self.branch_upgrade.bundle_id.is_base) - self.assertTrue(self.branch_upgrade.bundle_id.version_id) - self.assertEqual(self.trigger_upgrade_server.upgrade_step_id, self.step_upgrade_server) - - with self.assertRaises(UserError): - self.step_upgrade_server.job_type = 'install_odoo' - self.trigger_upgrade_server.flush(['upgrade_step_id']) - - batch = self.master_bundle._force() - batch._prepare() - upgrade_current_build = batch.slot_ids.filtered(lambda slot: slot.trigger_id == self.trigger_upgrade_server).build_id - host = self.env['runbot.host']._get_current() - upgrade_current_build.host = host.name - upgrade_current_build._init_pendings(host) - upgrade_current_build._schedule() - self.assertEqual(upgrade_current_build.local_state, 'done') - self.assertEqual(len(upgrade_current_build.children_ids), 4) - - [b_13_master_demo, b_13_master_no_demo, b_133_master_demo, b_133_master_no_demo] = upgrade_current_build.children_ids - - def assertOk(build, t, f, b_type, db_suffix, trigger): - self.assertEqual(build.params_id.upgrade_to_build_id, t) - self.assertEqual(build.params_id.upgrade_from_build_id, f[('root', trigger)]) - self.assertEqual(build.params_id.dump_db.build_id, f[(b_type, trigger)]) - self.assertEqual(build.params_id.dump_db.db_suffix, db_suffix) - self.assertEqual(build.params_id.config_id, self.test_upgrade_config) - - assertOk(b_13_master_demo, upgrade_current_build, self.build_niglty_13, 'demo', 'all', self.trigger_server_nightly) - assertOk(b_13_master_no_demo, upgrade_current_build, self.build_niglty_13, 'no_demo', 'no-demo-all', self.trigger_server_nightly) - assertOk(b_133_master_demo, upgrade_current_build, self.build_niglty_133, 'demo', 'all', self.trigger_server_nightly) - assertOk(b_133_master_no_demo, upgrade_current_build, self.build_niglty_133, 'no_demo', 'no-demo-all', self.trigger_server_nightly) - - self.assertEqual(b_13_master_demo.params_id.commit_ids.repo_id, self.repo_server | self.repo_upgrade) - - # upgrade repos tests - upgrade_build = batch.slot_ids.filtered(lambda slot: slot.trigger_id == self.trigger_upgrade).build_id - host = self.env['runbot.host']._get_current() - upgrade_build.host = host.name - upgrade_build._init_pendings(host) - upgrade_build._schedule() - self.assertEqual(upgrade_build.local_state, 'done') - self.assertEqual(len(upgrade_build.children_ids), 2) - - [b_11_12, b_12_13] = upgrade_build.children_ids - - assertOk(b_11_12, self.build_niglty_12[('root', self.trigger_addons_nightly)], self.build_niglty_11, 'no_demo', 'no-demo-all', self.trigger_addons_nightly) - assertOk(b_12_13, self.build_niglty_13[('root', self.trigger_addons_nightly)], self.build_niglty_12, 'no_demo', 'no-demo-all', self.trigger_addons_nightly) - - step_upgrade_nightly = self.env['runbot.build.config.step'].create({ - 'name': 'upgrade_nightly', - 'job_type': 'configure_upgrade', - 'upgrade_to_master': True, - 'upgrade_to_major_versions': True, - 'upgrade_from_previous_major_version': True, - 'upgrade_from_all_intermediate_version': True, - 'upgrade_flat': False, - 'upgrade_config_id': self.test_upgrade_config.id, - 'upgrade_dbs': [ - (0, 0, {'config_id': self.config_single.id, 'db_pattern': '*'}) - ] - }) - upgrade_config_nightly = self.env['runbot.build.config'].create({ - 'name': 'Upgrade nightly', - 'step_order_ids': [(0, 0, {'step_id': step_upgrade_nightly.id})] - }) - trigger_upgrade_addons_nightly = self.env['runbot.trigger'].create({ - 'name': 'Nigtly upgrade', - 'config_id': upgrade_config_nightly.id, - 'project_id': self.project.id, - 'dependency_ids': [(4, self.repo_upgrade.id)], - 'upgrade_dumps_trigger_id': self.trigger_addons_weekly.id, - 'category_id': self.nightly_category.id - }) - - batch = self.master_bundle._force(self.nightly_category.id) - batch._prepare() - upgrade_nightly = batch.slot_ids.filtered(lambda slot: slot.trigger_id == trigger_upgrade_addons_nightly).build_id - host = self.env['runbot.host']._get_current() - upgrade_nightly.host = host.name - upgrade_nightly._init_pendings(host) - upgrade_nightly._schedule() - to_version_builds = upgrade_nightly.children_ids - self.assertEqual(upgrade_nightly.local_state, 'done') - self.assertEqual(len(to_version_builds), 4) - self.assertEqual( - to_version_builds.mapped('params_id.upgrade_to_build_id.params_id.version_id.name'), - ['11.0', '12.0', '13.0', 'master'] - ) - self.assertEqual( - to_version_builds.mapped('params_id.upgrade_from_build_id.params_id.version_id.name'), - [] - ) - to_version_builds.host = host.name - to_version_builds._init_pendings(host) - to_version_builds._schedule() - self.assertEqual(to_version_builds.mapped('local_state'), ['done']*4) - from_version_builds = to_version_builds.children_ids - self.assertEqual( - [ - '%s->%s' % ( - b.params_id.upgrade_from_build_id.params_id.version_id.name, - b.params_id.upgrade_to_build_id.params_id.version_id.name - ) - for b in from_version_builds - ], - ['11.0->12.0', 'saas-11.3->12.0', '12.0->13.0', 'saas-12.3->13.0', '13.0->master', 'saas-13.1->master', 'saas-13.2->master', 'saas-13.3->master'] - ) - from_version_builds.host = host.name - from_version_builds._init_pendings(host) - from_version_builds._schedule() - self.assertEqual(from_version_builds.mapped('local_state'), ['done']*8) - db_builds = from_version_builds.children_ids - self.assertEqual(len(db_builds), 40) - - self.assertEqual( - db_builds.mapped('params_id.config_id'), self.test_upgrade_config - ) - - self.assertEqual( - db_builds.mapped('params_id.commit_ids.repo_id'), - self.repo_upgrade, - "Build should only have the upgrade commit" - ) - b11_12 = db_builds[:5] - self.assertEqual( - b11_12.mapped('params_id.upgrade_to_build_id.params_id.version_id.name'), - ['12.0'] - ) - self.assertEqual( - b11_12.mapped('params_id.upgrade_from_build_id.params_id.version_id.name'), - ['11.0'] - ) - b133_master = db_builds[-5:] - self.assertEqual( - b133_master.mapped('params_id.upgrade_to_build_id.params_id.version_id.name'), - ['master'] - ) - self.assertEqual( - b133_master.mapped('params_id.upgrade_from_build_id.params_id.version_id.name'), - ['saas-13.3'] - ) - self.assertEqual( - [b.params_id.dump_db.db_suffix for b in b133_master], - ['account', 'l10n_be', 'l10n_ch', 'mail', 'stock'] # is this order ok? - ) - - first_build = db_builds[0] - - self.start_patcher('docker_state', 'odoo.addons.runbot.models.build.docker_state', 'END') - - def docker_run_restore(cmd, *args, **kwargs): - source_dest = first_build.params_id.dump_db.build_id.dest - dump_url='http://host.runbot.com/runbot/static/build/%s/logs/%s-account.zip' % (source_dest, source_dest) - zip_name='%s-account.zip' % source_dest - db_name='%s-master-account' % str(first_build.id).zfill(5) - self.assertEqual( - str(cmd).split(' && '), - [ - 'mkdir /data/build/restore', - 'cd /data/build/restore', - f'wget {dump_url}', - f'unzip -q {zip_name}', - 'echo "### restoring filestore"', - f'mkdir -p /data/build/datadir/filestore/{db_name}', - f'mv filestore/* /data/build/datadir/filestore/{db_name}', - 'echo "### restoring db"', - f'psql -q {db_name} < dump.sql', - 'cd /data/build', - 'echo "### cleaning"', - 'rm -r restore', - 'echo "### listing modules"', - f'psql {db_name} -c "select name from ir_module_module where state = \'installed\'" -t -A > /data/build/logs/restore_modules_installed.txt', - 'echo "### restore" "successful"' - ] - ) - self.patchers['docker_run'].side_effect = docker_run_restore - first_build.host = host.name - first_build._init_pendings(host) - self.patchers['docker_run'].assert_called() - - def docker_run_upgrade(cmd, *args, ro_volumes=False, **kwargs): - user = getpass.getuser() - self.assertTrue(ro_volumes.pop(f'/home/{user}/.odoorc').startswith('/tmp/runbot_test/static/build/')) - self.assertEqual( - ro_volumes, { - '/data/build/addons': '/tmp/runbot_test/static/sources/addons/addons120', - '/data/build/server': '/tmp/runbot_test/static/sources/server/server120', - '/data/build/upgrade': '/tmp/runbot_test/static/sources/upgrade/123abc789', - }, - "other commit should have been added automaticaly" - ) - self.assertEqual( - str(cmd), - 'python3 server/server.py {addons_path} --no-xmlrpcs --no-netrpc -u all -d {db_name} --stop-after-init --max-cron-threads=0'.format( - addons_path='--addons-path addons,server/addons,server/core/addons', - db_name='%s-master-account' % str(first_build.id).zfill(5)) - ) - self.patchers['docker_run'].side_effect = docker_run_upgrade - first_build._schedule() - self.assertEqual(self.patchers['docker_run'].call_count, 2) - - # test_build_references - batch = self.master_bundle._force() - batch._prepare() - upgrade_slot = batch.slot_ids.filtered(lambda slot: slot.trigger_id == self.trigger_upgrade_server) - self.assertTrue(upgrade_slot) - upgrade_build = upgrade_slot.build_id - self.assertTrue(upgrade_build) - self.assertEqual(upgrade_build.params_id.config_id, self.upgrade_server_config) - # we should have 2 builds, the nightly roots of 13 and 13.3 - self.assertEqual( - upgrade_build.params_id.builds_reference_ids, - ( - self.build_niglty_13[('root', self.trigger_server_nightly)] | - self.build_niglty_133[('root', self.trigger_server_nightly)] - ) - ) - - self.trigger_upgrade_server.upgrade_step_id.upgrade_from_all_intermediate_version = True - batch = self.master_bundle._force() - batch._prepare() - upgrade_build = batch.slot_ids.filtered(lambda slot: slot.trigger_id == self.trigger_upgrade_server).build_id - self.assertEqual( - upgrade_build.params_id.builds_reference_ids, - ( - self.build_niglty_13[('root', self.trigger_server_nightly)] | - self.build_niglty_131[('root', self.trigger_server_nightly)] | - self.build_niglty_132[('root', self.trigger_server_nightly)] | - self.build_niglty_133[('root', self.trigger_server_nightly)] - ) - ) - - # test future upgrades - step_upgrade_complement = self.env['runbot.build.config.step'].create({ - 'name': 'upgrade_complement', - 'job_type': 'configure_upgrade_complement', - 'upgrade_config_id': self.test_upgrade_config.id, - }) - - config_upgrade_complement = self.env['runbot.build.config'].create({ - 'name': 'Stable policy', - 'step_order_ids': [(0, 0, {'step_id': step_upgrade_complement.id})] - }) - trigger_upgrade_complement = self.env['runbot.trigger'].create({ - 'name': 'Stable policy', - 'repo_ids': [(4, self.repo_server.id)], - 'dependency_ids': [(4, self.repo_upgrade.id)], - 'config_id': config_upgrade_complement.id, - 'upgrade_dumps_trigger_id': self.trigger_upgrade_server.id, - 'project_id': self.project.id, - }) - - bundle_13 = self.master_bundle.previous_major_version_base_id - bundle_133 = self.master_bundle.intermediate_version_base_ids[-1] - self.assertEqual(bundle_13.name, '13.0') - self.assertEqual(bundle_133.name, 'saas-13.3') - - batch13 = bundle_13._force() - batch13._prepare() - upgrade_complement_build_13 = batch13.slot_ids.filtered(lambda slot: slot.trigger_id == trigger_upgrade_complement).build_id - upgrade_complement_build_13.host = host.name - self.assertEqual(upgrade_complement_build_13.params_id.config_id, config_upgrade_complement) - for db in ['base', 'all', 'no-demo-all']: - upgrade_complement_build_13.database_ids = [(0, 0, {'name': '%s-%s' % (upgrade_complement_build_13.dest, db)})] - - upgrade_complement_build_13._init_pendings(host) - - self.assertEqual(len(upgrade_complement_build_13.children_ids), 5) - master_child = upgrade_complement_build_13.children_ids[0] - self.assertEqual(master_child.params_id.upgrade_from_build_id, upgrade_complement_build_13) - self.assertEqual(master_child.params_id.dump_db.db_suffix, 'all') - self.assertEqual(master_child.params_id.config_id, self.test_upgrade_config) - self.assertEqual(master_child.params_id.upgrade_to_build_id.params_id.version_id.name, 'master') - -class TestUpgrade(RunbotCase): - - def test_exceptions_in_env(self): - env_var = self.env['runbot.upgrade.exception']._generate() - self.assertEqual(env_var, False) - self.env['runbot.upgrade.exception'].create({'elements': 'field:module.some_field \nview:some_view_xmlid'}) - self.env['runbot.upgrade.exception'].create({'elements': 'field:module.some_field2'}) - env_var = self.env['runbot.upgrade.exception']._generate() - self.assertEqual(env_var, 'suppress_upgrade_warnings=field:module.some_field,view:some_view_xmlid,field:module.some_field2') diff --git a/runbot/tests/test_version.py b/runbot/tests/test_version.py deleted file mode 100644 index f95b8d8c..00000000 --- a/runbot/tests/test_version.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -from .common import RunbotCase - - -class TestVersion(RunbotCase): - - def test_basic_version(self): - - major_version = self.Version.create({'name': '12.0'}) - self.assertEqual(major_version.number, '12.00') - self.assertTrue(major_version.is_major) - - saas_version = self.Version.create({'name': 'saas-12.1'}) - self.assertEqual(saas_version.number, '12.01') - self.assertFalse(saas_version.is_major) - - self.assertGreater(saas_version.number, major_version.number) - - master_version = self.Version.create({'name': 'master'}) - self.assertEqual(master_version.number, '~') - self.assertGreater(master_version.number, saas_version.number) - - def test_version_relations(self): - version = self.env['runbot.version'] - v11 = version._get('11.0') - v113 = version._get('saas-11.3') - v12 = version._get('12.0') - v122 = version._get('saas-12.2') - v124 = version._get('saas-12.4') - v13 = version._get('13.0') - v131 = version._get('saas-13.1') - v132 = version._get('saas-13.2') - v133 = version._get('saas-13.3') - master = version._get('master') - - self.assertEqual(v11.previous_major_version_id, version) - self.assertEqual(v11.intermediate_version_ids, version) - - self.assertEqual(v113.previous_major_version_id, v11) - self.assertEqual(v113.intermediate_version_ids, version) - - self.assertEqual(v12.previous_major_version_id, v11) - self.assertEqual(v12.intermediate_version_ids, v113) - - self.assertEqual(v12.previous_major_version_id, v11) - self.assertEqual(v12.intermediate_version_ids, v113) - self.assertEqual(v12.next_major_version_id, v13) - self.assertEqual(v12.next_intermediate_version_ids, v124 | v122) - - self.assertEqual(v13.previous_major_version_id, v12) - self.assertEqual(v13.intermediate_version_ids, v124 | v122) - self.assertEqual(v13.next_major_version_id, master) - self.assertEqual(v13.next_intermediate_version_ids, v133 | v132 | v131) - - self.assertEqual(v132.previous_major_version_id, v13) - self.assertEqual(v132.intermediate_version_ids, v131) - self.assertEqual(v132.next_major_version_id, master) - self.assertEqual(v132.next_intermediate_version_ids, v133) - - self.assertEqual(master.previous_major_version_id, v13) - self.assertEqual(master.intermediate_version_ids, v133 | v132 | v131) diff --git a/runbot/views/branch_views.xml b/runbot/views/branch_views.xml deleted file mode 100644 index 6031d793..00000000 --- a/runbot/views/branch_views.xml +++ /dev/null @@ -1,47 +0,0 @@ -<odoo> - <data> - <record id="branch_form" model="ir.ui.view"> - <field name="name">runbot.branch.form</field> - <field name="model">runbot.branch</field> - <field name="arch" type="xml"> - <form> - <header> - <button name="recompute_infos" string="Recompute Infos" type="object" class="oe_highlight"/> - </header> - <sheet> - <group name="branch_group"> - <field name="bundle_id" readonly='0'/> - <field name="remote_id"/> - <field name="name"/> - <field name="branch_url" widget="url"/> - <field name="is_pr"/> - <field name="pull_head_name"/> - <field name="target_branch_name"/> - <field name="head"/> - <field name="alive"/> - </group> - </sheet> - </form> - </field> - </record> - - - <record id="branch_view_tree" model="ir.ui.view"> - <field name="name">runbot.branch.tree</field> - <field name="model">runbot.branch</field> - <field name="arch" type="xml"> - <tree string="Branches"> - <field name="remote_id"/> - <field name="name"/> - </tree> - </field> - </record> - - <record id="open_view_branch_tree" model="ir.actions.act_window"> - <field name="name">Branches</field> - <field name="res_model">runbot.branch</field> - <field name="view_mode">tree,form</field> - </record> - - </data> -</odoo> diff --git a/runbot/views/build_error_views.xml b/runbot/views/build_error_views.xml deleted file mode 100644 index f26ddcf5..00000000 --- a/runbot/views/build_error_views.xml +++ /dev/null @@ -1,193 +0,0 @@ -<odoo> - <data> - <record id="build_error_form" model="ir.ui.view"> - <field name="name">runbot.build.error.form</field> - <field name="model">runbot.build.error</field> - <field name="arch" type="xml"> - <form> - <header> - </header> - <group name="build_error_group"> - <field name="fingerprint" readonly="1"/> - <field name="content"/> - <field name="module_name"/> - <field name="function"/> - <field name="random"/> - <field name="responsible"/> - <field name="team_id"/> - <field name="fixing_commit"/> - <field name="fixing_pr_id"/> - <field name="active"/> - <field name="parent_id" /> - <field name="bundle_ids" widget="many2many_tags"/> - <field name="version_ids" widget="many2many_tags"/> - <field name="trigger_ids" widget="many2many_tags"/> - <field name="tag_ids" widget="many2many_tags"/> - <field name="first_seen_date"/> - <field name="first_seen_build_id" widget="frontend_url"/> - <field name="last_seen_date"/> - <field name="last_seen_build_id" widget="frontend_url"/> - <field name="test_tags"/> - </group> - <notebook> - <page string="Builds"> - <field name="build_ids" nolabel="1" widget="many2many" options="{'not_delete': True, 'no_create': True}"> - <tree> - <field name="create_date"/> - <field name="host" groups="base.group_no_one"/> - <field name="dest"/> - <field name="trigger_id"/> - <field name="description"/> - <field name="build_url" widget="url" readonly="1" text="View build"/> - </tree> - </field> - </page> - <page string="All Errors Builds"> - <field name="children_build_ids" widget="many2many" options="{'not_delete': True, 'no_create': True}"> - <tree> - <field name="create_date"/> - <field name="id"/> - <field name="host" groups="base.group_no_one"/> - <field name="dest"/> - <field name="build_url" widget="url" readonly="1" text="View build"/> - </tree> - </field> - </page> - <page string="Linked Errors"> - <field name="child_ids" widget="many2many" options="{'not_delete': True, 'no_create': True}"> - <tree> - <field name="create_date"/> - <field name="module_name"/> - <field name="summary"/> - <field name="build_count"/> - </tree> - </field> - </page> - <page string="Error history"> - <field name="error_history_ids" widget="one2many" options="{'not_delete': True, 'no_create': True}"> - <tree> - <field name="create_date"/> - <field name="module_name"/> - <field name="summary"/> - <field name="random"/> - <field name="build_count"/> - <field name="responsible"/> - <field name="fixing_commit"/> - </tree> - </field> - </page> - <page string="Cleaned" groups="base.group_no_one"> - <group name="build_error_group"> - <field name="cleaned_content"/> - </group> - </page> - </notebook> - <div class="oe_chatter"> - <field name="message_follower_ids" widget="mail_followers"/> - <field name="message_ids" widget="mail_thread"/> - </div> - </form> - </field> - </record> - - <record id="build_error_view_tree" model="ir.ui.view"> - <field name="name">runbot.build.error.tree</field> - <field name="model">runbot.build.error</field> - <field name="arch" type="xml"> - <tree string="Errors"> - <field name="module_name"/> - <field name="summary"/> - <field name="random" string="Random"/> - <field name="last_seen_date" string="Last Seen"/> - <field name="build_count"/> - <field name="responsible"/> - <field name="test_tags"/> - </tree> - </field> - </record> - - <record id="build_error_search_view" model="ir.ui.view"> - <field name="name">runbot.build.error.log.filter</field> - <field name="model">runbot.build.error</field> - <field name="arch" type="xml"> - <search string="Search errors"> - <field name="content"/> - <field name="module_name"/> - <field name="function"/> - <field name="version_ids"/> - <field name="responsible"/> - <field name="team_id"/> - <field name="fixing_commit"/> - <filter string="No Parent" name="no_parent_error" domain="[('parent_id', '=', False)]"/> - <separator/> - <filter string="Undeterministic" name="random_error" domain="[('random', '=', True)]"/> - <filter string="Deterministic" name="random_error" domain="[('random', '=', False)]"/> - <separator/> - <filter string="Fixed" name="fixed_errors" domain="[('active', '=', False)]"/> - <filter string="Not Fixed" name="not_fixed_errors" domain="[('active', '=', True)]"/> - <separator/> - <filter string="Not Assigned" name="not_assigned_errors" domain="[('responsible', '=', False)]"/> - <separator/> - <filter string="Test Tags" name="test_tagged_errors" domain="[('test_tags', '!=', False)]"/> - </search> - </field> - </record> - - <record id="open_view_build_error_tree" model="ir.actions.act_window"> - <field name="name">Build errors</field> - <field name="res_model">runbot.build.error</field> - <field name="view_mode">tree,form</field> - <field name="context">{'search_default_no_parent_error': True, 'search_default_random_error': True}</field> - </record> - - <record id="build_error_regex_form" model="ir.ui.view"> - <field name="name">runbot.error.regex.form</field> - <field name="model">runbot.error.regex</field> - <field name="arch" type="xml"> - <form> - <sheet> - <group name="build_regex_group"> - <field name="regex"/> - <field name="re_type"/> - </group> - </sheet> - <div class="oe_chatter"> - <field name="message_follower_ids" widget="mail_followers"/> - <field name="message_ids" widget="mail_thread"/> - </div> - </form> - </field> - </record> - - <record id="build_error_regex_tree" model="ir.ui.view"> - <field name="name">runbot.error.regex.tree</field> - <field name="model">runbot.error.regex</field> - <field name="arch" type="xml"> - <tree string="Errors Regexes"> - <field name="sequence" widget="handle"/> - <field name="regex"/> - <field name="re_type"/> - </tree> - </field> - </record> - - <record id="runbot_regex_search_view" model="ir.ui.view"> - <field name="name">runbot.error.regex.filter</field> - <field name="model">runbot.error.regex</field> - <field name="arch" type="xml"> - <search string="Search regex"> - <field name="regex"/> - <filter string="Filtering regex's" name="filtering_regex" domain="[(['re_type', '=', 'filter'])]"/> - <filter string="Cleaning regex's" name="clening_regex" domain="[(['re_type', '=', 'cleaning'])]"/> - </search> - </field> - </record> - - <record id="open_view_error_regex" model="ir.actions.act_window"> - <field name="name">Errors regex</field> - <field name="res_model">runbot.error.regex</field> - <field name="view_mode">tree,form</field> - </record> - - </data> -</odoo> diff --git a/runbot/views/build_views.xml b/runbot/views/build_views.xml deleted file mode 100644 index 524999bb..00000000 --- a/runbot/views/build_views.xml +++ /dev/null @@ -1,151 +0,0 @@ -<odoo> - <data> - <record id="build_form_params" model="ir.ui.view"> - <field name="model">runbot.build.params</field> - <field name="arch" type="xml"> - <form string="Build Params"> - <sheet> - <group> - <group colspan="2"> - <field name="config_id"/> - <field name="config_data"/> - <field name="extra_params"/> - <field name="dockerfile_id"/> - <field name="dump_db"/> - </group> - <group colspan="2"> - <field name="trigger_id"/> - <field name="create_batch_id"/> - <field name="version_id"/> - <field name="used_custom_trigger"/> - </group> - </group> - <group> - <notebook> - <page string="Commits"> - <field name="commit_link_ids"> - <tree> - <field name="commit_id"/> - <field name="match_type"/> - </tree> - </field> - </page> - <page string="Builds"> - <field name="build_ids"/> - </page> - <page string="Reference Builds"> - <field name="builds_reference_ids"/> - </page> - </notebook> - </group> - </sheet> - </form> - </field> - </record> - <record id="view_build_params_tree" model="ir.ui.view"> - <field name="model">runbot.build.params</field> - <field name="arch" type="xml"> - <tree string="Build params"> - <field name="config_id"/> - <field name="version_id"/> - <field name="commit_link_ids"/> - </tree> - </field> - </record> - <record id="build_form" model="ir.ui.view"> - <field name="model">runbot.build</field> - <field name="arch" type="xml"> - <form string="Build"> - <sheet> - <group> - <field name="description"/> - <field name="params_id"/> - <field name="config_id"/> - <field name="port" groups="base.group_no_one"/> - <field name="dest"/> - <field name="local_state"/> - <field name="global_state"/> - <field name="requested_action" groups="base.group_no_one"/> - <field name="local_result"/> - <field name="global_result"/> - <field name="triggered_result" groups="base.group_no_one"/> - <field name="host"/> - <field name="job_start" groups="base.group_no_one"/> - <field name="job_end" groups="base.group_no_one"/> - <field name="job_time" groups="base.group_no_one"/> - <field name="build_start" groups="base.group_no_one"/> - <field name="build_end" groups="base.group_no_one"/> - <field name="build_time" groups="base.group_no_one"/> - <field name="build_age" groups="base.group_no_one"/> - <field name="build_type"/> - <field name="parent_id"/> - <field name="orphan_result"/> - <field name="build_url" widget="url" readonly="1"/> - <field name="keep_running"/> - <field name="gc_date" readonly="1"/> - <field name="gc_delay"/> - </group> - </sheet> - </form> - </field> - </record> - <record id="view_build_tree" model="ir.ui.view"> - <field name="model">runbot.build</field> - <field name="arch" type="xml"> - <tree string="Builds"> - <field name="dest"/> - <field name="global_state"/> - <field name="global_result"/> - <field name="job"/> - <field name="host"/> - <field name="build_time"/> - <field name="build_age"/> - </tree> - </field> - </record> - <record id="view_build_pivot" model="ir.ui.view"> - <field name="name">runbot.pivot</field> - <field name="model">runbot.build</field> - <field name="arch" type="xml"> - <pivot string="Builds analysis"> - <field name="create_date" interval="week" type="row"/> - <field name="global_state" type="col"/> - </pivot> - </field> - </record> - <record id="view_build_search" model="ir.ui.view"> - <field name="model">runbot.build</field> - <field name="arch" type="xml"> - <search string="Search builds"> - <field name="id"/> - <field name="global_state"/> - <field name="dest"/> - <filter string="Pending" name='pending' domain="[('global_state','=', 'pending')]"/> - <filter string="Testing" name='testing' domain="[('global_state','in', ('testing', 'waiting'))]"/> - <filter string="Running" name='running' domain="[('global_state','=', 'running')]"/> - <filter string="Done" name='done' domain="[('global_state','=','done')]"/> - <filter string="Duplicate" name='duplicate' domain="[('local_state','=', 'duplicate')]"/> - <group expand="0" string="Group By..."> - <filter string="Status" name='status' domain="[]" context="{'group_by':'global_state'}"/> - <filter string="Result" name='result' domain="[]" context="{'group_by':'global_result'}"/> - <filter string="Start" name='start' domain="[]" context="{'group_by':'job_start'}"/> - <filter string="Host" name='host' domain="[]" context="{'group_by':'host'}"/> - <filter string="Create Date" name='create_date' domain="[]" context="{'group_by':'create_date'}"/> - </group> - </search> - </field> - </record> - <record id="action_build" model="ir.actions.act_window"> - <field name="name">Builds</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">runbot.build</field> - <field name="view_mode">tree,form,graph,pivot</field> - </record> - <record id="action_build_params" model="ir.actions.act_window"> - <field name="name">Builds Params</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">runbot.build.params</field> - <field name="view_mode">tree,form</field> - </record> - </data> -</odoo> \ No newline at end of file diff --git a/runbot/views/bundle_views.xml b/runbot/views/bundle_views.xml deleted file mode 100644 index 05ccbe46..00000000 --- a/runbot/views/bundle_views.xml +++ /dev/null @@ -1,210 +0,0 @@ -<odoo> - <data> - - <record id="view_runbot_project" model="ir.ui.view"> - <field name="model">runbot.project</field> - <field name="arch" type="xml"> - <form string="Projects"> - <group> - <field name="name"/> - <field name="keep_sticky_running"/> - <field name="dockerfile_id"/> - <field name="group_ids"/> - <field name="trigger_ids"/> - <field name="sequence"/> - </group> - </form> - </field> - </record> - - <record id="view_runbot_project_tree" model="ir.ui.view"> - <field name="model">runbot.project</field> - <field name="arch" type="xml"> - <tree string="Projects"> - <field name="name"/> - <field name="keep_sticky_running"/> - <field name="dockerfile_id"/> - <field name="group_ids"/> - <field name="trigger_ids"/> - <field name="sequence"/> - </tree> - </field> - </record> - - <record id="view_runbot_bundle" model="ir.ui.view"> - <field name="model">runbot.bundle</field> - <field name="arch" type="xml"> - <form string="Bundles"> - <div class="oe_button_box" name="button_box"> - </div> - <group> - <field name="name"/> - <field name="project_id"/> - <field name="sticky" readonly="0"/> - <field name="to_upgrade" readonly="0"/> - <field name="is_base"/> - <field name="base_id"/> - <field name="defined_base_id"/> - <field name="version_id"/> - <field name="no_build"/> - <field name="no_auto_run"/> - <field name="priority"/> - <field name="build_all"/> - <field name="dockerfile_id"/> - <field name="host_id" readonly="0"/> - <field name="commit_limit"/> - <field name="file_limit"/> - <field name="branch_ids"> - <tree> - <field name="dname"/> - <field name="remote_id"/> - <field name="pull_head_name"/> - <field name="target_branch_name"/> - </tree> - </field> - <field string="Trigger customisations" name="trigger_custom_ids"> - <tree editable="bottom"> - <field name="start_mode"/> - <field name="trigger_id" domain="[('project_id', '=', parent.project_id)]"/> - <field name="config_id"/> - <field name="extra_params"/> - <field name="config_data"/> - </tree> - </field> - <field string="Last batches" name="last_batchs"> - <tree> - <field name="state"/> - <field name="commit_link_ids"/> - <field name="slot_ids"/> - </tree> - </field> - - </group> - </form> - </field> - </record> - - <record id="view_runbot_custom_trigger_tree" model="ir.ui.view"> - <field name="model">runbot.bundle.trigger.custom</field> - <field name="arch" type="xml"> - <tree string="Bundle"> - <field name="bundle_id"/> - <field name="trigger_id"/> - <field name="config_id"/> - <field name="extra_params"/> - <field name="config_data"/> - </tree> - </field> - </record> - - <record id="view_runbot_custom_trigger_form" model="ir.ui.view"> - <field name="model">runbot.bundle.trigger.custom</field> - <field name="arch" type="xml"> - <form string="Bundle"> - <group> - <field name="bundle_id"/> - <field name="trigger_id"/> - <field name="config_id"/> - <field name="extra_params"/> - <field name="config_data"/> - </group> - </form> - </field> - </record> - - <record id="view_runbot_bundle_tree" model="ir.ui.view"> - <field name="model">runbot.bundle</field> - <field name="arch" type="xml"> - <tree string="Bundle"> - <field name="project_id"/> - <field name="name"/> - <field name="version_number"/> - <field name="is_base"/> - <field name="sticky"/> - <field name="to_upgrade"/> - <field name="no_build"/> - <field name="branch_ids"/> - <field name="version_id"/> - </tree> - </field> - </record> - - <record id="view_runbot_batch" model="ir.ui.view"> - <field name="model">runbot.batch</field> - <field name="arch" type="xml"> - <form string="Batch"> - <group> - <field name="last_update"/> - <field name="bundle_id"/> - <field name="state"/> - <field name="commit_link_ids"> - <tree> - <field name="commit_id"/> - <field name="match_type"/> - </tree> - </field> - <field name="slot_ids"> - <tree> - <field name="trigger_id"/> - <field name="build_id"/> - <field name="link_type"/> - </tree> - </field> - </group> - </form> - </field> - </record> - - <record id="view_runbot_batch_tree" model="ir.ui.view"> - <field name="model">runbot.batch</field> - <field name="arch" type="xml"> - <tree string="Batchs"> - <field name="bundle_id"/> - <field name="state"/> - </tree> - </field> - </record> - - <record id="view_runbot_version_tree" model="ir.ui.view"> - <field name="model">runbot.version</field> - <field name="arch" type="xml"> - <tree string="Version"> - <field name="name"/> - <field name="number"/> - <field name="is_major"/> - <field name="dockerfile_id"/> - </tree> - </field> - </record> - <record id="action_bundle_custom_trigger" model="ir.actions.act_window"> - <field name="name">Custom triggers</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">runbot.bundle.trigger.custom</field> - <field name="view_mode">tree,form</field> - </record> - <record id="action_bundle" model="ir.actions.act_window"> - <field name="name">Bundles</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">runbot.bundle</field> - <field name="view_mode">tree,form</field> - </record> - <record id="action_bundle_project" model="ir.actions.act_window"> - <field name="name">Projects</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">runbot.project</field> - <field name="view_mode">tree,form</field> - </record> - <record id="action_bundle_version" model="ir.actions.act_window"> - <field name="name">Versions</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">runbot.version</field> - <field name="view_mode">tree,form</field> - </record> - <record id="action_bundle_batch" model="ir.actions.act_window"> - <field name="name">Batches</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">runbot.batch</field> - <field name="view_mode">tree,form</field> - </record> - </data> -</odoo> \ No newline at end of file diff --git a/runbot/views/codeowner_views.xml b/runbot/views/codeowner_views.xml deleted file mode 100644 index 1b83f5ab..00000000 --- a/runbot/views/codeowner_views.xml +++ /dev/null @@ -1,46 +0,0 @@ -<odoo> - <data> - <record id="codeowner_form" model="ir.ui.view"> - <field name="name">runbot.codeowner.form</field> - <field name="model">runbot.codeowner</field> - <field name="arch" type="xml"> - <form string="Host"> - <sheet> - <group> - <field name="project_id"/> - <field name="team_id"/> - <field name="github_teams"/> - <field name="regex"/> - <field name="version_domain" widget="domain" options="{'model': 'runbot.version', 'in_dialog': True, 'operators': ['in','=', '<', '>']}"/> - </group> - </sheet> - <div class="oe_chatter"> - <field name="message_follower_ids" widget="mail_followers"/> - <field name="message_ids" widget="mail_thread"/> - </div> - </form> - </field> - </record> - - <record id="view_codeowner_tree" model="ir.ui.view"> - <field name="name">runbot.codeowner.tree</field> - <field name="model">runbot.codeowner</field> - <field name="arch" type="xml"> - <tree string="Codeowners"> - <field name="project_id"/> - <field name="team_id"/> - <field name="version_domain"/> - <field name="regex"/> - <field name="github_teams"/> - </tree> - </field> - </record> - - <record id="open_view_codeowner_tree" model="ir.actions.act_window"> - <field name="name">Codeowner</field> - <field name="res_model">runbot.codeowner</field> - <field name="view_mode">tree,form</field> - </record> - - </data> -</odoo> diff --git a/runbot/views/commit_views.xml b/runbot/views/commit_views.xml deleted file mode 100644 index 1b921845..00000000 --- a/runbot/views/commit_views.xml +++ /dev/null @@ -1,64 +0,0 @@ -<odoo> - <data> - - <record id="commit_view_form" model="ir.ui.view"> - <field name="model">runbot.commit</field> - <field name="arch" type="xml"> - <form string="Commit"> - <sheet> - <group> - <field name="name"/> - <field name="dname"/> - <field name="repo_id"/> - <field name="date"/> - <field name="author"/> - <field name="author_email"/> - <field name="committer"/> - <field name="committer_email"/> - <field name="subject"/> - <field name="rebase_on_id"/> - </group> - </sheet> - </form> - </field> - </record> - - <record id="commit_view_tree" model="ir.ui.view"> - <field name="name">runbot.commit.tree</field> - <field name="model">runbot.commit</field> - <field name="arch" type="xml"> - <tree string="Commits"> - <field name="name"/> - <field name="date"/> - <field name="repo_id"/> - <field name="author_email"/> - <field name="committer_email"/> - </tree> - </field> - </record> - - <record id="runbot_commit_search_view" model="ir.ui.view"> - <field name="name">runbot.commit.filter</field> - <field name="model">runbot.commit</field> - <field name="arch" type="xml"> - <search string="Search commit"> - <field name="name"/> - <field name="date"/> - <field name="author"/> - <field name="author_email"/> - <field name="committer"/> - <field name="committer_email"/> - <field name="subject"/> - </search> - </field> - </record> - - <record id="open_view_commit_tree" model="ir.actions.act_window"> - <field name="name">Commits</field> - <field name="res_model">runbot.commit</field> - <field name="view_mode">tree,form</field> - </record> - - - </data> -</odoo> diff --git a/runbot/views/config_views.xml b/runbot/views/config_views.xml deleted file mode 100644 index d62ff6fa..00000000 --- a/runbot/views/config_views.xml +++ /dev/null @@ -1,195 +0,0 @@ -<odoo> - <data> - - <record id="config_form" model="ir.ui.view"> - <field name="model">runbot.build.config</field> - <field name="arch" type="xml"> - <form string="Build config"> - <sheet> - <div attrs="{'invisible': [('protected', '=', False)]}"> - <i class="fa fa-warning text-warning"/>This record is protected and can only be edited by config administrator. - </div> - <group> - <field name="name"/> - <field name="description"/> - <field name="step_order_ids"> - <tree string="Step list" editable="bottom"> - <field name="step_id" widget="Many2one" options="{'no_edit': 1}"/> - <field name="sequence" widget="handle"/> - </tree> - </field> - <field name="protected" groups="base.group_no_one"/> - <field name="group" groups="base.group_no_one"/> - </group> - </sheet> - <div class="oe_chatter"> - <field name="message_follower_ids" widget="mail_followers"/> - <field name="message_ids" widget="mail_thread"/> - </div> - </form> - </field> - </record> - - <record id="config_step_form" model="ir.ui.view"> - <field name="model">runbot.build.config.step</field> - <field name="arch" type="xml"> - <form string="Build config step"> - <sheet> - <div t-att-class="label label-warning" attrs="{'invisible': [('protected', '=', False)]}"> - This record is protected and can only be edited by config administrator. - </div> - <group string="General settings"> - <field name="name"/> - <field name="domain_filter"/> - <field name="job_type"/> - <field name="make_stats"/> - <field name="protected" groups="base.group_no_one"/> - <field name="default_sequence" groups="base.group_no_one"/> - <field name="group" groups="base.group_no_one"/> - </group> - <group string="Stats regexes" attrs="{'invisible': [('make_stats', '=', False)]}"> - <field name="build_stat_regex_ids"> - <tree string="Regexes" editable="bottom"> - <field name="name"/> - <field name="regex"/> - <field name="description"/> - </tree> - </field> - </group> - <group string="Python settings" attrs="{'invisible': [('job_type', 'not in', ('python'))]}"> - <field name="python_code" widget="ace" options="{'mode': 'python'}"/> - <field name="python_result_code" widget="ace" options="{'mode': 'python'}"/> - <field name="running_job"/> - <field name="ignore_triggered_result"/> - </group> - <group string="Test settings" attrs="{'invisible': [('job_type', 'not in', ('python', 'install_odoo'))]}"> - <field name="create_db" groups="base.group_no_one"/> - <field name="install_modules"/> - <field name="db_name" groups="base.group_no_one"/> - <field name="cpu_limit" groups="base.group_no_one"/> - <field name="coverage"/> - <field name="paths_to_omit" attrs="{'invisible': [('coverage', '!=', True)]}"/> - <field name="test_enable"/> - <field name="test_tags"/> - <field name="enable_auto_tags"/> - <field name="sub_command"/> - </group> - <group string="Extra Parameters" attrs="{'invisible': [('job_type', 'not in', ('python', 'install_odoo', 'test_upgrade', 'run_odoo'))]}"> - <field name="extra_params"/> - <field name="additionnal_env"/> - </group> - <group string="Create settings" attrs="{'invisible': [('job_type', 'not in', ('python', 'create_build'))]}"> - <field name="create_config_ids" widget="many2many_tags" options="{'no_create': True}" /> - <field name="number_builds"/> - <field name="make_orphan"/> - </group> - <group attrs="{'invisible': [('job_type', 'not in', ('python', 'configure_upgrade'))]}"> - <group class="col" string="Target version settings"> - <field string="Current" name="upgrade_to_current"/> - <field string="Master" name="upgrade_to_master"/> - <field string="Major" name="upgrade_to_major_versions"/> - <field string="All saas" name="upgrade_to_all_versions"/> - <field string="Explicit list" name="upgrade_to_version_ids" widget="many2many_tags"/> - </group> - <group class="col" string="Source version settings"> - <field string="Major" name="upgrade_from_previous_major_version"/> - <field string="Last saas" name="upgrade_from_last_intermediate_version"/> - <field string="All saas" name="upgrade_from_all_intermediate_version"/> - <field string="Explicit list" name="upgrade_from_version_ids" widget="many2many_tags"/> - </group> - <group string="Upgrade settings" class="o_group_col_12"> - <field name="upgrade_flat"/> - <field name="upgrade_config_id"/> - <field string="Db to upgrade" name="upgrade_dbs"> - <tree editable="bottom"> - <field name="config_id"/> - <field name="db_pattern"/> - <field name="min_target_version_id"/> - </tree> - </field> - </group> - </group> - <group string="Restore settings" attrs="{'invisible': [('job_type', '!=', 'restore')]}"> - <field name="restore_download_db_suffix"/> - <field name="restore_rename_db_suffix"/> - </group> - </sheet> - <div class="oe_chatter"> - <field name="message_follower_ids" widget="mail_followers"/> - <field name="message_ids" widget="mail_thread"/> - </div> - </form> - </field> - </record> - - <record id="runbot_config_tree_view" model="ir.ui.view"> - <field name="name">Runbot Config tree view</field> - <field name="model">runbot.build.config</field> - <field name="arch" type="xml"> - <tree string="Build Configs"> - <field name="name"/> - <field name="description"/> - </tree> - </field> - </record> - - <record id="runbot_step_config_tree_view" model="ir.ui.view"> - <field name="name">Runbot Config Step tree view</field> - <field name="model">runbot.build.config.step</field> - <field name="arch" type="xml"> - <tree string="Build Config Steps"> - <field name="name"/> - <field name="job_type"/> - <field name="group"/> - </tree> - </field> - </record> - - <record id="runbot_config_search_view" model="ir.ui.view"> - <field name="name">runbot.build.config.filter</field> - <field name="model">runbot.build.config</field> - <field name="arch" type="xml"> - <search string="Search config"> - <field name="name"/> - <field name="group_name"/> - <filter string="Is in a group" name='is_in_group' domain="[(['group', '!=', False])]"/> - <filter string="No step's defined" name="no_step" domain="[(['step_order_ids', '=', False])]"/> - </search> - </field> - </record> - - <record id="runbot_config_step_search_view" model="ir.ui.view"> - <field name="name">runbot.build.config.step.filter</field> - <field name="model">runbot.build.config.step</field> - <field name="arch" type="xml"> - <search string="Search config step"> - <field name="name"/> - <field name="group_name"/> - <filter string="Install job" name='install_job' domain="[(['job_type', '=', 'install_odoo'])]"/> - <filter string="Run job" name='run_job' domain="[(['job_type', '=', 'run_odoo'])]"/> - <filter string="Python job" name='python_job' domain="[(['job_type', '=', 'python'])]"/> - <filter string="Create job" name='create_job' domain="[(['job_type', '=', 'create_build'])]"/> - <separator/> - <filter string="Is in a group" name='is_in_group' domain="[(['group', '!=', False])]"/> - <separator/> - <filter string="No config defined" name="no_step" domain="[(['step_order_ids', '=', False])]"/> - </search> - </field> - </record> - - <record id="open_view_job_config_tree" model="ir.actions.act_window"> - <field name="name">Build Configs</field> - <field name="res_model">runbot.build.config</field> - <field name="view_mode">tree,form</field> - </record> - - <record id="open_view_job_tree" model="ir.actions.act_window"> - <field name="name">Build Config Steps</field> - <field name="res_model">runbot.build.config.step</field> - <field name="view_mode">tree,form</field> - </record> - - - - </data> -</odoo> diff --git a/runbot/views/custom_trigger_wizard_views.xml b/runbot/views/custom_trigger_wizard_views.xml deleted file mode 100644 index 5d3b065f..00000000 --- a/runbot/views/custom_trigger_wizard_views.xml +++ /dev/null @@ -1,38 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<odoo> - <record model="ir.ui.view" id="runbot_trigger_custom_wizard_form"> - <field name="name">runbot_trigger_custom_wizard</field> - <field name="model">runbot.trigger.custom.wizard</field> - <field name="arch" type="xml"> - <form string="Custom trigger wizard"> - <group> - <field name="bundle_id" invisible="1"/> - <field name="project_id" invisible="1"/> - <field name="warnings" decoration-warning="warnings"/> - <field name="trigger_id"/> - <field name="config_id"/> - <field name="number_build"/> - <field name="child_extra_params"/> - <field name="child_dump_url"/> - <field name="child_config_id"/> - <field name="config_data"/> - </group> - <footer> - <button name="submit" string="Submit" type="object" class="btn-primary"/> - <button string="Cancel" special="cancel" class="btn-default"/> - </footer> - </form> - </field> - </record> - - <record model="ir.actions.act_window" id="runbot_trigger_custom_wizard_action"> - <field name="name">Generate custom trigger</field> - <field name="res_model">runbot.trigger.custom.wizard</field> - <field name="view_mode">form</field> - <field name="view_id" ref="runbot_trigger_custom_wizard_form"/> - <field name="target">new</field> - <field name="binding_model_id" ref="runbot.model_runbot_bundle"/> - <field name="binding_view_types">form</field> - <field name="context">{'default_bundle_id': active_id}</field> - </record> -</odoo> diff --git a/runbot/views/dashboard_views.xml b/runbot/views/dashboard_views.xml deleted file mode 100644 index 3e4ab908..00000000 --- a/runbot/views/dashboard_views.xml +++ /dev/null @@ -1,129 +0,0 @@ -<odoo> - <data> - <record id="team_form" model="ir.ui.view"> - <field name="name">runbot.team.form</field> - <field name="model">runbot.team</field> - <field name="arch" type="xml"> - <form> - <sheet> - <group name="team_group"> - <field name="name"/> - <field name="dashboard_id"/> - <field name="path_glob"/> - </group> - <notebook> - <page string="Team Errors"> - <field name="build_error_ids" nolabel="1" widget="many2many" options="{'not_delete': True, 'no_create': True}"/> - </page> - <page string="Team Members"> - <field name="user_ids" nolabel="1" widget="many2many" options="{'not_delete': True, 'no_create': True}"/> - </page> - </notebook> - </sheet> - </form> - </field> - </record> - - <record id="team_tree" model="ir.ui.view"> - <field name="name">runbot.team.tree</field> - <field name="model">runbot.team</field> - <field name="arch" type="xml"> - <tree string="Runbot Teams"> - <field name="name"/> - <field name="path_glob"/> - <field name="build_error_ids"/> - </tree> - </field> - </record> - - <record id="dashboard_form" model="ir.ui.view"> - <field name="name">runbot.dashboard.form</field> - <field name="model">runbot.dashboard</field> - <field name="arch" type="xml"> - <form> - <sheet> - <group name="dashboard_group"> - <field name="name"/> - <field name="team_ids"/> - <field name="dashboard_tile_ids"/> - </group> - </sheet> - </form> - </field> - </record> - - <record id="dashboard_tree" model="ir.ui.view"> - <field name="name">runbot.dashboard.tree</field> - <field name="model">runbot.dashboard</field> - <field name="arch" type="xml"> - <tree string="Runbot Dashboards"> - <field name="name"/> - <field name="team_ids"/> - <field name="dashboard_tile_ids"/> - </tree> - </field> - </record> - - <record id="dashboard_tile_form" model="ir.ui.view"> - <field name="name">runbot.dashboard.tile.form</field> - <field name="model">runbot.dashboard.tile</field> - <field name="arch" type="xml"> - <form> - <sheet> - <group name="dashboard_tile_group"> - <field name="name"/> - <field name="project_id"/> - <field name="category_id"/> - <field name="trigger_id"/> - <field name="config_id"/> - <field name="domain_filter" widget="domain" options="{'model': 'runbot.build', 'in_dialog': True}"/> - <field name="custom_template_id" groups="runbot.group_runbot_admin"/> - </group> - <notebook> - <page string="Builds Found"> - <field name="build_ids" nolabel="1" widget="many2many" options="{'not_delete': True, 'no_create': True}"/> - </page> - <page string="Dashboards"> - <field name="dashboard_ids" nolabel="1" widget="many2many" options="{'not_delete': True}"/> - </page> - </notebook> - </sheet> - </form> - </field> - </record> - - <record id="dashboard_tile_tree" model="ir.ui.view"> - <field name="name">runbot.dashboard.tile.tree</field> - <field name="model">runbot.dashboard.tile</field> - <field name="arch" type="xml"> - <tree string="Runbot Dashboards Tiles"> - <field name="sequence" widget="handle"/> - <field name="project_id"/> - <field name="category_id"/> - <field name="trigger_id"/> - <field name="config_id"/> - <field name="name"/> - </tree> - </field> - </record> - - <record id="open_view_runbot_team" model="ir.actions.act_window"> - <field name="name">Runbot Teams</field> - <field name="res_model">runbot.team</field> - <field name="view_mode">tree,form</field> - </record> - - <record id="open_view_runbot_dashboard" model="ir.actions.act_window"> - <field name="name">Runbot Dashboards</field> - <field name="res_model">runbot.dashboard</field> - <field name="view_mode">tree,form</field> - </record> - - <record id="open_view_runbot_dashboard_tile" model="ir.actions.act_window"> - <field name="name">Runbot Dashboards Tiles</field> - <field name="res_model">runbot.dashboard.tile</field> - <field name="view_mode">tree,form</field> - </record> - - </data> -</odoo> diff --git a/runbot/views/dockerfile_views.xml b/runbot/views/dockerfile_views.xml deleted file mode 100644 index ed8672b6..00000000 --- a/runbot/views/dockerfile_views.xml +++ /dev/null @@ -1,77 +0,0 @@ -<odoo> - <data> - <record id="dockerfile_form" model="ir.ui.view"> - <field name="name">runbot.dockerfile.form</field> - <field name="model">runbot.dockerfile</field> - <field name="arch" type="xml"> - <form string="Dockerfile"> - <sheet> - <widget name="web_ribbon" title="Empty" bg_color="bg-warning" attrs="{'invisible': [('dockerfile', '!=', '')]}"/> - <group> - <field name="name"/> - <field name="image_tag"/> - <field name="to_build"/> - <field name="version_ids" widget="many2many_tags"/> - <field name="project_ids" widget="many2many_tags"/> - <field name="template_id"/> - </group> - <group> - <field name="description"/> - </group> - <notebook> - <page string="Template"> - <field name="arch_base" widget="ace" options="{'mode': 'xml'}" readonly="0"/> - </page> - <page string="Dockerfile"> - <field name="dockerfile"/> - </page> - <page string="Views"> - <field name="view_ids" widget="one2many"> - <tree> - <field name="id"/> - <field name="key"/> - </tree> - </field> - </page> - <page string="Bundles"> - <field name="bundle_ids" widget="one2many"> - <tree> - <field name="project_id"/> - <field name="name"/> - </tree> - </field> - </page> - </notebook> - </sheet> - <div class="oe_chatter"> - <field name="message_follower_ids" widget="mail_followers"/> - <field name="message_ids" widget="mail_thread"/> - </div> - </form> - </field> - </record> - - <record id="dockerfile_view_tree" model="ir.ui.view"> - <field name="name">runbot.dockerfile.tree</field> - <field name="model">runbot.dockerfile</field> - <field name="arch" type="xml"> - <tree string="Dockerfile" decoration-danger="dockerfile == ''" decoration-warning="to_build == False"> - <field name="name"/> - <field name="image_tag"/> - <field name="to_build" widget="boolean_toggle"/> - <field name="version_ids" widget="many2many_tags"/> - <field name="project_ids" widget="many2many_tags"/> - <field name="bundle_ids"/> - <field name="dockerfile" invisible="True"/> - </tree> - </field> - </record> - - <record id="open_view_dockerfile_tree" model="ir.actions.act_window"> - <field name="name">Dockerfiles</field> - <field name="res_model">runbot.dockerfile</field> - <field name="view_mode">tree,form</field> - </record> - - </data> -</odoo> diff --git a/runbot/views/error_log_views.xml b/runbot/views/error_log_views.xml deleted file mode 100644 index b2e63c7e..00000000 --- a/runbot/views/error_log_views.xml +++ /dev/null @@ -1,90 +0,0 @@ -<odoo> - <data> - - <record id="runbot_error_log_form_view" model="ir.ui.view"> - <field name="name">Runbot Error Log form view</field> - <field name="model">runbot.error.log</field> - <field name="arch" type="xml"> - <form string="Build Error"> - <header> - </header> - <sheet> - <div class="oe_button_box" name="button_box" groups="base.group_user"> - </div> - <div class="oe_title"> - <h1><field name="build_id"/></h1> - <field name="build_url" widget="url"/> - &nbsp;<field name="log_create_date"/> - </div> - <group> - <group> - <field name="log_type"/> - </group> - <group> - <field name="name"/> - <field name="func"/> - <field name="path"/> - </group> - </group> - <notebook> - <page string="Log message" name="log_message"> - <group> - <field name="message"/> - </group> - </page> - </notebook> - </sheet> - </form> - </field> - </record> - - <record id="runbot_error_log_tree_view" model="ir.ui.view"> - <field name="name">Runbot Error Log tree view</field> - <field name="model">runbot.error.log</field> - <field name="arch" type="xml"> - <tree string="Build Errors"> - <button name="action_goto_build" type="object" icon="fa-external-link "/> - <field name="build_id"/> - <field name="bundle_ids" widget="many2many_tags"/> - <field name="log_create_date"/> - <field name="name"/> - <field name="func"/> - <field name="path"/> - <field name="summary"/> - <field name="log_type"/> - </tree> - </field> - </record> - - <record id="runbot_logs_search_view" model="ir.ui.view"> - <field name="name">runbot.error.log.filter</field> - <field name="model">runbot.error.log</field> - <field name="arch" type="xml"> - <search string="Search master"> - <field name="message"/> - <field name="name" string="Module"/> - <field name="func"/> - <field name="build_id"/> - <filter string="Failed builds" name="failed_builds" domain="[('global_state', '=', 'done'), ('global_result', '=', 'ko')]"/> - <separator/> - <filter string="Master bundle" name="master_bundle" domain="[('bundle_ids.name', '=', 'master')]"/> - <filter string="Sticky bundles" name="sticky_bundles" domain="[('sticky', '=', True)]"/> - <separator/> - <!-- <filter name="filter_log_create_date" date="log_create_date" string="Log Date" default_period="last_7_days"/> --> - <filter string="Last 7 Days" name="log_date" domain="[ - ('log_create_date', '>=', (datetime.datetime.combine(context_today() + relativedelta(days=-7), datetime.time(0,0,0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S')), - ('log_create_date', '<', (datetime.datetime.combine(context_today(), datetime.time(0,0,0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S'))]"/> - </search> - </field> - </record> - - <record id="open_view_error_log_tree" model="ir.actions.act_window"> - <field name="name">Error Logs</field> - <field name="res_model">runbot.error.log</field> - <field name="view_mode">tree,form</field> - <!-- <field name="context">{'search_default_sticky_bundles': True, 'search_default_failed_builds': True, 'time_ranges': {'field': 'log_create_date', 'range': 'last_7_days'},}</field> --> - <field name="context">{'search_default_sticky_bundles': True, 'search_default_failed_builds': True, 'search_default_log_date': True}</field> - </record> - - </data> -</odoo> diff --git a/runbot/views/host_views.xml b/runbot/views/host_views.xml deleted file mode 100644 index 767dc3a2..00000000 --- a/runbot/views/host_views.xml +++ /dev/null @@ -1,51 +0,0 @@ -<odoo> - <data> - - <record id="host_form" model="ir.ui.view"> - <field name="name">runbot.host.form</field> - <field name="model">runbot.host</field> - <field name="arch" type="xml"> - <form string="Host"> - <sheet> - <group> - <field name="name" readonly='1'/> - <field name="disp_name"/> - <field name="active"/> - <field name="last_start_loop" readonly='1'/> - <field name="last_end_loop" readonly='1'/> - <field name="last_success" readonly='1'/> - <field name="assigned_only"/> - <field name="nb_worker"/> - <field name="last_exception" readonly='1'/> - <field name="exception_count" readonly='1'/> - </group> - </sheet> - <div class="oe_chatter"> - <field name="message_follower_ids" widget="mail_followers"/> - <field name="message_ids" widget="mail_thread"/> - </div> - </form> - </field> - </record> - - <record id="view_host_tree" model="ir.ui.view"> - <field name="name">runbot.host.tree</field> - <field name="model">runbot.host</field> - <field name="arch" type="xml"> - <tree string="Builds"> - <field name="name"/> - <field name="disp_name"/> - <field name="assigned_only" widget="boolean_toggle"/> - <field name="nb_worker"/> - </tree> - </field> - </record> - - <record id="open_view_host_tree" model="ir.actions.act_window"> - <field name="name">Host</field> - <field name="res_model">runbot.host</field> - <field name="view_mode">tree,form</field> - </record> - - </data> -</odoo> diff --git a/runbot/views/menus.xml b/runbot/views/menus.xml deleted file mode 100644 index df0bae28..00000000 --- a/runbot/views/menus.xml +++ /dev/null @@ -1,57 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<odoo> - <menuitem name="Runbot" id="runbot_menu_root"/> - - <menuitem name="Objects" id="menu_objects" parent="runbot_menu_root" sequence="200"/> - <menuitem action="action_build" id="menu_build_build" parent="menu_objects" sequence="10" /> - <menuitem action="action_build_params" id="menu_build_params" parent="menu_objects" sequence="15" /> - <menuitem id="menu_bundle" action="action_bundle" parent="menu_objects" sequence="20"/> - <menuitem id="menu_bundle_batch" action="action_bundle_batch" parent="menu_objects" sequence="25" /> - <menuitem name="Branches" id="runbot_menu_branch_tree" parent="menu_objects" sequence="40" action="open_view_branch_tree"/> - <menuitem name="Commits" id="runbot_menu_commit_tree" parent="menu_objects" sequence="50" action="open_view_commit_tree"/> - <menuitem id="runbot_menu_bundle_custom_trigger" parent="menu_objects" sequence="70" action="action_bundle_custom_trigger"/> - - <menuitem name="Hosts" id="runbot_menu_host_tree" parent="runbot_menu_root" sequence="300" action="open_view_host_tree"/> - - <menuitem id="runbot_menu_trigger" parent="runbot_menu_root" sequence="500" action="runbot_triggers_action"/> - - <menuitem name="Configs" id="runbot_menu_configs" parent="runbot_menu_root" sequence="600"/> - <menuitem id="runbot_menu_job_config_tree" parent="runbot_menu_configs" sequence="10" action="open_view_job_config_tree"/> - <menuitem id="runbot_menu_job_tree" parent="runbot_menu_configs" sequence="20" action="open_view_job_tree"/> - <menuitem name="CodeOwners" id="runbot_menu_codeowner_tree" parent="runbot_menu_configs" sequence="30" action="open_view_codeowner_tree"/> - - <menuitem id="runbot_menu_upgrade_exceptions_tree" parent="runbot_menu_root" sequence="700" action="open_view_upgrade_exception_tree"/> - - <menuitem name="Docker" id="menu_dockerfile" parent="runbot_menu_root" action="open_view_dockerfile_tree" sequence="800"/> - - <menuitem name="Manage errors" id="runbot_menu_manage_errors" parent="runbot_menu_root" sequence="900"/> - <menuitem name="Build errors" id="runbot_menu_build_error_tree" parent="runbot_menu_manage_errors" sequence="10" action="open_view_build_error_tree"/> - <menuitem name="Error Logs" id="runbot_menu_error_logs" parent="runbot_menu_manage_errors" sequence="20" action="open_view_error_log_tree"/> - - <menuitem name="Teams" id="runbot_menu_teams" parent="runbot_menu_root" sequence="1000"/> - <menuitem name="Teams" id="runbot_menu_team_tree" parent="runbot_menu_teams" sequence="30" action="open_view_runbot_team"/> - <menuitem name="Dashboards" id="runbot_menu_runbot_dashboard_tree" parent="runbot_menu_teams" sequence="40" action="open_view_runbot_dashboard"/> - <menuitem name="Dashboard Tiles" id="runbot_menu_runbot_dashboard_tile_tree" parent="runbot_menu_teams" sequence="50" action="open_view_runbot_dashboard_tile"/> - - <menuitem name="Warnings" id="runbot_menu_warning_root" parent="runbot_menu_root" sequence="1200" action="open_view_warning_tree"/> - - <menuitem name="Settings" id="menu_runbot_settings" parent="runbot_menu_root" sequence="9000" /> - <menuitem id="menu_runbot_global_settings" parent="menu_runbot_settings" action="action_runbot_configuration" groups="base.group_system"/> - <menuitem id="menu_bundle_project" action="action_bundle_project" sequence="10" parent="menu_runbot_settings"/> - <menuitem id="menu_bundle_version" action="action_bundle_version" sequence="20" parent="menu_runbot_settings"/> - <menuitem id="runbot_menu_repos" parent="menu_runbot_settings" sequence="30" action="runbot_repos_action"/> - <menuitem id="runbot_menu_remotes" parent="menu_runbot_settings" sequence="40" action="runbot_remotes_action"/> - <menuitem id="runbot_menu_trigger_category" parent="menu_runbot_settings" sequence="50" action="runbot_triggers_category_action"/> - <menuitem id="runbot_menu_upgrade_regex_tree" parent="menu_runbot_settings" sequence="60" action="open_view_upgrade_regex_tree"/> - <menuitem name="Stats Regexes" id="runbot_menu_stat" parent="menu_runbot_settings" sequence="70" action="open_view_stat_regex_tree"/> - <menuitem name="Stat Regex Wizard" id="runbot_menu_stat_wizard" parent="menu_runbot_settings" sequence="80" action="runbot_stat_regex_wizard_action"/> - <menuitem name="Error regex" id="runbot_menu_error_regex_tree" parent="menu_runbot_settings" sequence="20" action="open_view_error_regex"/> - - <menuitem name="Technical" id="runbot_menu_technical" parent="menu_runbot_settings" sequence="10000"/> - <menuitem id="runbot_menu_ir_cron_act" action="base.ir_cron_act" parent="runbot_menu_technical"/> - <menuitem id="runbot_menu_base_automation_act" action="base_automation.base_automation_act" parent="runbot_menu_technical"/> - <menuitem id="runbot_menu_action_ui_view" action="base.action_ui_view" parent="runbot_menu_technical"/> - - <menuitem name="▶" id="runbot_menu_website" parent="runbot_menu_root" sequence="20000" action="website.action_website"/> - -</odoo> diff --git a/runbot/views/repo_views.xml b/runbot/views/repo_views.xml deleted file mode 100644 index 338411dd..00000000 --- a/runbot/views/repo_views.xml +++ /dev/null @@ -1,181 +0,0 @@ -<odoo> - <data> - <record id="repo_trigger_form" model="ir.ui.view"> - <field name="name">runbot.trigger.form</field> - <field name="model">runbot.trigger</field> - <field name="arch" type="xml"> - <form> - <header> - </header> - <sheet> - <widget name="web_ribbon" title="Archived" bg_color="bg-danger" attrs="{'invisible': [('active', '=', True)]}"/> - <group name="repo_group"> - <field name="active" invisible="1"/> - <field name="name"/> - <field name="sequence"/> - <field name="description"/> - <field name="category_id" required='1'/> - <field name="project_id"/> - <field name="repo_ids"/> - <field name="dependency_ids"/> - <field name="config_id"/> - <field name="batch_dependent"/> - <field name="version_domain" widget="domain" options="{'model': 'runbot.version', 'in_dialog': True}"/> - <field name="hide"/> - <field name="manual"/> - <field name="upgrade_dumps_trigger_id"/> - <field name="upgrade_step_id"/> - <field name="ci_context"/> - <field name="ci_url"/> - <field name="ci_description"/> - <field name="has_stats"/> - <field name="team_ids"/> - </group> - </sheet> - </form> - </field> - </record> - - <record id="trigger_view_tree" model="ir.ui.view"> - <field name="name">runbot.trigger.tree</field> - <field name="model">runbot.trigger</field> - <field name="arch" type="xml"> - <tree string="Repositories"> - <field name="name"/> - <field name="category_id"/> - <field name="project_id"/> - <field name="config_id"/> - <field name="ci_context"/> - <field name="repo_ids" widget="many2many_tags"/> - <field name="dependency_ids" widget="many2many_tags"/> - <field name="manual"/> - </tree> - </field> - </record> - - <record id="repo_trigger_catgory_form" model="ir.ui.view"> - <field name="name">runbot.category.form</field> - <field name="model">runbot.category</field> - <field name="arch" type="xml"> - <form> - <sheet> - <group name="category_group"> - <field name="name"/> - <field name="icon"/> - <field name="view_id"/> - </group> - </sheet> - </form> - </field> - </record> - - <record id="repo_form" model="ir.ui.view"> - <field name="name">runbot.repo.form</field> - <field name="model">runbot.repo</field> - <field name="arch" type="xml"> - <form> - <header> - </header> - <sheet> - <group name="repo"> - <field name="name"/> - <field name="identity_file"/> - <field name="sequence"/> - <field name="project_id"/> - <field name="modules"/> - <field name="server_files"/> - <field name="manifest_files"/> - <field name="addons_paths"/> - <field name="hook_time" groups="base.group_no_one"/> - <field name="mode"/> - <field name="forbidden_regex"/> - <field name="invalid_branch_message"/> - <field name="single_version"/> - <field name="remote_ids"> - <tree string="Remotes" editable="bottom"> - <field name="name"/> - <field name="sequence"/> - <field name="fetch_heads" string="Branch"/> - <field name="fetch_pull" string="PR"/> - <field name="send_status"/> - <field name="token"/> - </tree> - </field> - </group> - </sheet> - </form> - </field> - </record> - - <record id="remote_form" model="ir.ui.view"> - <field name="name">runbot.remote.form</field> - <field name="model">runbot.remote</field> - <field name="arch" type="xml"> - <form> - <header> - </header> - <sheet> - <group name="repo_group"> - <field name="name"/> - <field name="sequence"/> - <field name="repo_id"/> - <field name="token"/> - <field name="fetch_pull"/> - <field name="fetch_heads"/> - <field name="send_status"/> - </group> - </sheet> - </form> - </field> - </record> - - <record id="remote_view_tree" model="ir.ui.view"> - <field name="name">runbot.remote.tree</field> - <field name="model">runbot.remote</field> - <field name="arch" type="xml"> - <tree string="Repositories"> - <field name="name"/> - <field name="repo_id"/> - <field name="fetch_pull"/> - <field name="fetch_heads"/> - <field name="send_status"/> - </tree> - </field> - </record> - - <record id="repo_view_tree" model="ir.ui.view"> - <field name="name">runbot.repo.tree</field> - <field name="model">runbot.repo</field> - <field name="arch" type="xml"> - <tree string="Repositories"> - <field name="sequence" widget="handle"/> - <field name="name"/> - </tree> - </field> - </record> - - <record id="runbot_repos_action" model="ir.actions.act_window"> - <field name="name">Repositories</field> - <field name="res_model">runbot.repo</field> - <field name="view_mode">tree,form</field> - </record> - - <record id="runbot_triggers_action" model="ir.actions.act_window"> - <field name="name">Triggers</field> - <field name="res_model">runbot.trigger</field> - <field name="view_mode">tree,form</field> - </record> - <record id="runbot_remotes_action" model="ir.actions.act_window"> - <field name="name">Remotes</field> - <field name="res_model">runbot.remote</field> - <field name="view_mode">tree,form</field> - </record> - - <record id="runbot_triggers_category_action" model="ir.actions.act_window"> - <field name="name">Trigger Categories</field> - <field name="res_model">runbot.category</field> - <field name="view_mode">tree,form</field> - </record> - - </data> -</odoo> \ No newline at end of file diff --git a/runbot/views/res_config_settings_views.xml b/runbot/views/res_config_settings_views.xml deleted file mode 100644 index ea69a6a8..00000000 --- a/runbot/views/res_config_settings_views.xml +++ /dev/null @@ -1,88 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <data> - <record id="res_config_settings_view_form" model="ir.ui.view"> - <field name="name">res.config.settings.view.form.inherit.runbot</field> - <field name="model">res.config.settings</field> - <field name="inherit_id" ref="base.res_config_settings_view_form"/> - <field name="arch" type="xml"> - <xpath expr="//div[hasclass('settings')]" position="inside"> - <div class="app_settings_block" data-string="Runbot" string="Runbot" data-key="runbot"> - <h2>Runbot configuration</h2> - <div class="row mt16 o_settings_container"> - <div class="col-12 col-lg-6 o_setting_box"> - <div class="o_setting_right_pane"> - <span class="o_form_label">Builder settings</span> - <div class="content-group"> - <label for="runbot_workers" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_workers" style="width: 15%;"/> - <label for="runbot_containers_memory" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_containers_memory" style="width: 15%;"/>&nbsp; - <field name="runbot_memory_bytes" readonly='1' style="width: 15%;"/> - <label for="runbot_running_max" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_running_max" style="width: 15%;"/> - <label for="runbot_timeout" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_timeout" style="width: 15%;"/> - <label for="runbot_starting_port" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_starting_port" style="width: 15%;"/> - </div> - </div> - </div> - <div class="col-12 col-lg-6 o_setting_box"> - <div class="o_setting_right_pane"> - <span class="o_form_label">Leader settings</span> - <div class="content-group"> - <label for="runbot_max_age" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_max_age" style="width: 15%;"/> - </div> - </div> - </div> - <div class="col-12 col-lg-12 o_setting_box"> - <div class="o_setting_right_pane"> - <div class="content-group"> - <label for="runbot_template" class="col-xs-3 o_light_label" style="width: 40%;"/> - <field name="runbot_template" style="width: 55%;"/> - <label for="runbot_is_base_regex" class="col-xs-3 o_light_label" style="width: 40%;"/> - <field name="runbot_is_base_regex" style="width: 55%;"/> - </div> - </div> - </div> - <div class="col-12 col-lg-6 o_setting_box"> - <div class="o_setting_right_pane"> - <span class="o_form_label">Garbage Collecting Settings</span> - <div class="content-group"> - <label for="runbot_db_gc_days" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_db_gc_days" style="width: 15%;"/> - <label for="runbot_db_gc_days_child" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_db_gc_days_child" style="width: 15%;"/> - <label for="runbot_full_gc_days" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_full_gc_days" style="width: 15%;"/> - </div> - </div> - </div> - <label for="runbot_logdb_uri" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_logdb_uri" style="width: 100%;"/> - <label for="runbot_default_odoorc" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_default_odoorc" style="width: 100%;"/> - <label for="runbot_message" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_message" style="width: 100%;"/> - <label for="runbot_upgrade_exception_message" class="col-xs-3 o_light_label" style="width: 60%;"/> - <field name="runbot_upgrade_exception_message" style="width: 100%;"/> - </div> - </div> - </xpath> - </field> - </record> - - <record id="action_runbot_configuration" model="ir.actions.act_window"> - <field name="name">Settings</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">res.config.settings</field> - <field name="view_mode">form</field> - <field name="target">inline</field> - <field name="context">{'module' : 'runbot'}</field> - </record> - - - </data> -</odoo> diff --git a/runbot/views/stat_views.xml b/runbot/views/stat_views.xml deleted file mode 100644 index 75567974..00000000 --- a/runbot/views/stat_views.xml +++ /dev/null @@ -1,42 +0,0 @@ -<odoo> - <data> - <record id="build_stat_regex_form" model="ir.ui.view"> - <field name="name">runbot.build.stat.regex.form</field> - <field name="model">runbot.build.stat.regex</field> - <field name="arch" type="xml"> - <form> - <sheet> - <group name="stat_regex_group"> - <field name="name"/> - <field name="regex" placeholder="odoo.addons.(?P<key>.+) tested in .+, (?P<value>\d+) queries"/> - <field name="generic"/> - <field name="description"/> - <field name="sequence"/> - </group> - </sheet> - </form> - </field> - </record> - - <record id="build_stat_regex_tree" model="ir.ui.view"> - <field name="name">runbot.build.stat.regex.tree</field> - <field name="model">runbot.build.stat.regex</field> - <field name="arch" type="xml"> - <tree string="Statistics Regexes"> - <field name="name"/> - <field name="generic"/> - <field name="description"/> - <field name="regex"/> - <field name="sequence" widget="handle"/> - </tree> - </field> - </record> - - <record id="open_view_stat_regex_tree" model="ir.actions.act_window"> - <field name="name">Stat regex</field> - <field name="res_model">runbot.build.stat.regex</field> - <field name="view_mode">tree,form</field> - </record> - - </data> -</odoo> diff --git a/runbot/views/upgrade.xml b/runbot/views/upgrade.xml deleted file mode 100644 index 5013a576..00000000 --- a/runbot/views/upgrade.xml +++ /dev/null @@ -1,88 +0,0 @@ -<odoo> - <data> - <record model="ir.actions.server" id="action_parse_upgrade_errors"> - <field name="name">Parse upgrade errors</field> - <field name="model_id" ref="runbot.model_runbot_build" /> - <field name="binding_model_id" ref="runbot.model_runbot_build" /> - <field name="type">ir.actions.server</field> - <field name="state">code</field> - <field name="code"> - action = records._parse_upgrade_errors() - </field> - </record> - - <record id="upgrade_exception_tree" model="ir.ui.view"> - <field name="name">runbot.upgrade.exception</field> - <field name="model">runbot.upgrade.exception</field> - <field name="arch" type="xml"> - <tree string="Upgrade Exceptions"> - <field name="bundle_id" widget="frontend_url"/> - <field name="elements"/> - <field name="info"/> - <field name="create_date"/> - <field name="create_uid"/> - <field name="team_id"/> - </tree> - </field> - </record> - - - <record id="upgrade_exception_form" model="ir.ui.view"> - <field name="name">runbot.upgrade.exception</field> - <field name="model">runbot.upgrade.exception</field> - <field name="arch" type="xml"> - <form string="Upgrade Exceptions"> - <sheet> - <group> - <field name="active"/> - <field name="bundle_id" widget="frontend_url"/> - <field name="elements"/> - <field name="info"/> - <field name="create_date"/> - <field name="create_uid"/> - <field name="team_id"/> - <field name="message"/> - </group> - </sheet> - </form> - </field> - </record> - - - - <record id="upgrade_regex_tree" model="ir.ui.view"> - <field name="name">runbot.upgrade.regex</field> - <field name="model">runbot.upgrade.regex</field> - <field name="arch" type="xml"> - <tree string="Upgrade Regex"> - <field name="prefix"/> - <field name="regex"/> - </tree> - </field> - </record> - - <record id="upgrade_exception_search_view" model="ir.ui.view"> - <field name="name">runbot.upgrade.exception.filter</field> - <field name="model">runbot.upgrade.exception</field> - <field name="arch" type="xml"> - <search string="Search exceptions"> - <field name="elements"/> - <field name="bundle_id"/> - </search> - </field> - </record> - - <record id="open_view_upgrade_exception_tree" model="ir.actions.act_window"> - <field name="name">Upgrade Exceptions</field> - <field name="res_model">runbot.upgrade.exception</field> - <field name="view_mode">tree,form</field> - </record> - - <record id="open_view_upgrade_regex_tree" model="ir.actions.act_window"> - <field name="name">Upgrade Regexes</field> - <field name="res_model">runbot.upgrade.regex</field> - <field name="view_mode">tree,form</field> - </record> - - </data> -</odoo> diff --git a/runbot/views/warning_views.xml b/runbot/views/warning_views.xml deleted file mode 100644 index db956caf..00000000 --- a/runbot/views/warning_views.xml +++ /dev/null @@ -1,22 +0,0 @@ -<odoo> - <data> - <record id="warning_view_tree" model="ir.ui.view"> - <field name="name">runbot.warning.tree</field> - <field name="model">runbot.warning</field> - <field name="arch" type="xml"> - <tree string="Runbot Warnings"> - <field name="write_date"/> - <field name="message"/> - <field name="count"/> - </tree> - </field> - </record> - - <record id="open_view_warning_tree" model="ir.actions.act_window"> - <field name="name">Warnings</field> - <field name="res_model">runbot.warning</field> - <field name="view_mode">tree</field> - </record> - - </data> -</odoo> diff --git a/runbot/wizards/__init__.py b/runbot/wizards/__init__.py deleted file mode 100644 index e1c64da3..00000000 --- a/runbot/wizards/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- - -from . import stat_regex_wizard diff --git a/runbot/wizards/stat_regex_wizard.py b/runbot/wizards/stat_regex_wizard.py deleted file mode 100644 index 82fe4b50..00000000 --- a/runbot/wizards/stat_regex_wizard.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -import re - -from odoo import fields, models, api -from odoo.exceptions import ValidationError -from odoo.addons.runbot.models.build_stat_regex import VALUE_PATTERN - - -class StatRegexWizard(models.TransientModel): - _name = 'runbot.build.stat.regex.wizard' - _description = "Stat Regex Wizard" - - name = fields.Char("Key Name") - regex = fields.Char("Regular Expression") - description = fields.Char("Description") - generic = fields.Boolean('Generic', help='Executed when no regex on the step', default=True) - test_text = fields.Text("Test text") - key = fields.Char("Key") - value = fields.Float("Value") - message = fields.Char("Wizard message") - - def _validate_regex(self): - try: - regex = re.compile(self.regex) - except re.error as e: - raise ValidationError("Unable to compile regular expression: %s" % e) - if not re.search(VALUE_PATTERN, regex.pattern): - raise ValidationError( - "The regular expresion should contain the name group pattern 'value' e.g: '(?P<value>.+)'" - ) - - @api.onchange('regex', 'test_text') - def _onchange_regex(self): - key = '' - value = False - self.message = '' - if self.regex and self.test_text: - self._validate_regex() - match = re.search(self.regex, self.test_text) - if match: - group_dict = match.groupdict() - try: - value = float(group_dict.get("value")) - except ValueError: - raise ValidationError('The matched value (%s) of "%s" cannot be converted into float' % (group_dict.get("value"), self.regex)) - key = ( - "%s.%s" % (self.name, group_dict["key"]) - if "key" in group_dict - else self.name - ) - else: - self.message = 'No match !' - self.key = key - self.value = value - - def save(self): - if self.regex and self.test_text: - self._validate_regex() - stat_regex = self.env['runbot.build.stat.regex'].create({ - 'name': self.name, - 'regex': self.regex, - 'description': self.description, - 'generic': self.generic, - }) - return { - 'name': 'Stat regex', - 'type': 'ir.actions.act_window', - 'res_model': 'runbot.build.stat.regex', - 'view_mode': 'form', - 'res_id': stat_regex.id - } diff --git a/runbot/wizards/stat_regex_wizard_views.xml b/runbot/wizards/stat_regex_wizard_views.xml deleted file mode 100644 index d52cfd67..00000000 --- a/runbot/wizards/stat_regex_wizard_views.xml +++ /dev/null @@ -1,39 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<odoo> - <data> - <record model="ir.ui.view" id="runbot_stat_regex_wizard_form"> - <field name="name">runbot_stat_regex_wizard</field> - <field name="model">runbot.build.stat.regex.wizard</field> - <field name="arch" type="xml"> - <form string="Regex"> - <group> - <field name="name"/> - <field name="regex"/> - <field name="description"/> - <field name="generic"/> - <field name="test_text"/> - </group> - <group> - <field name="key" readonly="1"/> - <field name="value" readonly="1"/> - <field name="message" readonly="1"/> - </group> - <footer> - <button name="save" string="Save" type="object" class="btn-primary"/> - <button string="Cancel" special="cancel" class="btn-default"/> - </footer> - </form> - </field> - </record> - - <record model="ir.actions.act_window" id="runbot_stat_regex_wizard_action"> - <field name="name">Generate Stat Regex</field> - <field name="type">ir.actions.act_window</field> - <field name="res_model">runbot.build.stat.regex.wizard</field> - <field name="view_mode">form</field> - <field name="view_id" ref="runbot_stat_regex_wizard_form"/> - <field name="target">new</field> - </record> - - </data> -</odoo> diff --git a/runbot_builder/builder.py b/runbot_builder/builder.py deleted file mode 100755 index 8000a481..00000000 --- a/runbot_builder/builder.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/python3 -from tools import RunbotClient, run -import logging - -_logger = logging.getLogger(__name__) - -class BuilderClient(RunbotClient): - - def on_start(self): - for repo in self.env['runbot.repo'].search([('mode', '!=', 'disabled')]): - repo._update(force=True) - - def loop_turn(self): - if self.count == 1: # cleanup at second iteration - self.env['runbot.runbot']._source_cleanup() - self.env['runbot.build']._local_cleanup() - self.env['runbot.runbot']._docker_cleanup() - self.host.set_psql_conn_count() - self.host._docker_build() - self.env['runbot.repo']._update_git_config() - self.git_gc() - return self.env['runbot.runbot']._scheduler_loop_turn(self.host) - - -if __name__ == '__main__': - run(BuilderClient) diff --git a/runbot_builder/dbmover.py b/runbot_builder/dbmover.py deleted file mode 100755 index 2ffab190..00000000 --- a/runbot_builder/dbmover.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/python3 -import argparse -import contextlib -import logging -import psycopg2 -import os -import re -import shutil -import sys - -from collections import defaultdict -from logging.handlers import WatchedFileHandler - -LOG_FORMAT = '%(asctime)s %(levelname)s %(name)s: %(message)s' -logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) -logging.getLogger('odoo.addons.runbot').setLevel(logging.DEBUG) -logging.addLevelName(25, "!NFO") - -_logger = logging.getLogger(__name__) - -DBRE = r'^(?P<build_id>\d+)-.+-[0-9a-f]{6}-?(?P<db_suffix>.*)$' - - -@contextlib.contextmanager -def local_pgadmin_cursor(): - cnx = None - try: - cnx = psycopg2.connect("dbname=postgres") - cnx.autocommit = True # required for admin commands - yield cnx.cursor() - finally: - if cnx: - cnx.close() - - -def list_local_dbs(): - with local_pgadmin_cursor() as local_cr: - local_cr.execute(""" - SELECT datname - FROM pg_database - WHERE pg_get_userbyid(datdba) = current_user - """) - return [d[0] for d in local_cr.fetchall()] - - -def _local_pg_rename_db(dbname, new_db_name): - with local_pgadmin_cursor() as local_cr: - pid_col = 'pid' if local_cr.connection.server_version >= 90200 else 'procpid' - query = 'SELECT pg_terminate_backend({}) FROM pg_stat_activity WHERE datname=%s'.format(pid_col) - local_cr.execute(query, [dbname]) - local_cr.execute("ALTER DATABASE \"%s\" RENAME TO \"%s\";" % (dbname, new_db_name)) - - -class RunbotClient(): - - def __init__(self, env): - self.env = env - - def rename_build_dirs(self, args): - builds_root = os.path.join(self.env['runbot.runbot']._root(), 'build') - builds_backup_root = os.path.join(self.env['runbot.runbot']._root(), 'build-backup') - if not args.dry_run: - try: - _logger.info('Backup build dir in "%s"', builds_backup_root) - shutil.copytree(builds_root, builds_backup_root, copy_function=os.link) - except FileExistsError: - _logger.info('Backup path "%s" already exists, skipping', builds_backup_root) - - build_dirs = {} - leftovers = [] - for dir_name in os.listdir(builds_root): - match = re.match(DBRE, dir_name) - if match and match['db_suffix'] == '': - build_dirs[match['build_id']] = dir_name - else: - leftovers.append(dir_name) - - for build in self.env['runbot.build'].search([('id', 'in', list(build_dirs.keys()))]): - origin_dir = build_dirs[str(build.id)] - origin_path = os.path.join(builds_root, origin_dir) - if origin_dir == build.dest: - _logger.info('Skip moving %s, already moved', build.dest) - continue - _logger.info('Moving "%s" --> "%s"', origin_dir, build.dest) - if args.dry_run: - continue - dest_path = os.path.join(builds_root, build.dest) - os.rename(origin_path, dest_path) - - for leftover in leftovers: - _logger.info("leftover: %s", leftover) - - def rename_databases(self, args): - total_db = 0 - db_names = defaultdict(dict) - leftovers = [] - for local_db_name in list_local_dbs(): - match = re.match(DBRE, local_db_name) - if match and match['db_suffix'] != '': - db_names[match['build_id']][match['db_suffix']] = local_db_name - else: - leftovers.append(local_db_name) - total_db += 1 - - nb_matching = 0 - ids = [int(i) for i in db_names.keys()] - builds = self.env['runbot.build'].search([('id', 'in', ids)]) - for build in builds: - for suffix in db_names[str(build.id)].keys(): - origin_name = db_names[str(build.id)][suffix] - dest_name = "%s-%s" % (build.dest, suffix) - nb_matching += 1 - _logger.info('Renaming database "%s" --> "%s"', origin_name, dest_name) - if args.dry_run: - continue - _local_pg_rename_db(origin_name, dest_name) - - _logger.info("Found %s databases", total_db) - _logger.info("Found %s matching databases", nb_matching) - _logger.info("Leftovers: %s", len(leftovers)) - _logger.info("Builds not found : %s", len(set(ids) - set(builds.ids))) - - -def run(): - # parse args - parser = argparse.ArgumentParser() - parser.add_argument('--odoo-path', help='Odoo sources path') - parser.add_argument('--db_host', default='127.0.0.1') - parser.add_argument('--db_port', default='5432') - parser.add_argument('--db_user') - parser.add_argument('--db_password') - parser.add_argument('-d', '--database', default='runbot_upgrade', help='name of runbot db') - parser.add_argument('--logfile', default=False) - parser.add_argument('-n', '--dry-run', action='store_true') - args = parser.parse_args() - if args.logfile: - dirname = os.path.dirname(args.logfile) - if dirname and not os.path.isdir(dirname): - os.makedirs(dirname) - - handler = WatchedFileHandler(args.logfile) - formatter = logging.Formatter(LOG_FORMAT) - handler.setFormatter(formatter) - _logger.parent.handlers.clear() - _logger.parent.addHandler(handler) - - # configure odoo - sys.path.append(args.odoo_path) - import odoo - _logger.info("Starting upgrade move script using database %s", args.database) - odoo.tools.config['db_host'] = args.db_host - odoo.tools.config['db_port'] = args.db_port - odoo.tools.config['db_user'] = args.db_user - odoo.tools.config['db_password'] = args.db_password - addon_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')) - config_addons_path = odoo.tools.config['addons_path'] - odoo.tools.config['addons_path'] = ','.join([config_addons_path, addon_path]) - - # create environment - registry = odoo.registry(args.database) - with odoo.api.Environment.manage(): - with registry.cursor() as cr: - env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {}) - runbot_client = RunbotClient(env) - runbot_client.rename_build_dirs(args) - runbot_client.rename_databases(args) - - -if __name__ == '__main__': - run() - _logger.info("All done") diff --git a/runbot_builder/leader.py b/runbot_builder/leader.py deleted file mode 100755 index 47da297a..00000000 --- a/runbot_builder/leader.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/python3 -from tools import RunbotClient, run -import logging -import time - -_logger = logging.getLogger(__name__) - -class LeaderClient(RunbotClient): # Conductor, Director, Main, Maestro, Lead - def __init__(self, env): - self.pull_info_failures = {} - super().__init__(env) - - def on_start(self): - _logger.info('Updating all repos') - for repo in self.env['runbot.repo'].search([('mode', '!=', 'disabled')]): - repo._update(force=True) - _logger.info('update finished') - - def loop_turn(self): - if self.count == 0: - self.env['runbot.repo']._update_git_config() - self.git_gc() - return self.env['runbot.runbot']._fetch_loop_turn(self.host, self.pull_info_failures) - - -if __name__ == '__main__': - run(LeaderClient) diff --git a/runbot_builder/tester.py b/runbot_builder/tester.py deleted file mode 100755 index 3d0629d3..00000000 --- a/runbot_builder/tester.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/python3 -from tools import RunbotClient, run -import logging - -_logger = logging.getLogger(__name__) - -class TesterClient(RunbotClient): - - def loop_turn(self): - _logger.info('='*50) - _logger.info('Testing: %s', self.env['runbot.build'].search_count([('local_state', '=', 'testing')])) - _logger.info('Pending: %s', self.env['runbot.build'].search_count([('local_state', '=', 'pending')])) - return 10 - -if __name__ == '__main__': - run(TesterClient) diff --git a/runbot_builder/tools.py b/runbot_builder/tools.py deleted file mode 100644 index cc0e42dd..00000000 --- a/runbot_builder/tools.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/python3 -import argparse -import logging -import os -import sys -import threading -import random -import signal - -from datetime import datetime, timedelta -from logging.handlers import WatchedFileHandler - -LOG_FORMAT = '%(asctime)s %(levelname)s %(name)s: %(message)s' -logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) -logging.getLogger('odoo.addons.runbot').setLevel(logging.DEBUG) -logging.addLevelName(25, "!NFO") - -_logger = logging.getLogger(__name__) - - -class RunbotClient(): - - def __init__(self, env): - self.env = env - self.ask_interrupt = threading.Event() - self.host = None - self.count = 0 - self.max_count = 60 - - def on_start(self): - pass - - def main_loop(self): - from odoo import fields - self.on_start() - signal.signal(signal.SIGINT, self.signal_handler) - signal.signal(signal.SIGTERM, self.signal_handler) - signal.signal(signal.SIGQUIT, self.dump_stack) - self.host = self.env['runbot.host']._get_current() - self.update_next_git_gc_date() - self.host._bootstrap() - logging.info( - 'Host %s running with %s slots on pid %s%s', - self.host.name, - self.host.nb_worker, - os.getpid(), - ' (assigned only)' if self.host.assigned_only else '' - ) - while True: - try: - self.host.last_start_loop = fields.Datetime.now() - self.env.cr.commit() - self.count = self.count % self.max_count - sleep_time = self.loop_turn() - self.count += 1 - self.host.last_end_loop = fields.Datetime.now() - self.env.cr.commit() - self.env.clear() - self.sleep(sleep_time) - except Exception as e: - _logger.exception('Builder main loop failed with: %s', e) - self.env.cr.rollback() - self.env.clear() - self.sleep(10) - if self.ask_interrupt.is_set(): - return - - def loop_turn(self): - raise NotImplementedError() - - def signal_handler(self, _signal, _frame): - if self.ask_interrupt.is_set(): - _logger.info("Second Interrupt detected, force exit") - os._exit(1) - - _logger.info("Interrupt detected") - self.ask_interrupt.set() - - def dump_stack(self, _signal, _frame): - import odoo - odoo.tools.misc.dumpstacks() - - def sleep(self, t): - self.ask_interrupt.wait(t) - - def update_next_git_gc_date(self): - now = datetime.now() - gc_hour = int(self.env['ir.config_parameter'].sudo().get_param('runbot.git_gc_hour', '23')) - gc_minutes = self.host.id % 60 # deterministic minutes - self.next_git_gc_date = datetime(now.year, now.month, now.day, gc_hour, gc_minutes) - while self.next_git_gc_date <= now: - self.next_git_gc_date += timedelta(days=1) - _logger.info('Next git gc scheduled on %s', self.next_git_gc_date) - - def git_gc(self): - """ git gc once a day """ - if self.next_git_gc_date < datetime.now(): - _logger.info('Starting git gc on repositories') - self.env['runbot.runbot']._git_gc(self.host) - self.update_next_git_gc_date() - -def run(client_class): - # parse args - parser = argparse.ArgumentParser() - parser.add_argument('--odoo-path', help='Odoo sources path') - parser.add_argument('--db_host') - parser.add_argument('--db_port') - parser.add_argument('--db_user') - parser.add_argument('--db_password') - parser.add_argument('-d', '--database', default='runbot', help='name of runbot db') - parser.add_argument('--logfile', default=False) - parser.add_argument('--forced-host-name', default=False) - - args = parser.parse_args() - if args.logfile: - dirname = os.path.dirname(args.logfile) - if dirname and not os.path.isdir(dirname): - os.makedirs(dirname) - - handler = WatchedFileHandler(args.logfile) - formatter = logging.Formatter(LOG_FORMAT) - handler.setFormatter(formatter) - logging.getLogger().addHandler(handler) - - # configure odoo - sys.path.append(args.odoo_path) - import odoo - _logger.info("Starting scheduler on database %s", args.database) - odoo.tools.config['db_host'] = args.db_host - odoo.tools.config['db_port'] = args.db_port - odoo.tools.config['db_user'] = args.db_user - odoo.tools.config['db_password'] = args.db_password - addon_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')) - config_addons_path = odoo.tools.config['addons_path'] - odoo.tools.config['addons_path'] = ','.join([config_addons_path, addon_path]) - - odoo.tools.config['forced_host_name'] = args.forced_host_name - - # create environment - registry = odoo.registry(args.database) - with odoo.api.Environment.manage(): - with registry.cursor() as cr: - env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {}) - client = client_class(env) - # run main loop - try: - client.main_loop() - except Exception as e: - _logger.exception(str(e)) - raise e - _logger.info("Stopping gracefully") diff --git a/runbot_cla/__init__.py b/runbot_cla/__init__.py deleted file mode 100644 index 060836a7..00000000 --- a/runbot_cla/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from . import build_config diff --git a/runbot_cla/__manifest__.py b/runbot_cla/__manifest__.py deleted file mode 100644 index 0f393fc2..00000000 --- a/runbot_cla/__manifest__.py +++ /dev/null @@ -1,13 +0,0 @@ -{ - 'name': 'Runbot CLA', - 'category': 'Website', - 'summary': 'Runbot CLA', - 'version': '2.1', - 'description': "Runbot CLA", - 'author': 'Odoo SA', - 'depends': ['runbot'], - 'data': [ - 'data/runbot_build_config_data.xml', - ], - 'license': 'LGPL-3', -} diff --git a/runbot_cla/build_config.py b/runbot_cla/build_config.py deleted file mode 100644 index 6cd6800e..00000000 --- a/runbot_cla/build_config.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- encoding: utf-8 -*- - -import glob -import io -import logging -import re - -from odoo import models, fields - -_logger = logging.getLogger(__name__) - - -class Step(models.Model): - _inherit = "runbot.build.config.step" - - job_type = fields.Selection(selection_add=[('cla_check', 'Check cla')], ondelete={'cla_check': 'cascade'}) - - def _run_cla_check(self, build, log_path): - build._checkout() - cla_glob = glob.glob(build._get_server_commit()._source_path("doc/cla/*/*.md")) - error = False - checked = set() - if cla_glob: - for commit in build.params_id.commit_ids: - email = commit.author_email - if email in checked: - continue - checked.add(email) - build._log('check_cla', "[Odoo CLA signature](https://www.odoo.com/sign-cla) check for %s (%s) " % (commit.author, email), log_type='markdown') - mo = re.search('[^ <@]+@[^ @>]+', email or '') - if mo: - email = mo.group(0).lower() - if not re.match('.*@(odoo|openerp|tinyerp)\.com$', email): - try: - cla = ''.join(io.open(f, encoding='utf-8').read() for f in cla_glob) - if cla.lower().find(email) == -1: - error = True - build._log('check_cla', 'Email not found in cla file %s' % email, level="ERROR") - except UnicodeDecodeError: - error = True - build._log('check_cla', 'Invalid CLA encoding (must be utf-8)', level="ERROR") - else: - error = True - build._log('check_cla', 'Invalid email format %s' % email, level="ERROR") - else: - error = True - build._log('check_cla', "Missing cla file", level="ERROR") - - if error: - build.local_result = 'ko' - elif not build.local_result: - build.local_result = 'ok' diff --git a/runbot_cla/data/runbot_build_config_data.xml b/runbot_cla/data/runbot_build_config_data.xml deleted file mode 100644 index 89092a02..00000000 --- a/runbot_cla/data/runbot_build_config_data.xml +++ /dev/null @@ -1,8 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<odoo> - <record id="runbot_build_config_step_check_cla" model="runbot.build.config.step"> - <field name="name">cla_check</field> - <field name="job_type">cla_check</field> - <field name="protected" eval="True"/> - </record> -</odoo> diff --git a/runbot_merge/README.rst b/runbot_merge/README.rst deleted file mode 100644 index b37066ce..00000000 --- a/runbot_merge/README.rst +++ /dev/null @@ -1,221 +0,0 @@ -Merge Bot -========= - -Odoo workflow -------------- - -The sticky branches are protected on the github odoo project to restrict -push for the Merge Bot (MB) only. - -The MB only works with PR's using the github API. - -1. When a PR is created the github notifies the MB. The MB labels the PR - as 'seen 🙂' on github [#]_. - -2. Once the PR github statuses are green [#]_ , the MB labels the PR as - 'CI 🤖'. - -3. When a reviewer, known by the MB, approves the PR, the MB labels that - PR as 'r+ 👌'. - -4. At this moment, MB tries to merge the PR and labels the PR with - 'merging 👷'. - -5. If the merge is successfull, MB labels it 'merged 🎉', removes the - label 'merging 👷' and closes the PR. A message from MB gives a link - to the merge's commit [#]_. - -If an error occurs during the step 4, MB labels the PR with 'error 🙅' -and adds a message in the conversion stating what kind of error. For -example 'Unable to stage PR (merge conflict)'. - -If a new commit is pushed in the PR, the process starts again from the -begining. - -It's possible to interact with the MB by the way of github messages -containing `Commands`_. The message must start with the MB name (for -instance 'robodoo'). - -.. [#] Any activity on a PR the MB hasn't seen yet will bring it to the - MB's attention. e.g a comment on a PR. - -.. [#] At this moment the statuses are: Runbot build is green and CLA is - signed if needed. The expected statuses may change in the future. - -.. [#] If a PR contains only one commit, the PR is rebased and the - commit is fast forwarded. With more than one commit, the PR is - rebased and the commits are merged with a merge commit. When one - wants to avoid the rebase, 'rebase-' command should be used. - -Setup ------ - -* Setup a project with relevant repositories and branches the bot - should manage (e.g. odoo/odoo and 10.0). -* Set up reviewers (github_login + boolean flag on partners). -* Add "Issue comments", "Pull request reviews", "Pull requests" and - "Statuses" webhooks to managed repositories. -* If applicable, add "Statuses" webhook to the *source* repositories. - - Github does not seem to send statuses cross-repository when commits - get transmigrated so if a user creates a branch in odoo-dev/odoo, - waits for CI to run then creates a PR targeted to odoo/odoo the PR - will never get status-checked (unless we modify runbot to re-send - statuses on pull_request webhook). - -Working Principles ------------------- - -Useful information (new PRs, CI, comments, ...) is pushed to the MB -via webhooks. Most of the staging work is performed via a cron job: - -1. for each active staging, check if they are done - - 1. if successful - - * ``push --ff`` to target branches - * close PRs - - 2. if only one batch, mark as failed - - for batches of multiple PRs, the MB attempts to infer which - specific PR failed - - 3. otherwise split staging in 2 (bisection search of problematic - batch) - -2. for each branch with no active staging - - * if there are inactive stagings, stage one of them - * otherwise look for batches targeted to that PR (PRs grouped by - label with branch as target) - * attempt staging - - 1. reset temp branches (one per repo) to corresponding targets - 2. merge each batch's PR into the relevant temp branch - - * on merge failure, mark PRs as failed - - 3. once no more batch or limit reached, reset staging branches to - tmp - 4. mark staging as active - -Commands --------- - -A command string is a line starting with the mergebot's name and -followed by various commands. Self-reviewers count as reviewers for -the purpose of their own PRs, but delegate reviewers don't. - -retry - resets a PR in error mode to ready for staging - - can be used by a reviewer or the PR author to re-stage the PR after - it's been updated or the target has been updated & fixed. - -r(review)+ - approves a PR, can be used by a reviewer or delegate reviewer - - submitting an "approve" review implicitly r+'s the PR - -r(eview)- - removes approval from a PR, allows un-reviewing a PR in error (staging - failed) so it can be updated and re-submitted - -.. squash+/squash- -.. marks the PR as squash or merge, can override squash inference or a -.. previous squash command, can only be used by reviewers - -delegate+/delegate=<users> - adds either PR author or the specified (github) users as authorised - reviewers for this PR. ``<users>`` is a comma-separated list of - github usernames (no @), can be used by reviewers - -p(riority)=2|1|0 - sets the priority to normal (2), pressing (1) or urgent (0), - lower-priority PRs are selected first and batched together, can be - used by reviewers - -rebase- - the default merge mode is to rebase and merge the PR into the - target, however for some situations this is not suitable and - a regular merge is necessary; this command toggles rebasing - mode off (and thus back to a regular merge) - -Structure ---------- - -A *project* is used to manage multiple *repositories* across many -*branches*. - -Each *PR* targets a specific branch in a specific repository. - -A *batch* is a number of co-dependent PRs, PRs which are assumed to -depend on one another (the exact relationship is irrelevant) and thus -always need to be batched together. Batches are normally created on -the fly during staging. - -A *staging* is a number of batches (up to 8 by default) which will be -tested together, and split if CI fails. Each staging applies to a -single *branch* the target) across all managed repositories. Stagings -can be active (currently live on the various staging branches) or -inactive (to be staged later, generally as a result of splitting a -failed staging). - -Notes ------ - -* When looking for stageable batches, priority is taken in account and - isolating e.g. if there's a single high-priority PR, low-priority - PRs are ignored completely and only that will be staged on its own -* Reviewers are set up on partners so we can e.g. have author-tracking - & delegate reviewers without needing to create proper users for - every contributor. -* MB collates statuses on commits independently from other objects, so - a commit getting CI'd in odoo-dev/odoo then made into a PR on - odoo/odoo should be correctly interpreted assuming odoo-dev/odoo - sent its statuses to the MB. -* Github does not support transactional sequences of API calls, so - it's possible that "intermediate" staging states are visible & have - to be rollbacked e.g. a staging succeeds in a 2-repo scenario, - A.{target} is ff-d to A.{staging}, then B.{target}'s ff to - B.{staging} fails, we have to rollback A.{target}. -* Co-dependence is currently inferred through *labels*, which is a - pair of ``{repo}:{branchname}`` e.g. odoo-dev:11.0-pr-flanker-jke. - If this label is present in a PR to A and a PR to B, these two - PRs will be collected into a single batch to ensure they always - get batched (and failed) together. - -Previous Work -------------- - -bors-ng -~~~~~~~ - -* r+: accept (only for trusted reviewers) -* r-: unaccept -* r=users...: accept on behalf of users -* delegate+: allows author to self-review -* delegate=users...: allow non-reviewers users to review -* try: stage build (to separate branch) but don't merge on succes - -Why not bors-ng -############### - -* no concurrent staging (can only stage one target at a time) -* can't do co-dependent repositories/multi-repo staging -* cancels/forgets r+'d branches on FF failure (emergency pushes) - instead of re-staging - -homu -~~~~ - -Additionally to bors-ng's: - -* SHA option on r+/r=, guards -* p=NUMBER: set priority (unclear if best = low/high) -* rollup/rollup-: should be default -* retry: re-attempt PR (flaky?) -* delegate-: remove delegate+/delegate= -* force: ??? -* clean: ??? diff --git a/runbot_merge/__init__.py b/runbot_merge/__init__.py deleted file mode 100644 index f4821304..00000000 --- a/runbot_merge/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -import logging -from os import environ - -import sentry_sdk -from sentry_sdk.integrations.logging import LoggingIntegration -from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware - -from odoo import http -from . import models, controllers - -def delegate(self, attr): - return getattr(self.app, attr) -SentryWsgiMiddleware.__getattr__ = delegate - -def enable_sentry(): - logger = logging.getLogger('runbot_merge') - - dsn = environ.get('SENTRY_DSN') - if not dsn: - logger.info("No DSN found, skipping sentry...") - return - - try: - sentry_sdk.init( - dsn, - integrations=[ - # note: if the colorformatter is enabled, sentry gets lost - # and classifies everything as errors because it fails to - # properly classify levels as the colorformatter injects - # the ANSI color codes right into LogRecord.levelname - LoggingIntegration(level=logging.INFO, event_level=logging.WARNING), - ] - ) - http.root = SentryWsgiMiddleware(http.root) - except Exception: - logger.exception("DSN found, failed to enable sentry...") - else: - logger.info("DSN found, sentry enabled...") - -def _check_citext(cr): - cr.execute("select 1 from pg_extension where extname = 'citext'") - if not cr.rowcount: - try: - cr.execute('create extension citext') - except Exception: - raise AssertionError("runbot_merge needs the citext extension") diff --git a/runbot_merge/__manifest__.py b/runbot_merge/__manifest__.py deleted file mode 100644 index 9f44e106..00000000 --- a/runbot_merge/__manifest__.py +++ /dev/null @@ -1,21 +0,0 @@ -{ - 'name': 'merge bot', - 'version': '1.7', - 'depends': ['contacts', 'website'], - 'data': [ - 'security/security.xml', - 'security/ir.model.access.csv', - - 'data/merge_cron.xml', - 'views/res_partner.xml', - 'views/runbot_merge_project.xml', - 'views/mergebot.xml', - 'views/queues.xml', - 'views/configuration.xml', - 'views/templates.xml', - 'models/project_freeze/views.xml', - ], - 'post_load': 'enable_sentry', - 'pre_init_hook': '_check_citext', - 'license': 'LGPL-3', -} diff --git a/runbot_merge/changelog/2021-09/conflict_authorship.md b/runbot_merge/changelog/2021-09/conflict_authorship.md deleted file mode 100644 index 51bdc24a..00000000 --- a/runbot_merge/changelog/2021-09/conflict_authorship.md +++ /dev/null @@ -1 +0,0 @@ -ADD: refuse merging commits without an email set, this is mostly to be used by the forwardport-bot diff --git a/runbot_merge/changelog/2021-09/different_project_link.md b/runbot_merge/changelog/2021-09/different_project_link.md deleted file mode 100644 index 7de4f0d3..00000000 --- a/runbot_merge/changelog/2021-09/different_project_link.md +++ /dev/null @@ -1 +0,0 @@ -FIX: two PRs with the same label in different projects should not be considered linked anymore diff --git a/runbot_merge/changelog/2021-09/drafts.md b/runbot_merge/changelog/2021-09/drafts.md deleted file mode 100644 index c986709c..00000000 --- a/runbot_merge/changelog/2021-09/drafts.md +++ /dev/null @@ -1 +0,0 @@ -ADD: mergebot should not accept merging draft PR anymore diff --git a/runbot_merge/changelog/2021-09/fetch_closed.md b/runbot_merge/changelog/2021-09/fetch_closed.md deleted file mode 100644 index 446c6357..00000000 --- a/runbot_merge/changelog/2021-09/fetch_closed.md +++ /dev/null @@ -1 +0,0 @@ -FIX: when fetching an unknown PR and it's closed, don't lose that information diff --git a/runbot_merge/changelog/2021-09/persistent_linked_prs.md b/runbot_merge/changelog/2021-09/persistent_linked_prs.md deleted file mode 100644 index db9cbe5e..00000000 --- a/runbot_merge/changelog/2021-09/persistent_linked_prs.md +++ /dev/null @@ -1 +0,0 @@ -IMP: keep showing linked PRs after a PR has been merged diff --git a/runbot_merge/changelog/2021-09/rebase_tagging.md b/runbot_merge/changelog/2021-09/rebase_tagging.md deleted file mode 100644 index 554c34a8..00000000 --- a/runbot_merge/changelog/2021-09/rebase_tagging.md +++ /dev/null @@ -1 +0,0 @@ -ADD: when integrating a PR via rebasing, tag all the commits with the source PR so they're easier to find diff --git a/runbot_merge/changelog/2021-09/staging_failure_message.md b/runbot_merge/changelog/2021-09/staging_failure_message.md deleted file mode 100644 index 26064528..00000000 --- a/runbot_merge/changelog/2021-09/staging_failure_message.md +++ /dev/null @@ -1 +0,0 @@ -FIX: when a PR fails at staging, link the correct status in the message posted on the PR diff --git a/runbot_merge/changelog/2021-09/timestamps.md b/runbot_merge/changelog/2021-09/timestamps.md deleted file mode 100644 index 35213013..00000000 --- a/runbot_merge/changelog/2021-09/timestamps.md +++ /dev/null @@ -1 +0,0 @@ -IMP: cleanup timestamp displays, always show the tzoffset, UTC on hover in the main page (easier to relate to logs), local in the per-branch listing diff --git a/runbot_merge/changelog/2021-10/changelog.md b/runbot_merge/changelog/2021-10/changelog.md deleted file mode 100644 index 2c7e19e2..00000000 --- a/runbot_merge/changelog/2021-10/changelog.md +++ /dev/null @@ -1 +0,0 @@ -ADD: a changelog feature you can now see here diff --git a/runbot_merge/changelog/2021-10/commit-title-edition.md b/runbot_merge/changelog/2021-10/commit-title-edition.md deleted file mode 100644 index db2ca200..00000000 --- a/runbot_merge/changelog/2021-10/commit-title-edition.md +++ /dev/null @@ -1 +0,0 @@ -FIX: don't rewrite commit titles, this can lead to odd effects when it's incorrectly formatted and interpreted as a pseudo-header diff --git a/runbot_merge/changelog/2021-10/pr_description_up_to_date.md b/runbot_merge/changelog/2021-10/pr_description_up_to_date.md deleted file mode 100644 index cdbfbad1..00000000 --- a/runbot_merge/changelog/2021-10/pr_description_up_to_date.md +++ /dev/null @@ -1 +0,0 @@ -FIX: ensure the merge message matches the up-to-date PR descriptions, the two could desync if we'd missed an update diff --git a/runbot_merge/changelog/2021-10/pr_errors.md b/runbot_merge/changelog/2021-10/pr_errors.md deleted file mode 100644 index 069cd0f5..00000000 --- a/runbot_merge/changelog/2021-10/pr_errors.md +++ /dev/null @@ -1 +0,0 @@ -FIX: correctly display the error message when a PR is in error diff --git a/runbot_merge/changelog/2021-10/pr_page.md b/runbot_merge/changelog/2021-10/pr_page.md deleted file mode 100644 index 0cfee6a1..00000000 --- a/runbot_merge/changelog/2021-10/pr_page.md +++ /dev/null @@ -1 +0,0 @@ -IMP: add reviewer and direct link to backend in PR pages diff --git a/runbot_merge/changelog/2021-10/review-without-email.md b/runbot_merge/changelog/2021-10/review-without-email.md deleted file mode 100644 index 167038ed..00000000 --- a/runbot_merge/changelog/2021-10/review-without-email.md +++ /dev/null @@ -1 +0,0 @@ -CHG: reject reviewers without an email configured, the fallback to `@users.noreply.github.com` turns out to be confusing diff --git a/runbot_merge/changelog/2021-10/reviewer-merge-methods.md b/runbot_merge/changelog/2021-10/reviewer-merge-methods.md deleted file mode 100644 index 95202210..00000000 --- a/runbot_merge/changelog/2021-10/reviewer-merge-methods.md +++ /dev/null @@ -1 +0,0 @@ -IMP: allow delegate reviewers to set merge methods diff --git a/runbot_merge/changelog/2021-10/squash.md b/runbot_merge/changelog/2021-10/squash.md deleted file mode 100644 index 9441cfe2..00000000 --- a/runbot_merge/changelog/2021-10/squash.md +++ /dev/null @@ -1 +0,0 @@ -ADD: squash-mode, currently only for single-commit PRs to make it easier to edit commit messages when they're incorrectly formatted diff --git a/runbot_merge/changelog/2022-06/alerts.md b/runbot_merge/changelog/2022-06/alerts.md deleted file mode 100644 index 3335a92b..00000000 --- a/runbot_merge/changelog/2022-06/alerts.md +++ /dev/null @@ -1 +0,0 @@ -IMP: show current alerts (disabled crons) on the PR pages diff --git a/runbot_merge/changelog/2022-06/branch.md b/runbot_merge/changelog/2022-06/branch.md deleted file mode 100644 index 6c7c302b..00000000 --- a/runbot_merge/changelog/2022-06/branch.md +++ /dev/null @@ -1,4 +0,0 @@ -IMP: automatically close PRs when their target branch is deactivated - -Leave a message on the PRs to explain, such PRs should also be reopen-able if -the users wants to retarget them. diff --git a/runbot_merge/changelog/2022-06/empty-body.md b/runbot_merge/changelog/2022-06/empty-body.md deleted file mode 100644 index 093962f5..00000000 --- a/runbot_merge/changelog/2022-06/empty-body.md +++ /dev/null @@ -1,4 +0,0 @@ -FIX: correctly handle PR empty PR descriptions - -Github's webhook for this case are weird, and weren't handled correctly, -updating a PR's description to *or from* empty might be mishandled. diff --git a/runbot_merge/changelog/2022-06/pinging.md b/runbot_merge/changelog/2022-06/pinging.md deleted file mode 100644 index d82cc56a..00000000 --- a/runbot_merge/changelog/2022-06/pinging.md +++ /dev/null @@ -1,3 +0,0 @@ -IMP: review pinging (`@`-notification) of users by the mergebot and forwardbot - -The bots should more consistently ping users when they need some sort of action to proceed. diff --git a/runbot_merge/changelog/2022-06/provisioning.md b/runbot_merge/changelog/2022-06/provisioning.md deleted file mode 100644 index a0a47f27..00000000 --- a/runbot_merge/changelog/2022-06/provisioning.md +++ /dev/null @@ -1 +0,0 @@ -ADD: automated provisioning of accounts from odoo.com diff --git a/runbot_merge/changelog/2022-06/ui.md b/runbot_merge/changelog/2022-06/ui.md deleted file mode 100644 index fbd83d62..00000000 --- a/runbot_merge/changelog/2022-06/ui.md +++ /dev/null @@ -1,8 +0,0 @@ -IMP: various UI items - -- more clearly differentiate between "pending" and "unknown" statuses on stagings -- fix "outstanding forward ports" count -- add date of staging last modification (= success / failure instant) -- correctly retrieve and include fast-forward and unstaging reasons -- show the warnings banner (e.g. staging disabled) on the PR pages, as not all - users routinely visit the main dashboard diff --git a/runbot_merge/changelog/2022-06/unstaging.md b/runbot_merge/changelog/2022-06/unstaging.md deleted file mode 100644 index 273480da..00000000 --- a/runbot_merge/changelog/2022-06/unstaging.md +++ /dev/null @@ -1 +0,0 @@ -FIX: properly unstage pull requests when they're retargeted (base branch is changed) diff --git a/runbot_merge/controllers/__init__.py b/runbot_merge/controllers/__init__.py deleted file mode 100644 index 7e134af3..00000000 --- a/runbot_merge/controllers/__init__.py +++ /dev/null @@ -1,321 +0,0 @@ -import hashlib -import hmac -import logging -import json - -import werkzeug.exceptions - -from odoo.http import Controller, request, route - -from . import dashboard -from . import reviewer_provisioning -from .. import utils, github - -_logger = logging.getLogger(__name__) - -class MergebotController(Controller): - @route('/runbot_merge/hooks', auth='none', type='json', csrf=False, methods=['POST']) - def index(self): - req = request.httprequest - event = req.headers['X-Github-Event'] - - github._gh.info(self._format(req)) - - c = EVENTS.get(event) - if not c: - _logger.warning('Unknown event %s', event) - return 'Unknown event {}'.format(event) - - repo = request.jsonrequest['repository']['full_name'] - env = request.env(user=1) - - secret = env['runbot_merge.repository'].search([ - ('name', '=', repo), - ]).project_id.secret - if secret: - signature = 'sha1=' + hmac.new(secret.encode('ascii'), req.get_data(), hashlib.sha1).hexdigest() - if not hmac.compare_digest(signature, req.headers.get('X-Hub-Signature', '')): - _logger.warning("Ignored hook with incorrect signature %s", - req.headers.get('X-Hub-Signature')) - return werkzeug.exceptions.Forbidden() - - return c(env, request.jsonrequest) - - def _format(self, request): - return """<= {r.method} {r.full_path} -{headers} -{body} -vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv -""".format( - r=request, - headers='\n'.join( - '\t%s: %s' % entry for entry in request.headers.items() - ), - body=utils.shorten(request.get_data(as_text=True).strip(), 400) - ) - -def handle_pr(env, event): - if event['action'] in [ - 'assigned', 'unassigned', 'review_requested', 'review_request_removed', - 'labeled', 'unlabeled' - ]: - _logger.debug( - 'Ignoring pull_request[%s] on %s#%s', - event['action'], - event['pull_request']['base']['repo']['full_name'], - event['pull_request']['number'], - ) - return 'Ignoring' - - pr = event['pull_request'] - r = pr['base']['repo']['full_name'] - b = pr['base']['ref'] - - repo = env['runbot_merge.repository'].search([('name', '=', r)]) - if not repo: - _logger.warning("Received a PR for %s but not configured to handle that repo", r) - # sadly shit's retarded so odoo json endpoints really mean - # jsonrpc and it's LITERALLY NOT POSSIBLE TO REPLY WITH - # ACTUAL RAW HTTP RESPONSES and thus not possible to - # report actual errors to the webhooks listing thing on - # github (not that we'd be looking at them but it'd be - # useful for tests) - return "Not configured to handle {}".format(r) - - # PRs to unmanaged branches are not necessarily abnormal and - # we don't care - branch = env['runbot_merge.branch'].with_context(active_test=False).search([ - ('name', '=', b), - ('project_id', '=', repo.project_id.id), - ]) - - def feedback(**info): - return env['runbot_merge.pull_requests.feedback'].create({ - 'repository': repo.id, - 'pull_request': pr['number'], - **info, - }) - def find(target): - return env['runbot_merge.pull_requests'].search([ - ('repository', '=', repo.id), - ('number', '=', pr['number']), - ('target', '=', target.id), - ]) - # edition difficulty: pr['base']['ref] is the *new* target, the old one - # is at event['change']['base']['ref'] (if the target changed), so edition - # handling must occur before the rest of the steps - if event['action'] == 'edited': - source = event['changes'].get('base', {'ref': {'from': b}})['ref']['from'] - source_branch = env['runbot_merge.branch'].with_context(active_test=False).search([ - ('name', '=', source), - ('project_id', '=', repo.project_id.id), - ]) - # retargeting to un-managed => delete - if not branch: - pr = find(source_branch) - pr.unlink() - return 'Retargeted {} to un-managed branch {}, deleted'.format(pr.id, b) - - # retargeting from un-managed => create - if not source_branch: - return handle_pr(env, dict(event, action='opened')) - - pr_obj = find(source_branch) - updates = {} - if source_branch != branch: - if branch != pr_obj.target: - updates['target'] = branch.id - updates['squash'] = pr['commits'] == 1 - - # turns out github doesn't bother sending a change key if the body is - # changing from empty (None), therefore ignore that entirely, just - # generate the message and check if it changed - message = pr['title'].strip() - body = (pr['body'] or '').strip() - if body: - message += f"\n\n{body}" - if message != pr_obj.message: - updates['message'] = message - - _logger.info("update: %s#%d = %s (by %s)", repo.name, pr['number'], updates, event['sender']['login']) - if updates: - pr_obj.write(updates) - return 'Updated {}'.format(pr_obj.id) - return "Nothing to update ({})".format(event['changes'].keys()) - - message = None - if not branch: - message = f"This PR targets the un-managed branch {r}:{b}, it needs to be retargeted before it can be merged." - _logger.info("Ignoring event %s on PR %s#%d for un-managed branch %s", - event['action'], r, pr['number'], b) - elif not branch.active: - message = f"This PR targets the disabled branch {r}:{b}, it needs to be retargeted before it can be merged." - if message and event['action'] not in ('synchronize', 'closed'): - feedback(message=message) - - if not branch: - return "Not set up to care about {}:{}".format(r, b) - - - _logger.info("%s: %s#%s (%s) (by %s)", event['action'], repo.name, pr['number'], pr['title'].strip(), event['sender']['login']) - if event['action'] == 'opened': - author_name = pr['user']['login'] - author = env['res.partner'].search([('github_login', '=', author_name)], limit=1) - if not author: - env['res.partner'].create({'name': author_name, 'github_login': author_name}) - pr_obj = env['runbot_merge.pull_requests']._from_gh(pr) - return "Tracking PR as {}".format(pr_obj.id) - - pr_obj = env['runbot_merge.pull_requests']._get_or_schedule(r, pr['number']) - if not pr_obj: - _logger.info("webhook %s on unknown PR %s#%s, scheduled fetch", event['action'], repo.name, pr['number']) - return "Unknown PR {}:{}, scheduling fetch".format(repo.name, pr['number']) - if event['action'] == 'synchronize': - if pr_obj.head == pr['head']['sha']: - return 'No update to pr head' - - if pr_obj.state in ('closed', 'merged'): - _logger.error("Tentative sync to closed PR %s", pr_obj.display_name) - return "It's my understanding that closed/merged PRs don't get sync'd" - - if pr_obj.state == 'ready': - pr_obj.unstage("updated by %s", event['sender']['login']) - - _logger.info( - "PR %s updated to %s by %s, resetting to 'open' and squash=%s", - pr_obj.display_name, - pr['head']['sha'], event['sender']['login'], - pr['commits'] == 1 - ) - - pr_obj.write({ - 'state': 'opened', - 'head': pr['head']['sha'], - 'squash': pr['commits'] == 1, - }) - return 'Updated {} to {}'.format(pr_obj.display_name, pr_obj.head) - - if event['action'] == 'ready_for_review': - pr_obj.draft = False - return f'Updated {pr_obj.display_name} to ready' - if event['action'] == 'converted_to_draft': - pr_obj.draft = True - return f'Updated {pr_obj.display_name} to draft' - - # don't marked merged PRs as closed (!!!) - if event['action'] == 'closed' and pr_obj.state != 'merged': - oldstate = pr_obj.state - if pr_obj._try_closing(event['sender']['login']): - _logger.info( - '%s closed %s (state=%s)', - event['sender']['login'], - pr_obj.display_name, - oldstate, - ) - return 'Closed {}'.format(pr_obj.display_name) - else: - _logger.warning( - '%s tried to close %s (state=%s)', - event['sender']['login'], - pr_obj.display_name, - oldstate, - ) - return 'Ignored: could not lock rows (probably being merged)' - - if event['action'] == 'reopened' : - if pr_obj.state == 'merged': - feedback( - close=True, - message="@%s ya silly goose you can't reopen a merged PR." % event['sender']['login'] - ) - - if pr_obj.state == 'closed': - _logger.info('%s reopening %s', event['sender']['login'], pr_obj.display_name) - pr_obj.write({ - 'state': 'opened', - # updating the head triggers a revalidation - 'head': pr['head']['sha'], - 'squash': pr['commits'] == 1, - }) - - return 'Reopened {}'.format(pr_obj.display_name) - - _logger.info("Ignoring event %s on PR %s", event['action'], pr['number']) - return "Not handling {} yet".format(event['action']) - -def handle_status(env, event): - _logger.info( - 'status on %(sha)s %(context)s:%(state)s (%(target_url)s) [%(description)r]', - event - ) - status_value = json.dumps({ - event['context']: { - 'state': event['state'], - 'target_url': event['target_url'], - 'description': event['description'] - } - }) - # create status, or merge update into commit *unless* the update is already - # part of the status (dupe status) - env.cr.execute(""" - INSERT INTO runbot_merge_commit AS c (sha, to_check, statuses) - VALUES (%s, true, %s) - ON CONFLICT (sha) DO UPDATE - SET to_check = true, - statuses = c.statuses::jsonb || EXCLUDED.statuses::jsonb - WHERE NOT c.statuses::jsonb @> EXCLUDED.statuses::jsonb - """, [event['sha'], status_value]) - - return 'ok' - -def handle_comment(env, event): - if 'pull_request' not in event['issue']: - return "issue comment, ignoring" - - repo = event['repository']['full_name'] - issue = event['issue']['number'] - author = event['comment']['user']['login'] - comment = event['comment']['body'] - _logger.info('comment[%s]: %s %s#%s %r', event['action'], author, repo, issue, comment) - if event['action'] != 'created': - return "Ignored: action (%r) is not 'created'" % event['action'] - - return _handle_comment(env, repo, issue, event['comment']) - -def handle_review(env, event): - repo = event['repository']['full_name'] - pr = event['pull_request']['number'] - author = event['review']['user']['login'] - comment = event['review']['body'] or '' - - _logger.info('review[%s]: %s %s#%s %r', event['action'], author, repo, pr, comment) - if event['action'] != 'submitted': - return "Ignored: action (%r) is not 'submitted'" % event['action'] - - return _handle_comment( - env, repo, pr, event['review'], - target=event['pull_request']['base']['ref']) - -def handle_ping(env, event): - print("Got ping! {}".format(event['zen'])) - return "pong" - -EVENTS = { - 'pull_request': handle_pr, - 'status': handle_status, - 'issue_comment': handle_comment, - 'pull_request_review': handle_review, - 'ping': handle_ping, -} - -def _handle_comment(env, repo, issue, comment, target=None): - repository = env['runbot_merge.repository'].search([('name', '=', repo)]) - if not repository.project_id._find_commands(comment['body'] or ''): - return "No commands, ignoring" - - pr = env['runbot_merge.pull_requests']._get_or_schedule(repo, issue, target=target) - if not pr: - return "Unknown PR, scheduling fetch" - - partner = env['res.partner'].search([('github_login', '=', comment['user']['login'])]) - return pr._parse_commands(partner, comment, comment['user']['login']) diff --git a/runbot_merge/controllers/dashboard.py b/runbot_merge/controllers/dashboard.py deleted file mode 100644 index e80a2f15..00000000 --- a/runbot_merge/controllers/dashboard.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- coding: utf-8 -*- -import collections -import json -import pathlib - -import markdown -import markupsafe -import werkzeug.exceptions - -from odoo.http import Controller, route, request - -LIMIT = 20 -class MergebotDashboard(Controller): - @route('/runbot_merge', auth="public", type="http", website=True) - def dashboard(self): - return request.render('runbot_merge.dashboard', { - 'projects': request.env['runbot_merge.project'].with_context(active_test=False).sudo().search([]), - }) - - @route('/runbot_merge/<int:branch_id>', auth='public', type='http', website=True) - def stagings(self, branch_id, until=None): - branch = request.env['runbot_merge.branch'].browse(branch_id).sudo().exists() - if not branch: - raise werkzeug.exceptions.NotFound() - - stagings = request.env['runbot_merge.stagings'].with_context(active_test=False).sudo().search([ - ('target', '=', branch.id), - ('staged_at', '<=', until) if until else (True, '=', True), - ], order='staged_at desc', limit=LIMIT+1) - - return request.render('runbot_merge.branch_stagings', { - 'branch': branch, - 'stagings': stagings[:LIMIT], - 'next': stagings[-1].staged_at if len(stagings) > LIMIT else None, - }) - - def _entries(self): - changelog = pathlib.Path(__file__).parent.parent / 'changelog' - if changelog.is_dir(): - return [ - (d.name, [f.read_text(encoding='utf-8') for f in d.iterdir() if f.is_file()]) - for d in changelog.iterdir() - ] - return [] - - def entries(self, item_converter): - entries = collections.OrderedDict() - for key, items in sorted(self._entries(), reverse=True): - entries.setdefault(key, []).extend(map(item_converter, items)) - return entries - - @route('/runbot_merge/changelog', auth='public', type='http', website=True) - def changelog(self): - md = markdown.Markdown(extensions=['nl2br'], output_format='html5') - entries = self.entries(lambda t: markupsafe.Markup(md.convert(t))) - return request.render('runbot_merge.changelog', { - 'entries': entries, - }) - - @route('/<org>/<repo>/pull/<int(min=1):pr>', auth='public', type='http', website=True) - def pr(self, org, repo, pr): - pr_id = request.env['runbot_merge.pull_requests'].sudo().search([ - ('repository.name', '=', f'{org}/{repo}'), - ('number', '=', int(pr)), - ]) - if not pr_id: - raise werkzeug.exceptions.NotFound() - if not pr_id.repository.group_id <= request.env.user.groups_id: - raise werkzeug.exceptions.NotFound() - - st = {} - if pr_id.statuses: - # normalise `statuses` to map to a dict - st = { - k: {'state': v} if isinstance(v, str) else v - for k, v in json.loads(pr_id.statuses_full).items() - } - return request.render('runbot_merge.view_pull_request', { - 'pr': pr_id, - 'merged_head': json.loads(pr_id.commits_map).get(''), - 'statuses': st - }) diff --git a/runbot_merge/controllers/reviewer_provisioning.py b/runbot_merge/controllers/reviewer_provisioning.py deleted file mode 100644 index 1b7a9f2f..00000000 --- a/runbot_merge/controllers/reviewer_provisioning.py +++ /dev/null @@ -1,131 +0,0 @@ -# -*- coding: utf-8 -*- -import logging - -from odoo.http import Controller, request, route - -try: - from odoo.addons.saas_worker.util import from_role -except ImportError: - def from_role(_): - return lambda _: None - -_logger = logging.getLogger(__name__) -class MergebotReviewerProvisioning(Controller): - @from_role('accounts') - @route('/runbot_merge/users', type='json', auth='public') - def list_users(self): - env = request.env(su=True) - return [{ - 'github_login': u.github_login, - 'email': u.email, - } - for u in env['res.users'].search([]) - if u.github_login - ] - - @from_role('accounts') - @route('/runbot_merge/provision', type='json', auth='public') - def provision_user(self, users): - _logger.info('Provisioning %s users: %s.', len(users), ', '.join(map( - '{email} ({github_login})'.format_map, - users - ))) - env = request.env(su=True) - Partners = env['res.partner'] - Users = env['res.users'] - - existing_partners = Partners.search([ - '|', ('email', 'in', [u['email'] for u in users]), - ('github_login', 'in', [u['github_login'] for u in users]) - ]) - _logger.info("Found %d existing matching partners.", len(existing_partners)) - partners = {} - for p in existing_partners: - if p.email: - # email is not unique, though we want it to be (probably) - current = partners.get(p.email) - if current: - _logger.warning( - "Lookup conflict: %r set on two partners %r and %r.", - p.email, current.display_name, p.display_name, - ) - else: - partners[p.email] = p - - if p.github_login: - # assume there can't be an existing one because github_login is - # unique, and should not be able to collide with emails - partners[p.github_login] = p - - internal = env.ref('base.group_user') - odoo_provider = env.ref('auth_oauth.provider_openerp') - - to_create = [] - created = updated = 0 - for new in users: - if 'sub' in new: - new['oauth_provider_id'] = odoo_provider.id - new['oauth_uid'] = new.pop('sub') - - # prioritise by github_login as that's the unique-est point of information - current = partners.get(new['github_login']) or partners.get(new['email']) or Partners - # entry doesn't have user -> create user - if not current.user_ids: - # skip users without an email (= login) as that - # fails - if not new['email']: - continue - - new['login'] = new['email'] - new['groups_id'] = [(4, internal.id)] - # entry has partner -> create user linked to existing partner - # (and update partner implicitly) - if current: - new['partner_id'] = current.id - to_create.append(new) - continue - - # otherwise update user (if there is anything to update) - user = current.user_ids - if len(user) != 1: - _logger.warning("Got %d users for partner %s.", len(user), current.display_name) - user = user[:1] - update_vals = { - k: v - for k, v in new.items() - if v not in ('login', 'email') - if v != (user[k] if k != 'oauth_provider_id' else user[k].id) - } - if update_vals: - user.write(update_vals) - updated += 1 - if to_create: - # only create 100 users at a time to avoid request timeout - Users.create(to_create[:100]) - created = len(to_create[:100]) - - _logger.info("Provisioning: created %d updated %d.", created, updated) - return [created, updated] - - @from_role('accounts') - @route(['/runbot_merge/get_reviewers'], type='json', auth='public') - def fetch_reviewers(self, **kwargs): - reviewers = request.env['res.partner.review'].sudo().search([ - '|', ('review', '=', True), ('self_review', '=', True) - ]).mapped('partner_id.github_login') - return reviewers - - @from_role('accounts') - @route(['/runbot_merge/remove_reviewers'], type='json', auth='public', methods=['POST']) - def update_reviewers(self, github_logins, **kwargs): - partners = request.env['res.partner'].sudo().search([('github_login', 'in', github_logins)]) - partners.write({ - 'review_rights': [(5, 0, 0)], - 'delegate_reviewer': [(5, 0, 0)], - }) - - # Assign the linked users as portal users - partners.mapped('user_ids').write({ - 'groups_id': [(6, 0, [request.env.ref('base.group_portal').id])] - }) - return True diff --git a/runbot_merge/data/merge_cron.xml b/runbot_merge/data/merge_cron.xml deleted file mode 100644 index f8736785..00000000 --- a/runbot_merge/data/merge_cron.xml +++ /dev/null @@ -1,72 +0,0 @@ -<odoo> - <record model="ir.cron" id="merge_cron"> - <field name="name">Check for progress of (and merge) stagings</field> - <field name="model_id" ref="model_runbot_merge_project"/> - <field name="state">code</field> - <field name="code">model._check_stagings(True)</field> - <field name="interval_number">1</field> - <field name="interval_type">minutes</field> - <field name="numbercall">-1</field> - <field name="doall" eval="False"/> - </record> - <record model="ir.cron" id="staging_cron"> - <field name="name">Check for progress of PRs and create Stagings</field> - <field name="model_id" ref="model_runbot_merge_project"/> - <field name="state">code</field> - <field name="code">model._create_stagings(True)</field> - <field name="interval_number">1</field> - <field name="interval_type">minutes</field> - <field name="numbercall">-1</field> - <field name="doall" eval="False"/> - </record> - <record model="ir.cron" id="feedback_cron"> - <field name="name">Send feedback to PR</field> - <field name="model_id" ref="model_runbot_merge_pull_requests_feedback"/> - <field name="state">code</field> - <field name="code">model._send()</field> - <field name="interval_number">1</field> - <field name="interval_type">minutes</field> - <field name="numbercall">-1</field> - <field name="doall" eval="False"/> - </record> - <record model="ir.cron" id="labels_cron"> - <field name="name">Update labels on PR</field> - <field name="model_id" ref="model_runbot_merge_pull_requests_tagging"/> - <field name="state">code</field> - <field name="code">model._send()</field> - <field name="interval_number">10</field> - <field name="interval_type">minutes</field> - <field name="numbercall">-1</field> - <field name="doall" eval="False"/> - </record> - <record model="ir.cron" id="fetch_prs_cron"> - <field name="name">Check for PRs to fetch</field> - <field name="model_id" ref="model_runbot_merge_fetch_job"/> - <field name="state">code</field> - <field name="code">model._check(True)</field> - <field name="interval_number">1</field> - <field name="interval_type">minutes</field> - <field name="numbercall">-1</field> - <field name="doall" eval="False"/> - </record> - <record model="ir.cron" id="check_linked_prs_status"> - <field name="name">Warn on linked PRs where only one is ready</field> - <field name="model_id" ref="model_runbot_merge_pull_requests"/> - <field name="state">code</field> - <field name="code">model._check_linked_prs_statuses(True)</field> - <field name="interval_number">1</field> - <field name="interval_type">hours</field> - <field name="numbercall">-1</field> - <field name="doall" eval="False"/> - </record> - <record model="ir.cron" id="process_updated_commits"> - <field name="name">Impact commit statuses on PRs and stagings</field> - <field name="model_id" ref="model_runbot_merge_commit"/> - <field name="state">code</field> - <field name="code">model._notify()</field> - <field name="interval_number">1</field> - <field name="interval_type">minutes</field> - <field name="numbercall">-1</field> - <field name="doall" eval="False"/> - </record> -</odoo> diff --git a/runbot_merge/exceptions.py b/runbot_merge/exceptions.py deleted file mode 100644 index 4ef79f2e..00000000 --- a/runbot_merge/exceptions.py +++ /dev/null @@ -1,8 +0,0 @@ -class MergeError(Exception): - pass -class FastForwardError(Exception): - pass -class Mismatch(MergeError): - pass -class Unmergeable(MergeError): - ... diff --git a/runbot_merge/github.py b/runbot_merge/github.py deleted file mode 100644 index c36a2ebf..00000000 --- a/runbot_merge/github.py +++ /dev/null @@ -1,406 +0,0 @@ -import collections -import itertools -import json as json_ -import logging -import logging.handlers -import os -import pathlib -import pprint -import textwrap -import unicodedata -from datetime import datetime, timezone - -import requests -import werkzeug.urls - -import odoo.netsvc -from odoo.tools import topological_sort, config -from . import exceptions, utils - -class MergeError(Exception): ... - -def _is_json(r): - return r and r.headers.get('content-type', '').startswith(('application/json', 'application/javascript')) - -_logger = logging.getLogger(__name__) -_gh = logging.getLogger('github_requests') -def _init_gh_logger(): - """ Log all GH requests / responses so we have full tracking, but put them - in a separate file if we're logging to a file - """ - if not config['logfile']: - return - original = pathlib.Path(config['logfile']) - new = original.with_name('github_requests')\ - .with_suffix(original.suffix) - - if os.name == 'posix': - handler = logging.handlers.WatchedFileHandler(str(new)) - else: - handler = logging.FileHandler(str(new)) - - handler.setFormatter(odoo.netsvc.DBFormatter( - '%(asctime)s %(pid)s %(levelname)s %(dbname)s %(name)s: %(message)s' - )) - _gh.addHandler(handler) - _gh.propagate = False - -if odoo.netsvc._logger_init: - _init_gh_logger() - -GH_LOG_PATTERN = """=> {method} /{self._repo}/{path}{qs}{body} - -<= {r.status_code} {r.reason} -{headers} -{body2} -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -""" -class GH(object): - def __init__(self, token, repo): - self._url = 'https://api.github.com' - self._repo = repo - session = self._session = requests.Session() - session.headers['Authorization'] = 'token {}'.format(token) - session.headers['Accept'] = 'application/vnd.github.symmetra-preview+json' - - def _log_gh(self, logger, method, path, params, json, response, level=logging.INFO): - """ Logs a pair of request / response to github, to the specified - logger, at the specified level. - - Tries to format all the information (including request / response - bodies, at least in part) so we have as much information as possible - for post-mortems. - """ - body = body2 = '' - - if json: - body = '\n' + textwrap.indent('\t', pprint.pformat(json, indent=4)) - - if response.content: - if _is_json(response): - body2 = pprint.pformat(response.json(), depth=4) - elif response.encoding is not None: - body2 = response.text - else: # fallback: universal decoding & replace nonprintables - body2 = ''.join( - '\N{REPLACEMENT CHARACTER}' if unicodedata.category(c) == 'Cc' else c - for c in response.content.decode('iso-8859-1') - ) - - logger.log(level, GH_LOG_PATTERN.format( - self=self, - # requests data - method=method, path=path, - qs='' if not params else ('?' + werkzeug.urls.url_encode(params)), - body=utils.shorten(body.strip(), 400), - # response data - r=response, - headers='\n'.join( - '\t%s: %s' % (h, v) for h, v in response.headers.items() - ), - body2=utils.shorten(body2.strip(), 400) - )) - return body2 - - def __call__(self, method, path, params=None, json=None, check=True): - """ - :type check: bool | dict[int:Exception] - """ - r = self._session.request( - method, - '{}/repos/{}/{}'.format(self._url, self._repo, path), - params=params, - json=json - ) - self._log_gh(_gh, method, path, params, json, r) - if check: - if isinstance(check, collections.Mapping): - exc = check.get(r.status_code) - if exc: - raise exc(r.text) - if r.status_code >= 400: - body = self._log_gh( - _logger, method, path, params, json, r, level=logging.ERROR) - if not isinstance(body, (bytes, str)): - raise requests.HTTPError( - json_.dumps(body, indent=4), - response=r - ) - r.raise_for_status() - return r - - def user(self, username): - r = self._session.get("{}/users/{}".format(self._url, username)) - r.raise_for_status() - return r.json() - - def head(self, branch): - d = utils.backoff( - lambda: self('get', 'git/refs/heads/{}'.format(branch)).json(), - exc=requests.HTTPError - ) - - assert d['ref'] == 'refs/heads/{}'.format(branch) - assert d['object']['type'] == 'commit' - _logger.debug("head(%s, %s) -> %s", self._repo, branch, d['object']['sha']) - return d['object']['sha'] - - def commit(self, sha): - c = self('GET', 'git/commits/{}'.format(sha)).json() - _logger.debug('commit(%s, %s) -> %s', self._repo, sha, shorten(c['message'])) - return c - - def comment(self, pr, message): - # if the mergebot user has been blocked by the PR author, this will - # fail, but we don't want the closing of the PR to fail, or for the - # feedback cron to get stuck - try: - self('POST', 'issues/{}/comments'.format(pr), json={'body': message}) - except requests.HTTPError as r: - if _is_json(r.response): - body = r.response.json() - if any(e.message == 'User is blocked' for e in (body.get('errors') or [])): - _logger.warning("comment(%s#%s) failed: user likely blocked", self._repo, pr) - return - raise - _logger.debug('comment(%s, %s, %s)', self._repo, pr, shorten(message)) - - def close(self, pr): - self('PATCH', 'pulls/{}'.format(pr), json={'state': 'closed'}) - - def change_tags(self, pr, remove, add): - labels_endpoint = 'issues/{}/labels'.format(pr) - tags_before = {label['name'] for label in self('GET', labels_endpoint).json()} - tags_after = (tags_before - remove) | add - # replace labels entirely - self('PUT', labels_endpoint, json={'labels': list(tags_after)}) - - _logger.debug('change_tags(%s, %s, from=%s, to=%s)', self._repo, pr, tags_before, tags_after) - - def _check_updated(self, branch, to): - """ - :return: nothing if successful, the incorrect HEAD otherwise - """ - r = self('get', 'git/refs/heads/{}'.format(branch), check=False) - if r.status_code == 200: - head = r.json()['object']['sha'] - else: - head = '<Response [%s]: %s)>' % (r.status_code, r.json() if _is_json(r) else r.text) - - if head == to: - _logger.debug("Sanity check ref update of %s to %s: ok", branch, to) - return - - _logger.warning("Sanity check ref update of %s, expected %s got %s", branch, to, head) - return head - - def fast_forward(self, branch, sha): - try: - self('patch', 'git/refs/heads/{}'.format(branch), json={'sha': sha}) - _logger.debug('fast_forward(%s, %s, %s) -> OK', self._repo, branch, sha) - @utils.backoff(exc=exceptions.FastForwardError) - def _wait_for_update(): - if not self._check_updated(branch, sha): - return - raise exceptions.FastForwardError(self._repo) \ - from Exception("timeout: never saw %s" % sha) - except requests.HTTPError as e: - _logger.debug('fast_forward(%s, %s, %s) -> ERROR', self._repo, branch, sha, exc_info=True) - if e.response.status_code == 422: - try: - r = e.response.json() - except Exception: - pass - else: - if isinstance(r, dict) and 'message' in r: - e = Exception(r['message'].lower()) - raise exceptions.FastForwardError(self._repo) from e - - def set_ref(self, branch, sha): - # force-update ref - r = self('patch', 'git/refs/heads/{}'.format(branch), json={ - 'sha': sha, - 'force': True, - }, check=False) - - status0 = r.status_code - _logger.debug( - 'ref_set(%s, %s, %s -> %s (%s)', - self._repo, branch, sha, status0, - 'OK' if status0 == 200 else r.text or r.reason - ) - if status0 == 200: - @utils.backoff(exc=AssertionError) - def _wait_for_update(): - head = self._check_updated(branch, sha) - assert not head, "Sanity check ref update of %s, expected %s got %s" % ( - branch, sha, head - ) - return - - # 422 makes no sense but that's what github returns, leaving 404 just - # in case - if status0 in (404, 422): - # fallback: create ref - status1 = self.create_ref(branch, sha) - if status1 == 201: - return - else: - status1 = None - - raise AssertionError("set_ref failed(%s, %s)" % (status0, status1)) - - def create_ref(self, branch, sha): - r = self('post', 'git/refs', json={ - 'ref': 'refs/heads/{}'.format(branch), - 'sha': sha, - }, check=False) - status = r.status_code - _logger.debug( - 'ref_create(%s, %s, %s) -> %s (%s)', - self._repo, branch, sha, status, - 'OK' if status == 201 else r.text or r.reason - ) - if status == 201: - @utils.backoff(exc=AssertionError) - def _wait_for_update(): - head = self._check_updated(branch, sha) - assert not head, \ - f"Sanity check ref update of {branch}, expected {sha} got {head}" - return status - - def merge(self, sha, dest, message): - r = self('post', 'merges', json={ - 'base': dest, - 'head': sha, - 'commit_message': message, - }, check={409: MergeError}) - try: - r = r.json() - except Exception: - raise MergeError("Got non-JSON reponse from github: %s %s (%s)" % (r.status_code, r.reason, r.text)) - _logger.debug( - "merge(%s, %s (%s), %s) -> %s", - self._repo, dest, r['parents'][0]['sha'], - shorten(message), r['sha'] - ) - return dict(r['commit'], sha=r['sha'], parents=r['parents']) - - def rebase(self, pr, dest, reset=False, commits=None): - """ Rebase pr's commits on top of dest, updates dest unless ``reset`` - is set. - - Returns the hash of the rebased head and a map of all PR commits (to the PR they were rebased to) - """ - logger = _logger.getChild('rebase') - original_head = self.head(dest) - if commits is None: - commits = self.commits(pr) - - logger.debug("rebasing %s, %s on %s (reset=%s, commits=%s)", - self._repo, pr, dest, reset, len(commits)) - - assert commits, "can't rebase a PR with no commits" - prev = original_head - for original in commits: - assert len(original['parents']) == 1, "can't rebase commits with more than one parent" - tmp_msg = 'temp rebasing PR %s (%s)' % (pr, original['sha']) - merged = self.merge(original['sha'], dest, tmp_msg) - - # whichever parent is not original['sha'] should be what dest - # deref'd to, and we want to check that matches the "left parent" we - # expect (either original_head or the previously merged commit) - [base_commit] = (parent['sha'] for parent in merged['parents'] - if parent['sha'] != original['sha']) - assert prev == base_commit,\ - "Inconsistent view of %s between head (%s) and merge (%s)" % ( - dest, prev, base_commit, - ) - prev = merged['sha'] - original['new_tree'] = merged['tree']['sha'] - - prev = original_head - mapping = {} - for c in commits: - committer = c['commit']['committer'] - committer.pop('date') - copy = self('post', 'git/commits', json={ - 'message': c['commit']['message'], - 'tree': c['new_tree'], - 'parents': [prev], - 'author': c['commit']['author'], - 'committer': committer, - }, check={409: MergeError}).json() - logger.debug('copied %s to %s (parent: %s)', c['sha'], copy['sha'], prev) - prev = mapping[c['sha']] = copy['sha'] - - if reset: - self.set_ref(dest, original_head) - else: - self.set_ref(dest, prev) - - logger.debug('rebased %s, %s on %s (reset=%s, commits=%s) -> %s', - self._repo, pr, dest, reset, len(commits), - prev) - # prev is updated after each copy so it's the rebased PR head - return prev, mapping - - # fetch various bits of issues / prs to load them - def pr(self, number): - return ( - self('get', 'issues/{}'.format(number)).json(), - self('get', 'pulls/{}'.format(number)).json() - ) - - def comments(self, number): - for page in itertools.count(1): - r = self('get', 'issues/{}/comments'.format(number), params={'page': page}) - yield from r.json() - if not r.links.get('next'): - return - - def reviews(self, number): - for page in itertools.count(1): - r = self('get', 'pulls/{}/reviews'.format(number), params={'page': page}) - yield from r.json() - if not r.links.get('next'): - return - - def commits_lazy(self, pr): - for page in itertools.count(1): - r = self('get', 'pulls/{}/commits'.format(pr), params={'page': page}) - yield from r.json() - if not r.links.get('next'): - return - - def commits(self, pr): - """ Returns a PR's commits oldest first (that's what GH does & - is what we want) - """ - commits = list(self.commits_lazy(pr)) - # map shas to the position the commit *should* have - idx = { - c: i - for i, c in enumerate(topological_sort({ - c['sha']: [p['sha'] for p in c['parents']] - for c in commits - })) - } - return sorted(commits, key=lambda c: idx[c['sha']]) - - def statuses(self, h): - r = self('get', 'commits/{}/status'.format(h)).json() - return [{ - 'sha': r['sha'], - **s, - } for s in r['statuses']] - -def shorten(s): - if not s: - return s - - line1 = s.split('\n', 1)[0] - if len(line1) < 50: - return line1 - - return line1[:47] + '...' diff --git a/runbot_merge/migrations/13.0.1.1/pre-migration.py b/runbot_merge/migrations/13.0.1.1/pre-migration.py deleted file mode 100644 index edecc416..00000000 --- a/runbot_merge/migrations/13.0.1.1/pre-migration.py +++ /dev/null @@ -1,17 +0,0 @@ -def migrate(cr, version): - """ Moved the required_statuses field from the project to the repository so - different repos can have different CI requirements within a project - """ - # create column on repo - cr.execute("ALTER TABLE runbot_merge_repository ADD COLUMN required_statuses varchar") - # copy data from project - cr.execute(""" - UPDATE runbot_merge_repository r - SET required_statuses = ( - SELECT required_statuses - FROM runbot_merge_project - WHERE id = r.project_id - ) - """) - # drop old column on project - cr.execute("ALTER TABLE runbot_merge_project DROP COLUMN required_statuses") diff --git a/runbot_merge/migrations/13.0.1.2/pre-migration.py b/runbot_merge/migrations/13.0.1.2/pre-migration.py deleted file mode 100644 index 840df84c..00000000 --- a/runbot_merge/migrations/13.0.1.2/pre-migration.py +++ /dev/null @@ -1,16 +0,0 @@ -def migrate(cr, version): - cr.execute(""" - create table res_partner_review ( - id serial primary key, - partner_id integer not null references res_partner (id), - repository_id integer not null references runbot_merge_repository (id), - review bool, - self_review bool - ) - """) - cr.execute(""" - insert into res_partner_review (partner_id, repository_id, review, self_review) - select p.id, r.id, reviewer, self_reviewer - from res_partner p, runbot_merge_repository r - where p.reviewer or p.self_reviewer - """) diff --git a/runbot_merge/migrations/13.0.1.3/pre-migration.py b/runbot_merge/migrations/13.0.1.3/pre-migration.py deleted file mode 100644 index d4e59b7e..00000000 --- a/runbot_merge/migrations/13.0.1.3/pre-migration.py +++ /dev/null @@ -1,2 +0,0 @@ -def migrate(cr, version): - cr.execute("DROP INDEX runbot_merge_unique_gh_login") diff --git a/runbot_merge/migrations/13.0.1.4/pre-migration.py b/runbot_merge/migrations/13.0.1.4/pre-migration.py deleted file mode 100644 index d3ad4689..00000000 --- a/runbot_merge/migrations/13.0.1.4/pre-migration.py +++ /dev/null @@ -1,35 +0,0 @@ -import re - -def migrate(cr, version): - """ required_statuses is now a separate object in its own table - """ - # apparently the DDL has already been updated but the reflection gunk - cr.execute(""" - DELETE FROM ir_model_fields - WHERE model = 'runbot_merge.pull_requests.tagging' - AND name in ('state_from', 'state_to') - """) - - cr.execute(""" - CREATE TABLE runbot_merge_repository_status ( - id SERIAL NOT NULL PRIMARY KEY, - context VARCHAR NOT NULL, - repo_id INTEGER NOT NULL REFERENCES runbot_merge_repository (id) ON DELETE CASCADE, - prs BOOLEAN, - stagings BOOLEAN - ) - """) - cr.execute(""" - CREATE TABLE runbot_merge_repository_status_branch ( - status_id INTEGER NOT NULL REFERENCES runbot_merge_repository_status (id) ON DELETE CASCADE, - branch_id INTEGER NOT NULL REFERENCES runbot_merge_branch (id) ON DELETE CASCADE - ) - """) - - cr.execute('select id, required_statuses from runbot_merge_repository') - for repo, statuses in cr.fetchall(): - for st in re.split(r',\s*', statuses): - cr.execute(""" - INSERT INTO runbot_merge_repository_status (context, repo_id, prs, stagings) - VALUES (%s, %s, true, true) - """, [st, repo]) diff --git a/runbot_merge/migrations/13.0.1.5/pre-migration.py b/runbot_merge/migrations/13.0.1.5/pre-migration.py deleted file mode 100644 index 85e0c67a..00000000 --- a/runbot_merge/migrations/13.0.1.5/pre-migration.py +++ /dev/null @@ -1,22 +0,0 @@ -def migrate(cr, version): - """ copy required status filters from an m2m to branches to a domain - """ - cr.execute(""" - ALTER TABLE runbot_merge_repository_status - ADD COLUMN branch_filter varchar - """) - cr.execute(''' - SELECT status_id, array_agg(branch_id) - FROM runbot_merge_repository_status_branch - GROUP BY status_id - ''') - for st, brs in cr.fetchall(): - cr.execute(""" - UPDATE runbot_merge_repository_status - SET branch_filter = %s - WHERE id = %s - """, [ - repr([('id', 'in', brs)]), - st - ]) - cr.execute("DROP TABLE runbot_merge_repository_status_branch") diff --git a/runbot_merge/migrations/13.0.1.6/pre-migration.py b/runbot_merge/migrations/13.0.1.6/pre-migration.py deleted file mode 100644 index a7903018..00000000 --- a/runbot_merge/migrations/13.0.1.6/pre-migration.py +++ /dev/null @@ -1,39 +0,0 @@ -import collections - - -def migrate(cr, version): - """ Status overrides: o2m -> m2m - """ - # create link table - cr.execute(''' - CREATE TABLE res_partner_res_partner_override_rel ( - res_partner_id integer not null references res_partner (id) ON DELETE CASCADE, - res_partner_override_id integer not null references res_partner_override (id) ON DELETE CASCADE, - primary key (res_partner_id, res_partner_override_id) - ) - ''') - cr.execute(''' - CREATE UNIQUE INDEX ON res_partner_res_partner_override_rel - (res_partner_override_id, res_partner_id) - ''') - - # deduplicate override rights and insert into link table - cr.execute('SELECT array_agg(id), array_agg(partner_id)' - ' FROM res_partner_override GROUP BY repository_id, context') - links = {} - duplicants = set() - for [keep, *drops], partners in cr.fetchall(): - links[keep] = partners - duplicants.update(drops) - for override_id, partner_ids in links.items(): - for partner_id in partner_ids: - cr.execute('INSERT INTO res_partner_res_partner_override_rel (res_partner_override_id, res_partner_id)' - ' VALUES (%s, %s)', [override_id, partner_id]) - # drop dups - cr.execute('DELETE FROM res_partner_override WHERE id = any(%s)', [list(duplicants)]) - - # remove old partner field - cr.execute('ALTER TABLE res_partner_override DROP COLUMN partner_id') - # add constraint to overrides - cr.execute('CREATE UNIQUE INDEX res_partner_override_unique ON res_partner_override ' - '(context, coalesce(repository_id, 0))') diff --git a/runbot_merge/migrations/13.0.1.7/pre-migration.py b/runbot_merge/migrations/13.0.1.7/pre-migration.py deleted file mode 100644 index f15bd95d..00000000 --- a/runbot_merge/migrations/13.0.1.7/pre-migration.py +++ /dev/null @@ -1,6 +0,0 @@ -def migrate(cr, version): - """ Create draft column manually because the v13 orm can't handle the power - of adding new required columns - """ - cr.execute("ALTER TABLE runbot_merge_pull_requests" - " ADD COLUMN draft BOOLEAN NOT NULL DEFAULT false") diff --git a/runbot_merge/models/__init__.py b/runbot_merge/models/__init__.py deleted file mode 100644 index b457f468..00000000 --- a/runbot_merge/models/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from . import res_partner -from . import project -from . import pull_requests -from . import project_freeze diff --git a/runbot_merge/models/project.py b/runbot_merge/models/project.py deleted file mode 100644 index f7296dc8..00000000 --- a/runbot_merge/models/project.py +++ /dev/null @@ -1,123 +0,0 @@ -import logging -import re - -from odoo import models, fields - -_logger = logging.getLogger(__name__) -class Project(models.Model): - _name = _description = 'runbot_merge.project' - - name = fields.Char(required=True, index=True) - repo_ids = fields.One2many( - 'runbot_merge.repository', 'project_id', - help="Repos included in that project, they'll be staged together. "\ - "*Not* to be used for cross-repo dependencies (that is to be handled by the CI)" - ) - branch_ids = fields.One2many( - 'runbot_merge.branch', 'project_id', - context={'active_test': False}, - help="Branches of all project's repos which are managed by the merge bot. Also "\ - "target branches of PR this project handles." - ) - - ci_timeout = fields.Integer( - default=60, required=True, - help="Delay (in minutes) before a staging is considered timed out and failed" - ) - - github_token = fields.Char("Github Token", required=True) - github_prefix = fields.Char( - required=True, - default="hanson", # mergebot du bot du bot du~ - help="Prefix (~bot name) used when sending commands from PR " - "comments e.g. [hanson retry] or [hanson r+ p=1]" - ) - - batch_limit = fields.Integer( - default=8, help="Maximum number of PRs staged together") - - secret = fields.Char( - help="Webhook secret. If set, will be checked against the signature " - "of (valid) incoming webhook signatures, failing signatures " - "will lead to webhook rejection. Should only use ASCII." - ) - - freeze_id = fields.Many2one('runbot_merge.project.freeze', compute='_compute_freeze') - freeze_reminder = fields.Text() - - def _check_stagings(self, commit=False): - for branch in self.search([]).mapped('branch_ids').filtered('active'): - staging = branch.active_staging_id - if not staging: - continue - try: - with self.env.cr.savepoint(): - staging.check_status() - except Exception: - _logger.exception("Failed to check staging for branch %r (staging %s)", - branch.name, staging) - else: - if commit: - self.env.cr.commit() - - def _create_stagings(self, commit=False): - for branch in self.search([]).mapped('branch_ids').filtered('active'): - if not branch.active_staging_id: - try: - with self.env.cr.savepoint(): - branch.try_staging() - except Exception: - _logger.exception("Failed to create staging for branch %r", branch.name) - else: - if commit: - self.env.cr.commit() - - def _find_commands(self, comment): - return re.findall( - '^\s*[@|#]?{}:? (.*)$'.format(self.github_prefix), - comment, re.MULTILINE | re.IGNORECASE) - - def _has_branch(self, name): - self.env.cr.execute(""" - SELECT 1 FROM runbot_merge_branch - WHERE project_id = %s AND name = %s - LIMIT 1 - """, (self.id, name)) - return bool(self.env.cr.rowcount) - - def _next_freeze(self): - prev = self.branch_ids[1:2].name - if not prev: - return None - - m = re.search(r'(\d+)(?:\.(\d+))?$', prev) - if m: - return "%s.%d" % (m[1], (int(m[2] or 0) + 1)) - else: - return f'post-{prev}' - - def _compute_freeze(self): - freezes = { - f.project_id.id: f.id - for f in self.env['runbot_merge.project.freeze'].search([('project_id', 'in', self.ids)]) - } - for project in self: - project.freeze_id = freezes.get(project.id) or False - - def action_prepare_freeze(self): - """ Initialises the freeze wizard and returns the corresponding action. - """ - self.check_access_rights('write') - self.check_access_rule('write') - Freeze = self.env['runbot_merge.project.freeze'].sudo() - - w = Freeze.search([('project_id', '=', self.id)]) or Freeze.create({ - 'project_id': self.id, - 'branch_name': self._next_freeze(), - 'release_pr_ids': [ - (0, 0, {'repository_id': repo.id}) - for repo in self.repo_ids - if repo.freeze - ] - }) - return w.action_open() diff --git a/runbot_merge/models/project_freeze/__init__.py b/runbot_merge/models/project_freeze/__init__.py deleted file mode 100644 index 6e3be694..00000000 --- a/runbot_merge/models/project_freeze/__init__.py +++ /dev/null @@ -1,386 +0,0 @@ -import contextlib -import enum -import itertools -import json -import logging -import time - -from odoo import models, fields, api -from odoo.exceptions import UserError -from odoo.addons.runbot_merge.exceptions import FastForwardError - -_logger = logging.getLogger(__name__) -class FreezeWizard(models.Model): - _name = 'runbot_merge.project.freeze' - _description = "Wizard for freezing a project('s master)" - - project_id = fields.Many2one('runbot_merge.project', required=True) - errors = fields.Text(compute='_compute_errors') - branch_name = fields.Char(required=True, help="Name of the new branches to create") - - required_pr_ids = fields.Many2many( - 'runbot_merge.pull_requests', string="Required Pull Requests", - domain="[('state', 'not in', ('closed', 'merged'))]", - help="Pull requests which must have been merged before the freeze is allowed", - ) - - release_label = fields.Char() - release_pr_ids = fields.One2many( - 'runbot_merge.project.freeze.prs', 'wizard_id', - string="Release pull requests", - help="Pull requests used as tips for the freeze branches, " - "one per repository" - ) - - bump_label = fields.Char() - bump_pr_ids = fields.One2many( - 'runbot_merge.project.freeze.bumps', 'wizard_id', - string="Bump pull requests", - help="Pull requests used as tips of the frozen-off branches, " - "one per repository" - ) - - _sql_constraints = [ - ('unique_per_project', 'unique (project_id)', - "There should be only one ongoing freeze per project"), - ] - - @api.onchange('release_label') - def _onchange_release_label(self): - prs = self.env['runbot_merge.pull_requests'].search([ - ('label', '=', self.release_label) - ]) - for release_pr in self.release_pr_ids: - release_pr.pd_id = next(( - p.id for p in prs - if p.repository == release_pr.repository_id - ), False) - - @api.onchange('release_pr_ids') - def _onchange_release_prs(self): - labels = {p.pr_id.label for p in self.release_pr_ids} - self.release_label = len(labels) == 1 and labels.pop() - - @api.onchange('bump_label') - def _onchange_bump_label(self): - prs = self.env['runbot_merge.pull_requests'].search([ - ('label', '=', self.bump_label) - ]) - for bump_pr in self.bump_pr_ids: - bump_pr.pd_id = next(( - p.id for p in prs - if p.repository == bump_pr.repository_id - ), False) - - @api.onchange('bump_pr_ids') - def _onchange_bump_prs(self): - labels = {p.pr_id.label for p in self.bump_pr_ids} - self.bump_label = len(labels) == 1 and labels.pop() - - @api.depends('release_pr_ids.pr_id.label', 'required_pr_ids.state') - def _compute_errors(self): - errors = [] - without = self.release_pr_ids.filtered(lambda p: not p.pr_id) - if without: - errors.append("* Every repository must have a release PR, missing release PRs for %s." % ', '.join( - without.mapped('repository_id.name') - )) - - labels = set(self.mapped('release_pr_ids.pr_id.label')) - if len(labels) != 1: - errors.append("* All release PRs must have the same label, found %r." % ', '.join(sorted(labels))) - non_squash = self.mapped('release_pr_ids.pr_id').filtered(lambda p: not p.squash) - if non_squash: - errors.append("* Release PRs should have a single commit, found more in %s." % ', '.join(p.display_name for p in non_squash)) - - bump_labels = set(self.mapped('bump_pr_ids.pr_id.label')) - if len(bump_labels) > 1: - errors.append("* All bump PRs must have the same label, found %r" % ', '.join(sorted(bump_labels))) - non_squash = self.mapped('bump_pr_ids.pr_id').filtered(lambda p: not p.squash) - if non_squash: - errors.append("* Bump PRs should have a single commit, found more in %s." % ', '.join(p.display_name for p in non_squash)) - - unready = sum(p.state not in ('closed', 'merged') for p in self.required_pr_ids) - if unready: - errors.append(f"* {unready} required PRs not ready.") - - self.errors = '\n'.join(errors) or False - - def action_cancel(self): - self.project_id.check_access_rights('write') - self.project_id.check_access_rule('write') - self.sudo().unlink() - - return {'type': 'ir.actions.act_window_close'} - - def action_open(self): - return { - 'type': 'ir.actions.act_window', - 'target': 'new', - 'name': f'Freeze project {self.project_id.name}', - 'view_mode': 'form', - 'res_model': self._name, - 'res_id': self.id, - } - - def action_freeze(self): - """ Attempts to perform the freeze. - """ - # if there are still errors, reopen the wizard - if self.errors: - return self.action_open() - - conflict_crons = self.env.ref('runbot_merge.merge_cron') | self.env.ref('runbot_merge.staging_cron') - # we don't want to run concurrently to the crons above, though we - # don't need to prevent read access to them - self.env.cr.execute( - 'SELECT * FROM ir_cron WHERE id =ANY(%s) FOR SHARE NOWAIT', - [conflict_crons.ids] - ) - - project_id = self.project_id - # need to create the new branch, but at the same time resequence - # everything so the new branch is the second one, just after the branch - # it "forks" - master, rest = project_id.branch_ids[0], project_id.branch_ids[1:] - seq = itertools.count(start=1) # start reseq at 1 - commands = [ - (1, master.id, {'sequence': next(seq)}), - (0, 0, { - 'name': self.branch_name, - 'sequence': next(seq), - }) - ] - commands.extend((1, b.id, {'sequence': s}) for s, b in zip(seq, rest)) - project_id.branch_ids = commands - master_name = master.name - - gh_sessions = {r: r.github() for r in self.project_id.repo_ids} - - # prep new branch (via tmp refs) on every repo - rel_heads = {} - # store for master heads as odds are high the bump pr(s) will be on the - # same repo as one of the release PRs - prevs = {} - for rel in self.release_pr_ids: - repo_id = rel.repository_id - gh = gh_sessions[repo_id] - try: - prev = prevs[repo_id] = gh.head(master_name) - except Exception: - raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.") - - # create the tmp branch to merge the PR into - tmp_branch = f'tmp.{self.branch_name}' - try: - gh.set_ref(tmp_branch, prev) - except Exception as err: - raise UserError(f"Unable to create branch {self.branch_name} of repository {repo_id.name}: {err}.") - - rel_heads[repo_id], _ = gh.rebase(rel.pr_id.number, tmp_branch) - time.sleep(1) - - # prep bump - bump_heads = {} - for bump in self.bump_pr_ids: - repo_id = bump.repository_id - gh = gh_sessions[repo_id] - - try: - prev = prevs[repo_id] = prevs.get(repo_id) or gh.head(master_name) - except Exception: - raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.") - - # create the tmp branch to merge the PR into - tmp_branch = f'tmp.{master_name}' - try: - gh.set_ref(tmp_branch, prev) - except Exception as err: - raise UserError(f"Unable to create branch {master_name} of repository {repo_id.name}: {err}.") - - bump_heads[repo_id], _ = gh.rebase(bump.pr_id.number, tmp_branch) - time.sleep(1) - - deployed = {} - # at this point we've got a bunch of tmp branches with merged release - # and bump PRs, it's time to update the corresponding targets - to_delete = [] # release prs go on new branches which we try to delete on failure - to_revert = [] # bump prs go on new branch which we try to revert on failure - failure = None - for rel in self.release_pr_ids: - repo_id = rel.repository_id - # helper API currently has no API to ensure we're just creating a - # new branch (as cheaply as possible) so do it by hand - status = None - with contextlib.suppress(Exception): - status = gh_sessions[repo_id].create_ref(self.branch_name, rel_heads[repo_id]) - deployed[rel.pr_id.id] = rel_heads[repo_id] - to_delete.append(repo_id) - - if status != 201: - failure = ('create', repo_id.name, self.branch_name) - break - else: # all release deployments succeeded - for bump in self.bump_pr_ids: - repo_id = bump.repository_id - try: - gh_sessions[repo_id].fast_forward(master_name, bump_heads[repo_id]) - deployed[bump.pr_id.id] = bump_heads[repo_id] - to_revert.append(repo_id) - except FastForwardError: - failure = ('fast-forward', repo_id.name, master_name) - break - - if failure: - addendums = [] - # creating the branch failed, try to delete all previous branches - failures = [] - for prev_id in to_revert: - revert = gh_sessions[prev_id]('PATCH', f'git/refs/heads/{master_name}', json={ - 'sha': prevs[prev_id], - 'force': True - }, check=False) - if not revert.ok: - failures.append(prev_id.name) - if failures: - addendums.append( - "Subsequently unable to revert branches created in %s." % \ - ', '.join(failures) - ) - failures.clear() - - for prev_id in to_delete: - deletion = gh_sessions[prev_id]('DELETE', f'git/refs/heads/{self.branch_name}', check=False) - if not deletion.ok: - failures.append(prev_id.name) - if failures: - addendums.append( - "Subsequently unable to delete branches created in %s." % \ - ", ".join(failures) - ) - failures.clear() - - if addendums: - addendum = '\n\n' + '\n'.join(addendums) - else: - addendum = '' - - reason, repo, branch = failure - raise UserError( - f"Unable to {reason} branch {repo}:{branch}.{addendum}" - ) - - all_prs = self.release_pr_ids.pr_id | self.bump_pr_ids.pr_id - all_prs.state = 'merged' - self.env['runbot_merge.pull_requests.feedback'].create([{ - 'repository': pr.repository.id, - 'pull_request': pr.number, - 'close': True, - 'message': json.dumps({ - 'sha': deployed[pr.id], - 'base': self.branch_name if pr in self.release_pr_ids.pr_id else None - }) - } for pr in all_prs]) - - # delete wizard - self.sudo().unlink() - # managed to create all the things, show reminder text (or close) - if project_id.freeze_reminder: - return { - 'type': 'ir.actions.act_window', - 'target': 'new', - 'name': f'Freeze reminder {project_id.name}', - 'view_mode': 'form', - 'res_model': project_id._name, - 'res_id': project_id.id, - 'view_id': self.env.ref('runbot_merge.project_freeze_reminder').id - } - - return {'type': 'ir.actions.act_window_close'} - -class ReleasePullRequest(models.Model): - _name = 'runbot_merge.project.freeze.prs' - _description = "links to pull requests used to \"cap\" freezes" - - wizard_id = fields.Many2one('runbot_merge.project.freeze', required=True, index=True, ondelete='cascade') - repository_id = fields.Many2one('runbot_merge.repository', required=True) - pr_id = fields.Many2one( - 'runbot_merge.pull_requests', - domain='[("repository", "=", repository_id), ("state", "not in", ("closed", "merged"))]', - string="Release Pull Request", - ) - label = fields.Char(related='pr_id.label') - - def write(self, vals): - # only the pr should be writeable after initial creation - assert 'wizard_id' not in vals - assert 'repository_id' not in vals - # and if the PR gets set, it should match the requested repository - if 'pr_id' in vals: - assert self.env['runbot_merge.pull_requests'].browse(vals['pr_id'])\ - .repository == self.repository_id - - return super().write(vals) - -class BumpPullRequest(models.Model): - _name = 'runbot_merge.project.freeze.bumps' - _description = "links to pull requests used to \"bump\" the development branches" - - wizard_id = fields.Many2one('runbot_merge.project.freeze', required=True, index=True, ondelete='cascade') - repository_id = fields.Many2one('runbot_merge.repository', required=True) - pr_id = fields.Many2one( - 'runbot_merge.pull_requests', - domain='[("repository", "=", repository_id), ("state", "not in", ("closed", "merged"))]', - string="Release Pull Request", - ) - label = fields.Char(related='pr_id.label') - - def write(self, vals): - # only the pr should be writeable after initial creation - assert 'wizard_id' not in vals - assert 'repository_id' not in vals - # and if the PR gets set, it should match the requested repository - if 'pr_id' in vals: - assert self.env['runbot_merge.pull_requests'].browse(vals['pr_id'])\ - .repository == self.repository_id - - return super().write(vals) - -class RepositoryFreeze(models.Model): - _inherit = 'runbot_merge.repository' - freeze = fields.Boolean(required=True, default=True, - help="Freeze this repository by default") - -@enum.unique -class Colors(enum.IntEnum): - No = 0 - Red = 1 - Orange = 2 - Yellow = 3 - LightBlue = 4 - DarkPurple = 5 - Salmon = 6 - MediumBlue = 7 - DarkBlue = 8 - Fuchsia = 9 - Green = 10 - Purple = 11 - -STATE_COLORMAP = { - 'opened': Colors.No, - 'closed': Colors.Orange, - 'validated': Colors.No, - 'approved': Colors.No, - 'ready': Colors.LightBlue, - 'merged': Colors.Green, - 'error': Colors.Red, -} -class PullRequestColor(models.Model): - _inherit = 'runbot_merge.pull_requests' - - state_color = fields.Integer(compute='_compute_state_color') - - @api.depends('state') - def _compute_state_color(self): - for p in self: - p.state_color = STATE_COLORMAP[p.state] diff --git a/runbot_merge/models/project_freeze/views.xml b/runbot_merge/models/project_freeze/views.xml deleted file mode 100644 index de0836cb..00000000 --- a/runbot_merge/models/project_freeze/views.xml +++ /dev/null @@ -1,94 +0,0 @@ -<odoo> - <template id="runbot_merge_freeze_assets" inherit_id="web.assets_backend" active="True"> - <xpath expr="." position="inside"> - <script type="text/javascript" src="/runbot_merge/static/project_freeze/index.js"></script> - </xpath> - </template> - - <record id="runbot_merge_project_freeze_form" model="ir.ui.view"> - <field name="name">Freeze Wizard Configuration Screen</field> - <field name="model">runbot_merge.project.freeze</field> - <field name="arch" type="xml"> - <form js_class="freeze_wizard"> - <sheet> - <div class="alert alert-warning" role="alert" - attrs="{'invisible': [('errors', '=', False)]}"> - <field name="errors" readonly="True"/> - </div> - <group> - <group colspan="2"> - <field name="branch_name"/> - <field name="required_pr_ids" widget="many2many_tags" - options="{'color_field': 'state_color', 'no_create': True}"/> - </group> - </group> - <group> - <group colspan="2" string="Release"> - <p> - Release (freeze) PRs, provide the first commit - of the new branches. Each PR must have a single - commit. - </p> - <p class="alert alert-warning" role="alert"> - These PRs will be merged directly, not staged. - </p> - <field name="release_label"/> - <field name="release_pr_ids" nolabel="1"> - <tree editable="bottom" create="false"> - <field name="repository_id" readonly="1"/> - <field name="pr_id" options="{'no_create': True}" - context="{'pr_include_title': 1}"/> - <field name="label"/> - </tree> - </field> - </group> - </group> - <group> - <group colspan="2" string="Bump"> - <p> - Bump PRs, provide the first commit of the source - branches after the release has been cut. - </p> - <p class="alert alert-warning" role="alert"> - These PRs will be merged directly, not staged. - </p> - <field name="bump_label"/> - <field name="bump_pr_ids" nolabel="1"> - <tree editable="bottom" create="false"> - <field name="repository_id" readonly="1"/> - <field name="pr_id" options="{'no_create': True}" - context="{'pr_include_title': 1}"/> - <field name="label"/> - </tree> - </field> - </group> - </group> - <footer> - <!-- - the operator should always be able to try freezing, in - case the smart form blows up or whatever, but change - the style of the button if the form has "no errors" - --> - <button string="Freeze" type="object" name="action_freeze" - class="btn-success" attrs="{'invisible': [('errors', '!=', False)]}"/> - <button string="Freeze" type="object" name="action_freeze" - class="btn-primary" attrs="{'invisible': [('errors', '=', False)]}"/> - <button string="Save & Close" special="save"/> - <button string="Cancel" type="object" name="action_cancel" class="btn-warning"/> - </footer> - </sheet> - </form> - </field> - </record> - - <record id="runbot_merge_repository_freeze" model="ir.ui.view"> - <field name="name">Add freeze field to repo form</field> - <field name="model">runbot_merge.repository</field> - <field name="inherit_id" ref="form_repository"/> - <field name="arch" type="xml"> - <field name="branch_filter" position="after"> - <field name="freeze"/> - </field> - </field> - </record> -</odoo> diff --git a/runbot_merge/models/pull_requests.py b/runbot_merge/models/pull_requests.py deleted file mode 100644 index 0b6168e1..00000000 --- a/runbot_merge/models/pull_requests.py +++ /dev/null @@ -1,2304 +0,0 @@ -# coding: utf-8 - -import ast -import base64 -import collections -import contextlib -import datetime -import io -import itertools -import json -import logging -import os -import pprint -import re -import time - -from itertools import takewhile - -import requests -import werkzeug -from werkzeug.datastructures import Headers - -from odoo import api, fields, models, tools -from odoo.exceptions import ValidationError -from odoo.osv import expression -from odoo.tools import OrderedSet - -from .. import github, exceptions, controllers, utils - -WAIT_FOR_VISIBILITY = [10, 10, 10, 10] - -_logger = logging.getLogger(__name__) - - -class StatusConfiguration(models.Model): - _name = 'runbot_merge.repository.status' - _description = "required statuses on repositories" - _rec_name = 'context' - _log_access = False - - context = fields.Char(required=True) - repo_id = fields.Many2one('runbot_merge.repository', required=True, ondelete='cascade') - branch_filter = fields.Char(help="branches this status applies to") - prs = fields.Boolean(string="Applies to pull requests", default=True) - stagings = fields.Boolean(string="Applies to stagings", default=True) - - def _for_branch(self, branch): - assert branch._name == 'runbot_merge.branch', \ - f'Expected branch, got {branch}' - return self.filtered(lambda st: ( - not st.branch_filter - or branch.filtered_domain(ast.literal_eval(st.branch_filter)) - )) - def _for_pr(self, pr): - assert pr._name == 'runbot_merge.pull_requests', \ - f'Expected pull request, got {pr}' - return self._for_branch(pr.target).filtered('prs') - def _for_staging(self, staging): - assert staging._name == 'runbot_merge.stagings', \ - f'Expected staging, got {staging}' - return self._for_branch(staging.target).filtered('stagings') - -class Repository(models.Model): - _name = _description = 'runbot_merge.repository' - _order = 'sequence, id' - - sequence = fields.Integer(default=50) - name = fields.Char(required=True) - project_id = fields.Many2one('runbot_merge.project', required=True) - status_ids = fields.One2many('runbot_merge.repository.status', 'repo_id', string="Required Statuses") - - group_id = fields.Many2one('res.groups', default=lambda self: self.env.ref('base.group_user')) - - branch_filter = fields.Char(default='[(1, "=", 1)]', help="Filter branches valid for this repository") - substitutions = fields.Text( - "label substitutions", - help="""sed-style substitution patterns applied to the label on input, one per line. - -All substitutions are tentatively applied sequentially to the input. -""") - - @api.model - def create(self, vals): - if 'status_ids' in vals: - return super().create(vals) - - st = vals.pop('required_statuses', 'legal/cla,ci/runbot') - if st: - vals['status_ids'] = [(0, 0, {'context': c}) for c in st.split(',')] - return super().create(vals) - - def write(self, vals): - st = vals.pop('required_statuses', None) - if st: - vals['status_ids'] = [(5, 0, {})] + [(0, 0, {'context': c}) for c in st.split(',')] - return super().write(vals) - - def github(self, token_field='github_token'): - return github.GH(self.project_id[token_field], self.name) - - def _auto_init(self): - res = super(Repository, self)._auto_init() - tools.create_unique_index( - self._cr, 'runbot_merge_unique_repo', self._table, ['name']) - return res - - def _load_pr(self, number): - gh = self.github() - - # fetch PR object and handle as *opened* - issue, pr = gh.pr(number) - - feedback = self.env['runbot_merge.pull_requests.feedback'].create - if not self.project_id._has_branch(pr['base']['ref']): - _logger.info("Tasked with loading PR %d for un-managed branch %s:%s, ignoring", - number, self.name, pr['base']['ref']) - feedback({ - 'repository': self.id, - 'pull_request': number, - 'message': "Branch `{}` is not within my remit, imma just ignore it.".format(pr['base']['ref']), - }) - return - - # if the PR is already loaded, check... if the heads match? - pr_id = self.env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', pr['base']['repo']['full_name']), - ('number', '=', number), - ]) - if pr_id: - # TODO: edited, maybe (requires crafting a 'changes' object) - r = controllers.handle_pr(self.env, { - 'action': 'synchronize', - 'pull_request': pr, - 'sender': {'login': self.project_id.github_prefix} - }) - feedback({ - 'repository': pr_id.repository.id, - 'pull_request': number, - 'message': r, - }) - return - - feedback({ - 'repository': self.id, - 'pull_request': number, - 'message': "%sI didn't know about this PR and had to retrieve " - "its information, you may have to re-approve it as " - "I didn't see previous commands." % pr_id.ping() - }) - sender = {'login': self.project_id.github_prefix} - # init the PR to the null commit so we can later synchronise it back - # back to the "proper" head while resetting reviews - controllers.handle_pr(self.env, { - 'action': 'opened', - 'pull_request': { - **pr, - 'head': {**pr['head'], 'sha': '0'*40}, - 'state': 'open', - }, - 'sender': sender, - }) - # fetch & set up actual head - for st in gh.statuses(pr['head']['sha']): - controllers.handle_status(self.env, st) - # fetch and apply comments - counter = itertools.count() - items = [ # use counter so `comment` and `review` don't get hit during sort - (comment['created_at'], next(counter), False, comment) - for comment in gh.comments(number) - ] + [ - (review['submitted_at'], next(counter), True, review) - for review in gh.reviews(number) - ] - items.sort() - for _, _, is_review, item in items: - if is_review: - controllers.handle_review(self.env, { - 'action': 'submitted', - 'review': item, - 'pull_request': pr, - 'repository': {'full_name': self.name}, - 'sender': sender, - }) - else: - controllers.handle_comment(self.env, { - 'action': 'created', - 'issue': issue, - 'comment': item, - 'repository': {'full_name': self.name}, - 'sender': sender, - }) - # sync to real head - controllers.handle_pr(self.env, { - 'action': 'synchronize', - 'pull_request': pr, - 'sender': sender, - }) - if pr['state'] == 'closed': - # don't go through controller because try_closing does weird things - # for safety / race condition reasons which ends up committing - # and breaks everything - self.env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', pr['base']['repo']['full_name']), - ('number', '=', number), - ]).state = 'closed' - - def having_branch(self, branch): - branches = self.env['runbot_merge.branch'].search - return self.filtered(lambda r: branch in branches(ast.literal_eval(r.branch_filter))) - - def _remap_label(self, label): - for line in filter(None, (self.substitutions or '').splitlines()): - sep = line[0] - _, pattern, repl, flags = line.split(sep) - label = re.sub( - pattern, repl, label, - count=0 if 'g' in flags else 1, - flags=(re.MULTILINE if 'm' in flags.lower() else 0) - | (re.IGNORECASE if 'i' in flags.lower() else 0) - ) - return label - -class Branch(models.Model): - _name = _description = 'runbot_merge.branch' - _order = 'sequence, name' - - name = fields.Char(required=True) - project_id = fields.Many2one('runbot_merge.project', required=True) - - active_staging_id = fields.Many2one( - 'runbot_merge.stagings', compute='_compute_active_staging', store=True, - help="Currently running staging for the branch." - ) - staging_ids = fields.One2many('runbot_merge.stagings', 'target') - split_ids = fields.One2many('runbot_merge.split', 'target') - - prs = fields.One2many('runbot_merge.pull_requests', 'target', domain=[ - ('state', '!=', 'closed'), - ('state', '!=', 'merged'), - ]) - - active = fields.Boolean(default=True) - sequence = fields.Integer() - - def _auto_init(self): - res = super(Branch, self)._auto_init() - tools.create_unique_index( - self._cr, 'runbot_merge_unique_branch_per_repo', - self._table, ['name', 'project_id']) - return res - - @api.depends('active') - def _compute_display_name(self): - super()._compute_display_name() - for b in self.filtered(lambda b: not b.active): - b.display_name += ' (inactive)' - - def write(self, vals): - super().write(vals) - if vals.get('active') is False: - self.env['runbot_merge.pull_requests.feedback'].create([{ - 'repository': pr.repository.id, - 'pull_request': pr.number, - 'close': True, - 'message': f'{pr.ping()}the target branch {pr.target.name!r} has been disabled, closing this PR.', - } for pr in self.prs]) - return True - - @api.depends('staging_ids.active') - def _compute_active_staging(self): - for b in self: - b.active_staging_id = b.with_context(active_test=True).staging_ids - - def _ready(self): - self.env.cr.execute(""" - SELECT - min(pr.priority) as priority, - array_agg(pr.id) AS match - FROM runbot_merge_pull_requests pr - WHERE pr.target = any(%s) - -- exclude terminal states (so there's no issue when - -- deleting branches & reusing labels) - AND pr.state != 'merged' - AND pr.state != 'closed' - GROUP BY - pr.target, - CASE - WHEN pr.label SIMILAR TO '%%:patch-[[:digit:]]+' - THEN pr.id::text - ELSE pr.label - END - HAVING - bool_or(pr.state = 'ready') or bool_or(pr.priority = 0) - ORDER BY min(pr.priority), min(pr.id) - """, [self.ids]) - browse = self.env['runbot_merge.pull_requests'].browse - return [(p, browse(ids)) for p, ids in self.env.cr.fetchall()] - - def _stageable(self): - return [ - (p, prs) - for p, prs in self._ready() - if not any(prs.mapped('blocked')) - ] - - def try_staging(self): - """ Tries to create a staging if the current branch does not already - have one. Returns None if the branch already has a staging or there - is nothing to stage, the newly created staging otherwise. - """ - logger = _logger.getChild('cron') - - logger.info( - "Checking %s (%s) for staging: %s, skip? %s", - self, self.name, - self.active_staging_id, - bool(self.active_staging_id) - ) - if self.active_staging_id: - return - - rows = self._stageable() - priority = rows[0][0] if rows else -1 - if priority == 0 or priority == 1: - # p=0 take precedence over all else - # p=1 allows merging a fix inside / ahead of a split (e.g. branch - # is broken or widespread false positive) without having to cancel - # the existing staging - batched_prs = [pr_ids for _, pr_ids in takewhile(lambda r: r[0] == priority, rows)] - elif self.split_ids: - split_ids = self.split_ids[0] - logger.info("Found split of PRs %s, re-staging", split_ids.mapped('batch_ids.prs')) - batched_prs = [batch.prs for batch in split_ids.batch_ids] - split_ids.unlink() - else: # p=2 - batched_prs = [pr_ids for _, pr_ids in takewhile(lambda r: r[0] == priority, rows)] - - if not batched_prs: - return - - Batch = self.env['runbot_merge.batch'] - staged = Batch - original_heads = {} - meta = {repo: {} for repo in self.project_id.repo_ids.having_branch(self)} - for repo, it in meta.items(): - gh = it['gh'] = repo.github() - it['head'] = original_heads[repo] = gh.head(self.name) - # create tmp staging branch - gh.set_ref('tmp.{}'.format(self.name), it['head']) - - batch_limit = self.project_id.batch_limit - first = True - for batch in batched_prs: - if len(staged) >= batch_limit: - break - try: - staged |= Batch.stage(meta, batch) - except exceptions.MergeError as e: - pr = e.args[0] - _logger.exception("Failed to merge %s into staging branch", pr.display_name) - if first or isinstance(e, exceptions.Unmergeable): - if len(e.args) > 1 and e.args[1]: - reason = e.args[1] - else: - reason = e.__context__ - # if the reason is a json document, assume it's a github - # error and try to extract the error message to give it to - # the user - with contextlib.suppress(Exception): - reason = json.loads(str(reason))['message'].lower() - - pr.state = 'error' - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': pr.repository.id, - 'pull_request': pr.number, - 'message': f'{pr.ping()}unable to stage: {reason}', - }) - else: - first = False - - if not staged: - return - - heads = {} - for repo, it in meta.items(): - tree = it['gh'].commit(it['head'])['tree'] - # ensures staging branches are unique and always - # rebuilt - r = base64.b64encode(os.urandom(12)).decode('ascii') - trailer = '' - if heads: - trailer = '\n'.join( - 'Runbot-dependency: %s:%s' % (repo, h) - for repo, h in heads.items() - if not repo.endswith('^') - ) - dummy_head = {'sha': it['head']} - if it['head'] == original_heads[repo]: - # if the repo has not been updated by the staging, create a - # dummy commit to force rebuild - dummy_head = it['gh']('post', 'git/commits', json={ - 'message': '''force rebuild - -uniquifier: %s -For-Commit-Id: %s -%s''' % (r, it['head'], trailer), - 'tree': tree['sha'], - 'parents': [it['head']], - }).json() - - # $repo is the head to check, $repo^ is the head to merge (they - # might be the same) - heads[repo.name + '^'] = it['head'] - heads[repo.name] = dummy_head['sha'] - self.env.cr.execute( - "INSERT INTO runbot_merge_commit (sha, to_check, statuses) " - "VALUES (%s, true, '{}') " - "ON CONFLICT (sha) DO UPDATE SET to_check=true", - [dummy_head['sha']] - ) - - # create actual staging object - st = self.env['runbot_merge.stagings'].create({ - 'target': self.id, - 'batch_ids': [(4, batch.id, 0) for batch in staged], - 'heads': json.dumps(heads) - }) - # create staging branch from tmp - token = self.project_id.github_token - for r in self.project_id.repo_ids.having_branch(self): - it = meta[r] - staging_head = heads[r.name] - _logger.info( - "%s: create staging for %s:%s at %s", - self.project_id.name, r.name, self.name, - staging_head - ) - refname = 'staging.{}'.format(self.name) - it['gh'].set_ref(refname, staging_head) - # asserts that the new head is visible through the api - head = it['gh'].head(refname) - assert head == staging_head,\ - "[api] updated %s:%s to %s but found %s" % ( - r.name, refname, - staging_head, head, - ) - - i = itertools.count() - @utils.backoff(delays=WAIT_FOR_VISIBILITY, exc=TimeoutError) - def wait_for_visibility(): - if self._check_visibility(r, refname, staging_head, token): - _logger.info( - "[repo] updated %s:%s to %s: ok (at %d/%d)", - r.name, refname, staging_head, - next(i), len(WAIT_FOR_VISIBILITY) - ) - return - _logger.warning( - "[repo] updated %s:%s to %s: failed (at %d/%d)", - r.name, refname, staging_head, - next(i), len(WAIT_FOR_VISIBILITY) - ) - raise TimeoutError("Staged head not updated after %d seconds" % sum(WAIT_FOR_VISIBILITY)) - - logger.info("Created staging %s (%s) to %s", st, ', '.join( - '%s[%s]' % (batch, batch.prs) - for batch in staged - ), st.target.name) - return st - - def _check_visibility(self, repo, branch_name, expected_head, token): - """ Checks the repository actual to see if the new / expected head is - now visible - """ - # v1 protocol provides URL for ref discovery: https://github.com/git/git/blob/6e0cc6776106079ed4efa0cc9abace4107657abf/Documentation/technical/http-protocol.txt#L187 - # for more complete client this is also the capabilities discovery and - # the "entry point" for the service - url = 'https://github.com/{}.git/info/refs?service=git-upload-pack'.format(repo.name) - with requests.get(url, stream=True, auth=(token, '')) as resp: - if not resp.ok: - return False - for head, ref in parse_refs_smart(resp.raw.read): - if ref != ('refs/heads/' + branch_name): - continue - return head == expected_head - return False - -ACL = collections.namedtuple('ACL', 'is_admin is_reviewer is_author') -class PullRequests(models.Model): - _name = _description = 'runbot_merge.pull_requests' - _order = 'number desc' - _rec_name = 'number' - - target = fields.Many2one('runbot_merge.branch', required=True, index=True) - repository = fields.Many2one('runbot_merge.repository', required=True) - # NB: check that target & repo have same project & provide project related? - - state = fields.Selection([ - ('opened', 'Opened'), - ('closed', 'Closed'), - ('validated', 'Validated'), - ('approved', 'Approved'), - ('ready', 'Ready'), - # staged? - ('merged', 'Merged'), - ('error', 'Error'), - ], default='opened', index=True) - - number = fields.Integer(required=True, index=True) - author = fields.Many2one('res.partner') - head = fields.Char(required=True) - label = fields.Char( - required=True, index=True, - help="Label of the source branch (owner:branchname), used for " - "cross-repository branch-matching" - ) - message = fields.Text(required=True) - draft = fields.Boolean(default=False, required=True) - squash = fields.Boolean(default=False) - merge_method = fields.Selection([ - ('merge', "merge directly, using the PR as merge commit message"), - ('rebase-merge', "rebase and merge, using the PR as merge commit message"), - ('rebase-ff', "rebase and fast-forward"), - ('squash', "squash"), - ], default=False) - method_warned = fields.Boolean(default=False) - - reviewed_by = fields.Many2one('res.partner') - delegates = fields.Many2many('res.partner', help="Delegate reviewers, not intrinsically reviewers but can review this PR") - priority = fields.Integer(default=2, index=True) - - overrides = fields.Char(required=True, default='{}') - statuses = fields.Text( - compute='_compute_statuses', - help="Copy of the statuses from the HEAD commit, as a Python literal" - ) - statuses_full = fields.Text( - compute='_compute_statuses', - help="Compilation of the full status of the PR (commit statuses + overrides), as JSON" - ) - status = fields.Char(compute='_compute_statuses') - previous_failure = fields.Char(default='{}') - - batch_id = fields.Many2one('runbot_merge.batch', string="Active Batch", compute='_compute_active_batch', store=True) - batch_ids = fields.Many2many('runbot_merge.batch', string="Batches", context={'active_test': False}) - staging_id = fields.Many2one(related='batch_id.staging_id', store=True) - commits_map = fields.Char(help="JSON-encoded mapping of PR commits to actually integrated commits. The integration head (either a merge commit or the PR's topmost) is mapped from the 'empty' pr commit (the key is an empty string, because you can't put a null key in json maps).", default='{}') - - link_warned = fields.Boolean( - default=False, help="Whether we've already warned that this (ready)" - " PR is linked to an other non-ready PR" - ) - - blocked = fields.Char( - compute='_compute_is_blocked', - help="PR is not currently stageable for some reason (mostly an issue if status is ready)" - ) - - url = fields.Char(compute='_compute_url') - github_url = fields.Char(compute='_compute_url') - - repo_name = fields.Char(related='repository.name') - message_title = fields.Char(compute='_compute_message_title') - - def ping(self, author=True, reviewer=True): - P = self.env['res.partner'] - s = ' '.join( - f'@{p.github_login}' - for p in (self.author if author else P) | (self.reviewed_by if reviewer else P) - if p - ) - if s: - s += ' ' - return s - - @api.depends('repository.name', 'number') - def _compute_url(self): - base = werkzeug.urls.url_parse(self.env['ir.config_parameter'].sudo().get_param('web.base.url', 'http://localhost:8069')) - gh_base = werkzeug.urls.url_parse('https://github.com') - for pr in self: - path = f'/{werkzeug.urls.url_quote(pr.repository.name)}/pull/{pr.number}' - pr.url = str(base.join(path)) - pr.github_url = str(gh_base.join(path)) - - @api.depends('message') - def _compute_message_title(self): - for pr in self: - pr.message_title = next(iter(pr.message.splitlines()), '') - - @api.depends('repository.name', 'number', 'message') - def _compute_display_name(self): - return super(PullRequests, self)._compute_display_name() - - def name_get(self): - name_template = '%(repo_name)s#%(number)d' - if self.env.context.get('pr_include_title'): - name_template += ' (%(message_title)s)' - return [(p.id, name_template % p) for p in self] - - @api.model - def name_search(self, name='', args=None, operator='ilike', limit=100): - if not name or operator != 'ilike': - return super().name_search(name, args=args, operator=operator, limit=limit) - bits = [[('label', 'ilike', name)]] - if name.isdigit(): - bits.append([('number', '=', name)]) - if re.match(r'\w+#\d+$', name): - repo, num = name.rsplit('#', 1) - bits.append(['&', ('repository.name', 'ilike', repo), ('number', '=', int(num))]) - else: - bits.append([('repository.name', 'ilike', name)]) - domain = expression.OR(bits) - if args: - domain = expression.AND([args, domain]) - return self.search(domain, limit=limit).sudo().name_get() - - @property - def _approved(self): - return self.state in ('approved', 'ready') or any( - p.priority == 0 - for p in (self | self._linked_prs) - ) - - @property - def _ready(self): - return (self.squash or self.merge_method) and self._approved and self.status == 'success' - - @property - def _linked_prs(self): - if re.search(r':patch-\d+', self.label): - return self.browse(()) - if self.state == 'merged': - return self.with_context(active_test=False).batch_ids\ - .filtered(lambda b: b.staging_id.state == 'success')\ - .prs - self - return self.search([ - ('target', '=', self.target.id), - ('label', '=', self.label), - ('state', 'not in', ('merged', 'closed')), - ]) - self - - # missing link to other PRs - @api.depends('priority', 'state', 'squash', 'merge_method', 'batch_id.active', 'label') - def _compute_is_blocked(self): - self.blocked = False - for pr in self: - if pr.state in ('merged', 'closed'): - continue - - linked = pr._linked_prs - # check if PRs are configured (single commit or merge method set) - if not (pr.squash or pr.merge_method): - pr.blocked = 'has no merge method' - continue - other_unset = next((p for p in linked if not (p.squash or p.merge_method)), None) - if other_unset: - pr.blocked = "linked PR %s has no merge method" % other_unset.display_name - continue - - # check if any PR in the batch is p=0 and none is in error - if any(p.priority == 0 for p in (pr | linked)): - if pr.state == 'error': - pr.blocked = "in error" - other_error = next((p for p in linked if p.state == 'error'), None) - if other_error: - pr.blocked = "linked pr %s in error" % other_error.display_name - # if none is in error then none is blocked because p=0 - # "unblocks" the entire batch - continue - - if pr.state != 'ready': - pr.blocked = 'not ready' - continue - - unready = next((p for p in linked if p.state != 'ready'), None) - if unready: - pr.blocked = 'linked pr %s is not ready' % unready.display_name - continue - - def _get_overrides(self): - if self: - return json.loads(self.overrides) - return {} - - @api.depends('head', 'repository.status_ids', 'overrides') - def _compute_statuses(self): - Commits = self.env['runbot_merge.commit'] - for pr in self: - c = Commits.search([('sha', '=', pr.head)]) - st = json.loads(c.statuses or '{}') - statuses = {**st, **pr._get_overrides()} - pr.statuses_full = json.dumps(statuses) - if not statuses: - pr.status = pr.statuses = False - continue - - pr.statuses = pprint.pformat(st) - - st = 'success' - for ci in pr.repository.status_ids._for_pr(pr): - v = state_(statuses, ci.context) or 'pending' - if v in ('error', 'failure'): - st = 'failure' - break - if v == 'pending': - st = 'pending' - pr.status = st - - @api.depends('batch_ids.active') - def _compute_active_batch(self): - for r in self: - r.batch_id = r.batch_ids.filtered(lambda b: b.active)[:1] - - def _get_or_schedule(self, repo_name, number, target=None): - repo = self.env['runbot_merge.repository'].search([('name', '=', repo_name)]) - if not repo: - return - - if target and not repo.project_id._has_branch(target): - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': repo.id, - 'pull_request': number, - 'message': "I'm sorry. Branch `{}` is not within my remit.".format(target), - }) - return - - pr = self.search([ - ('repository', '=', repo.id), - ('number', '=', number,) - ]) - if pr: - return pr - - Fetch = self.env['runbot_merge.fetch_job'] - if Fetch.search([('repository', '=', repo.id), ('number', '=', number)]): - return - Fetch.create({ - 'repository': repo.id, - 'number': number, - }) - - def _parse_command(self, commandstring): - for m in re.finditer( - r'(\S+?)(?:([+-])|=(\S*))?(?=\s|$)', - commandstring, - ): - name, flag, param = m.groups() - if name == 'r': - name = 'review' - if flag in ('+', '-'): - yield name, flag == '+' - elif name == 'delegate': - if param: - for p in param.split(','): - yield 'delegate', p.lstrip('#@') - elif name == 'override': - if param: - for p in param.split(','): - yield 'override', p - elif name in ('p', 'priority'): - if param in ('0', '1', '2'): - yield ('priority', int(param)) - elif any(name == k for k, _ in type(self).merge_method.selection): - yield ('method', name) - else: - yield name, param - - def _parse_commands(self, author, comment, login): - """Parses a command string prefixed by Project::github_prefix. - - A command string can contain any number of space-separated commands: - - retry - resets a PR in error mode to ready for staging - r(eview)+/- - approves or disapproves a PR (disapproving just cancels an approval) - delegate+/delegate=<users> - adds either PR author or the specified (github) users as - authorised reviewers for this PR. ``<users>`` is a - comma-separated list of github usernames (no @) - p(riority)=2|1|0 - sets the priority to normal (2), pressing (1) or urgent (0). - Lower-priority PRs are selected first and batched together. - rebase+/- - Whether the PR should be rebased-and-merged (the default) or just - merged normally. - """ - assert self, "parsing commands must be executed in an actual PR" - - (login, name) = (author.github_login, author.display_name) if author else (login, 'not in system') - - is_admin, is_reviewer, is_author = self._pr_acl(author) - - commands = [ - ps - for m in self.repository.project_id._find_commands(comment['body'] or '') - for ps in self._parse_command(m) - ] - - if not commands: - _logger.info("found no commands in comment of %s (%s) (%s)", author.github_login, author.display_name, - utils.shorten(comment['body'] or '', 50) - ) - return 'ok' - - Feedback = self.env['runbot_merge.pull_requests.feedback'] - if not (is_author or any(cmd == 'override' for cmd, _ in commands)): - # no point even parsing commands - _logger.info("ignoring comment of %s (%s): no ACL to %s", - login, name, self.display_name) - Feedback.create({ - 'repository': self.repository.id, - 'pull_request': self.number, - 'message': "I'm sorry, @{}. I'm afraid I can't do that.".format(login) - }) - return 'ignored' - - applied, ignored = [], [] - def reformat(command, param): - if param is None: - pstr = '' - elif isinstance(param, bool): - pstr = '+' if param else '-' - elif isinstance(param, list): - pstr = '=' + ','.join(param) - else: - pstr = '={}'.format(param) - - return '%s%s' % (command, pstr) - msgs = [] - for command, param in commands: - ok = False - msg = None - if command == 'retry': - if is_author: - if self.state == 'error': - ok = True - self.state = 'ready' - else: - msg = "retry makes no sense when the PR is not in error." - elif command == 'check': - if is_author: - self.env['runbot_merge.fetch_job'].create({ - 'repository': self.repository.id, - 'number': self.number, - }) - ok = True - elif command == 'review': - if self.draft: - msg = "draft PRs can not be approved." - elif param and is_reviewer: - oldstate = self.state - newstate = RPLUS.get(self.state) - if not author.email: - msg = "I must know your email before you can review PRs. Please contact an administrator." - elif not newstate: - msg = "this PR is already reviewed, reviewing it again is useless." - else: - self.state = newstate - self.reviewed_by = author - ok = True - _logger.debug( - "r+ on %s by %s (%s->%s) status=%s message? %s", - self.display_name, author.github_login, - oldstate, newstate or oldstate, - self.status, self.status == 'failure' - ) - if self.status == 'failure': - # the normal infrastructure is for failure and - # prefixes messages with "I'm sorry" - Feedback.create({ - 'repository': self.repository.id, - 'pull_request': self.number, - 'message': "@{} you may want to rebuild or fix this PR as it has failed CI.".format(login), - }) - elif not param and is_author: - newstate = RMINUS.get(self.state) - if self.priority == 0 or newstate: - if newstate: - self.state = newstate - if self.priority == 0: - self.priority = 1 - Feedback.create({ - 'repository': self.repository.id, - 'pull_request': self.number, - 'message': "PR priority reset to 1, as pull requests with priority 0 ignore review state.", - }) - self.unstage("unreviewed (r-) by %s", login) - ok = True - else: - msg = "r- makes no sense in the current PR state." - elif command == 'delegate': - if is_reviewer: - ok = True - Partners = self.env['res.partner'] - if param is True: - delegate = self.author - else: - delegate = Partners.search([('github_login', '=', param)]) or Partners.create({ - 'name': param, - 'github_login': param, - }) - delegate.write({'delegate_reviewer': [(4, self.id, 0)]}) - elif command == 'priority': - if is_admin: - ok = True - self.priority = param - if param == 0: - self.target.active_staging_id.cancel( - "P=0 on %s by %s, unstaging target %s", - self.display_name, - author.github_login, self.target.name, - ) - elif command == 'method': - if is_reviewer: - if param == 'squash' and not self.squash: - msg = "squash can only be used with a single commit at this time." - else: - self.merge_method = param - ok = True - explanation = next(label for value, label in type(self).merge_method.selection if value == param) - Feedback.create({ - 'repository': self.repository.id, - 'pull_request': self.number, - 'message':"Merge method set to %s." % explanation - }) - elif command == 'override': - overridable = author.override_rights\ - .filtered(lambda r: not r.repository_id or (r.repository_id == self.repository))\ - .mapped('context') - if param in overridable: - self.overrides = json.dumps({ - **json.loads(self.overrides), - param: { - 'state': 'success', - 'target_url': comment['html_url'], - 'description': f"Overridden by @{author.github_login}", - }, - }) - c = self.env['runbot_merge.commit'].search([('sha', '=', self.head)]) - if c: - c.to_check = True - else: - c.create({'sha': self.head, 'statuses': '{}'}) - ok = True - else: - msg = "you are not allowed to override this status." - else: - # ignore unknown commands - continue - - _logger.info( - "%s %s(%s) on %s by %s (%s)", - "applied" if ok else "ignored", - command, param, self.display_name, - author.github_login, author.display_name, - ) - if ok: - applied.append(reformat(command, param)) - else: - ignored.append(reformat(command, param)) - msgs.append(msg or "you can't {}.".format(reformat(command, param))) - - if msgs: - joiner = ' ' if len(msgs) == 1 else '\n- ' - msgs.insert(0, "I'm sorry, @{}:".format(login)) - Feedback.create({ - 'repository': self.repository.id, - 'pull_request': self.number, - 'message': joiner.join(msgs), - }) - - msg = [] - if applied: - msg.append('applied ' + ' '.join(applied)) - if ignored: - ignoredstr = ' '.join(ignored) - msg.append('ignored ' + ignoredstr) - return '\n'.join(msg) - - def _pr_acl(self, user): - if not self: - return ACL(False, False, False) - - is_admin = self.env['res.partner.review'].search_count([ - ('partner_id', '=', user.id), - ('repository_id', '=', self.repository.id), - ('review', '=', True) if self.author != user else ('self_review', '=', True), - ]) == 1 - is_reviewer = is_admin or self in user.delegate_reviewer - # TODO: should delegate reviewers be able to retry PRs? - is_author = is_reviewer or self.author == user - return ACL(is_admin, is_reviewer, is_author) - - def _validate(self, statuses): - # could have two PRs (e.g. one open and one closed) at least - # temporarily on the same head, or on the same head with different - # targets - failed = self.browse(()) - for pr in self: - required = pr.repository.status_ids._for_pr(pr).mapped('context') - sts = {**statuses, **pr._get_overrides()} - - success = True - for ci in required: - st = state_(sts, ci) or 'pending' - if st == 'success': - continue - - success = False - if st in ('error', 'failure'): - failed |= pr - pr._notify_ci_new_failure(ci, to_status(sts.get(ci.strip(), 'pending'))) - if success: - oldstate = pr.state - if oldstate == 'opened': - pr.state = 'validated' - elif oldstate == 'approved': - pr.state = 'ready' - return failed - - def _notify_ci_new_failure(self, ci, st): - prev = json.loads(self.previous_failure) - if prev.get('state'): # old-style previous-failure - prev = {ci: prev} - if not any(self._statuses_equivalent(st, v) for v in prev.values()): - prev[ci] = st - self.previous_failure = json.dumps(prev) - self._notify_ci_failed(ci) - - def _notify_merged(self, gh, payload): - deployment = gh('POST', 'deployments', json={ - 'ref': self.head, 'environment': 'merge', - 'description': "Merge %s into %s" % (self.display_name, self.target.name), - 'task': 'merge', - 'auto_merge': False, - 'required_contexts': [], - }).json() - gh('POST', 'deployments/{}/statuses'.format(deployment['id']), json={ - 'state': 'success', - 'target_url': 'https://github.com/{}/commit/{}'.format( - self.repository.name, - payload['sha'], - ), - 'description': "Merged %s in %s at %s" % ( - self.display_name, self.target.name, payload['sha'] - ) - }) - - def _statuses_equivalent(self, a, b): - """ Check if two statuses are *equivalent* meaning the description field - is ignored (check only state and target_url). This is because the - description seems to vary even if the rest does not, and generates - unnecessary notififcations as a result - """ - return a.get('state') == b.get('state') \ - and a.get('target_url') == b.get('target_url') - - def _notify_ci_failed(self, ci): - # only report an issue of the PR is already approved (r+'d) - if self.state == 'approved': - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': self.repository.id, - 'pull_request': self.number, - 'message': "%s%r failed on this reviewed PR." % (self.ping(), ci), - }) - - def _auto_init(self): - super(PullRequests, self)._auto_init() - # incorrect index: unique(number, target, repository). - tools.drop_index(self._cr, 'runbot_merge_unique_pr_per_target', self._table) - # correct index: - tools.create_unique_index( - self._cr, 'runbot_merge_unique_pr_per_repo', self._table, ['repository', 'number']) - self._cr.execute("CREATE INDEX IF NOT EXISTS runbot_merge_pr_head " - "ON runbot_merge_pull_requests " - "USING hash (head)") - - @property - def _tagstate(self): - if self.state == 'ready' and self.staging_id.heads: - return 'staged' - return self.state - - @api.model - def create(self, vals): - pr = super().create(vals) - c = self.env['runbot_merge.commit'].search([('sha', '=', pr.head)]) - pr._validate(json.loads(c.statuses or '{}')) - - if pr.state not in ('closed', 'merged'): - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': pr.repository.id, - 'pull_request': pr.number, - 'message': f"[Pull request status dashboard]({pr.url}).", - }) - return pr - - def _from_gh(self, description, author=None, branch=None, repo=None): - if repo is None: - repo = self.env['runbot_merge.repository'].search([ - ('name', '=', description['base']['repo']['full_name']), - ]) - if branch is None: - branch = self.env['runbot_merge.branch'].with_context(active_test=False).search([ - ('name', '=', description['base']['ref']), - ('project_id', '=', repo.project_id.id), - ]) - if author is None: - author = self.env['res.partner'].search([ - ('github_login', '=', description['user']['login']), - ], limit=1) - - message = description['title'].strip() - body = description['body'] and description['body'].strip() - if body: - message += '\n\n' + body - return self.env['runbot_merge.pull_requests'].create({ - 'state': 'opened' if description['state'] == 'open' else 'closed', - 'number': description['number'], - 'label': repo._remap_label(description['head']['label']), - 'author': author.id, - 'target': branch.id, - 'repository': repo.id, - 'head': description['head']['sha'], - 'squash': description['commits'] == 1, - 'message': message, - 'draft': description['draft'], - }) - - def write(self, vals): - if vals.get('squash'): - vals['merge_method'] = False - prev = None - if 'target' in vals or 'message' in vals: - prev = { - pr.id: {'target': pr.target, 'message': pr.message} - for pr in self - } - - w = super().write(vals) - - newhead = vals.get('head') - if newhead: - c = self.env['runbot_merge.commit'].search([('sha', '=', newhead)]) - self._validate(json.loads(c.statuses or '{}')) - - if prev: - for pr in self: - old_target = prev[pr.id]['target'] - if pr.target != old_target: - pr.unstage( - "target (base) branch was changed from %r to %r", - old_target.display_name, pr.target.display_name, - ) - old_message = prev[pr.id]['message'] - if pr.merge_method in ('merge', 'rebase-merge') and pr.message != old_message: - pr.unstage("merge message updated") - return w - - def _check_linked_prs_statuses(self, commit=False): - """ Looks for linked PRs where at least one of the PRs is in a ready - state and the others are not, notifies the other PRs. - - :param bool commit: whether to commit the tnx after each comment - """ - # similar to Branch.try_staging's query as it's a subset of that - # other query's behaviour - self.env.cr.execute(""" - SELECT - array_agg(pr.id) AS match - FROM runbot_merge_pull_requests pr - WHERE - -- exclude terminal states (so there's no issue when - -- deleting branches & reusing labels) - pr.state != 'merged' - AND pr.state != 'closed' - GROUP BY - pr.target, - CASE - WHEN pr.label SIMILAR TO '%%:patch-[[:digit:]]+' - THEN pr.id::text - ELSE pr.label - END - HAVING - -- one of the batch's PRs should be ready & not marked - bool_or(pr.state = 'ready' AND NOT pr.link_warned) - -- one of the others should be unready - AND bool_or(pr.state != 'ready') - -- but ignore batches with one of the prs at p0 - AND bool_and(pr.priority != 0) - """) - for [ids] in self.env.cr.fetchall(): - prs = self.browse(ids) - ready = prs.filtered(lambda p: p.state == 'ready') - unready = (prs - ready).sorted(key=lambda p: (p.repository.name, p.number)) - - for r in ready: - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': r.repository.id, - 'pull_request': r.number, - 'message': "{}linked pull request(s) {} not ready. Linked PRs are not staged until all of them are ready.".format( - r.ping(), - ', '.join(map('{0.display_name}'.format, unready)) - ) - }) - r.link_warned = True - if commit: - self.env.cr.commit() - - # send feedback for multi-commit PRs without a merge_method (which - # we've not warned yet) - methods = ''.join( - '* `%s` to %s\n' % pair - for pair in type(self).merge_method.selection - if pair[0] != 'squash' - ) - for r in self.search([ - ('state', '=', 'ready'), - ('squash', '=', False), - ('merge_method', '=', False), - ('method_warned', '=', False), - ]): - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': r.repository.id, - 'pull_request': r.number, - 'message': "%sbecause this PR has multiple commits, I need to know how to merge it:\n\n%s" % ( - r.ping(), - methods, - ) - }) - r.method_warned = True - if commit: - self.env.cr.commit() - - def _parse_commit_message(self, message): - """ Parses a commit message to split out the pseudo-headers (which - should be at the end) from the body, and serialises back with a - predefined pseudo-headers ordering. - """ - return Message.from_message(message) - - def _is_mentioned(self, message, *, full_reference=False): - """Returns whether ``self`` is mentioned in ``message``` - - :param str | PullRequest message: - :param bool full_reference: whether the repository name must be present - :rtype: bool - """ - if full_reference: - pattern = fr'\b{re.escape(self.display_name)}\b' - else: - repository = self.repository.name # .replace('/', '\\/') - pattern = fr'( |\b{repository})#{self.number}\b' - return bool(re.search(pattern, message if isinstance(message, str) else message.message)) - - def _build_merge_message(self, message, related_prs=()): - # handle co-authored commits (https://help.github.com/articles/creating-a-commit-with-multiple-authors/) - m = self._parse_commit_message(message) - if not self._is_mentioned(message): - m.body += '\n\ncloses {pr.display_name}'.format(pr=self) - - for r in related_prs: - if not r._is_mentioned(message, full_reference=True): - m.headers.add('Related', r.display_name) - - if self.reviewed_by: - m.headers.add('signed-off-by', self.reviewed_by.formatted_email) - - return m - - def _add_self_references(self, commits): - """Adds a footer reference to ``self`` to all ``commits`` if they don't - already refer to the PR. - """ - for c in (c['commit'] for c in commits): - if not self._is_mentioned(c['message']): - m = self._parse_commit_message(c['message']) - m.headers.pop('Part-Of', None) - m.headers.add('Part-Of', self.display_name) - c['message'] = str(m) - - def _stage(self, gh, target, related_prs=()): - # nb: pr_commits is oldest to newest so pr.head is pr_commits[-1] - _, prdict = gh.pr(self.number) - commits = prdict['commits'] - method = self.merge_method or ('rebase-ff' if commits == 1 else None) - if commits > 50 and method.startswith('rebase'): - raise exceptions.Unmergeable(self, "Rebasing 50 commits is too much.") - if commits > 250: - raise exceptions.Unmergeable( - self, "Merging PRs of 250 or more commits is not supported " - "(https://developer.github.com/v3/pulls/#list-commits-on-a-pull-request)" - ) - pr_commits = gh.commits(self.number) - for c in pr_commits: - if not (c['commit']['author']['email'] and c['commit']['committer']['email']): - raise exceptions.Unmergeable( - self, - f"All commits must have author and committer email, " - f"missing email on {c['sha']} indicates the authorship is " - f"most likely incorrect." - ) - pr_head = pr_commits[-1]['sha'] - if pr_head != self.head: - raise exceptions.Mismatch(self.head, pr_head, commits == 1) - - if self.reviewed_by and self.reviewed_by.name == self.reviewed_by.github_login: - # XXX: find other trigger(s) to sync github name? - gh_name = gh.user(self.reviewed_by.github_login)['name'] - if gh_name: - self.reviewed_by.name = gh_name - - # update pr message in case an update was missed - msg = f'{prdict["title"]}\n\n{prdict.get("body") or ""}'.strip() - if self.message != msg: - self.message = msg - - # NOTE: lost merge v merge/copy distinction (head being - # a merge commit reused instead of being re-merged) - return method, getattr(self, '_stage_' + method.replace('-', '_'))( - gh, target, pr_commits, related_prs=related_prs) - - def _stage_squash(self, gh, target, commits, related_prs=()): - assert len(commits) == 1, "can only squash a single commit" - msg = self._build_merge_message(self, related_prs=related_prs) - commits[0]['commit']['message'] = str(msg) - head, mapping = gh.rebase(self.number, target, commits=commits) - self.commits_map = json.dumps({**mapping, '': head}) - return head - - def _stage_rebase_ff(self, gh, target, commits, related_prs=()): - # updates head commit with PR number (if necessary) then rebases - # on top of target - msg = self._build_merge_message(commits[-1]['commit']['message'], related_prs=related_prs) - commits[-1]['commit']['message'] = str(msg) - self._add_self_references(commits[:-1]) - head, mapping = gh.rebase(self.number, target, commits=commits) - self.commits_map = json.dumps({**mapping, '': head}) - return head - - def _stage_rebase_merge(self, gh, target, commits, related_prs=()): - self._add_self_references(commits) - h, mapping = gh.rebase(self.number, target, reset=True, commits=commits) - msg = self._build_merge_message(self, related_prs=related_prs) - merge_head = gh.merge(h, target, str(msg))['sha'] - self.commits_map = json.dumps({**mapping, '': merge_head}) - return merge_head - - def _stage_merge(self, gh, target, commits, related_prs=()): - pr_head = commits[-1] # oldest to newest - base_commit = None - head_parents = {p['sha'] for p in pr_head['parents']} - if len(head_parents) > 1: - # look for parent(s?) of pr_head not in PR, means it's - # from target (so we merged target in pr) - merge = head_parents - {c['sha'] for c in commits} - external_parents = len(merge) - if external_parents > 1: - raise exceptions.Unmergeable( - "The PR head can only have one parent from the base branch " - "(not part of the PR itself), found %d: %s" % ( - external_parents, - ', '.join(merge) - )) - if external_parents == 1: - [base_commit] = merge - - commits_map = {c['sha']: c['sha'] for c in commits} - if base_commit: - # replicate pr_head with base_commit replaced by - # the current head - original_head = gh.head(target) - merge_tree = gh.merge(pr_head['sha'], target, 'temp merge')['tree']['sha'] - new_parents = [original_head] + list(head_parents - {base_commit}) - msg = self._build_merge_message(pr_head['commit']['message'], related_prs=related_prs) - copy = gh('post', 'git/commits', json={ - 'message': str(msg), - 'tree': merge_tree, - 'author': pr_head['commit']['author'], - 'committer': pr_head['commit']['committer'], - 'parents': new_parents, - }).json() - gh.set_ref(target, copy['sha']) - # merge commit *and old PR head* map to the pr head replica - commits_map[''] = commits_map[pr_head['sha']] = copy['sha'] - self.commits_map = json.dumps(commits_map) - return copy['sha'] - else: - # otherwise do a regular merge - msg = self._build_merge_message(self) - merge_head = gh.merge(self.head, target, str(msg))['sha'] - # and the merge commit is the normal merge head - commits_map[''] = merge_head - self.commits_map = json.dumps(commits_map) - return merge_head - - def unstage(self, reason, *args): - """ If the PR is staged, cancel the staging. If the PR is split and - waiting, remove it from the split (possibly delete the split entirely) - """ - split_batches = self.with_context(active_test=False).mapped('batch_ids').filtered('split_id') - if len(split_batches) > 1: - _logger.warning("Found a PR linked with more than one split batch: %s (%s)", self, split_batches) - for b in split_batches: - if len(b.split_id.batch_ids) == 1: - # only the batch of this PR -> delete split - b.split_id.unlink() - else: - # else remove this batch from the split - b.split_id = False - - self.staging_id.cancel('%s ' + reason, self.display_name, *args) - - def _try_closing(self, by): - # ignore if the PR is already being updated in a separate transaction - # (most likely being merged?) - self.env.cr.execute(''' - SELECT id, state FROM runbot_merge_pull_requests - WHERE id = %s AND state != 'merged' - FOR UPDATE SKIP LOCKED; - ''', [self.id]) - if not self.env.cr.fetchone(): - return False - - self.env.cr.execute(''' - UPDATE runbot_merge_pull_requests - SET state = 'closed' - WHERE id = %s - ''', [self.id]) - self.env.cr.commit() - self.modified(['state']) - self.unstage("closed by %s", by) - return True - -# state changes on reviews -RPLUS = { - 'opened': 'approved', - 'validated': 'ready', -} -RMINUS = { - 'approved': 'opened', - 'ready': 'validated', - 'error': 'validated', -} - -_TAGS = { - False: set(), - 'opened': {'seen 🙂'}, -} -_TAGS['validated'] = _TAGS['opened'] | {'CI 🤖'} -_TAGS['approved'] = _TAGS['opened'] | {'r+ 👌'} -_TAGS['ready'] = _TAGS['validated'] | _TAGS['approved'] -_TAGS['staged'] = _TAGS['ready'] | {'merging 👷'} -_TAGS['merged'] = _TAGS['ready'] | {'merged 🎉'} -_TAGS['error'] = _TAGS['opened'] | {'error 🙅'} -_TAGS['closed'] = _TAGS['opened'] | {'closed 💔'} -ALL_TAGS = set.union(*_TAGS.values()) - -class Tagging(models.Model): - """ - Queue of tag changes to make on PRs. - - Several PR state changes are driven by webhooks, webhooks should return - quickly, performing calls to the Github API would *probably* get in the - way of that. Instead, queue tagging changes into this table whose - execution can be cron-driven. - """ - _name = _description = 'runbot_merge.pull_requests.tagging' - - repository = fields.Many2one('runbot_merge.repository', required=True) - # store the PR number (not id) as we need a Tagging for PR objects - # being deleted (retargeted to non-managed branches) - pull_request = fields.Integer() - - tags_remove = fields.Char(required=True, default='[]') - tags_add = fields.Char(required=True, default='[]') - - def create(self, values): - if values.pop('state_from', None): - values['tags_remove'] = ALL_TAGS - if 'state_to' in values: - values['tags_add'] = _TAGS[values.pop('state_to')] - if not isinstance(values.get('tags_remove', ''), str): - values['tags_remove'] = json.dumps(list(values['tags_remove'])) - if not isinstance(values.get('tags_add', ''), str): - values['tags_add'] = json.dumps(list(values['tags_add'])) - return super().create(values) - - def _send(self): - # noinspection SqlResolve - self.env.cr.execute(""" - SELECT - t.repository as repo_id, - t.pull_request as pr_number, - array_agg(t.id) as ids, - array_agg(t.tags_remove::json) as to_remove, - array_agg(t.tags_add::json) as to_add - FROM runbot_merge_pull_requests_tagging t - GROUP BY t.repository, t.pull_request - """) - Repos = self.env['runbot_merge.repository'] - ghs = {} - to_remove = [] - for repo_id, pr, ids, remove, add in self.env.cr.fetchall(): - repo = Repos.browse(repo_id) - - gh = ghs.get(repo) - if not gh: - gh = ghs[repo] = repo.github() - - # fold all grouped PRs' - tags_remove, tags_add = set(), set() - for minus, plus in zip(remove, add): - tags_remove.update(minus) - # need to remove minuses from to_add in case we get e.g. - # -foo +bar; -bar +baz, if we don't remove the minus, we'll end - # up with -foo +bar +baz instead of -foo +baz - tags_add.difference_update(minus) - tags_add.update(plus) - - try: - gh.change_tags(pr, tags_remove, tags_add) - except Exception: - _logger.exception( - "Error while trying to change the tags of %s#%s from %s to %s", - repo.name, pr, remove, add, - ) - else: - to_remove.extend(ids) - self.browse(to_remove).unlink() - -class Feedback(models.Model): - """ Queue of feedback comments to send to PR users - """ - _name = _description = 'runbot_merge.pull_requests.feedback' - - repository = fields.Many2one('runbot_merge.repository', required=True) - # store the PR number (not id) as we may want to send feedback to PR - # objects on non-handled branches - pull_request = fields.Integer() - message = fields.Char() - close = fields.Boolean() - token_field = fields.Selection( - [('github_token', "Mergebot")], - default='github_token', - string="Bot User", - help="Token field (from repo's project) to use to post messages" - ) - - def _send(self): - ghs = {} - to_remove = [] - for f in self.search([]): - repo = f.repository - gh = ghs.get((repo, f.token_field)) - if not gh: - gh = ghs[(repo, f.token_field)] = repo.github(f.token_field) - - try: - message = f.message - with contextlib.suppress(json.JSONDecodeError): - data = json.loads(message or '') - message = data.get('message') - - if data.get('base'): - gh('PATCH', f'pulls/{f.pull_request}', json={'base': data['base']}) - - if f.close: - pr_to_notify = self.env['runbot_merge.pull_requests'].search([ - ('repository', '=', repo.id), - ('number', '=', f.pull_request), - ]) - if pr_to_notify: - pr_to_notify._notify_merged(gh, data) - - if f.close: - gh.close(f.pull_request) - - if message: - gh.comment(f.pull_request, message) - except Exception: - _logger.exception( - "Error while trying to %s %s#%s (%s)", - 'close' if f.close else 'send a comment to', - repo.name, f.pull_request, - utils.shorten(f.message, 200) - ) - else: - to_remove.append(f.id) - self.browse(to_remove).unlink() - -class Commit(models.Model): - """Represents a commit onto which statuses might be posted, - independent of everything else as commits can be created by - statuses only, by PR pushes, by branch updates, ... - """ - _name = _description = 'runbot_merge.commit' - - sha = fields.Char(required=True) - statuses = fields.Char(help="json-encoded mapping of status contexts to states", default="{}") - to_check = fields.Boolean(default=False) - - def create(self, values): - values['to_check'] = True - r = super(Commit, self).create(values) - return r - - def write(self, values): - values.setdefault('to_check', True) - r = super(Commit, self).write(values) - return r - - def _notify(self): - Stagings = self.env['runbot_merge.stagings'] - PRs = self.env['runbot_merge.pull_requests'] - # chances are low that we'll have more than one commit - for c in self.search([('to_check', '=', True)]): - try: - c.to_check = False - st = json.loads(c.statuses) - pr = PRs.search([('head', '=', c.sha)]) - if pr: - pr._validate(st) - - stagings = Stagings.search([('heads', 'ilike', c.sha)]).filtered( - lambda s, h=c.sha: any( - head == h - for repo, head in json.loads(s.heads).items() - if not repo.endswith('^') - ) - ) - if stagings: - stagings._validate() - except Exception: - _logger.exception("Failed to apply commit %s (%s)", c, c.sha) - self.env.cr.rollback() - else: - self.env.cr.commit() - - _sql_constraints = [ - ('unique_sha', 'unique (sha)', 'no duplicated commit'), - ] - - def _auto_init(self): - res = super(Commit, self)._auto_init() - self._cr.execute(""" - CREATE INDEX IF NOT EXISTS runbot_merge_unique_statuses - ON runbot_merge_commit - USING hash (sha) - """) - self._cr.execute(""" - CREATE INDEX IF NOT EXISTS runbot_merge_to_process - ON runbot_merge_commit ((1)) WHERE to_check - """) - return res - -class Stagings(models.Model): - _name = _description = 'runbot_merge.stagings' - - target = fields.Many2one('runbot_merge.branch', required=True) - - batch_ids = fields.One2many( - 'runbot_merge.batch', 'staging_id', - context={'active_test': False}, - ) - state = fields.Selection([ - ('success', 'Success'), - ('failure', 'Failure'), - ('pending', 'Pending'), - ('cancelled', "Cancelled"), - ('ff_failed', "Fast forward failed") - ], default='pending') - active = fields.Boolean(default=True) - - staged_at = fields.Datetime(default=fields.Datetime.now) - timeout_limit = fields.Datetime(store=True, compute='_compute_timeout_limit') - reason = fields.Text("Reason for final state (if any)") - - # seems simpler than adding yet another indirection through a model - heads = fields.Char(required=True, help="JSON-encoded map of heads, one per repo in the project") - head_ids = fields.Many2many('runbot_merge.commit', compute='_compute_statuses') - - statuses = fields.Binary(compute='_compute_statuses') - - def name_get(self): - return [ - (staging.id, "%d (%s, %s%s)" % ( - staging.id, - staging.target.name, - staging.state, - (', ' + staging.reason) if staging.reason else '', - )) - for staging in self - ] - - @api.depends('heads') - def _compute_statuses(self): - """ Fetches statuses associated with the various heads, returned as - (repo, context, state, url) - """ - Commits = self.env['runbot_merge.commit'] - for st in self: - heads = { - head: repo for repo, head in json.loads(st.heads).items() - if not repo.endswith('^') - } - commits = st.head_ids = Commits.search([('sha', 'in', list(heads.keys()))]) - st.statuses = [ - ( - heads[commit.sha], - context, - status.get('state') or 'pending', - status.get('target_url') or '' - ) - for commit in commits - for context, st in json.loads(commit.statuses).items() - for status in [to_status(st)] - ] - - # only depend on staged_at as it should not get modified, but we might - # update the CI timeout after the staging have been created and we - # *do not* want to update the staging timeouts in that case - @api.depends('staged_at') - def _compute_timeout_limit(self): - for st in self: - st.timeout_limit = fields.Datetime.to_string( - fields.Datetime.from_string(st.staged_at) - + datetime.timedelta(minutes=st.target.project_id.ci_timeout) - ) - - def _validate(self): - Commits = self.env['runbot_merge.commit'] - for s in self: - if s.state != 'pending': - continue - - repos = { - repo.name: repo - for repo in self.env['runbot_merge.repository'].search([]) - .having_branch(s.target) - } - # maps commits to the statuses they need - required_statuses = [ - (head, repos[repo].status_ids._for_staging(s).mapped('context')) - for repo, head in json.loads(s.heads).items() - if not repo.endswith('^') - ] - # maps commits to their statuses - cmap = { - c.sha: json.loads(c.statuses) - for c in Commits.search([('sha', 'in', [h for h, _ in required_statuses])]) - } - - update_timeout_limit = False - st = 'success' - for head, reqs in required_statuses: - statuses = cmap.get(head) or {} - for v in map(lambda n: state_(statuses, n), reqs): - if st == 'failure' or v in ('error', 'failure'): - st = 'failure' - elif v is None: - st = 'pending' - elif v == 'pending': - st = 'pending' - update_timeout_limit = True - else: - assert v == 'success' - - vals = {'state': st} - if update_timeout_limit: - vals['timeout_limit'] = fields.Datetime.to_string(datetime.datetime.now() + datetime.timedelta(minutes=s.target.project_id.ci_timeout)) - _logger.debug("%s got pending status, bumping timeout to %s (%s)", self, vals['timeout_limit'], cmap) - s.write(vals) - - def action_cancel(self): - self.cancel("explicitly cancelled by %s", self.env.user.display_name) - return { 'type': 'ir.actions.act_window_close' } - - def cancel(self, reason, *args): - self = self.filtered('active') - if not self: - return - - _logger.info("Cancelling staging %s: " + reason, self, *args) - self.mapped('batch_ids').write({'active': False}) - self.write({ - 'active': False, - 'state': 'cancelled', - 'reason': reason % args, - }) - - def fail(self, message, prs=None): - _logger.info("Staging %s failed: %s", self, message) - prs = prs or self.batch_ids.prs - prs.write({'state': 'error'}) - for pr in prs: - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': pr.repository.id, - 'pull_request': pr.number, - 'message': "%sstaging failed: %s" % (pr.ping(), message), - }) - - self.batch_ids.write({'active': False}) - self.write({ - 'active': False, - 'state': 'failure', - 'reason': message, - }) - - def try_splitting(self): - batches = len(self.batch_ids) - if batches > 1: - midpoint = batches // 2 - h, t = self.batch_ids[:midpoint], self.batch_ids[midpoint:] - # NB: batches remain attached to their original staging - sh = self.env['runbot_merge.split'].create({ - 'target': self.target.id, - 'batch_ids': [(4, batch.id, 0) for batch in h], - }) - st = self.env['runbot_merge.split'].create({ - 'target': self.target.id, - 'batch_ids': [(4, batch.id, 0) for batch in t], - }) - _logger.info("Split %s to %s (%s) and %s (%s)", - self, h, sh, t, st) - self.batch_ids.write({'active': False}) - self.write({ - 'active': False, - 'state': 'failure', - 'reason': self.reason if self.state == 'failure' else 'timed out' - }) - return True - - # single batch => the staging is an unredeemable failure - if self.state != 'failure': - # timed out, just mark all PRs (wheee) - self.fail('timed out (>{} minutes)'.format(self.target.project_id.ci_timeout)) - return False - - # try inferring which PR failed and only mark that one - for repo, head in json.loads(self.heads).items(): - if repo.endswith('^'): - continue - - required_statuses = set( - self.env['runbot_merge.repository'] - .search([('name', '=', repo)]) - .status_ids - ._for_staging(self) - .mapped('context')) - - commit = self.env['runbot_merge.commit'].search([('sha', '=', head)]) - statuses = json.loads(commit.statuses or '{}') - reason = next(( - ctx for ctx, result in statuses.items() - if ctx in required_statuses - if to_status(result).get('state') in ('error', 'failure') - ), None) - if not reason: - continue - - pr = next(( - pr for pr in self.batch_ids.prs - if pr.repository.name == repo - ), None) - - status = to_status(statuses[reason]) - viewmore = '' - if status.get('target_url'): - viewmore = ' (view more at %(target_url)s)' % status - if pr: - self.fail("%s%s" % (reason, viewmore), pr) - else: - self.fail('%s on %s%s' % (reason, head, viewmore)) - return False - - # the staging failed but we don't have a specific culprit, fail - # everything - self.fail("unknown reason") - - return False - - def check_status(self): - """ - Checks the status of an active staging: - * merges it if successful - * splits it if failed (or timed out) and more than 1 batch - * marks the PRs as failed otherwise - * ignores if pending (or cancelled or ff_failed but those should also - be disabled) - """ - logger = _logger.getChild('cron') - if not self.active: - logger.info("Staging %s is not active, ignoring status check", self) - return - - logger.info("Checking active staging %s (state=%s)", self, self.state) - project = self.target.project_id - if self.state == 'success': - gh = {repo.name: repo.github() for repo in project.repo_ids.having_branch(self.target)} - staging_heads = json.loads(self.heads) - self.env.cr.execute(''' - SELECT 1 FROM runbot_merge_pull_requests - WHERE id in %s - FOR UPDATE - ''', [tuple(self.mapped('batch_ids.prs.id'))]) - try: - self._safety_dance(gh, staging_heads) - except exceptions.FastForwardError as e: - logger.warning( - "Could not fast-forward successful staging on %s:%s", - e.args[0], self.target.name, - exc_info=True - ) - self.write({ - 'state': 'ff_failed', - 'reason': str(e.__cause__ or e.__context__ or e) - }) - else: - prs = self.mapped('batch_ids.prs') - logger.info( - "%s FF successful, marking %s as merged", - self, prs - ) - prs.write({'state': 'merged'}) - - pseudobranch = None - if self.target == project.branch_ids[:1]: - pseudobranch = project._next_freeze() - - for pr in prs: - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': pr.repository.id, - 'pull_request': pr.number, - 'message': json.dumps({ - 'sha': json.loads(pr.commits_map)[''], - }), - 'close': True, - }) - if pseudobranch: - self.env['runbot_merge.pull_requests.tagging'].create({ - 'repository': pr.repository.id, - 'pull_request': pr.number, - 'tags_add': json.dumps([pseudobranch]), - }) - finally: - self.batch_ids.write({'active': False}) - self.write({'active': False}) - elif self.state == 'failure' or self.is_timed_out(): - self.try_splitting() - - def is_timed_out(self): - return fields.Datetime.from_string(self.timeout_limit) < datetime.datetime.now() - - def _safety_dance(self, gh, staging_heads): - """ Reverting updates doesn't work if the branches are protected - (because a revert is basically a force push). So we can update - REPO_A, then fail to update REPO_B for some reason, and we're hosed. - - To try and make this issue less likely, do the safety dance: - - * First, perform a dry run using the tmp branches (which can be - force-pushed and sacrificed), that way if somebody pushed directly - to REPO_B during the staging we catch it. If we're really unlucky - they could still push after the dry run but... - * An other issue then is that the github call sometimes fails for no - noticeable reason (e.g. network failure or whatnot), if it fails - on REPO_B when REPO_A has already been updated things get pretty - bad. In that case, wait a bit and retry for now. A more complex - strategy (including disabling the branch entirely until somebody - has looked at and fixed the issue) might be necessary. - - :returns: the last repo it tried to update (probably the one on which - it failed, if it failed) - """ - # FIXME: would make sense for FFE to be richer, and contain the repo name - repo_name = None - tmp_target = 'tmp.' + self.target.name - # first force-push the current targets to all tmps - for repo_name in staging_heads.keys(): - if repo_name.endswith('^'): - continue - g = gh[repo_name] - g.set_ref(tmp_target, g.head(self.target.name)) - # then attempt to FF the tmp to the staging - for repo_name, head in staging_heads.items(): - if repo_name.endswith('^'): - continue - gh[repo_name].fast_forward(tmp_target, staging_heads.get(repo_name + '^') or head) - # there is still a race condition here, but it's way - # lower than "the entire staging duration"... - first = True - for repo_name, head in staging_heads.items(): - if repo_name.endswith('^'): - continue - - for pause in [0.1, 0.3, 0.5, 0.9, 0]: # last one must be 0/falsy of we lose the exception - try: - # if the staging has a $repo^ head, merge that, - # otherwise merge the regular (CI'd) head - gh[repo_name].fast_forward( - self.target.name, - staging_heads.get(repo_name + '^') or head - ) - except exceptions.FastForwardError: - # The GH API regularly fails us. If the failure does not - # occur on the first repository, retry a few times with a - # little pause. - if not first and pause: - time.sleep(pause) - continue - raise - else: - break - first = False - return repo_name - -class Split(models.Model): - _name = _description = 'runbot_merge.split' - - target = fields.Many2one('runbot_merge.branch', required=True) - batch_ids = fields.One2many('runbot_merge.batch', 'split_id', context={'active_test': False}) - -class Batch(models.Model): - """ A batch is a "horizontal" grouping of *codependent* PRs: PRs with - the same label & target but for different repositories. These are - assumed to be part of the same "change" smeared over multiple - repositories e.g. change an API in repo1, this breaks use of that API - in repo2 which now needs to be updated. - """ - _name = _description = 'runbot_merge.batch' - - target = fields.Many2one('runbot_merge.branch', required=True) - staging_id = fields.Many2one('runbot_merge.stagings') - split_id = fields.Many2one('runbot_merge.split') - - prs = fields.Many2many('runbot_merge.pull_requests') - - active = fields.Boolean(default=True) - - @api.constrains('target', 'prs') - def _check_prs(self): - for batch in self: - repos = self.env['runbot_merge.repository'] - for pr in batch.prs: - if pr.target != batch.target: - raise ValidationError("A batch and its PRs must have the same branch, got %s and %s" % (batch.target, pr.target)) - if pr.repository in repos: - raise ValidationError("All prs of a batch must have different target repositories, got a duplicate %s on %s" % (pr.repository, pr)) - repos |= pr.repository - - def stage(self, meta, prs): - """ - Updates meta[*][head] on success - - :return: () or Batch object (if all prs successfully staged) - """ - new_heads = {} - for pr in prs: - gh = meta[pr.repository]['gh'] - - _logger.info( - "Staging pr %s for target %s; method=%s", - pr.display_name, pr.target.name, - pr.merge_method or (pr.squash and 'single') or None - ) - - target = 'tmp.{}'.format(pr.target.name) - original_head = gh.head(target) - try: - try: - method, new_heads[pr] = pr._stage(gh, target, related_prs=(prs - pr)) - _logger.info( - "Staged pr %s to %s by %s: %s -> %s", - pr.display_name, pr.target.name, method, - original_head, new_heads[pr] - ) - except Exception: - # reset the head which failed, as rebase() may have partially - # updated it (despite later steps failing) - gh.set_ref(target, original_head) - # then reset every previous update - for to_revert in new_heads.keys(): - it = meta[to_revert.repository] - it['gh'].set_ref('tmp.{}'.format(to_revert.target.name), it['head']) - raise - except github.MergeError: - raise exceptions.MergeError(pr) - except exceptions.Mismatch as e: - old_head, new_head, to_squash = e.args - pr.write({ - 'state': 'opened', - 'squash': to_squash, - 'head': new_head, - }) - _logger.warning( - "head mismatch on %s: had %s but found %s", - pr.display_name, old_head, new_head - ) - self.env['runbot_merge.pull_requests.feedback'].create({ - 'repository': pr.repository.id, - 'pull_request': pr.number, - 'message': "%swe apparently missed an update to this PR " - "and tried to stage it in a state which " - "might not have been approved. PR has been " - "updated to %s, please check and approve or " - "re-approve." % (pr.ping(), new_head) - }) - return self.env['runbot_merge.batch'] - - # update meta to new heads - for pr, head in new_heads.items(): - meta[pr.repository]['head'] = head - return self.create({ - 'target': prs[0].target.id, - 'prs': [(4, pr.id, 0) for pr in prs], - }) - -class FetchJob(models.Model): - _name = _description = 'runbot_merge.fetch_job' - - active = fields.Boolean(default=True) - repository = fields.Many2one('runbot_merge.repository', required=True) - number = fields.Integer(required=True) - - def _check(self, commit=False): - """ - :param bool commit: commit after each fetch has been executed - """ - while True: - f = self.search([], limit=1) - if not f: - return - - self.env.cr.execute("SAVEPOINT runbot_merge_before_fetch") - try: - f.repository._load_pr(f.number) - except Exception: - self.env.cr.execute("ROLLBACK TO SAVEPOINT runbot_merge_before_fetch") - _logger.exception("Failed to load pr %s, skipping it", f.number) - finally: - self.env.cr.execute("RELEASE SAVEPOINT runbot_merge_before_fetch") - - f.active = False - if commit: - self.env.cr.commit() - -# The commit (and PR) statuses was originally a map of ``{context:state}`` -# however it turns out to clarify error messages it'd be useful to have -# a bit more information e.g. a link to the CI's build info on failure and -# all that. So the db-stored statuses are now becoming a map of -# ``{ context: {state, target_url, description } }``. The issue here is -# there's already statuses stored in the db so we need to handle both -# formats, hence these utility functions) -def state_(statuses, name): - """ Fetches the status state """ - name = name.strip() - v = statuses.get(name) - if isinstance(v, dict): - return v.get('state') - return v -def to_status(v): - """ Converts old-style status values (just a state string) to new-style - (``{state, target_url, description}``) - - :type v: str | dict - :rtype: dict - """ - if isinstance(v, dict): - return v - return {'state': v, 'target_url': None, 'description': None} - -refline = re.compile(rb'([\da-f]{40}) ([^\0\n]+)(\0.*)?\n?$') -ZERO_REF = b'0'*40 -def parse_refs_smart(read): - """ yields pkt-line data (bytes), or None for flush lines """ - def read_line(): - length = int(read(4), 16) - if length == 0: - return None - return read(length - 4) - - header = read_line() - assert header.rstrip() == b'# service=git-upload-pack', header - assert read_line() is None, "failed to find first flush line" - # read lines until second delimiter - for line in iter(read_line, None): - if line.startswith(ZERO_REF): - break # empty list (no refs) - m = refline.match(line) - yield m[1].decode(), m[2].decode() - -BREAK = re.compile(r''' - ^ - [ ]{0,3} # 0-3 spaces of indentation - # followed by a sequence of three or more matching -, _, or * characters, - # each followed optionally by any number of spaces or tabs - # so needs to start with a _, - or *, then have at least 2 more such - # interspersed with any number of spaces or tabs - ([*_-]) - ([ \t]*\1){2,} - [ \t]* - $ -''', flags=re.VERBOSE) -SETEX_UNDERLINE = re.compile(r''' - ^ - [ ]{0,3} # no more than 3 spaces indentation - [-=]+ # a sequence of = characters or a sequence of - characters - [ ]* # any number of trailing spaces - $ - # we don't care about "a line containing a single -" because we want to - # disambiguate SETEX headings from thematic breaks, and thematic breaks have - # 3+ -. Doesn't look like GH interprets `- - -` as a line so yay... -''', flags=re.VERBOSE) -HEADER = re.compile('^([A-Za-z-]+): (.*)$') -class Message: - @classmethod - def from_message(cls, msg): - in_headers = True - maybe_setex = None - # creating from PR message -> remove content following break - msg, handle_break = (msg, False) if isinstance(msg, str) else (msg.message, True) - headers = [] - body = [] - # don't process the title (first line) of the commit message - msg = msg.splitlines() - for line in reversed(msg[1:]): - if maybe_setex: - # NOTE: actually slightly more complicated: it's a SETEX heading - # only if preceding line(s) can be interpreted as a - # paragraph so e.g. a title followed by a line of dashes - # would indeed be a break, but this should be good enough - # for now, if we need more we'll need a full-blown - # markdown parser probably - if line: # actually a SETEX title -> add underline to body then process current - body.append(maybe_setex) - else: # actually break, remove body then process current - body = [] - maybe_setex = None - - if not line: - if not in_headers and body and body[-1]: - body.append(line) - continue - - if handle_break and BREAK.match(line): - if SETEX_UNDERLINE.match(line): - maybe_setex = line - else: - body = [] - continue - - h = HEADER.match(line) - if h: - # c-a-b = special case from an existing test, not sure if actually useful? - if in_headers or h.group(1).lower() == 'co-authored-by': - headers.append(h.groups()) - continue - - body.append(line) - in_headers = False - - # if there are non-title body lines, add a separation after the title - if body and body[-1]: - body.append('') - body.append(msg[0]) - return cls('\n'.join(reversed(body)), Headers(reversed(headers))) - - def __init__(self, body, headers=None): - self.body = body - self.headers = headers or Headers() - - def __setattr__(self, name, value): - # make sure stored body is always stripped - if name == 'body': - value = value and value.strip() - super().__setattr__(name, value) - - def __str__(self): - if not self.headers: - return self.body + '\n' - - with io.StringIO(self.body) as msg: - msg.write(self.body) - msg.write('\n\n') - # https://git.wiki.kernel.org/index.php/CommitMessageConventions - # seems to mostly use capitalised names (rather than title-cased) - keys = list(OrderedSet(k.capitalize() for k in self.headers.keys())) - # c-a-b must be at the very end otherwise github doesn't see it - keys.sort(key=lambda k: k == 'Co-authored-by') - for k in keys: - for v in self.headers.getlist(k): - msg.write(k) - msg.write(': ') - msg.write(v) - msg.write('\n') - - return msg.getvalue() - - def sub(self, pattern, repl, *, flags): - """ Performs in-place replacements on the body - """ - self.body = re.sub(pattern, repl, self.body, flags=flags) diff --git a/runbot_merge/models/res_partner.py b/runbot_merge/models/res_partner.py deleted file mode 100644 index d627b2f4..00000000 --- a/runbot_merge/models/res_partner.py +++ /dev/null @@ -1,117 +0,0 @@ -import random -from email.utils import parseaddr - -from odoo import fields, models, tools, api - -from .. import github - -class CIText(fields.Char): - type = 'char' - column_type = ('citext', 'citext') - column_cast_from = ('varchar', 'text') - -class Partner(models.Model): - _inherit = 'res.partner' - - email = fields.Char(index=True) - github_login = CIText() - delegate_reviewer = fields.Many2many('runbot_merge.pull_requests') - formatted_email = fields.Char(string="commit email", compute='_rfc5322_formatted') - review_rights = fields.One2many('res.partner.review', 'partner_id') - override_rights = fields.Many2many('res.partner.override') - - def _auto_init(self): - res = super(Partner, self)._auto_init() - tools.create_unique_index( - self._cr, 'runbot_merge_unique_gh_login', self._table, ['github_login']) - return res - - @api.depends('name', 'email', 'github_login') - def _rfc5322_formatted(self): - for partner in self: - if partner.email: - email = parseaddr(partner.email)[1] - elif partner.github_login: - email = '%s@users.noreply.github.com' % partner.github_login - else: - email = '' - partner.formatted_email = '%s <%s>' % (partner.name, email) - - def fetch_github_email(self): - # this requires a token in order to fetch the email field, otherwise - # it's just not returned, select a random project to fetch - gh = github.GH(random.choice(self.env['runbot_merge.project'].search([])).github_token, None) - for p in self.filtered(lambda p: p.github_login and p.email is False): - p.email = gh.user(p.github_login)['email'] or False - return False - -class PartnerMerge(models.TransientModel): - _inherit = 'base.partner.merge.automatic.wizard' - - @api.model - def _update_values(self, src_partners, dst_partner): - # sift down through src partners, removing all github_login and keeping - # the last one - new_login = None - for p in src_partners: - new_login = p.github_login or new_login - if new_login: - src_partners.write({'github_login': False}) - if new_login and not dst_partner.github_login: - dst_partner.github_login = new_login - super()._update_values(src_partners, dst_partner) - -class ReviewRights(models.Model): - _name = 'res.partner.review' - _description = "mapping of review rights between partners and repos" - - partner_id = fields.Many2one('res.partner', required=True, ondelete='cascade') - repository_id = fields.Many2one('runbot_merge.repository', required=True) - review = fields.Boolean(default=False) - self_review = fields.Boolean(default=False) - - def _auto_init(self): - res = super()._auto_init() - tools.create_unique_index(self._cr, 'runbot_merge_review_m2m', self._table, ['partner_id', 'repository_id']) - return res - - def name_get(self): - return [ - (r.id, '%s: %s' % (r.repository_id.name, ', '.join(filter(None, [ - r.review and "reviewer", - r.self_review and "self-reviewer" - ])))) - for r in self - ] - - @api.model - def name_search(self, name='', args=None, operator='ilike', limit=100): - return self.search((args or []) + [('repository_id.name', operator, name)], limit=limit).name_get() - -class OverrideRights(models.Model): - _name = 'res.partner.override' - _description = 'lints which the partner can override' - - partner_ids = fields.Many2many('res.partner') - repository_id = fields.Many2one('runbot_merge.repository') - context = fields.Char(required=True) - - def init(self): - super().init() - tools.create_unique_index( - self.env.cr, 'res_partner_override_unique', self._table, - ['context', 'coalesce(repository_id, 0)'] - ) - - @api.model - def name_search(self, name='', args=None, operator='ilike', limit=100): - return self.search((args or []) + [ - '|', ('context', operator, name), - ('repository_id.name', operator, name) - ], limit=limit).name_get() - - def name_get(self): - return [ - (r.id, f'{r.repository_id.name}: {r.context}' if r.repository_id else r.context) - for r in self - ] diff --git a/runbot_merge/security/ir.model.access.csv b/runbot_merge/security/ir.model.access.csv deleted file mode 100644 index 0de5d476..00000000 --- a/runbot_merge/security/ir.model.access.csv +++ /dev/null @@ -1,25 +0,0 @@ -id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink -access_runbot_merge_project_admin,Admin access to project,model_runbot_merge_project,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_project_freeze,Admin access to freeze wizard,model_runbot_merge_project_freeze,runbot_merge.group_admin,1,1,0,0 -access_runbot_merge_project_freeze_prs,Admin access to freeze wizard release prs,model_runbot_merge_project_freeze_prs,runbot_merge.group_admin,1,1,0,1 -access_runbot_merge_project_freeze_bumps,Admin access to freeze wizard bump prs,model_runbot_merge_project_freeze_bumps,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_repository_admin,Admin access to repo,model_runbot_merge_repository,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_repository_status_admin,Admin access to repo statuses,model_runbot_merge_repository_status,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_branch_admin,Admin access to branches,model_runbot_merge_branch,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_pull_requests_admin,Admin access to PR,model_runbot_merge_pull_requests,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_pull_requests_tagging_admin,Admin access to tagging,model_runbot_merge_pull_requests_tagging,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_commit_admin,Admin access to commits,model_runbot_merge_commit,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_stagings_admin,Admin access to stagings,model_runbot_merge_stagings,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_split_admin,Admin access to splits,model_runbot_merge_split,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_batch_admin,Admin access to batches,model_runbot_merge_batch,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_fetch_job_admin,Admin access to fetch jobs,model_runbot_merge_fetch_job,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_pull_requests_feedback_admin,Admin access to feedback,model_runbot_merge_pull_requests_feedback,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_review_rights,Admin access to review permissions,model_res_partner_review,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_review_override,Admin access to override permissions,model_res_partner_override,runbot_merge.group_admin,1,1,1,1 -access_runbot_merge_project,User access to project,model_runbot_merge_project,base.group_user,1,0,0,0 -access_runbot_merge_repository,User access to repo,model_runbot_merge_repository,base.group_user,1,0,0,0 -access_runbot_merge_branch,User access to branches,model_runbot_merge_branch,base.group_user,1,0,0,0 -access_runbot_merge_pull_requests,User access to PR,model_runbot_merge_pull_requests,base.group_user,1,0,0,0 -access_runbot_merge_pull_requests_feedback,Users have no reason to access feedback,model_runbot_merge_pull_requests_feedback,,0,0,0,0 -access_runbot_merge_review_rights_2,Users can see partners,model_res_partner_review,base.group_user,1,0,0,0 -access_runbot_merge_review_override_2,Users can see partners,model_res_partner_override,base.group_user,1,0,0,0 diff --git a/runbot_merge/security/security.xml b/runbot_merge/security/security.xml deleted file mode 100644 index 62e1f323..00000000 --- a/runbot_merge/security/security.xml +++ /dev/null @@ -1,8 +0,0 @@ -<odoo> - <record model="res.groups" id="group_admin"> - <field name="name">Mergebot Administrator</field> - </record> - <record model="res.groups" id="base.group_system"> - <field name="implied_ids" eval="[(4, ref('runbot_merge.group_admin'))]"/> - </record> -</odoo> diff --git a/runbot_merge/static/project_freeze/index.js b/runbot_merge/static/project_freeze/index.js deleted file mode 100644 index a0568962..00000000 --- a/runbot_merge/static/project_freeze/index.js +++ /dev/null @@ -1,62 +0,0 @@ -odoo.define('runbot_merge.index', function (require) { -"use strict"; -const FormController = require('web.FormController'); -const FormView = require('web.FormView'); -const viewRegistry = require('web.view_registry'); - -/** - * Attept at a "smart" controller for the freeze wizard: keeps triggering - * onchange() on the form in order to try and update the error information, as - * some of the "errors" are not under direct operator control. Hopefully this - * allows the operator to just keep the wizard open and wait until the error - * messages disappear so they can proceed. - */ -const FreezeController = FormController.extend({ - async _checkState() { - const record = this.model.get(this.handle) - const requiredPrIds = record.data.required_pr_ids; - - // we're inside the form's mutex, so can use `_applyChange` directly - const changed = await this.model._applyChange(this.handle, { - required_pr_ids: { - operation: 'REPLACE_WITH', - ids: requiredPrIds.res_ids, - } - }); - // not sure why we need to wait for the round *after* the error update - // notification, but even debouncing the rest of the method is not - // sufficient (so it's not just a problem of being behind the mutex, - // there's something wonky going on) - if (!this._updateNext) { - this._updateNext = changed.includes('errors'); - return; - } - - this._updateNext = false; - for(const p of requiredPrIds.data) { - this.renderer.updateState(p.id, {fieldNames: ['state_color']}); - } - this.renderer.updateState(record, {fieldNames: ['errors', 'required_pr_ids']}); - }, - /** - * @override - */ - async start(...args) { - const checker = async () => { - if (this.isDestroyed()) { return; } - await this.model.mutex.exec(() => this._checkState()); - setTimeout(checker, 1000); - }; - const started = await this._super(...args); - const _ = checker(); - return started; - }, -}); - -viewRegistry.add('freeze_wizard', FormView.extend({ - config: Object.assign({}, FormView.prototype.config, { - Controller: FreezeController, - }) -})); -}); - diff --git a/runbot_merge/static/scss/runbot_merge.scss b/runbot_merge/static/scss/runbot_merge.scss deleted file mode 100644 index c00bf61f..00000000 --- a/runbot_merge/static/scss/runbot_merge.scss +++ /dev/null @@ -1,101 +0,0 @@ -// FIX: bs4 shit-heap colors and styles -body { - font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; - color: #666666; -} -h1, h2, h3, h4, h5, h6{ - color: inherit; - margin-top: 0.66em; - margin-bottom: 0.33em; -} -h5 { font-size: 1em; } -.bg-success, .bg-info, .bg-warning, .bg-danger, .bg-gray-lighter { - color: inherit; -} -.dropdown-item, .dropdown-menu, .dropdown-menu a { - color: inherit; -} -.bg-success { - background-color: #dff0d8 !important; -} -.bg-unmerged { - background-color: #dcefe8 !important -} -.bg-info { - background-color: #d9edf7 !important; -} -.bg-warning { - background-color: #fcf8e3 !important; -} -.bg-danger { - background-color: #f2dede !important; -} -.list-inline { - margin-bottom: 10px; -} -.list-inline > li { - padding: 0 5px; - margin-right: 0; -} - -// mergebot layouting -.stagings { - display: flex; - align-items: stretch; - - > li { - flex: 1; - // prevent content-based autosizing otherwise that's flex' starting point - width: 0; - - padding: 0.1em 0.1em 0.1em 0.5em; - - &:not(:last-child) { - border-right: 1px solid lightgray; - } - } - .batch { - // cut off branch names if they can't be line-wrapped and would break the - // layout, works with flex to force all columns to be at the same size - overflow: hidden; - text-overflow: ellipsis; - - &:not(:last-child) { - border-bottom: 1px solid lightgray; - } - } - - .batch a:not(:last-of-type) a:after { - content: ","; - } -} -.pr-listing > * { display: inline-block; } -.pr-awaiting { opacity: 0.8; } -.pr-blocked { opacity: 0.6; } -.pr-failed { opacity: 0.9; } - -ul.todo { - list-style-type: '☐ '; - > li.ok { - //@extend .alert-success; - list-style-type: '☑ '; - } - > li.fail { - @extend .alert-danger; - list-style-type: '☒ '; - } -} - -dl.runbot-merge-fields { - @extend .row; - > dt { - @extend .col-sm-2; - } - > dd { - @extend .col-sm-10; - } -} - -.staging-statuses { - cursor: wait; -} diff --git a/runbot_merge/tests/README.rst b/runbot_merge/tests/README.rst deleted file mode 100644 index 0671b9be..00000000 --- a/runbot_merge/tests/README.rst +++ /dev/null @@ -1,47 +0,0 @@ -Execute this test suite using pytest. - -The default mode is to run tests locally using a mock github.com. - -See the docstring of remote.py for instructions to run against github "actual" -(including remote-specific options) and the end of this file for a sample. - -Shared properties running tests, regardless of the github implementation: - -* test should be run from the root of the runbot repository providing the - name of this module aka ``pytest runbot_merge`` or - ``python -mpytest runbot_merge`` -* a database name to use must be provided using ``--db``, the database should - not exist beforehand -* the addons path must be specified using ``--addons-path``, both "runbot" and - the standard addons (odoo/addons) must be provided explicitly - -See pytest's documentation for other options, I would recommend ``-rXs``, -``-v`` and ``--showlocals``. - -When running "remote" tests as they take a very long time (hours) ``-x`` -(aka ``--maxfail=1``) and ``--ff`` (run previously failed first) is also -recommended unless e.g. you run the tests overnight. - -``pytest.ini`` sample ---------------------- - -.. code:: ini - - [github] - owner = test-org - token = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa - - [role_reviewer] - name = Dick Bong - user = loginb - token = bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb - - [role_self_reviewer] - name = Fanny Chmelar - user = loginc - token = cccccccccccccccccccccccccccccccccccccccc - - [role_other] - name = Harry Baals - user = logind - token = dddddddddddddddddddddddddddddddddddddddd diff --git a/runbot_merge/tests/conftest.py b/runbot_merge/tests/conftest.py deleted file mode 100644 index 75cad7a8..00000000 --- a/runbot_merge/tests/conftest.py +++ /dev/null @@ -1,41 +0,0 @@ -import pytest -import requests - -@pytest.fixture() -def module(): - return 'runbot_merge' - -@pytest.fixture -def page(port): - s = requests.Session() - def get(url): - r = s.get('http://localhost:{}{}'.format(port, url)) - r.raise_for_status() - return r.content - return get - -@pytest.fixture -def default_crons(): - return [ - # env['runbot_merge.project']._check_fetch() - 'runbot_merge.fetch_prs_cron', - # env['runbot_merge.commit']._notify() - 'runbot_merge.process_updated_commits', - # env['runbot_merge.project']._check_stagings() - 'runbot_merge.merge_cron', - # env['runbot_merge.project']._create_stagings() - 'runbot_merge.staging_cron', - # env['runbot_merge.pull_requests']._check_linked_prs_statuses() - 'runbot_merge.check_linked_prs_status', - # env['runbot_merge.pull_requests.feedback']._send() - 'runbot_merge.feedback_cron', - ] - -@pytest.fixture -def project(env, config): - return env['runbot_merge.project'].create({ - 'name': 'odoo', - 'github_token': config['github']['token'], - 'github_prefix': 'hansen', - 'branch_ids': [(0, 0, {'name': 'master'})], - }) diff --git a/runbot_merge/tests/test_basic.py b/runbot_merge/tests/test_basic.py deleted file mode 100644 index 5bf13012..00000000 --- a/runbot_merge/tests/test_basic.py +++ /dev/null @@ -1,3694 +0,0 @@ -import datetime -import itertools -import json -import textwrap -import time -from unittest import mock - -import pytest -from lxml import html, etree - -import odoo -from utils import _simple_init, seen, re_matches, get_partner, Commit, pr_page, to_pr, part_of - - -@pytest.fixture -def repo(env, project, make_repo, users, setreviewers): - r = make_repo('repo') - project.write({'repo_ids': [(0, 0, { - 'name': r.name, - 'group_id': False, - 'required_statuses': 'legal/cla,ci/runbot' - })]}) - setreviewers(*project.repo_ids) - return r - -def test_trivial_flow(env, repo, page, users, config): - # create base branch - with repo: - [m] = repo.make_commits(None, Commit("initial", tree={'a': 'some content'}), ref='heads/master') - - # create PR with 2 commits - _, c1 = repo.make_commits( - m, - Commit('replace file contents', tree={'a': 'some other content'}), - Commit('add file', tree={'b': 'a second file'}), - ref='heads/other' - ) - pr = repo.make_pr(title="gibberish", body="blahblah", target='master', head='other') - - pr_id = to_pr(env, pr) - assert pr_id.state == 'opened' - env.run_crons() - assert pr.comments == [seen(env, pr, users)] - - pr_dashboard = pr_page(page, pr) - s = pr_dashboard.cssselect('.alert-info > ul > li') - assert [it.get('class') for it in s] == ['fail', 'fail', ''],\ - "merge method unset, review missing, no CI" - assert dict(zip( - [e.text_content() for e in pr_dashboard.cssselect('dl.runbot-merge-fields dt')], - [e.text_content() for e in pr_dashboard.cssselect('dl.runbot-merge-fields dd')], - )) == { - 'label': f"{config['github']['owner']}:other", - 'head': c1, - 'target': 'master', - } - - with repo: - repo.post_status(c1, 'success', 'legal/cla') - # rewrite status payload in old-style to ensure it does not break - c = env['runbot_merge.commit'].search([('sha', '=', c1)]) - c.statuses = json.dumps({k: v['state'] for k, v in json.loads(c.statuses).items()}) - - with repo: - repo.post_status(c1, 'success', 'ci/runbot') - - env.run_crons() - assert pr_id.state == 'validated' - - s = pr_page(page, pr).cssselect('.alert-info > ul > li') - assert [it.get('class') for it in s] == ['fail', 'fail', 'ok'],\ - "merge method unset, review missing, CI" - statuses = [ - (l.find('a').text.split(':')[0], l.get('class').strip()) - for l in s[2].cssselect('ul li') - ] - assert statuses == [('legal/cla', 'ok'), ('ci/runbot', 'ok')] - - with repo: - pr.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - assert pr_id.state == 'ready' - - # can't check labels here as running the cron will stage it - - env.run_crons() - assert pr_id.staging_id - assert pr_page(page, pr).cssselect('.alert-primary') - - with repo: - # get head of staging branch - staging_head = repo.commit('heads/staging.master') - repo.post_status(staging_head.id, 'success', 'ci/runbot', target_url='http://foo.com/pog') - repo.post_status(staging_head.id, 'success', 'legal/cla') - # the should not block the merge because it's not part of the requirements - repo.post_status(staging_head.id, 'failure', 'ci/lint', target_url='http://ignored.com/whocares') - # need to store this because after the crons have run the staging will - # have succeeded and been disabled - st = pr_id.staging_id - env.run_crons() - - assert {tuple(t) for t in st.statuses} == { - (repo.name, 'legal/cla', 'success', ''), - (repo.name, 'ci/runbot', 'success', 'http://foo.com/pog'), - (repo.name, 'ci/lint', 'failure', 'http://ignored.com/whocares'), - } - - p = html.fromstring(page('/runbot_merge')) - s = p.cssselect('.staging div.dropdown li') - assert len(s) == 2 - assert s[1].get('class') == 'bg-success' - assert s[1][0].text.strip() == '{}: ci/runbot'.format(repo.name) - - assert st.state == 'success' - assert pr_id.state == 'merged' - assert pr_page(page, pr).cssselect('.alert-success') - - master = repo.commit('heads/master') - # with default-rebase, only one parent is "known" - assert master.parents[0] == m - assert repo.read_tree(master) == { - 'a': 'some other content', - 'b': 'a second file', - } - assert master.message == "gibberish\n\nblahblah\n\ncloses {repo.name}#1"\ - "\n\nSigned-off-by: {reviewer.formatted_email}"\ - .format(repo=repo, reviewer=get_partner(env, users['reviewer'])) - -class TestCommitMessage: - def test_commit_simple(self, env, repo, users, config): - """ verify 'closes ...' is correctly added in the commit message - """ - with repo: - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, 'simple commit message', None, tree={'f': 'm2'}) - - prx = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - master = repo.commit('heads/master') - assert master.message == "simple commit message\n\ncloses {repo.name}#1"\ - "\n\nSigned-off-by: {reviewer.formatted_email}"\ - .format(repo=repo, reviewer=get_partner(env, users['reviewer'])) - - def test_commit_existing(self, env, repo, users, config): - """ verify do not duplicate 'closes' instruction - """ - with repo: - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, 'simple commit message that closes #1', None, tree={'f': 'm2'}) - - prx = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - master = repo.commit('heads/master') - # closes #1 is already present, should not modify message - assert master.message == "simple commit message that closes #1"\ - "\n\nSigned-off-by: {reviewer.formatted_email}"\ - .format(reviewer=get_partner(env, users['reviewer'])) - - def test_commit_other(self, env, repo, users, config): - """ verify do not duplicate 'closes' instruction - """ - with repo: - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, 'simple commit message that closes odoo/enterprise#1', None, tree={'f': 'm2'}) - - prx = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - master = repo.commit('heads/master') - # closes on another repositoy, should modify the commit message - assert master.message == "simple commit message that closes odoo/enterprise#1\n\ncloses {repo.name}#1"\ - "\n\nSigned-off-by: {reviewer.formatted_email}"\ - .format(repo=repo, reviewer=get_partner(env, users['reviewer'])) - - def test_commit_wrong_number(self, env, repo, users, config): - """ verify do not match on a wrong number - """ - with repo: - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, 'simple commit message that closes #11', None, tree={'f': 'm2'}) - - prx = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - master = repo.commit('heads/master') - # closes on another repositoy, should modify the commit message - assert master.message == "simple commit message that closes #11\n\ncloses {repo.name}#1"\ - "\n\nSigned-off-by: {reviewer.formatted_email}"\ - .format(repo=repo, reviewer=get_partner(env, users['reviewer'])) - - def test_commit_delegate(self, env, repo, users, config): - """ verify 'signed-off-by ...' is correctly added in the commit message for delegated review - """ - env['res.partner'].create({ - 'name': users['other'], - 'github_login': users['other'], - 'email': users['other'] + '@example.org' - }) - with repo: - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, 'simple commit message', None, tree={'f': 'm2'}) - - prx = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen delegate=%s' % users['other'], config["role_reviewer"]["token"]) - prx.post_comment('hansen r+', config['role_other']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - master = repo.commit('heads/master') - assert master.message == "simple commit message\n\ncloses {repo.name}#1"\ - "\n\nSigned-off-by: {reviewer.formatted_email}"\ - .format(repo=repo, reviewer=get_partner(env, users['other'])) - - def test_commit_coauthored(self, env, repo, users, config): - """ verify 'closes ...' and 'Signed-off-by' are added before co-authored-by tags. - - Also checks that all co-authored-by are moved at the end of the - message - """ - with repo: - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, '''simple commit message - - -Co-authored-by: Bob <bob@example.com> - -Fixes a thing''', None, tree={'f': 'm2'}) - - prx = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - master = repo.commit('heads/master') - assert master.message == """simple commit message - -Fixes a thing - -closes {repo.name}#1 - -Signed-off-by: {reviewer.formatted_email} -Co-authored-by: Bob <bob@example.com>""".format( - repo=repo, - reviewer=get_partner(env, users['reviewer']) - ) - -class TestWebhookSecurity: - def test_no_secret(self, env, project, repo): - """ Test 1: didn't add a secret to the repo, should be ignored - """ - project.secret = "a secret" - - with repo: - m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - pr0 = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c0) - - assert not env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr0.number), - ]) - - def test_wrong_secret(self, env, project, repo): - project.secret = "a secret" - with repo: - repo.set_secret("wrong secret") - - m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - pr0 = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c0) - - assert not env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr0.number), - ]) - - def test_correct_secret(self, env, project, repo): - project.secret = "a secret" - with repo: - repo.set_secret("a secret") - - m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - pr0 = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c0) - - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr0.number), - ]) - -def test_staging_ongoing(env, repo, config): - with repo: - # create base branch - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - # create PR - c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - c1 = repo.make_commit(c0, 'add file', None, tree={'a': 'some other content', 'b': 'a second file'}) - pr1 = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c1) - repo.post_status(c1, 'success', 'legal/cla') - repo.post_status(c1, 'success', 'ci/runbot') - pr1.post_comment("hansen r+ rebase-merge", config['role_reviewer']['token']) - env.run_crons() - pr1 = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', 1) - ]) - assert pr1.staging_id - - with repo: - # create second PR and make ready for staging - c2 = repo.make_commit(m, 'other', None, tree={'a': 'some content', 'c': 'ccc'}) - c3 = repo.make_commit(c2, 'other', None, tree={'a': 'some content', 'c': 'ccc', 'd': 'ddd'}) - pr2 = repo.make_pr(title='gibberish', body='blahblah', target='master', head=c3) - repo.post_status(c3, 'success', 'legal/cla') - repo.post_status(c3, 'success', 'ci/runbot') - pr2.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - env.run_crons() - p_2 = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr2.number) - ]) - assert p_2.state == 'ready', "PR2 should not have been staged since there is a pending staging for master" - - staging_head = repo.commit('heads/staging.master') - with repo: - repo.post_status(staging_head.id, 'success', 'ci/runbot') - repo.post_status(staging_head.id, 'success', 'legal/cla') - env.run_crons() - assert pr1.state == 'merged' - assert p_2.staging_id - - staging_head = repo.commit('heads/staging.master') - with repo: - repo.post_status(staging_head.id, 'success', 'ci/runbot') - repo.post_status(staging_head.id, 'success', 'legal/cla') - env.run_crons() - assert p_2.state == 'merged' - -def test_staging_concurrent(env, repo, config): - """ test staging to different targets, should be picked up together """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/1.0', m) - repo.make_ref('heads/2.0', m) - - env['runbot_merge.project'].search([]).write({ - 'branch_ids': [(0, 0, {'name': '1.0'}), (0, 0, {'name': '2.0'})], - }) - - with repo: - c10 = repo.make_commit(m, 'AAA', None, tree={'m': 'm', 'a': 'a'}) - c11 = repo.make_commit(c10, 'BBB', None, tree={'m': 'm', 'a': 'a', 'b': 'b'}) - pr1 = repo.make_pr(title='t1', body='b1', target='1.0', head=c11) - repo.post_status(pr1.head, 'success', 'ci/runbot') - repo.post_status(pr1.head, 'success', 'legal/cla') - pr1.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - - c20 = repo.make_commit(m, 'CCC', None, tree={'m': 'm', 'c': 'c'}) - c21 = repo.make_commit(c20, 'DDD', None, tree={'m': 'm', 'c': 'c', 'd': 'd'}) - pr2 = repo.make_pr(title='t2', body='b2', target='2.0', head=c21) - repo.post_status(pr2.head, 'success', 'ci/runbot') - repo.post_status(pr2.head, 'success', 'legal/cla') - pr2.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - env.run_crons() - - pr1 = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr1.number) - ]) - assert pr1.staging_id - pr2 = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr2.number) - ]) - assert pr2.staging_id - -def test_staging_conflict_first(env, repo, users, config, page): - """ If the first batch of a staging triggers a conflict, the PR should be - marked as in error - """ - with repo: - m1 = repo.make_commit(None, 'initial', None, tree={'f': 'm1'}) - m2 = repo.make_commit(m1, 'second', None, tree={'f': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m1, 'other second', None, tree={'f': 'c1'}) - c2 = repo.make_commit(c1, 'third', None, tree={'f': 'c2'}) - pr = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(pr.head, 'success', 'ci/runbot') - repo.post_status(pr.head, 'success', 'legal/cla') - pr.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - env.run_crons() - - pr_id = to_pr(env, pr) - assert pr_id.state == 'error' - assert pr.comments == [ - (users['reviewer'], 'hansen r+ rebase-merge'), - seen(env, pr, users), - (users['user'], 'Merge method set to rebase and merge, using the PR as merge commit message.'), - (users['user'], '@%(user)s @%(reviewer)s unable to stage: merge conflict' % users), - ] - - dangerbox = pr_page(page, pr).cssselect('.alert-danger span') - assert dangerbox - assert dangerbox[0].text.strip() == 'Unable to stage PR' - -def test_staging_conflict_second(env, repo, users, config): - """ If the non-first batch of a staging triggers a conflict, the PR should - just be skipped: it might be a conflict with an other PR which could fail - the staging - """ - with repo: - [m] = repo.make_commits(None, Commit('initial', tree={'a': '1'}), ref='heads/master') - - with repo: - repo.make_commits(m, Commit('first pr', tree={'a': '2'}), ref='heads/pr0') - pr0 = repo.make_pr(target='master', head='pr0') - repo.post_status(pr0.head, 'success', 'ci/runbot') - repo.post_status(pr0.head, 'success', 'legal/cla') - pr0.post_comment('hansen r+', config['role_reviewer']['token']) - - with repo: - repo.make_commits(m, Commit('second pr', tree={'a': '3'}), ref='heads/pr1') - pr1 = repo.make_pr(target='master', head='pr1') - repo.post_status(pr1.head, 'success', 'ci/runbot') - repo.post_status(pr1.head, 'success', 'legal/cla') - pr1.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - pr0_id = to_pr(env, pr0) - pr1_id = to_pr(env, pr1) - assert pr0_id.staging_id, "pr0 should have been staged" - assert not pr1_id.staging_id, "pr1 should not have been staged (due to conflict)" - assert pr1_id.state == 'ready', "pr1 should not be in error yet" - - # merge the staging, this should try to stage pr1, fail, and put it in error - # as it now conflicts with the master proper - with repo: - repo.post_status('staging.master', 'success', 'ci/runbot') - repo.post_status('staging.master', 'success', 'legal/cla') - env.run_crons() - - assert pr1_id.state == 'error', "now pr1 should be in error" - - -def test_staging_ci_timeout(env, repo, config, page): - """If a staging timeouts (~ delay since staged greater than - configured)... requeue? - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'f': 'm'}) - repo.make_ref('heads/master', m) - - c1 = repo.make_commit(m, 'first', None, tree={'f': 'c1'}) - c2 = repo.make_commit(c1, 'second', None, tree={'f': 'c2'}) - pr = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(pr.head, 'success', 'ci/runbot') - repo.post_status(pr.head, 'success', 'legal/cla') - pr.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - env.run_crons() - - pr_id = to_pr(env, pr) - assert pr_id.staging_id - timeout = env['runbot_merge.project'].search([]).ci_timeout - - pr_id.staging_id.staged_at = odoo.fields.Datetime.to_string(datetime.datetime.now() - datetime.timedelta(minutes=2*timeout)) - env.run_crons('runbot_merge.merge_cron', 'runbot_merge.staging_cron') - assert pr_id.state == 'error', "timeout should fail the PR" - - dangerbox = pr_page(page, pr).cssselect('.alert-danger span') - assert dangerbox - assert dangerbox[0].text == 'timed out (>60 minutes)' - -def test_timeout_bump_on_pending(env, repo, config): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'f': '0'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'c', None, tree={'f': '1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - st = env['runbot_merge.stagings'].search([]) - old_timeout = odoo.fields.Datetime.to_string(datetime.datetime.now() - datetime.timedelta(days=15)) - st.timeout_limit = old_timeout - with repo: - repo.post_status(repo.commit('heads/staging.master').id, 'pending', 'ci/runbot') - env.run_crons('runbot_merge.process_updated_commits') - assert st.timeout_limit > old_timeout - -def test_staging_ci_failure_single(env, repo, users, config, page): - """ on failure of single-PR staging, mark & notify failure - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) - pr = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(pr.head, 'success', 'ci/runbot') - repo.post_status(pr.head, 'success', 'legal/cla') - pr.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - env.run_crons() - pr_id = to_pr(env, pr) - assert pr_id.staging_id - - staging_head = repo.commit('heads/staging.master') - with repo: - repo.post_status(staging_head.id, 'failure', 'a/b') - repo.post_status(staging_head.id, 'success', 'legal/cla') - repo.post_status(staging_head.id, 'failure', 'ci/runbot') # stable genius - env.run_crons() - assert pr_id.state == 'error' - - assert pr.comments == [ - (users['reviewer'], 'hansen r+ rebase-merge'), - seen(env, pr, users), - (users['user'], "Merge method set to rebase and merge, using the PR as merge commit message."), - (users['user'], '@%(user)s @%(reviewer)s staging failed: ci/runbot' % users) - ] - - dangerbox = pr_page(page, pr).cssselect('.alert-danger span') - assert dangerbox - assert dangerbox[0].text == 'ci/runbot' - -def test_ff_failure(env, repo, config, page): - """ target updated while the PR is being staged => redo staging """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - env.run_crons() - st = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).staging_id - assert st - - with repo: - m2 = repo.make_commit('heads/master', 'cockblock', None, tree={'m': 'm', 'm2': 'm2'}) - assert repo.commit('heads/master').id == m2 - - # report staging success & run cron to merge - staging = repo.commit('heads/staging.master') - with repo: - repo.post_status(staging.id, 'success', 'legal/cla') - repo.post_status(staging.id, 'success', 'ci/runbot') - env.run_crons() - - assert st.reason == 'update is not a fast forward' - # check that it's added as title on the staging - doc = html.fromstring(page('/runbot_merge')) - _new, prev = doc.cssselect('li.staging') - - assert 'bg-gray-lighter' in prev.classes, "ff failure is ~ cancelling" - assert prev.get('title') == re_matches('fast forward failed \(update is not a fast forward\)') - - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).staging_id, "merge should not have succeeded" - assert repo.commit('heads/staging.master').id != staging.id,\ - "PR should be staged to a new commit" - -def test_ff_failure_batch(env, repo, users, config): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - a1 = repo.make_commit(m, 'a1', None, tree={'m': 'm', 'a': '1'}) - a2 = repo.make_commit(a1, 'a2', None, tree={'m': 'm', 'a': '2'}) - repo.make_ref('heads/A', a2) - A = repo.make_pr(title='A', body=None, target='master', head='A') - repo.post_status(A.head, 'success', 'legal/cla') - repo.post_status(A.head, 'success', 'ci/runbot') - A.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - - b1 = repo.make_commit(m, 'b1', None, tree={'m': 'm', 'b': '1'}) - b2 = repo.make_commit(b1, 'b2', None, tree={'m': 'm', 'b': '2'}) - repo.make_ref('heads/B', b2) - B = repo.make_pr(title='B', body=None, target='master', head='B') - repo.post_status(B.head, 'success', 'legal/cla') - repo.post_status(B.head, 'success', 'ci/runbot') - B.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - - c1 = repo.make_commit(m, 'c1', None, tree={'m': 'm', 'c': '1'}) - c2 = repo.make_commit(c1, 'c2', None, tree={'m': 'm', 'c': '2'}) - repo.make_ref('heads/C', c2) - C = repo.make_pr(title='C', body=None, target='master', head='C') - repo.post_status(C.head, 'success', 'legal/cla') - repo.post_status(C.head, 'success', 'ci/runbot') - C.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - env.run_crons() - - pr_a = to_pr(env, A) - pr_b = to_pr(env, B) - pr_c = to_pr(env, C) - - messages = [ - c['commit']['message'] - for c in repo.log('heads/staging.master') - ] - assert part_of('a2', pr_a) in messages - assert part_of('b2', pr_b) in messages - assert part_of('c2', pr_c) in messages - - # block FF - with repo: - repo.make_commit('heads/master', 'NO!', None, tree={'m': 'm2'}) - - old_staging = repo.commit('heads/staging.master') - # confirm staging - with repo: - repo.post_status('heads/staging.master', 'success', 'legal/cla') - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - env.run_crons() - new_staging = repo.commit('heads/staging.master') - - assert new_staging.id != old_staging.id - - # confirm again - with repo: - repo.post_status('heads/staging.master', 'success', 'legal/cla') - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - env.run_crons() - messages = { - c['commit']['message'] - for c in repo.log('heads/master') - } - reviewer = get_partner(env, users["reviewer"]).formatted_email - assert messages == { - 'initial', 'NO!', - part_of('a1', pr_a), part_of('a2', pr_a), f'A\n\ncloses {pr_a.display_name}\n\nSigned-off-by: {reviewer}', - part_of('b1', pr_b), part_of('b2', pr_b), f'B\n\ncloses {pr_b.display_name}\n\nSigned-off-by: {reviewer}', - part_of('c1', pr_c), part_of('c2', pr_c), f'C\n\ncloses {pr_c.display_name}\n\nSigned-off-by: {reviewer}', - } - -class TestPREdition: - def test_edit(self, env, repo, config): - """ Editing PR: - - * title (-> message) - * body (-> message) - * base.ref (-> target) - """ - branch_1 = env['runbot_merge.branch'].create({ - 'name': '1.0', - 'project_id': env['runbot_merge.project'].search([]).id, - }) - - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - repo.make_ref('heads/1.0', m) - repo.make_ref('heads/2.0', m) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c2) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen rebase-ff r+', config['role_reviewer']['token']) - env.run_crons() - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.state == 'ready' - st = pr.staging_id - assert st - assert pr.message == 'title\n\nbody' - with repo: prx.title = "title 2" - assert pr.message == 'title 2\n\nbody' - with repo: prx.body = None - assert pr.message == "title 2" - assert pr.staging_id, \ - "message edition does not affect staging of rebased PRs" - with repo: prx.base = '1.0' - assert pr.target == branch_1 - assert not pr.staging_id, "updated the base of a staged PR should have unstaged it" - assert st.reason == f"{pr.display_name} target (base) branch was changed from 'master' to '1.0'" - - with repo: prx.base = '2.0' - assert not pr.exists() - env.run_crons() - - with repo: prx.base = '1.0' - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).target == branch_1 - - def test_retarget_update_commits(self, env, repo): - """ Retargeting a PR should update its commits count - """ - branch_1 = env['runbot_merge.branch'].create({ - 'name': '1.0', - 'project_id': env['runbot_merge.project'].search([]).id, - }) - master = env['runbot_merge.branch'].search([('name', '=', 'master')]) - - with repo: - # master is 1 commit ahead of 1.0 - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/1.0', m) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm2'}) - repo.make_ref('heads/master', m2) - - # the PR builds on master, but is errorneously targeted to 1.0 - c = repo.make_commit(m2, 'first', None, tree={'m': 'm3'}) - prx = repo.make_pr(title='title', body='body', target='1.0', head=c) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - assert not pr.squash - - with repo: - prx.base = 'master' - assert pr.target == master - assert pr.squash - - with repo: - prx.base = '1.0' - assert pr.target == branch_1 - assert not pr.squash - - # check if things also work right when modifying the PR then - # retargeting (don't see why not but...) - with repo: - c2 = repo.make_commit(m2, 'xxx', None, tree={'m': 'm4'}) - repo.update_ref(prx.ref, c2, force=True) - assert pr.head == c2 - assert not pr.squash - with repo: - prx.base = 'master' - assert pr.squash - - @pytest.mark.xfail(reason="github doesn't allow retargeting closed PRs", strict=True) - def test_retarget_closed(self, env, repo): - branch_1 = env['runbot_merge.branch'].create({ - 'name': '1.0', - 'project_id': env['runbot_merge.project'].search([]).id, - }) - - with repo: - # master is 1 commit ahead of 1.0 - [m] = repo.make_commits(None, repo.Commit('initial', tree={'1': '1'}), ref='heads/1.0') - repo.make_commits(m, repo.Commit('second', tree={'m': 'm'}), ref='heads/master') - - [c] = repo.make_commits(m, repo.Commit('first', tree={'m': 'm3'}), ref='heads/abranch') - prx = repo.make_pr(title='title', body='body', target='1.0', head=c) - env.run_crons() - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - assert pr.target == branch_1 - with repo: - prx.close() - with repo: - prx.base = 'master' - -def test_close_staged(env, repo, config, page): - """ - When closing a staged PR, cancel the staging - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - env.run_crons() - assert pr.state == 'ready' - assert pr.staging_id - - with repo: - prx.close() - env.run_crons() - - assert not pr.staging_id - assert not env['runbot_merge.stagings'].search([]) - assert pr.state == 'closed' - assert pr_page(page, prx).cssselect('.alert-light') - -def test_forward_port(env, repo, config): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - head = m - for i in range(110): - head = repo.make_commit(head, 'c_%03d' % i, None, tree={'m': 'm', 'f': str(i)}) - # not sure why we wanted to wait here - - with repo: - pr = repo.make_pr(title='PR', body=None, target='master', head=head) - repo.post_status(pr.head, 'success', 'legal/cla') - repo.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen r+ merge', config['role_reviewer']['token']) - env.run_crons() - - st = repo.commit('staging.master') - - with repo: - repo.post_status(st.id, 'success', 'legal/cla') - repo.post_status(st.id, 'success', 'ci/runbot') - env.run_crons() - - h = repo.commit('master') - assert st.id == h.id - assert set(h.parents) == {m, pr.head} - commits = {c['sha'] for c in repo.log('master')} - assert len(commits) == 112 - -@pytest.mark.skip("Needs to find a way to make set_ref fail on *second* call.") -def test_rebase_failure(env, repo, users, config): - """ It looks like gh.rebase() can fail in the final ref-setting after - the merging & commits creation has been performed. At this point, the - staging will fail (yay) but the target branch (tmp) would not get reset, - leading to the next PR being staged *on top* of the one being staged - right there, and pretty much integrating it, leading to very, very - strange results if the entire thing passes staging. - - Seen: https://github.com/odoo/odoo/pull/27835#issuecomment-430505429 - PR 27835 was merged to tmp at df0ae6c00e085dbaabcfec821208c9ace2f4b02d - then the set_ref failed, following which PR 27840 is merged to tmp at - 819b5414c27a92031a9ce3f159a8f466a4fd698c note that the first (left) - parent is the merge commit from PR 27835. The set_ref of PR 27840 - succeeded resulting in PR 27835 being integrated into the squashing of - 27840 (without any renaming or anything, just the content), following - which PR 27835 was merged and squashed as a "no-content" commit. - - Problem: I need to make try_staging > stage > rebase > set_ref fail - but only the first time, and not the set_ref in try_staging itself, and - that call is performed *in a subprocess* when running <remote> tests. - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - commit_a = repo.make_commit(m, 'A', None, tree={'m': 'm', 'a': 'a'}) - repo.make_ref('heads/a', commit_a) - pr_a = repo.make_pr(title='A', body=None, target='master', head='a') - repo.post_status(pr_a.head, 'success', 'ci/runbot') - repo.post_status(pr_a.head, 'success', 'legal/cla') - pr_a.post_comment('hansen r+', config['role_reviewer']['token']) - - commit_b = repo.make_commit(m, 'B', None, tree={'m': 'm', 'b': 'b'}) - repo.make_ref('heads/b', commit_b) - pr_b = repo.make_pr(title='B', body=None, target='master', head='b') - repo.post_status(pr_b.head, 'success', 'ci/runbot') - repo.post_status(pr_b.head, 'success', 'legal/cla') - pr_b.post_comment('hansen r+', config['role_reviewer']['token']) - - from odoo.addons.runbot_merge.github import GH - original = GH.set_ref - counter = itertools.count(start=1) - def wrapper(*args): - assert next(counter) != 2, "make it seem like updating the branch post-rebase fails" - return original(*args) - - env['runbot_merge.commit']._notify() - with mock.patch.object(GH, 'set_ref', autospec=True, side_effect=wrapper): - env['runbot_merge.project']._check_progress() - - env['runbot_merge.pull_requests.feedback']._send() - - assert pr_a.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr_a, users), - (users['user'], re_matches(r'^Unable to stage PR')), - ] - assert pr_b.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr_b, users), - ] - assert repo.read_tree(repo.commit('heads/staging.master')) == { - 'm': 'm', - 'b': 'b', - } - -def test_ci_failure_after_review(env, repo, users, config): - """ If a PR is r+'d but the CI ends up failing afterwards, ping the user - so they're aware. This is useful for the more "fire and forget" approach - especially small / simple PRs where you assume they're going to pass and - just r+ immediately. - """ - with repo: - prx = _simple_init(repo) - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - for ctx, url in [ - ('ci/runbot', 'https://a'), - ('ci/runbot', 'https://a'), - ('legal/cla', 'https://b'), - ('foo/bar', 'https://c'), - ('ci/runbot', 'https://a'), - ('legal/cla', 'https://d'), # url changes so different from the previous - ]: - with repo: - repo.post_status(prx.head, 'failure', ctx, target_url=url) - env.run_crons() - - assert prx.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, prx, users), - (users['user'], "@{user} @{reviewer} 'ci/runbot' failed on this reviewed PR.".format_map(users)), - (users['user'], "@{user} @{reviewer} 'legal/cla' failed on this reviewed PR.".format_map(users)), - (users['user'], "@{user} @{reviewer} 'legal/cla' failed on this reviewed PR.".format_map(users)), - ] - -def test_reopen_merged_pr(env, repo, config, users): - """ Reopening a *merged* PR should cause us to immediately close it again, - and insult whoever did it - """ - with repo: - [m] = repo.make_commits( - None, - repo.Commit('initial', tree={'0': '0'}), - ref = 'heads/master' - ) - - [c] = repo.make_commits( - m, repo.Commit('second', tree={'0': '1'}), - ref='heads/abranch' - ) - prx = repo.make_pr(target='master', head='abranch') - repo.post_status(c, 'success', 'legal/cla') - repo.post_status(c, 'success', 'ci/runbot') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('staging.master', 'success', 'legal/cla') - repo.post_status('staging.master', 'success', 'ci/runbot') - env.run_crons() - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - assert prx.state == 'closed' - assert pr.state == 'merged' - - repo.add_collaborator(users['other'], config['role_other']['token']) - with repo: - prx.open(config['role_other']['token']) - env.run_crons() - assert prx.state == 'closed' - assert pr.state == 'merged' - assert prx.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, prx, users), - (users['user'], "@%s ya silly goose you can't reopen a merged PR." % users['other']) - ] - -class TestNoRequiredStatus: - def test_basic(self, env, repo, config): - """ check that mergebot can work on a repo with no CI at all - """ - env['runbot_merge.repository'].search([('name', '=', repo.name)]).status_ids = False - with repo: - m = repo.make_commit(None, 'initial', None, tree={'0': '0'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'0': '1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - assert pr.state == 'ready' - st = pr.staging_id - assert st - env.run_crons() - assert st.state == 'success' - assert pr.state == 'merged' - - def test_updated(self, env, repo, config): - env['runbot_merge.repository'].search([('name', '=', repo.name)]).status_ids = False - with repo: - m = repo.make_commit(None, 'initial', None, tree={'0': '0'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'0': '1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - env.run_crons() - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - assert pr.state == 'validated' - - # normal push - with repo: - repo.make_commits(c, repo.Commit('second', tree={'0': '2'}), ref=prx.ref) - env.run_crons() - assert pr.state == 'validated' - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert pr.state == 'ready' - - # force push - with repo: - repo.make_commits(m, repo.Commit('xxx', tree={'0': 'm'}), ref=prx.ref) - env.run_crons() - assert pr.state == 'validated' - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert pr.state == 'ready' - -class TestRetry: - @pytest.mark.xfail(reason="This may not be a good idea as it could lead to tons of rebuild spam") - def test_auto_retry_push(self, env, repo, config): - prx = _simple_init(repo) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).staging_id - - staging_head = repo.commit('heads/staging.master') - repo.post_status(staging_head.id, 'success', 'legal/cla') - repo.post_status(staging_head.id, 'failure', 'ci/runbot') - env.run_crons() - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - assert pr.state == 'error' - - repo.update_ref(prx.ref, repo.make_commit(prx.head, 'third', None, tree={'m': 'c3'}), force=True) - assert pr.state == 'approved' - env['runbot_merge.project']._check_progress() - assert pr.state == 'approved' - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - env.run_crons() - assert pr.state == 'ready' - - staging_head2 = repo.commit('heads/staging.master') - assert staging_head2 != staging_head - repo.post_status(staging_head2.id, 'success', 'legal/cla') - repo.post_status(staging_head2.id, 'success', 'ci/runbot') - env.run_crons() - assert pr.state == 'merged' - - @pytest.mark.parametrize('retrier', ['user', 'other', 'reviewer']) - def test_retry_comment(self, env, repo, retrier, users, config): - """ An accepted but failed PR should be re-tried when the author or a - reviewer asks for it - """ - with repo: - prx = _simple_init(repo) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+ delegate=%s rebase-merge' % users['other'], - config["role_reviewer"]['token']) - env.run_crons() - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).staging_id - - staging_head = repo.commit('heads/staging.master') - with repo: - repo.post_status(staging_head.id, 'success', 'legal/cla') - repo.post_status(staging_head.id, 'failure', 'ci/runbot') - env.run_crons() - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).state == 'error' - - with repo: - prx.post_comment('hansen retry', config['role_' + retrier]['token']) - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).state == 'ready' - env.run_crons('runbot_merge.merge_cron', 'runbot_merge.staging_cron') - - staging_head2 = repo.commit('heads/staging.master') - assert staging_head2 != staging_head - with repo: - repo.post_status(staging_head2.id, 'success', 'legal/cla') - repo.post_status(staging_head2.id, 'success', 'ci/runbot') - env.run_crons() - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).state == 'merged' - - def test_retry_again_message(self, env, repo, users, config, page): - """ For a retried PR, the error message on the PR's page should be the - later staging - """ - with repo: - pr = _simple_init(repo) - repo.post_status(pr.head, 'success', 'ci/runbot') - repo.post_status(pr.head, 'success', 'legal/cla') - pr.post_comment('hansen r+ delegate=%s rebase-merge' % users['other'], - config["role_reviewer"]['token']) - env.run_crons() - pr_id = to_pr(env, pr) - assert pr_id.staging_id - - with repo: - repo.post_status('staging.master', 'success', 'legal/cla') - repo.post_status('staging.master', 'failure', 'ci/runbot', - target_url='https://example.com/whocares') - env.run_crons() - assert pr_id.state == 'error' - - with repo: - pr.post_comment('hansen retry', config['role_reviewer']['token']) - env.run_crons('runbot_merge.merge_cron', 'runbot_merge.staging_cron') - - with repo: - repo.post_status('staging.master', 'success', 'legal/cla') - repo.post_status('staging.master', 'failure', 'ci/runbot', - target_url='https://example.com/ohno') - env.run_crons() - assert pr_id.state == 'error' - - dangerbox = pr_page(page, pr).cssselect('.alert-danger span') - assert dangerbox - assert dangerbox[0].text == 'ci/runbot (view more at https://example.com/ohno)' - - def test_retry_ignored(self, env, repo, users, config): - """ Check feedback in case of ignored retry command on a non-error PR. - """ - with repo: - prx = _simple_init(repo) - prx.post_comment('hansen r+', config['role_reviewer']['token']) - prx.post_comment('hansen retry', config['role_reviewer']['token']) - env.run_crons() - - assert prx.comments == [ - (users['reviewer'], 'hansen r+'), - (users['reviewer'], 'hansen retry'), - seen(env, prx, users), - (users['user'], "I'm sorry, @{reviewer}: retry makes no sense when the PR is not in error.".format_map(users)), - ] - - @pytest.mark.parametrize('disabler', ['user', 'other', 'reviewer']) - def test_retry_disable(self, env, repo, disabler, users, config): - with repo: - prx = _simple_init(repo) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+ delegate=%s rebase-merge' % users['other'], - config["role_reviewer"]['token']) - env.run_crons() - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).staging_id - - staging_head = repo.commit('heads/staging.master') - with repo: - repo.post_status(staging_head.id, 'success', 'legal/cla') - repo.post_status(staging_head.id, 'failure', 'ci/runbot') - env.run_crons() - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - assert pr.state == 'error' - - with repo: - prx.post_comment('hansen r-', config['role_' + disabler]['token']) - assert pr.state == 'validated' - with repo: - repo.make_commit(prx.ref, 'third', None, tree={'m': 'c3'}) - # just in case, apparently in some case the first post_status uses the old head... - with repo: - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - env.run_crons() - assert pr.state == 'validated' - -class TestMergeMethod: - """ - if event['pull_request']['commits'] == 1, "squash" (/rebase); otherwise - regular merge - """ - def test_pr_single_commit(self, repo, env, config): - """ If single commit, default to rebase & FF - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).squash - - env.run_crons() - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).staging_id - - staging = repo.commit('heads/staging.master') - assert not repo.is_ancestor(prx.head, of=staging.id),\ - "the pr head should not be an ancestor of the staging branch in a squash merge" - assert repo.read_tree(staging) == { - 'm': 'c1', 'm2': 'm2', - }, "the tree should still be correctly merged" - assert staging.parents == [m2],\ - "dummy commit aside, the previous master's tip should be the sole parent of the staging commit" - - with repo: - repo.post_status(staging.id, 'success', 'legal/cla') - repo.post_status(staging.id, 'success', 'ci/runbot') - env.run_crons() - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - assert pr.state == 'merged' - assert prx.state == 'closed' - assert json.loads(pr.commits_map) == { - c1: staging.id, - '': staging.id, - }, "for a squash, the one PR commit should be mapped to the one rebased commit" - - def test_delegate_method(self, repo, env, users, config): - """Delegates should be able to configure the merge method. - """ - with repo: - m, _ = repo.make_commits( - None, - Commit('initial', tree={'m': 'm'}), - Commit('second', tree={'m2': 'm2'}), - ref="heads/master" - ) - - [c1] = repo.make_commits(m, Commit('first', tree={'m': 'c1'})) - pr = repo.make_pr(target='master', head=c1) - repo.post_status(pr.head, 'success', 'legal/cla') - repo.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen delegate+', config['role_reviewer']['token']) - pr.post_comment('hansen merge', config['role_user']['token']) - env.run_crons() - - assert pr.user == users['user'] - assert to_pr(env, pr).merge_method == 'merge' - - def test_pr_update_to_many_commits(self, repo, env): - """ - If a PR starts with 1 commit and a second commit is added, the PR - should be unflagged as squash - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.squash, "a PR with a single commit should be squashed" - - with repo: - repo.make_commit(prx.ref, 'second2', None, tree={'m': 'c2'}) - assert not pr.squash, "a PR with a single commit should not be squashed" - - def test_pr_reset_to_single_commit(self, repo, env): - """ - If a PR starts at >1 commits and is reset back to 1, the PR should be - re-flagged as squash - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - c2 = repo.make_commit(c1, 'second2', None, tree={'m': 'c2'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c2) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - pr.merge_method = 'rebase-merge' - assert not pr.squash, "a PR with a single commit should not be squashed" - - with repo: - repo.update_ref( - prx.ref, - repo.make_commit(m, 'fixup', None, tree={'m': 'c2'}), - force=True - ) - assert pr.squash, "a PR with a single commit should be squashed" - assert not pr.merge_method, \ - "resetting a PR to a single commit should remove the merge method" - - def test_pr_no_method(self, repo, env, users, config): - """ a multi-repo PR should not be staged by default, should also get - feedback indicating a merge method is necessary - """ - with repo: - _, m1, _ = repo.make_commits( - None, - Commit('M0', tree={'m': '0'}), - Commit('M1', tree={'m': '1'}), - Commit('M2', tree={'m': '2'}), - ref='heads/master' - ) - - _, b1 = repo.make_commits( - m1, - Commit('B0', tree={'b': '0'}), - Commit('B1', tree={'b': '1'}), - ) - prx = repo.make_pr(title='title', body='body', target='master', head=b1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - assert not to_pr(env, prx).staging_id - - assert prx.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, prx, users), - (users['user'], """@{user} @{reviewer} because this PR has multiple \ -commits, I need to know how to merge it: - -* `merge` to merge directly, using the PR as merge commit message -* `rebase-merge` to rebase and merge, using the PR as merge commit message -* `rebase-ff` to rebase and fast-forward -""".format_map(users)), - ] - - def test_pr_method_no_review(self, repo, env, users, config): - """ Configuring the method should be independent from the review - """ - with repo: - m0 = repo.make_commit(None, 'M0', None, tree={'m': '0'}) - m1 = repo.make_commit(m0, 'M1', None, tree={'m': '1'}) - m2 = repo.make_commit(m1, 'M2', None, tree={'m': '2'}) - repo.make_ref('heads/master', m2) - - b0 = repo.make_commit(m1, 'B0', None, tree={'m': '1', 'b': '0'}) - b1 = repo.make_commit(b0, 'B1', None, tree={'m': '1', 'b': '1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=b1) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - with repo: - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - - prx.post_comment('hansen rebase-merge', config['role_reviewer']['token']) - assert pr.merge_method == 'rebase-merge' - env.run_crons() - - with repo: - prx.post_comment('hansen merge', config['role_reviewer']['token']) - assert pr.merge_method == 'merge' - env.run_crons() - - with repo: - prx.post_comment('hansen rebase-ff', config['role_reviewer']['token']) - assert pr.merge_method == 'rebase-ff' - env.run_crons() - - assert prx.comments == [ - (users['reviewer'], 'hansen rebase-merge'), - seen(env, prx, users), - (users['user'], "Merge method set to rebase and merge, using the PR as merge commit message."), - (users['reviewer'], 'hansen merge'), - (users['user'], "Merge method set to merge directly, using the PR as merge commit message."), - (users['reviewer'], 'hansen rebase-ff'), - (users['user'], "Merge method set to rebase and fast-forward."), - ] - - def test_pr_rebase_merge(self, repo, env, users, config): - """ test result on rebase-merge - - left: PR - right: post-merge result - - +------+ +------+ - | M0 | | M0 | - +--^---+ +--^---+ - | | - | | - +--+---+ +--+---+ - +----> M1 <--+ | M1 <--+ - | +------+ | +------+ | - | | | - | | | - +--+---+ +---+---+ +------+ +---+---+ - | B0 | | M2 | | B0 +------> M2 | - +--^---+ +-------+ +--^---+ +---^---+ - | | | - +--+---+ +--+---+ | - PR | B1 | | B1 | | - +------+ +--^---+ | - | +---+---+ - +----------+ merge | - +-------+ - """ - with repo: - m0 = repo.make_commit(None, 'M0', None, tree={'m': '0'}) - m1 = repo.make_commit(m0, 'M1', None, tree={'m': '1'}) - m2 = repo.make_commit(m1, 'M2', None, tree={'m': '2'}) - repo.make_ref('heads/master', m2) - - # test commit ordering issue while at it: github sorts commits on - # author.date instead of doing so topologically which is absolutely - # not what we want - committer = {'name': 'a', 'email': 'a', 'date': '2018-10-08T11:48:43Z'} - author0 = {'name': 'a', 'email': 'a', 'date': '2018-10-01T14:58:38Z'} - author1 = {'name': 'a', 'email': 'a', 'date': '2015-10-01T14:58:38Z'} - b0 = repo.make_commit(m1, 'B0', author=author0, committer=committer, tree={'m': '1', 'b': '0'}) - b1 = repo.make_commit(b0, 'B1', author=author1, committer=committer, tree={'m': '1', 'b': '1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=b1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - env.run_crons() - - pr_id = to_pr(env, prx) - # create a dag (msg:str, parents:set) from the log - staging = log_to_node(repo.log('heads/staging.master')) - # then compare to the dag version of the right graph - nm2 = node('M2', node('M1', node('M0'))) - nb1 = node(part_of('B1', pr_id), node(part_of('B0', pr_id), nm2)) - reviewer = get_partner(env, users["reviewer"]).formatted_email - merge_head = ( - f'title\n\nbody\n\ncloses {pr_id.display_name}\n\nSigned-off-by: {reviewer}', - frozenset([nm2, nb1]) - ) - assert staging == merge_head - st = pr_id.staging_id - assert st - - with repo: prx.title = 'title 2' - assert not pr_id.staging_id, "updating the message of a merge-staged PR should unstage rien" - assert st.reason == f'{pr_id.display_name} merge message updated' - # since we updated the description, the merge_head value is impacted, - # and it's checked again later on - merge_head = ( - merge_head[0].replace('title', 'title 2'), - merge_head[1], - ) - env.run_crons() - assert pr_id.staging_id, "PR should immediately be re-stageable" - - with repo: - repo.post_status('heads/staging.master', 'success', 'legal/cla') - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - env.run_crons() - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.state == 'merged' - - # check that the dummy commit is not in the final master - master = log_to_node(repo.log('heads/master')) - assert master == merge_head - head = repo.commit('heads/master') - final_tree = repo.read_tree(head) - assert final_tree == {'m': '2', 'b': '1'}, "sanity check of final tree" - r1 = repo.commit(head.parents[1]) - r0 = repo.commit(r1.parents[0]) - assert json.loads(pr.commits_map) == { - b0: r0.id, - b1: r1.id, - '': head.id, - } - assert r0.parents == [m2] - - def test_pr_rebase_ff(self, repo, env, users, config): - """ test result on rebase-merge - - left: PR - right: post-merge result - - +------+ +------+ - | M0 | | M0 | - +--^---+ +--^---+ - | | - | | - +--+---+ +--+---+ - +----> M1 <--+ | M1 <--+ - | +------+ | +------+ | - | | | - | | | - +--+---+ +---+---+ +------+ +---+---+ - | B0 | | M2 | | B0 +------> M2 | - +--^---+ +-------+ +--^---+ +---^---+ - | | - +--+---+ +--+---+ - PR | B1 | | B1 | - +------+ +--^---+ - """ - with repo: - _, m1, m2 = repo.make_commits( - None, - Commit('M0', tree={'m': '0'}), - Commit('M1', tree={'m': '1'}), - Commit('M2', tree={'m': '2'}), - ref='heads/master' - ) - - b0, b1 = repo.make_commits( - m1, - Commit('B0', tree={'b': '0'}, author={'name': 'Maarten Tromp', 'email': 'm.tromp@example.nl', 'date': '1651-03-30T12:00:00Z'}), - Commit('B1', tree={'b': '1'}, author={'name': 'Rein Huydecoper', 'email': 'r.huydecoper@example.nl', 'date': '1986-04-17T12:00:00Z'}), - ) - - prx = repo.make_pr(title='title', body='body', target='master', head=b1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ rebase-ff', config['role_reviewer']['token']) - env.run_crons() - - pr_id = to_pr(env, prx) - # create a dag (msg:str, parents:set) from the log - staging = log_to_node(repo.log('heads/staging.master')) - # then compare to the dag version of the right graph - nm2 = node('M2', node('M1', node('M0'))) - reviewer = get_partner(env, users["reviewer"]).formatted_email - nb1 = node(f'B1\n\ncloses {pr_id.display_name}\n\nSigned-off-by: {reviewer}', - node(part_of('B0', pr_id), nm2)) - assert staging == nb1 - - with repo: - repo.post_status('heads/staging.master', 'success', 'legal/cla') - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - env.run_crons() - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.state == 'merged' - - # check that the dummy commit is not in the final master - master = log_to_node(repo.log('heads/master')) - assert master == nb1 - head = repo.commit('heads/master') - final_tree = repo.read_tree(head) - assert final_tree == {'m': '2', 'b': '1'}, "sanity check of final tree" - - m1 = head - m0 = repo.commit(m1.parents[0]) - assert json.loads(pr.commits_map) == { - '': m1.id, # merge commit - b1: m1.id, # second PR's commit - b0: m0.id, # first PR's commit - } - assert m0.parents == [m2], "can't hurt to check the parent of our root commit" - assert m0.author['date'] != m0.committer['date'], "commit date should have been rewritten" - assert m1.author['date'] != m1.committer['date'], "commit date should have been rewritten" - - utcday = datetime.datetime.utcnow().date() - def parse(dt): - return datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") - - # FIXME: actual commit creation could run before the date rollover and - # local datetime.utcnow() after - assert parse(m0.committer['date']).date() == utcday - # FIXME: git date storage is unreliable and non-portable outside of an - # unsigned 31b epoch range so the m0 event may get flung in the - # future (compared to the literal datum), this test unexpectedly - # becoming true if run on the exact wrong day - assert parse(m0.author['date']).date() != utcday - assert parse(m1.committer['date']).date() == utcday - assert parse(m0.author['date']).date() != utcday - - @pytest.mark.skip(reason="what do if the PR contains merge commits???") - def test_pr_contains_merges(self, repo, env): - pass - - def test_pr_force_merge_single_commit(self, repo, env, users, config): - """ should be possible to flag a PR as regular-merged, regardless of - its commits count - - M M<--+ - ^ ^ | - | -> | C0 - + | ^ - C0 + | - gib-+ - """ - with repo: - m = repo.make_commit(None, "M", None, tree={'a': 'a'}) - repo.make_ref('heads/master', m) - - c0 = repo.make_commit(m, 'C0', None, tree={'a': 'b'}) - prx = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c0) - env.run_crons('runbot_merge.merge_cron', 'runbot_merge.staging_cron') - - with repo: - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ merge', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - master = repo.commit('heads/master') - assert master.parents == [m, prx.head], \ - "master's parents should be the old master & the PR head" - - m = node('M') - c0 = node('C0', m) - reviewer = get_partner(env, users["reviewer"]).formatted_email - expected = node('gibberish\n\nblahblah\n\ncloses {}#{}' - '\n\nSigned-off-by: {}'.format(repo.name, prx.number, reviewer), m, c0) - assert log_to_node(repo.log('heads/master')), expected - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert json.loads(pr.commits_map) == { - prx.head: prx.head, - '': master.id - } - - def test_unrebase_emptymessage(self, repo, env, users, config): - """ When merging between master branches (e.g. forward port), the PR - may have only a title - """ - with repo: - m = repo.make_commit(None, "M", None, tree={'a': 'a'}) - repo.make_ref('heads/master', m) - - c0 = repo.make_commit(m, 'C0', None, tree={'a': 'b'}) - prx = repo.make_pr(title="gibberish", body=None, target='master', head=c0) - env.run_crons('runbot_merge.merge_cron', 'runbot_merge.staging_cron') - - with repo: - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ merge', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - master = repo.commit('heads/master') - assert master.parents == [m, prx.head], \ - "master's parents should be the old master & the PR head" - - m = node('M') - c0 = node('C0', m) - reviewer = get_partner(env, users["reviewer"]).formatted_email - expected = node('gibberish\n\ncloses {}#{}' - '\n\nSigned-off-by: {}'.format(repo.name, prx.number, reviewer), m, c0) - assert log_to_node(repo.log('heads/master')), expected - - @pytest.mark.parametrize('separator', [ - '***', '___', '\n---', - '*'*12, '\n----------------', - '- - -', ' ** ** **' - ]) - def test_pr_message_break(self, repo, env, users, config, separator): - """ If the PR message contains a "thematic break", only the part before - should be included in the merge commit's message. - """ - reviewer = get_partner(env, users["reviewer"]).formatted_email - with repo: - root = repo.make_commits(None, Commit("root", tree={'a': 'a'}), ref='heads/master') - - repo.make_commits(root, Commit('C', tree={'a': 'b'}), ref=f'heads/change') - pr = repo.make_pr(title="title", body=f'first\n{separator}\nsecond', - target='master', head=f'change') - repo.post_status(pr.head, 'success', 'legal/cla') - repo.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen r+ merge', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - head = repo.commit('heads/master') - assert head.message == textwrap.dedent(f"""\ - title - - first - - closes {repo.name}#{pr.number} - - Signed-off-by: {reviewer} - """).strip(), "should not contain the content which follows the thematic break" - - def test_pr_message_setex_title(self, repo, env, users, config): - """ should not break on a proper SETEX-style title """ - reviewer = get_partner(env, users["reviewer"]).formatted_email - with repo: - root = repo.make_commits(None, Commit("root", tree={'a': 'a'}), ref='heads/master') - - repo.make_commits(root, Commit('C', tree={'a': 'b'}), ref=f'heads/change') - pr = repo.make_pr(title="title", body="""\ -Title ---- -This is some text - -Title 2 -------- -This is more text -*** -removed -""", - target='master', head=f'change') - repo.post_status(pr.head, 'success', 'legal/cla') - repo.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen r+ merge', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - head = repo.commit('heads/master') - assert head.message == textwrap.dedent(f"""\ - title - - Title - --- - This is some text - - Title 2 - ------- - This is more text - - closes {repo.name}#{pr.number} - - Signed-off-by: {reviewer} - """).strip(), "should not break the SETEX titles" - - def test_rebase_no_edit(self, repo, env, users, config): - """ Only the merge messages should be de-breaked - """ - reviewer = get_partner(env, users["reviewer"]).formatted_email - with repo: - root = repo.make_commits(None, Commit("root", tree={'a': 'a'}), ref='heads/master') - - repo.make_commits(root, Commit('Commit\n\nfirst\n***\nsecond', tree={'a': 'b'}), ref=f'heads/change') - pr = repo.make_pr(title="PR", body=f'first\n***\nsecond', - target='master', head='change') - repo.post_status(pr.head, 'success', 'legal/cla') - repo.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - head = repo.commit('heads/master') - assert head.message == textwrap.dedent(f"""\ - Commit - - first - *** - second - - closes {repo.name}#{pr.number} - - Signed-off-by: {reviewer} - """).strip(), "squashed / rebased messages should not be stripped" - - def test_title_no_edit(self, repo, env, users, config): - """The first line of a commit message should not be taken in account for - rewriting, especially as it can be untagged and interpreted as a - pseudo-header - """ - with repo: - repo.make_commits(None, Commit("0", tree={'a': '1'}), ref='heads/master') - repo.make_commits( - 'master', - Commit('Some: thing\n\nis odd', tree={'b': '1'}), - Commit('thing: thong', tree={'b': '2'}), - ref='heads/change') - - pr = repo.make_pr(target='master', head='change') - repo.post_status(pr.head, 'success', 'legal/cla') - repo.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen rebase-ff r+', config['role_reviewer']['token']) - env.run_crons() - - pr_id = to_pr(env, pr) - assert pr_id.staging_id # check PR is staged - - - reviewer = get_partner(env, users["reviewer"]).formatted_email - staging_head = repo.commit('staging.master') - assert staging_head.message == f"""\ -thing: thong - -closes {pr_id.display_name} - -Signed-off-by: {reviewer}""" - assert repo.commit(staging_head.parents[0]).message == f"""\ -Some: thing - -is odd - -Part-of: {pr_id.display_name}""" - - def test_pr_mergehead(self, repo, env, config): - """ if the head of the PR is a merge commit and one of the parents is - in the target, replicate the merge commit instead of merging - - rankdir="BT" - M2 -> M1 - C0 -> M1 - C1 -> C0 - C1 -> M2 - - C1 [label = "\\N / MERGE"] - """ - with repo: - m1 = repo.make_commit(None, "M1", None, tree={'a': '0'}) - m2 = repo.make_commit(m1, "M2", None, tree={'a': '1'}) - repo.make_ref('heads/master', m2) - - c0 = repo.make_commit(m1, 'C0', None, tree={'a': '0', 'b': '2'}) - c1 = repo.make_commit([c0, m2], 'C1', None, tree={'a': '1', 'b': '2'}) - prx = repo.make_pr(title="T", body="TT", target='master', head=c1) - env.run_crons() - - with repo: - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ merge', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - master = repo.commit('heads/master') - assert master.parents == [m2, c0] - m1 = node('M1') - expected = node('C1', node('C0', m1), node('M2', m1)) - assert log_to_node(repo.log('heads/master')), expected - - def test_pr_mergehead_nonmember(self, repo, env, users, config): - """ if the head of the PR is a merge commit but none of the parents is - in the target, merge normally - - rankdir="BT" - M2 -> M1 - B0 -> M1 - C0 -> M1 - C1 -> C0 - C1 -> B0 - - MERGE -> M2 - MERGE -> C1 - """ - with repo: - m1 = repo.make_commit(None, "M1", None, tree={'a': '0'}) - m2 = repo.make_commit(m1, "M2", None, tree={'a': '1'}) - repo.make_ref('heads/master', m2) - - b0 = repo.make_commit(m1, 'B0', None, tree={'a': '0', 'bb': 'bb'}) - - c0 = repo.make_commit(m1, 'C0', None, tree={'a': '0', 'b': '2'}) - c1 = repo.make_commit([c0, b0], 'C1', None, tree={'a': '0', 'b': '2', 'bb': 'bb'}) - prx = repo.make_pr(title="T", body="TT", target='master', head=c1) - env.run_crons() - - with repo: - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ merge', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - master = repo.commit('heads/master') - assert master.parents == [m2, c1] - assert repo.read_tree(master) == {'a': '1', 'b': '2', 'bb': 'bb'} - - m1 = node('M1') - reviewer = get_partner(env, users["reviewer"]).formatted_email - expected = node( - 'T\n\nTT\n\ncloses {}#{}\n\nSigned-off-by: {}'.format(repo.name, prx.number, reviewer), - node('M2', m1), - node('C1', node('C0', m1), node('B0', m1)) - ) - assert log_to_node(repo.log('heads/master')), expected - - def test_squash_merge(self, repo, env, config, users): - with repo: - repo.make_commits(None, Commit('initial', tree={'a': '0'}), ref='heads/master') - - repo.make_commits( - 'master', - Commit('sub', tree={'b': '0'}, committer={'name': 'bob', 'email': 'builder@example.org', 'date': '1999-04-12T08:19:30Z'}), - ref='heads/other' - ) - pr1 = repo.make_pr(title='first pr', target='master', head='other') - repo.post_status('other', 'success', 'legal/cla') - repo.post_status('other', 'success', 'ci/runbot') - - repo.make_commits('master', Commit('x', tree={'x': '0'}), Commit('y', tree={'x': '1'}), ref='heads/other2') - pr2 = repo.make_pr(title='second pr', target='master', head='other2') - repo.post_status('other2', 'success', 'legal/cla') - repo.post_status('other2', 'success', 'ci/runbot') - env.run_crons() - - with repo: # comments sequencing - pr1.post_comment('hansen r+ squash', config['role_reviewer']['token']) - pr2.post_comment('hansen r+ squash', config['role_reviewer']['token']) - env.run_crons() - - with repo: - repo.post_status('staging.master', 'success', 'legal/cla') - repo.post_status('staging.master', 'success', 'ci/runbot') - env.run_crons() - - # PR 1 should have merged properly, the PR message should be the - # message of the merged commit - pr1_id = to_pr(env, pr1) - assert pr1_id.state == 'merged' - assert pr1.comments == [ - seen(env, pr1, users), - (users['reviewer'], 'hansen r+ squash'), - (users['user'], 'Merge method set to squash.') - ] - merged_head = repo.commit('master') - assert merged_head.message == f"""first pr - -closes {pr1_id.display_name} - -Signed-off-by: {get_partner(env, users["reviewer"]).formatted_email}\ -""" - assert merged_head.committer['name'] == 'bob' - assert merged_head.committer['email'] == 'builder@example.org' - commit_date = datetime.datetime.strptime(merged_head.committer['date'], '%Y-%m-%dT%H:%M:%SZ') - # using timestamp (and working in seconds) because `pytest.approx` - # silently fails on datetimes (#8395) - assert commit_date.timestamp() == pytest.approx(time.time(), abs=5*60), \ - "the commit date of the merged commit should be about now, despite" \ - " the source commit being >20 years old" - - pr2_id = to_pr(env, pr2) - assert pr2_id.state == 'ready' - assert not pr2_id.merge_method - assert pr2.comments == [ - seen(env, pr2, users), - (users['reviewer'], 'hansen r+ squash'), - (users['user'], f"I'm sorry, @{users['reviewer']}: squash can only be used with a single commit at this time."), - (users['user'], """@{user} @{reviewer} because this PR has multiple commits, I need to know how to merge it: - -* `merge` to merge directly, using the PR as merge commit message -* `rebase-merge` to rebase and merge, using the PR as merge commit message -* `rebase-ff` to rebase and fast-forward -""".format_map(users)) - ] - - @pytest.mark.xfail(reason="removed support for squash- command") - def test_disable_squash_merge(self, repo, env, config): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ squash-', config['role_reviewer']['token']) - assert not env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).squash - - env.run_crons() - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).staging_id - - staging = repo.commit('heads/staging.master') - assert repo.is_ancestor(prx.head, of=staging.id) - assert staging.parents == [m2, c1] - assert repo.read_tree(staging) == { - 'm': 'c1', 'm2': 'm2', - } - - repo.post_status(staging.id, 'success', 'legal/cla') - repo.post_status(staging.id, 'success', 'ci/runbot') - env.run_crons() - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).state == 'merged' - assert prx.state == 'closed' - -class TestPRUpdate(object): - """ Pushing on a PR should update the HEAD except for merged PRs, it - can have additional effect (see individual tests) - """ - def test_update_opened(self, env, repo): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.head == c - # alter & push force PR entirely - with repo: - c2 = repo.make_commit(m, 'first', None, tree={'m': 'cc'}) - repo.update_ref(prx.ref, c2, force=True) - assert pr.head == c2 - - def test_reopen_update(self, env, repo): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - with repo: - prx.close() - assert pr.state == 'closed' - assert pr.head == c - - with repo: - prx.open() - assert pr.state == 'opened' - - with repo: - c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) - repo.update_ref(prx.ref, c2, force=True) - assert pr.head == c2 - - def test_update_validated(self, env, repo): - """ Should reset to opened - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - env.run_crons() - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.head == c - assert pr.state == 'validated' - - with repo: - c2 = repo.make_commit(m, 'first', None, tree={'m': 'cc'}) - repo.update_ref(prx.ref, c2, force=True) - assert pr.head == c2 - assert pr.state == 'opened' - - def test_update_approved(self, env, repo, config): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - prx.post_comment('hansen r+', config['role_reviewer']['token']) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.head == c - assert pr.state == 'approved' - - with repo: - c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) - repo.update_ref(prx.ref, c2, force=True) - assert pr.head == c2 - assert pr.state == 'opened' - - def test_update_ready(self, env, repo, config): - """ Should reset to opened - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.head == c - assert pr.state == 'ready' - - with repo: - c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) - repo.update_ref(prx.ref, c2, force=True) - assert pr.head == c2 - assert pr.state == 'opened' - - def test_update_staged(self, env, repo, config): - """ Should cancel the staging & reset PR to opened - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - env.run_crons() - assert pr.state == 'ready' - assert pr.staging_id - - with repo: - c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) - repo.update_ref(prx.ref, c2, force=True) - assert pr.head == c2 - assert pr.state == 'opened' - assert not pr.staging_id - assert not env['runbot_merge.stagings'].search([]) - - def test_split(self, env, repo, config): - """ Should remove the PR from its split, and possibly delete the split - entirely. - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'm', '1': '1'}) - repo.make_ref('heads/p1', c) - prx1 = repo.make_pr(title='t1', body='b1', target='master', head='p1') - repo.post_status(prx1.head, 'success', 'legal/cla') - repo.post_status(prx1.head, 'success', 'ci/runbot') - prx1.post_comment('hansen r+', config['role_reviewer']['token']) - - c = repo.make_commit(m, 'first', None, tree={'m': 'm', '2': '2'}) - repo.make_ref('heads/p2', c) - prx2 = repo.make_pr(title='t2', body='b2', target='master', head='p2') - repo.post_status(prx2.head, 'success', 'legal/cla') - repo.post_status(prx2.head, 'success', 'ci/runbot') - prx2.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - pr1, pr2 = env['runbot_merge.pull_requests'].search([], order='number') - assert pr1.number == prx1.number - assert pr2.number == prx2.number - assert pr1.staging_id == pr2.staging_id - s0 = pr1.staging_id - - with repo: - repo.post_status('heads/staging.master', 'failure', 'ci/runbot') - env.run_crons() - - assert pr1.staging_id and pr1.staging_id != s0, "pr1 should have been re-staged" - assert not pr2.staging_id, "pr2 should not" - # TODO: remote doesn't currently handle env context so can't mess - # around using active_test=False - assert env['runbot_merge.split'].search([]) - - with repo: - repo.update_ref(prx2.ref, repo.make_commit(c, 'second', None, tree={'m': 'm', '2': '22'}), force=True) - # probably not necessary ATM but... - env.run_crons() - - assert pr2.state == 'opened', "state should have been reset" - assert not env['runbot_merge.split'].search([]), "there should be no split left" - - def test_update_error(self, env, repo, config): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - env.run_crons() - assert pr.state == 'ready' - assert pr.staging_id - - h = repo.commit('heads/staging.master').id - with repo: - repo.post_status(h, 'success', 'legal/cla') - repo.post_status(h, 'failure', 'ci/runbot') - env.run_crons() - assert not pr.staging_id - assert pr.state == 'error' - - with repo: - c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) - repo.update_ref(prx.ref, c2, force=True) - assert pr.head == c2 - assert pr.state == 'opened' - - def test_unknown_pr(self, env, repo): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/1.0', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='1.0', head=c) - assert not env['runbot_merge.pull_requests'].search([('number', '=', prx.number)]) - - env['runbot_merge.project'].search([]).write({ - 'branch_ids': [(0, 0, {'name': '1.0'})] - }) - - with repo: - c2 = repo.make_commit(c, 'second', None, tree={'m': 'c2'}) - repo.update_ref(prx.ref, c2, force=True) - - assert not env['runbot_merge.pull_requests'].search([('number', '=', prx.number)]) - - def test_update_to_ci(self, env, repo): - """ If a PR is updated to a known-valid commit, it should be - validated - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - c2 = repo.make_commit(m, 'first', None, tree={'m': 'cc'}) - repo.post_status(c2, 'success', 'legal/cla') - repo.post_status(c2, 'success', 'ci/runbot') - env.run_crons() - - with repo: - prx = repo.make_pr(title='title', body='body', target='master', head=c) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.head == c - assert pr.state == 'opened' - - with repo: - repo.update_ref(prx.ref, c2, force=True) - assert pr.head == c2 - assert pr.state == 'validated' - - def test_update_missed(self, env, repo, config): - """ Sometimes github's webhooks don't trigger properly, a branch's HEAD - does not get updated and we might e.g. attempt to merge a PR despite it - now being unreviewed or failing CI or somesuch. - - This is not a super frequent occurrence, and possibly not the most - problematic issue ever (e.g. if the branch doesn't CI it's not going to - pass staging, though we might still be staging a branch which had been - unreviewed). - - So during the staging process, the heads should be checked, and the PR - will not be staged if the heads don't match (though it'll be reset to - open, rather than put in an error state as technically there's no - failure, we just want to notify users that something went odd with the - mergebot). - - TODO: other cases / situations where we want to update the head? - """ - with repo: - repo.make_commits(None, repo.Commit('m', tree={'a': '0'}), ref='heads/master') - - [c] = repo.make_commits( - 'heads/master', repo.Commit('c', tree={'a': '1'}), ref='heads/abranch') - pr = repo.make_pr(target='master', head='abranch') - repo.post_status(pr.head, 'success', 'legal/cla') - repo.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr.number), - ]) - env.run_crons('runbot_merge.process_updated_commits') - assert pr_id.state == 'ready' - - # TODO: find way to somehow skip / ignore the update_ref? - with repo: - # can't push a second commit because then the staging crashes due - # to the PR *actually* having more than 1 commit and thus needing - # a configuration - [c2] = repo.make_commits('heads/master', repo.Commit('c2', tree={'a': '2'})) - repo.post_status(c2, 'success', 'legal/cla') - repo.post_status(c2, 'success', 'ci/runbot') - repo.update_ref(pr.ref, c2, force=True) - - # we missed the update notification so the db should still be at c and - # in a "ready" state - pr_id.write({ - 'head': c, - 'state': 'ready', - }) - - env.run_crons() - - # the PR should not get merged, and should be updated - assert pr_id.state == 'validated' - assert pr_id.head == c2 - - pr_id.write({'head': c, 'state': 'ready'}) - with repo: - pr.post_comment('hansen check') - env.run_crons() - assert pr_id.state == 'validated' - assert pr_id.head == c2 - - def test_update_closed(self, env, repo): - with repo: - [m] = repo.make_commits(None, repo.Commit('initial', tree={'m': 'm'}), ref='heads/master') - - [c] = repo.make_commits(m, repo.Commit('first', tree={'m': 'm3'}), ref='heads/abranch') - prx = repo.make_pr(title='title', body='body', target='master', head=c) - env.run_crons() - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - assert pr.state == 'opened' - assert pr.head == c - assert pr.squash - - with repo: - prx.close() - - with repo: - c2 = repo.make_commit(c, 'xxx', None, tree={'m': 'm4'}) - repo.update_ref(prx.ref, c2) - - assert pr.state == 'closed' - assert pr.head == c - assert pr.squash - - with repo: - prx.open() - assert pr.state == 'opened' - assert pr.head == c2 - assert not pr.squash - - def test_update_closed_revalidate(self, env, repo): - """ The PR should be validated on opening and reopening in case there's - already a CI+ stored (as the CI might never trigger unless explicitly - re-requested) - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - repo.post_status(c, 'success', 'legal/cla') - repo.post_status(c, 'success', 'ci/runbot') - prx = repo.make_pr(title='title', body='body', target='master', head=c) - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.state == 'validated', \ - "if a PR is created on a CI'd commit, it should be validated immediately" - - with repo: prx.close() - assert pr.state == 'closed' - - with repo: prx.open() - assert pr.state == 'validated', \ - "if a PR is reopened and had a CI'd head, it should be validated immediately" - - @pytest.mark.xfail(reason="github doesn't allow reopening force-pushed PRs", strict=True) - def test_force_update_closed(self, env, repo): - with repo: - [m] = repo.make_commits(None, repo.Commit('initial', tree={'m': 'm'}), ref='heads/master') - - [c] = repo.make_commits(m, repo.Commit('first', tree={'m': 'm3'}), ref='heads/abranch') - prx = repo.make_pr(title='title', body='body', target='master', head=c) - env.run_crons() - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - with repo: - prx.close() - - with repo: - c2 = repo.make_commit(m, 'xxx', None, tree={'m': 'm4'}) - repo.update_ref(prx.ref, c2, force=True) - - with repo: - prx.open() - assert pr.head == c2 - -class TestBatching(object): - def _pr(self, repo, prefix, trees, *, target='master', user, reviewer, - statuses=(('ci/runbot', 'success'), ('legal/cla', 'success')) - ): - """ Helper creating a PR from a series of commits on a base - """ - *_, c = repo.make_commits( - 'heads/{}'.format(target), - *( - repo.Commit('commit_{}_{:02}'.format(prefix, i), tree=t) - for i, t in enumerate(trees) - ), - ref='heads/{}'.format(prefix) - ) - pr = repo.make_pr(title='title {}'.format(prefix), body='body {}'.format(prefix), - target=target, head=prefix, token=user) - - for context, result in statuses: - repo.post_status(c, result, context) - if reviewer: - pr.post_comment( - 'hansen r+%s' % (' rebase-merge' if len(trees) > 1 else ''), - reviewer - ) - return pr - - def test_staging_batch(self, env, repo, users, config): - """ If multiple PRs are ready for the same target at the same point, - they should be staged together - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr2 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - env.run_crons() - - pr1 = to_pr(env, pr1) - assert pr1.staging_id - pr2 = to_pr(env, pr2) - assert pr1.staging_id - assert pr2.staging_id - assert pr1.staging_id == pr2.staging_id - - log = list(repo.log('heads/staging.master')) - staging = log_to_node(log) - reviewer = get_partner(env, users["reviewer"]).formatted_email - p1 = node( - 'title PR1\n\nbody PR1\n\ncloses {}\n\nSigned-off-by: {}'.format(pr1.display_name, reviewer), - node('initial'), - node(part_of('commit_PR1_01', pr1), node(part_of('commit_PR1_00', pr1), node('initial'))) - ) - p2 = node( - 'title PR2\n\nbody PR2\n\ncloses {}\n\nSigned-off-by: {}'.format(pr2.display_name, reviewer), - p1, - node(part_of('commit_PR2_01', pr2), node(part_of('commit_PR2_00', pr2), p1)) - ) - assert staging == p2 - - def test_staging_batch_norebase(self, env, repo, users, config): - """ If multiple PRs are ready for the same target at the same point, - they should be staged together - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr1.post_comment('hansen merge', config['role_reviewer']['token']) - pr2 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr2.post_comment('hansen merge', config['role_reviewer']['token']) - env.run_crons() - - pr1 = to_pr(env, pr1) - assert pr1.staging_id - assert pr1.merge_method == 'merge' - pr2 = to_pr(env, pr2) - assert pr2.merge_method == 'merge' - assert pr1.staging_id - assert pr2.staging_id - assert pr1.staging_id == pr2.staging_id - - log = list(repo.log('staging.master')) - - staging = log_to_node(log) - reviewer = get_partner(env, users["reviewer"]).formatted_email - - p1 = node( - 'title PR1\n\nbody PR1\n\ncloses {}#{}\n\nSigned-off-by: {}'.format(repo.name, pr1.number, reviewer), - node('initial'), - node('commit_PR1_01', node('commit_PR1_00', node('initial'))) - ) - p2 = node( - 'title PR2\n\nbody PR2\n\ncloses {}#{}\n\nSigned-off-by: {}'.format(repo.name, pr2.number, reviewer), - p1, - node('commit_PR2_01', node('commit_PR2_00', node('initial'))) - ) - assert staging == p2 - - def test_staging_batch_squash(self, env, repo, users, config): - """ If multiple PRs are ready for the same target at the same point, - they should be staged together - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr2 = self._pr(repo, 'PR2', [{'c': 'CCC'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - env.run_crons() - - pr1 = to_pr(env, pr1) - assert pr1.staging_id - pr2 = to_pr(env, pr2) - assert pr1.staging_id - assert pr2.staging_id - assert pr1.staging_id == pr2.staging_id - - log = list(repo.log('heads/staging.master')) - - staging = log_to_node(log) - reviewer = get_partner(env, users["reviewer"]).formatted_email - expected = node('commit_PR2_00\n\ncloses {}#{}\n\nSigned-off-by: {}'.format(repo.name, pr2.number, reviewer), - node('commit_PR1_00\n\ncloses {}#{}\n\nSigned-off-by: {}'.format(repo.name, pr1.number, reviewer), - node('initial'))) - assert staging == expected - - def test_batching_pressing(self, env, repo, config): - """ "Pressing" PRs should be selected before normal & batched together - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - pr21 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr22 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - - pr11 = self._pr(repo, 'Pressing1', [{'x': 'x'}, {'y': 'y'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr12 = self._pr(repo, 'Pressing2', [{'z': 'z'}, {'zz': 'zz'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr11.post_comment('hansen priority=1', config['role_reviewer']['token']) - pr12.post_comment('hansen priority=1', config['role_reviewer']['token']) - - pr21, pr22, pr11, pr12 = prs = [to_pr(env, pr) for pr in [pr21, pr22, pr11, pr12]] - assert pr21.priority == pr22.priority == 2 - assert pr11.priority == pr12.priority == 1 - - env.run_crons() - - assert all(pr.state == 'ready' for pr in prs) - assert not pr21.staging_id - assert not pr22.staging_id - assert pr11.staging_id - assert pr12.staging_id - assert pr11.staging_id == pr12.staging_id - - def test_batching_urgent(self, env, repo, config): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - pr21 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr22 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - - pr11 = self._pr(repo, 'Pressing1', [{'x': 'x'}, {'y': 'y'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr12 = self._pr(repo, 'Pressing2', [{'z': 'z'}, {'zz': 'zz'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr11.post_comment('hansen priority=1', config['role_reviewer']['token']) - pr12.post_comment('hansen priority=1', config['role_reviewer']['token']) - - # stage PR1 - env.run_crons() - p_11, p_12, p_21, p_22 = \ - [to_pr(env, pr) for pr in [pr11, pr12, pr21, pr22]] - assert not p_21.staging_id or p_22.staging_id - assert p_11.staging_id and p_12.staging_id - assert p_11.staging_id == p_12.staging_id - staging_1 = p_11.staging_id - - # no statuses run on PR0s - with repo: - pr01 = self._pr(repo, 'Urgent1', [{'n': 'n'}, {'o': 'o'}], user=config['role_user']['token'], reviewer=None, statuses=[]) - pr01.post_comment('hansen priority=0 rebase-merge', config['role_reviewer']['token']) - p_01 = to_pr(env, pr01) - assert p_01.state == 'opened' - assert p_01.priority == 0 - - env.run_crons() - # first staging should be cancelled and PR0 should be staged - # regardless of CI (or lack thereof) - assert not staging_1.active - assert not p_11.staging_id and not p_12.staging_id - assert p_01.staging_id - - def test_batching_urgenter_than_split(self, env, repo, config): - """ p=0 PRs should take priority over split stagings (processing - of a staging having CI-failed and being split into sub-stagings) - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr2 = self._pr(repo, 'PR2', [{'a': 'some content', 'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - env.run_crons() - - p_1 = to_pr(env, pr1) - p_2 = to_pr(env, pr2) - st = env['runbot_merge.stagings'].search([]) - - # both prs should be part of the staging - assert st.mapped('batch_ids.prs') == p_1 | p_2 - - # add CI failure - with repo: - repo.post_status('heads/staging.master', 'failure', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons() - - # should have staged the first half - assert p_1.staging_id.heads - assert not p_2.staging_id.heads - - # during restaging of pr1, create urgent PR - with repo: - pr0 = self._pr(repo, 'urgent', [{'a': 'a', 'b': 'b'}], user=config['role_user']['token'], reviewer=None, statuses=[]) - pr0.post_comment('hansen priority=0', config['role_reviewer']['token']) - env.run_crons() - - # TODO: maybe just deactivate stagings instead of deleting them when canceling? - assert not p_1.staging_id - assert to_pr(env, pr0).staging_id - - def test_urgent_failed(self, env, repo, config): - """ Ensure pr[p=0,state=failed] don't get picked up - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - pr21 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - - p_21 = to_pr(env, pr21) - - # no statuses run on PR0s - with repo: - pr01 = self._pr(repo, 'Urgent1', [{'n': 'n'}, {'o': 'o'}], user=config['role_user']['token'], reviewer=None, statuses=[]) - pr01.post_comment('hansen priority=0', config['role_reviewer']['token']) - p_01 = to_pr(env, pr01) - p_01.state = 'error' - - env.run_crons() - assert not p_01.staging_id, "p_01 should not be picked up as it's failed" - assert p_21.staging_id, "p_21 should have been staged" - - @pytest.mark.skip(reason="Maybe nothing to do, the PR is just skipped and put in error?") - def test_batching_merge_failure(self): - pass - - def test_staging_ci_failure_batch(self, env, repo, config): - """ on failure split batch & requeue - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) - - pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr2 = self._pr(repo, 'PR2', [{'a': 'some content', 'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - env.run_crons() - - st = env['runbot_merge.stagings'].search([]) - # both prs should be part of the staging - assert len(st.mapped('batch_ids.prs')) == 2 - # add CI failure - with repo: - repo.post_status('heads/staging.master', 'failure', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - - pr1 = env['runbot_merge.pull_requests'].search([('number', '=', pr1.number)]) - pr2 = env['runbot_merge.pull_requests'].search([('number', '=', pr2.number)]) - - env.run_crons() - # should have split the existing batch into two, with one of the - # splits having been immediately restaged - st = env['runbot_merge.stagings'].search([]) - assert len(st) == 1 - assert pr1.staging_id and pr1.staging_id == st - - sp = env['runbot_merge.split'].search([]) - assert len(sp) == 1 - - # This is the failing PR! - h = repo.commit('heads/staging.master').id - with repo: - repo.post_status(h, 'failure', 'ci/runbot') - repo.post_status(h, 'success', 'legal/cla') - env.run_crons() - assert pr1.state == 'error' - - assert pr2.staging_id - - h = repo.commit('heads/staging.master').id - with repo: - repo.post_status(h, 'success', 'ci/runbot') - repo.post_status(h, 'success', 'legal/cla') - env.run_crons('runbot_merge.process_updated_commits', 'runbot_merge.merge_cron', 'runbot_merge.staging_cron') - assert pr2.state == 'merged' - -class TestReviewing(object): - def test_reviewer_rights(self, env, repo, users, config): - """Only users with review rights will have their r+ (and other - attributes) taken in account - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', config['role_other']['token']) - env.run_crons() - - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).state == 'validated' - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).state == 'ready' - # second r+ to check warning - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - - env.run_crons() - assert prx.comments == [ - (users['other'], 'hansen r+'), - seen(env, prx, users), - (users['user'], "I'm sorry, @{}. I'm afraid I can't do that.".format(users['other'])), - (users['reviewer'], 'hansen r+'), - (users['reviewer'], 'hansen r+'), - (users['user'], "I'm sorry, @{}: this PR is already reviewed, reviewing it again is useless.".format( - users['reviewer'])), - ] - - def test_self_review_fail(self, env, repo, users, config): - """ Normal reviewers can't self-review - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1, token=config['role_reviewer']['token']) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - assert prx.user == users['reviewer'] - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).state == 'validated' - - env.run_crons() - assert prx.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, prx, users), - (users['user'], "I'm sorry, @{}: you can't review+.".format(users['reviewer'])), - ] - - def test_self_review_success(self, env, repo, users, config): - """ Some users are allowed to self-review - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1, token=config['role_self_reviewer']['token']) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', config['role_self_reviewer']['token']) - env.run_crons() - - assert prx.user == users['self_reviewer'] - assert env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]).state == 'ready' - - def test_delegate_review(self, env, repo, users, config): - """Users should be able to delegate review to either the creator of - the PR or an other user without review rights - """ - env['res.partner'].create({ - 'name': users['user'], - 'github_login': users['user'], - 'email': users['user'] + '@example.org', - }) - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen delegate+', config['role_reviewer']['token']) - prx.post_comment('hansen r+', config['role_user']['token']) - env.run_crons() - - assert prx.user == users['user'] - assert to_pr(env, prx).state == 'ready' - - def test_delegate_review_thirdparty(self, env, repo, users, config): - """Users should be able to delegate review to either the creator of - the PR or an other user without review rights - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - # flip case to check that github login is case-insensitive - other = ''.join(c.lower() if c.isupper() else c.upper() for c in users['other']) - prx.post_comment('hansen delegate=%s' % other, config['role_reviewer']['token']) - env.run_crons() - env['res.partner'].search([('github_login', '=', other)]).email = f'{other}@example.org' - - with repo: - # check this is ignored - prx.post_comment('hansen r+', config['role_user']['token']) - assert prx.user == users['user'] - prx_id = to_pr(env, prx) - assert prx_id.state == 'validated' - - with repo: - # check this works - prx.post_comment('hansen r+', config['role_other']['token']) - assert prx_id.state == 'ready' - - def test_delegate_prefixes(self, env, repo, config): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr(title='title', body=None, target='master', head=c) - prx.post_comment('hansen delegate=foo,@bar,#baz', config['role_reviewer']['token']) - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - - assert {d.github_login for d in pr.delegates} == {'foo', 'bar', 'baz'} - - def test_actual_review(self, env, repo, config): - """ treat github reviews as regular comments - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - - with repo: - prx.post_review('COMMENT', "hansen priority=1", config['role_reviewer']['token']) - assert pr.priority == 1 - assert pr.state == 'opened' - - with repo: - prx.post_review('APPROVE', "hansen priority=2", config['role_reviewer']['token']) - assert pr.priority == 2 - assert pr.state == 'opened' - - with repo: - prx.post_review('REQUEST_CHANGES', 'hansen priority=1', config['role_reviewer']['token']) - assert pr.priority == 1 - assert pr.state == 'opened' - - with repo: - prx.post_review('COMMENT', 'hansen r+', config['role_reviewer']['token']) - assert pr.priority == 1 - assert pr.state == 'approved' - - def test_no_email(self, env, repo, users, config, partners): - """A review should be rejected if the reviewer doesn't have an email - configured, otherwise the email address will show up - @users.noreply.github.com which is *weird*. - """ - with repo: - [m] = repo.make_commits( - None, - Commit('initial', tree={'m': '1'}), - ref='heads/master' - ) - [c] = repo.make_commits(m, Commit('first', tree={'m': '2'})) - pr = repo.make_pr(target='master', head=c) - env.run_crons() - with repo: - pr.post_comment('hansen delegate+', config['role_reviewer']['token']) - pr.post_comment('hansen r+', config['role_user']['token']) - env.run_crons() - - user_partner = env['res.partner'].search([('github_login', '=', users['user'])]) - assert user_partner.email is False - assert pr.comments == [ - seen(env, pr, users), - (users['reviewer'], 'hansen delegate+'), - (users['user'], 'hansen r+'), - (users['user'], f"I'm sorry, @{users['user']}: I must know your email before you can review PRs. Please contact an administrator."), - ] - user_partner.fetch_github_email() - assert user_partner.email - with repo: - pr.post_comment('hansen r+', config['role_user']['token']) - env.run_crons() - assert to_pr(env, pr).state == 'approved' - - -class TestUnknownPR: - """ Sync PRs initially looked excellent but aside from the v4 API not - being stable yet, it seems to have greatly regressed in performances to - the extent that it's almost impossible to sync odoo/odoo today: trying to - fetch more than 2 PRs per query will fail semi-randomly at one point, so - fetching all 15000 PRs takes hours - - => instead, create PRs on the fly when getting notifications related to - valid but unknown PRs - """ - def test_rplus_unknown(self, repo, env, config, users): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot', target_url="http://example.org/wheee") - env.run_crons() - - # assume an unknown but ready PR: we don't know the PR or its head commit - env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]).unlink() - env['runbot_merge.commit'].search([('sha', '=', prx.head)]).unlink() - - # reviewer reviewers - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - with repo: - prx.post_review('REQUEST_CHANGES', 'hansen r-', config['role_reviewer']['token']) - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - - Fetch = env['runbot_merge.fetch_job'] - fetches = Fetch.search([('repository', '=', repo.name), ('number', '=', prx.number)]) - assert len(fetches) == 1, f"expected one fetch for {prx.number}, found {len(fetches)}" - - env.run_crons('runbot_merge.fetch_prs_cron') - env.run_crons() - assert not Fetch.search([('repository', '=', repo.name), ('number', '=', prx.number)]) - - c = env['runbot_merge.commit'].search([('sha', '=', prx.head)]) - assert json.loads(c.statuses) == { - 'legal/cla': {'state': 'success', 'target_url': None, 'description': None}, - 'ci/runbot': {'state': 'success', 'target_url': 'http://example.org/wheee', 'description': None} - } - assert prx.comments == [ - seen(env, prx, users), - (users['reviewer'], 'hansen r+'), - (users['reviewer'], 'hansen r+'), - (users['user'], "I didn't know about this PR and had to " - "retrieve its information, you may have to " - "re-approve it as I didn't see previous commands."), - seen(env, prx, users), - ] - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - assert pr.state == 'validated' - - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert pr.state == 'ready' - - def test_fetch_closed(self, env, repo, users, config): - """ If an "unknown PR" is fetched while closed, it should be saved as - closed - """ - with repo: - m, _ = repo.make_commits( - None, - Commit('initial', tree={'m': 'm'}), - Commit('second', tree={'m2': 'm2'}), - ref='heads/master') - - [c1] = repo.make_commits(m, Commit('first', tree={'m': 'c1'})) - pr = repo.make_pr(title='title', body='body', target='master', head=c1) - env.run_crons() - with repo: - pr.close() - - # assume an unknown but ready PR: we don't know the PR or its head commit - to_pr(env, pr).unlink() - env['runbot_merge.commit'].search([('sha', '=', pr.head)]).unlink() - - # reviewer reviewers - with repo: - pr.post_comment('hansen r+', config['role_reviewer']['token']) - - Fetch = env['runbot_merge.fetch_job'] - fetches = Fetch.search([('repository', '=', repo.name), ('number', '=', pr.number)]) - assert len(fetches) == 1, f"expected one fetch for {pr.number}, found {len(fetches)}" - - env.run_crons('runbot_merge.fetch_prs_cron') - env.run_crons() - assert not Fetch.search([('repository', '=', repo.name), ('number', '=', pr.number)]) - - assert to_pr(env, pr).state == 'closed' - assert pr.comments == [ - seen(env, pr, users), - (users['reviewer'], 'hansen r+'), - (users['user'], "I didn't know about this PR and had to retrieve " - "its information, you may have to re-approve it " - "as I didn't see previous commands."), - seen(env, pr, users), - ] - - def test_rplus_unmanaged(self, env, repo, users, config): - """ r+ on an unmanaged target should notify about - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/branch', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='branch', head=c1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons( - 'runbot_merge.fetch_prs_cron', - 'runbot_merge.feedback_cron', - ) - - assert prx.comments == [ - (users['reviewer'], 'hansen r+'), - (users['user'], "This PR targets the un-managed branch %s:branch, it needs to be retargeted before it can be merged." % repo.name), - (users['user'], "Branch `branch` is not within my remit, imma just ignore it."), - ] - - def test_rplus_review_unmanaged(self, env, repo, users, config): - """ r+ reviews can take a different path than comments - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/branch', m2) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='branch', head=c1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - - prx.post_review('APPROVE', 'hansen r+', config['role_reviewer']['token']) - env.run_crons( - 'runbot_merge.fetch_prs_cron', - 'runbot_merge.feedback_cron', - ) - - # FIXME: either split out reviews in local or merge reviews & comments in remote - assert prx.comments[-1:] == [ - (users['user'], "I'm sorry. Branch `branch` is not within my remit."), - ] - -class TestRecognizeCommands: - @pytest.mark.parametrize('botname', ['hansen', 'Hansen', 'HANSEN', 'HanSen', 'hAnSeN']) - def test_botname_casing(self, repo, env, botname, config): - """ Test that the botname is case-insensitive as people might write - bot names capitalised or titlecased or uppercased or whatever - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr(title='title', body=None, target='master', head=c) - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.state == 'opened' - - with repo: - prx.post_comment('%s r+' % botname, config['role_reviewer']['token']) - assert pr.state == 'approved' - - @pytest.mark.parametrize('indent', ['', '\N{SPACE}', '\N{SPACE}'*4, '\N{TAB}']) - def test_botname_indented(self, repo, env, indent, config): - """ matching botname should ignore leading whitespaces - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr(title='title', body=None, target='master', head=c) - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.state == 'opened' - - with repo: - prx.post_comment('%shansen r+' % indent, config['role_reviewer']['token']) - assert pr.state == 'approved' - - def test_unknown_commands(self, repo, env, config, users): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - pr = repo.make_pr(title='title', body=None, target='master', head=c) - pr.post_comment("hansen do the thing", config['role_reviewer']['token']) - pr.post_comment('hansen @bobby-b r+ :+1:', config['role_reviewer']['token']) - env.run_crons() - - assert pr.comments == [ - (users['reviewer'], "hansen do the thing"), - (users['reviewer'], "hansen @bobby-b r+ :+1:"), - seen(env, pr, users), - ] - -class TestRMinus: - def test_rminus_approved(self, repo, env, config): - """ approved -> r- -> opened - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr(title='title', body=None, target='master', head=c) - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.state == 'opened' - - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert pr.state == 'approved' - - with repo: - prx.post_comment('hansen r-', config['role_user']['token']) - assert pr.state == 'opened' - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert pr.state == 'approved' - - with repo: - prx.post_comment('hansen r-', config['role_other']['token']) - assert pr.state == 'approved' - - with repo: - prx.post_comment('hansen r-', config['role_reviewer']['token']) - assert pr.state == 'opened' - - def test_rminus_ready(self, repo, env, config): - """ ready -> r- -> validated - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr(title='title', body=None, target='master', head=c) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - env.run_crons() - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.state == 'validated' - - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert pr.state == 'ready' - - with repo: - prx.post_comment('hansen r-', config['role_user']['token']) - assert pr.state == 'validated' - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert pr.state == 'ready' - - with repo: - prx.post_comment('hansen r-', config['role_other']['token']) - assert pr.state == 'ready' - - with repo: - prx.post_comment('hansen r-', config['role_reviewer']['token']) - assert pr.state == 'validated' - - def test_rminus_staged(self, repo, env, config): - """ staged -> r- -> validated - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr(title='title', body=None, target='master', head=c) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - env.run_crons() - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - - # if reviewer unreviews, cancel staging & unreview - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - st = pr.staging_id - assert st - - with repo: - prx.post_comment('hansen r-', config['role_reviewer']['token']) - assert not st.active - assert not pr.staging_id - assert pr.state == 'validated' - - # if author unreviews, cancel staging & unreview - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - st = pr.staging_id - assert st - - with repo: - prx.post_comment('hansen r-', config['role_user']['token']) - assert not st.active - assert not pr.staging_id - assert pr.state == 'validated' - - # if rando unreviews, ignore - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - st = pr.staging_id - assert st - - with repo: - prx.post_comment('hansen r-', config['role_other']['token']) - assert pr.staging_id == st - assert pr.state == 'ready' - - def test_split(self, env, repo, config): - """ Should remove the PR from its split, and possibly delete the split - entirely. - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'm', '1': '1'}) - repo.make_ref('heads/p1', c) - prx1 = repo.make_pr(title='t1', body='b1', target='master', head='p1') - repo.post_status(prx1.head, 'success', 'legal/cla') - repo.post_status(prx1.head, 'success', 'ci/runbot') - prx1.post_comment('hansen r+', config['role_reviewer']['token']) - - c = repo.make_commit(m, 'first', None, tree={'m': 'm', '2': '2'}) - repo.make_ref('heads/p2', c) - prx2 = repo.make_pr(title='t2', body='b2', target='master', head='p2') - repo.post_status(prx2.head, 'success', 'legal/cla') - repo.post_status(prx2.head, 'success', 'ci/runbot') - prx2.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - pr1, pr2 = env['runbot_merge.pull_requests'].search([], order='number') - assert pr1.number == prx1.number - assert pr2.number == prx2.number - assert pr1.staging_id == pr2.staging_id - s0 = pr1.staging_id - - with repo: - repo.post_status('heads/staging.master', 'failure', 'ci/runbot') - env.run_crons() - - assert pr1.staging_id and pr1.staging_id != s0, "pr1 should have been re-staged" - assert not pr2.staging_id, "pr2 should not" - # TODO: remote doesn't currently handle env context so can't mess - # around using active_test=False - assert env['runbot_merge.split'].search([]) - - with repo: - # prx2 was actually a terrible idea! - prx2.post_comment('hansen r-', config['role_reviewer']['token']) - # probably not necessary ATM but... - env.run_crons() - - assert pr2.state == 'validated', "state should have been reset" - assert not env['runbot_merge.split'].search([]), "there should be no split left" - - def test_rminus_p0(self, env, repo, config, users): - """ In and of itself r- doesn't do anything on p=0 since they bypass - approval, so unstage and downgrade to p=1. - """ - - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr(title='title', body=None, target='master', head=c) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen p=0', config['role_reviewer']['token']) - env.run_crons() - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number), - ]) - assert pr.priority == 0 - assert pr.staging_id - - with repo: - prx.post_comment('hansen r-', config['role_reviewer']['token']) - env.run_crons() - assert not pr.staging_id, "pr should have been unstaged" - assert pr.priority == 1, "priority should have been downgraded" - assert prx.comments == [ - (users['reviewer'], 'hansen p=0'), - seen(env, prx, users), - (users['reviewer'], 'hansen r-'), - (users['user'], "PR priority reset to 1, as pull requests with priority 0 ignore review state."), - ] - -class TestComments: - def test_address_method(self, repo, env, config): - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen delegate=foo', config['role_reviewer']['token']) - prx.post_comment('@hansen delegate=bar', config['role_reviewer']['token']) - prx.post_comment('#hansen delegate=baz', config['role_reviewer']['token']) - - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - - assert {p.github_login for p in pr.delegates} \ - == {'foo', 'bar', 'baz'} - - def test_delete(self, repo, env, config): - """ Comments being deleted should be ignored - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - - with repo: - cid = prx.post_comment('hansen r+', config['role_reviewer']['token']) - # unreview by pushing a new commit - repo.update_ref(prx.ref, repo.make_commit(c1, 'second', None, tree={'m': 'c2'}), force=True) - assert pr.state == 'opened' - with repo: - prx.delete_comment(cid, config['role_reviewer']['token']) - # check that PR is still unreviewed - assert pr.state == 'opened' - - def test_edit(self, repo, env, config): - """ Comments being edited should be ignored - """ - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - - with repo: - cid = prx.post_comment('hansen r+', config['role_reviewer']['token']) - # unreview by pushing a new commit - repo.update_ref(prx.ref, repo.make_commit(c1, 'second', None, tree={'m': 'c2'}), force=True) - assert pr.state == 'opened' - with repo: - prx.edit_comment(cid, 'hansen r+ edited', config['role_reviewer']['token']) - # check that PR is still unreviewed - assert pr.state == 'opened' - -class TestFeedback: - def test_ci_approved(self, repo, env, users, config): - """CI failing on an r+'d PR sends feedback""" - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert pr.state == 'approved' - - with repo: - repo.post_status(prx.head, 'failure', 'ci/runbot') - env.run_crons() - - assert prx.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, prx, users), - (users['user'], "@%(user)s @%(reviewer)s 'ci/runbot' failed on this reviewed PR." % users) - ] - - def test_review_failed(self, repo, env, users, config): - """r+-ing a PR with failed CI sends feedback""" - with repo: - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr(title='title', body='body', target='master', head=c1) - pr = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', prx.number) - ]) - - with repo: - repo.post_status(prx.head, 'failure', 'ci/runbot') - env.run_crons() - assert pr.state == 'opened' - - with repo: - prx.post_comment('hansen r+', config['role_reviewer']['token']) - assert pr.state == 'approved' - - env.run_crons() - - assert prx.comments == [ - seen(env, prx, users), - (users['reviewer'], 'hansen r+'), - (users['user'], "@%s you may want to rebuild or fix this PR as it has failed CI." % users['reviewer']) - ] - -class TestInfrastructure: - @pytest.mark.skip(reason="Don't want to implement") - def test_protection(self, repo): - """ force-pushing on a protected ref should fail - """ - with repo: - m0 = repo.make_commit(None, 'initial', None, tree={'m': 'm0'}) - m1 = repo.make_commit(m0, 'first', None, tree={'m': 'm1'}) - repo.make_ref('heads/master', m1) - repo.protect('master') - - c1 = repo.make_commit(m0, 'other', None, tree={'m': 'c1'}) - with pytest.raises(AssertionError): - repo.update_ref('heads/master', c1, force=True) - assert repo.get_ref('heads/master') == m1 - -def node(name, *children): - assert type(name) in (str, re_matches) - return name, frozenset(children) -def log_to_node(log): - log = list(log) - nodes = {} - # check that all parents are present - ids = {c['sha'] for c in log} - parents = {p['sha'] for c in log for p in c['parents']} - missing = parents - ids - assert parents, "Didn't find %s in log" % missing - - # github doesn't necessarily log topologically maybe? - todo = list(reversed(log)) - while todo: - c = todo.pop(0) - if all(p['sha'] in nodes for p in c['parents']): - nodes[c['sha']] = (c['commit']['message'], frozenset( - nodes[p['sha']] - for p in c['parents'] - )) - else: - todo.append(c) - - return nodes[log[0]['sha']] - -class TestEmailFormatting: - def test_simple(self, env): - p1 = env['res.partner'].create({ - 'name': 'Bob', - 'email': 'bob@example.com', - }) - assert p1.formatted_email == 'Bob <bob@example.com>' - - def test_noemail(self, env): - p1 = env['res.partner'].create({ - 'name': 'Shultz', - 'github_login': 'Osmose99', - }) - assert p1.formatted_email == 'Shultz <Osmose99@users.noreply.github.com>' diff --git a/runbot_merge/tests/test_by_branch.py b/runbot_merge/tests/test_by_branch.py deleted file mode 100644 index 4122bf82..00000000 --- a/runbot_merge/tests/test_by_branch.py +++ /dev/null @@ -1,180 +0,0 @@ -import pytest - -from utils import Commit - - -@pytest.fixture -def repo(env, project, make_repo, users, setreviewers): - r = make_repo('repo') - project.write({ - 'repo_ids': [(0, 0, { - 'name': r.name, - 'status_ids': [ - (0, 0, {'context': 'ci'}), - # require the lint status on master - (0, 0, { - 'context': 'lint', - 'branch_filter': [('id', '=', project.branch_ids.id)] - }), - (0, 0, {'context': 'pr', 'stagings': False}), - (0, 0, {'context': 'staging', 'prs': False}), - ] - })], - }) - setreviewers(*project.repo_ids) - return r - -def test_status_applies(env, repo, config): - """ If branches are associated with a repo status, only those branch should - require the status on their PRs & stagings - """ - with repo: - m = repo.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') - - [c] = repo.make_commits(m, Commit('pr', tree={'a': '2'}), ref='heads/change') - pr = repo.make_pr(target='master', title="super change", head='change') - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr.number) - ]) - assert pr_id.state == 'opened' - - with repo: - repo.post_status(c, 'success', 'ci') - env.run_crons('runbot_merge.process_updated_commits') - assert pr_id.state == 'opened' - with repo: - repo.post_status(c, 'success', 'pr') - env.run_crons('runbot_merge.process_updated_commits') - assert pr_id.state == 'opened' - with repo: - repo.post_status(c, 'success', 'lint') - env.run_crons('runbot_merge.process_updated_commits') - assert pr_id.state == 'validated' - - with repo: - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - st = env['runbot_merge.stagings'].search([]) - assert st.state == 'pending' - with repo: - repo.post_status('staging.master', 'success', 'ci') - env.run_crons('runbot_merge.process_updated_commits') - assert st.state == 'pending' - with repo: - repo.post_status('staging.master', 'success', 'lint') - env.run_crons('runbot_merge.process_updated_commits') - assert st.state == 'pending' - with repo: - repo.post_status('staging.master', 'success', 'staging') - env.run_crons('runbot_merge.process_updated_commits') - assert st.state == 'success' - -def test_status_skipped(env, project, repo, config): - """ Branches not associated with a repo status should not require the status - on their PRs or stagings - """ - # add a second branch for which the lint status doesn't apply - project.write({'branch_ids': [(0, 0, {'name': 'maintenance'})]}) - with repo: - m = repo.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/maintenance') - - [c] = repo.make_commits(m, Commit('pr', tree={'a': '2'}), ref='heads/change') - pr = repo.make_pr(target='maintenance', title="super change", head='change') - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr.number) - ]) - assert pr_id.state == 'opened' - - with repo: - repo.post_status(c, 'success', 'ci') - env.run_crons('runbot_merge.process_updated_commits') - assert pr_id.state == 'opened' - with repo: - repo.post_status(c, 'success', 'pr') - env.run_crons('runbot_merge.process_updated_commits') - assert pr_id.state == 'validated' - - with repo: - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - st = env['runbot_merge.stagings'].search([]) - assert st.state == 'pending' - with repo: - repo.post_status('staging.maintenance', 'success', 'staging') - env.run_crons('runbot_merge.process_updated_commits') - assert st.state == 'pending' - with repo: - repo.post_status('staging.maintenance', 'success', 'ci') - env.run_crons('runbot_merge.process_updated_commits') - assert st.state == 'success' - -def test_pseudo_version_tag(env, project, make_repo, setreviewers, config): - """ Because the last branch in the sequence is "live", if a PR's merged in - it it's hard to know where it landed in terms of other branches. - - Therefore if a PR is merged in one such branch, tag it using the previous - branch of the sequence: - - * if that ends with a number, increment the number by 1 - * otherwise add 'post-' prefix (I guess) - """ - repo = make_repo('repo') - project.branch_ids.sequence = 1 - project.write({ - 'repo_ids': [(0, 0, {'name': repo.name, 'required_statuses': 'ci'})], - 'branch_ids': [ - (0, 0, {'name': '2.0', 'sequence': 11}), - (0, 0, {'name': '1.0', 'sequence': 21}) - ], - }) - setreviewers(*project.repo_ids) - - with repo: - [m] = repo.make_commits(None, Commit('c1', tree={'a': '1'}), ref='heads/master') - repo.make_ref('heads/1.0', m) - repo.make_ref('heads/2.0', m) - repo.make_ref('heads/bonk', m) - - with repo: - repo.make_commits(m, Commit('pr1', tree={'b': '1'}), ref='heads/change') - pr = repo.make_pr(target='master', head='change') - repo.post_status(pr.ref, 'success', 'ci') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() # should create staging - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr.number) - ]) - assert pr_id.state == 'ready' - assert pr_id.staging_id - with repo: - repo.post_status('staging.master', 'success', 'ci') - env.run_crons() # should merge staging - env.run_crons('runbot_merge.labels_cron') # update labels - assert pr_id.state == 'merged' - assert pr.labels >= {'2.1'} - - # now the second branch is non-numeric, therefore the label should just be prefixed by "post-" - project.write({'branch_ids': [(0, 0, {'name': 'bonk', 'sequence': 6})]}) - with repo: - repo.make_commits(m, Commit('pr2', tree={'c': '1'}), ref='heads/change2') - pr = repo.make_pr(target='master', head='change2') - repo.post_status(pr.ref, 'success', 'ci') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() # should create staging - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr.number) - ]) - assert pr_id.state == 'ready', pr.comments - assert pr_id.staging_id - with repo: - repo.post_status('staging.master', 'success', 'ci') - env.run_crons() # should merge staging - env.run_crons('runbot_merge.labels_cron') # update labels - assert pr_id.state == 'merged' - assert pr.labels >= {'post-bonk'} diff --git a/runbot_merge/tests/test_disabled_branch.py b/runbot_merge/tests/test_disabled_branch.py deleted file mode 100644 index 1d6be201..00000000 --- a/runbot_merge/tests/test_disabled_branch.py +++ /dev/null @@ -1,152 +0,0 @@ -from utils import seen, Commit, pr_page - -def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, config, users, page): - """ PRs to disabled branches are ignored, but what if the PR exists *before* - the branch is disabled? - """ - repo = make_repo('repo') - project.branch_ids.sequence = 0 - project.write({'branch_ids': [ - (0, 0, {'name': 'other', 'sequence': 1}), - (0, 0, {'name': 'other2', 'sequence': 2}), - ]}) - repo_id = env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repo.name, - 'status_ids': [(0, 0, {'context': 'status'})], - 'group_id': False, - }) - setreviewers(*project.repo_ids) - - with repo: - [m] = repo.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') - [ot] = repo.make_commits(m, Commit('other', tree={'b': '1'}), ref='heads/other') - repo.make_commits(m, Commit('other2', tree={'c': '1'}), ref='heads/other2') - - [c] = repo.make_commits(ot, Commit('wheee', tree={'b': '2'})) - pr = repo.make_pr(title="title", body='body', target='other', head=c) - repo.post_status(c, 'success', 'status') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository', '=', repo_id.id), - ('number', '=', pr.number), - ]) - branch_id = pr_id.target - assert pr_id.staging_id - staging_id = branch_id.active_staging_id - assert staging_id == pr_id.staging_id - - # disable branch "other" - branch_id.active = False - env.run_crons() - - assert not branch_id.active_staging_id - assert staging_id.state == 'cancelled', \ - "closing the PRs should have canceled the staging" - - p = pr_page(page, pr) - target = dict(zip( - (e.text for e in p.cssselect('dl.runbot-merge-fields dt')), - (p.cssselect('dl.runbot-merge-fields dd')) - ))['target'] - assert target.text_content() == 'other (inactive)' - assert target.get('class') == 'text-muted bg-warning' - - # the PR should have been closed implicitly - assert pr_id.state == 'closed' - assert not pr_id.staging_id - - with repo: - pr.open() - pr.post_comment('hansen r+', config['role_reviewer']['token']) - assert pr_id.state == 'ready', "pr should be reopenable" - env.run_crons() - - assert pr.comments == [ - (users['reviewer'], "hansen r+"), - seen(env, pr, users), - (users['user'], "@%(user)s @%(reviewer)s the target branch 'other' has been disabled, closing this PR." % users), - (users['reviewer'], "hansen r+"), - (users['user'], "This PR targets the disabled branch %s:other, it needs to be retargeted before it can be merged." % repo.name), - ] - - with repo: - [c2] = repo.make_commits(ot, Commit('wheee', tree={'b': '3'})) - repo.update_ref(pr.ref, c2, force=True) - assert pr_id.head == c2, "pr should be aware of its update" - - with repo: - pr.base = 'other2' - repo.post_status(c2, 'success', 'status') - pr.post_comment('hansen rebase-ff r+', config['role_reviewer']['token']) - env.run_crons() - - assert pr_id.state == 'ready' - assert pr_id.target == env['runbot_merge.branch'].search([('name', '=', 'other2')]) - assert pr_id.staging_id - - -def test_new_pr_no_branch(env, project, make_repo, setreviewers, users): - """ A new PR to an *unknown* branch should be ignored and warn - """ - repo = make_repo('repo') - repo_id = env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repo.name, - 'status_ids': [(0, 0, {'context': 'status'})] - }) - setreviewers(*project.repo_ids) - - with repo: - [m] = repo.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') - [ot] = repo.make_commits(m, Commit('other', tree={'b': '1'}), ref='heads/other') - - [c] = repo.make_commits(ot, Commit('wheee', tree={'b': '2'})) - pr = repo.make_pr(title="title", body='body', target='other', head=c) - env.run_crons() - - assert not env['runbot_merge.pull_requests'].search([ - ('repository', '=', repo_id.id), - ('number', '=', pr.number), - ]), "the PR should not have been created in the backend" - assert pr.comments == [ - (users['user'], "This PR targets the un-managed branch %s:other, it needs to be retargeted before it can be merged." % repo.name), - ] - -def test_new_pr_disabled_branch(env, project, make_repo, setreviewers, users): - """ A new PR to a *disabled* branch should be accepted (rather than ignored) - but should warn - """ - repo = make_repo('repo') - repo_id = env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repo.name, - 'status_ids': [(0, 0, {'context': 'status'})] - }) - env['runbot_merge.branch'].create({ - 'project_id': project.id, - 'name': 'other', - 'active': False, - }) - setreviewers(*project.repo_ids) - - with repo: - [m] = repo.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') - [ot] = repo.make_commits(m, Commit('other', tree={'b': '1'}), ref='heads/other') - - [c] = repo.make_commits(ot, Commit('wheee', tree={'b': '2'})) - pr = repo.make_pr(title="title", body='body', target='other', head=c) - env.run_crons() - - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository', '=', repo_id.id), - ('number', '=', pr.number), - ]) - assert pr_id, "the PR should have been created in the backend" - assert pr_id.state == 'opened' - assert pr.comments == [ - (users['user'], "This PR targets the disabled branch %s:other, it needs to be retargeted before it can be merged." % repo.name), - seen(env, pr, users), - ] diff --git a/runbot_merge/tests/test_multirepo.py b/runbot_merge/tests/test_multirepo.py deleted file mode 100644 index b94a1ec9..00000000 --- a/runbot_merge/tests/test_multirepo.py +++ /dev/null @@ -1,1438 +0,0 @@ -""" The mergebot does not work on a dependency basis, rather all -repositories of a project are co-equal and get (on target and -source branches). - -When preparing a staging, we simply want to ensure branch-matched PRs -are staged concurrently in all repos -""" -import json -import time -import xmlrpc.client - -import pytest -import requests -from lxml.etree import XPath - -from utils import seen, get_partner, pr_page, to_pr, Commit - - -@pytest.fixture -def repo_a(project, make_repo, setreviewers): - repo = make_repo('a') - r = project.env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repo.name, - 'required_statuses': 'legal/cla,ci/runbot', - 'group_id': False, - }) - setreviewers(r) - return repo - -@pytest.fixture -def repo_b(project, make_repo, setreviewers): - repo = make_repo('b') - r = project.env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repo.name, - 'required_statuses': 'legal/cla,ci/runbot', - 'group_id': False, - }) - setreviewers(r) - return repo - -@pytest.fixture -def repo_c(project, make_repo, setreviewers): - repo = make_repo('c') - r = project.env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repo.name, - 'required_statuses': 'legal/cla,ci/runbot', - 'group_id': False, - }) - setreviewers(r) - return repo - -def make_pr(repo, prefix, trees, *, target='master', user, - statuses=(('ci/runbot', 'success'), ('legal/cla', 'success')), - reviewer): - """ - :type repo: fake_github.Repo - :type prefix: str - :type trees: list[dict] - :type target: str - :type user: str - :type statuses: list[(str, str)] - :type reviewer: str | None - :rtype: fake_github.PR - """ - *_, c = repo.make_commits( - 'heads/{}'.format(target), - *( - repo.Commit('commit_{}_{:02}'.format(prefix, i), tree=tree) - for i, tree in enumerate(trees) - ), - ref='heads/{}'.format(prefix) - ) - pr = repo.make_pr(title='title {}'.format(prefix), body='body {}'.format(prefix), - target=target, head=prefix, token=user) - for context, result in statuses: - repo.post_status(c, result, context) - if reviewer: - pr.post_comment('hansen r+', reviewer) - return pr - -def make_branch(repo, name, message, tree, protect=True): - c = repo.make_commit(None, message, None, tree=tree) - repo.make_ref('heads/%s' % name, c) - if protect: - repo.protect(name) - return c - -def test_stage_one(env, project, repo_a, repo_b, config): - """ First PR is non-matched from A => should not select PR from B - """ - project.batch_limit = 1 - - with repo_a: - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - pr_a = make_pr( - repo_a, 'A', [{'a': 'a_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token']) - - with repo_b: - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - pr_b = make_pr( - repo_b, 'B', [{'a': 'b_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - env.run_crons() - - pra_id = to_pr(env, pr_a) - assert pra_id.state == 'ready' - assert pra_id.staging_id - assert repo_a.commit('staging.master').message.startswith('commit_A_00') - assert repo_b.commit('staging.master').message.startswith('force rebuild') - - prb_id = to_pr(env, pr_b) - assert prb_id.state == 'ready' - assert not prb_id.staging_id - -get_related_pr_labels = XPath('.//*[normalize-space(text()) = "Linked pull requests"]//a/text()') -def test_stage_match(env, project, repo_a, repo_b, config, page): - """ First PR is matched from A, => should select matched PR from B - """ - project.batch_limit = 1 - - with repo_a: - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - prx_a = make_pr( - repo_a, 'do-a-thing', [{'a': 'a_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - with repo_b: - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - prx_b = make_pr(repo_b, 'do-a-thing', [{'a': 'b_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - pr_a = to_pr(env, prx_a) - pr_b = to_pr(env, prx_b) - - # check that related PRs link to one another - assert get_related_pr_labels(pr_page(page, prx_a)) == pr_b.mapped('display_name') - assert get_related_pr_labels(pr_page(page, prx_b)) == pr_a.mapped('display_name') - - env.run_crons() - - assert pr_a.state == 'ready' - assert pr_a.staging_id - assert pr_b.state == 'ready' - assert pr_b.staging_id - # should be part of the same staging - assert pr_a.staging_id == pr_b.staging_id, \ - "branch-matched PRs should be part of the same staging" - - # check that related PRs *still* link to one another during staging - assert get_related_pr_labels(pr_page(page, prx_a)) == [pr_b.display_name] - assert get_related_pr_labels(pr_page(page, prx_b)) == [pr_a.display_name] - with repo_a: - repo_a.post_status('staging.master', 'failure', 'legal/cla') - env.run_crons() - - assert pr_a.state == 'error' - assert pr_b.state == 'ready' - - with repo_a: - prx_a.post_comment('hansen retry', config['role_reviewer']['token']) - env.run_crons() - - assert pr_a.state == pr_b.state == 'ready' - assert pr_a.staging_id and pr_b.staging_id - for repo in [repo_a, repo_b]: - with repo: - repo.post_status('staging.master', 'success', 'legal/cla') - repo.post_status('staging.master', 'success', 'ci/runbot') - env.run_crons() - assert pr_a.state == 'merged' - assert pr_b.state == 'merged' - - assert 'Related: {}'.format(pr_b.display_name) in repo_a.commit('master').message - assert 'Related: {}'.format(pr_a.display_name) in repo_b.commit('master').message - - print(pr_a.batch_ids.read(['staging_id', 'prs'])) - # check that related PRs *still* link to one another after merge - assert get_related_pr_labels(pr_page(page, prx_a)) == [pr_b.display_name] - assert get_related_pr_labels(pr_page(page, prx_b)) == [pr_a.display_name] - -def test_different_targets(env, project, repo_a, repo_b, config): - """ PRs with different targets should not be matched together - """ - project.write({ - 'batch_limit': 1, - 'branch_ids': [(0, 0, {'name': 'other'})] - }) - with repo_a: - make_branch(repo_a, 'master', 'initial', {'master': 'a_0'}) - make_branch(repo_a, 'other', 'initial', {'other': 'a_0'}) - pr_a = make_pr( - repo_a, 'do-a-thing', [{'mater': 'a_1'}], - target='master', - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - with repo_b: - make_branch(repo_b, 'master', 'initial', {'master': 'b_0'}) - make_branch(repo_b, 'other', 'initial', {'other': 'b_0'}) - pr_b = make_pr( - repo_b, 'do-a-thing', [{'other': 'b_1'}], - target='other', - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - statuses=[], - ) - time.sleep(5) - env.run_crons() - - pr_a = to_pr(env, pr_a) - pr_b = to_pr(env, pr_b) - assert pr_a.state == 'ready' - assert not pr_a.blocked - assert pr_a.staging_id - - assert pr_b.blocked - assert pr_b.state == 'approved' - assert not pr_b.staging_id - - for r in [repo_a, repo_b]: - with r: - r.post_status('staging.master', 'success', 'legal/cla') - r.post_status('staging.master', 'success', 'ci/runbot') - env.run_crons() - assert pr_a.state == 'merged' - -def test_stage_different_statuses(env, project, repo_a, repo_b, config): - project.batch_limit = 1 - - env['runbot_merge.repository'].search([ - ('name', '=', repo_b.name) - ]).write({ - 'required_statuses': 'foo/bar', - }) - - with repo_a: - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - pr_a = make_pr( - repo_a, 'do-a-thing', [{'a': 'a_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - repo_a.post_status(pr_a.head, 'success', 'foo/bar') - with repo_b: - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - [c] = repo_b.make_commits( - 'heads/master', - repo_b.Commit('some_commit\n\nSee also %s#%d' % (repo_a.name, pr_a.number), tree={'a': 'b_1'}), - ref='heads/do-a-thing' - ) - pr_b = repo_b.make_pr( - title="title", body="body", target='master', head='do-a-thing', - token=config['role_user']['token']) - repo_b.post_status(c, 'success', 'ci/runbot') - repo_b.post_status(c, 'success', 'legal/cla') - pr_b.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - # since the labels are the same but the statuses on pr_b are not the - # expected ones, pr_a should be blocked on pr_b, which should be approved - # but not validated / ready - pr_a_id = to_pr(env, pr_a) - pr_b_id = to_pr(env, pr_b) - assert pr_a_id.state == 'ready' - assert not pr_a_id.staging_id - assert pr_a_id.blocked - assert pr_b_id.state == 'approved' - assert not pr_b_id.staging_id - - with repo_b: - repo_b.post_status(pr_b.head, 'success', 'foo/bar') - env.run_crons() - - assert pr_a_id.state == pr_b_id.state == 'ready' - assert pr_a_id.staging_id == pr_b_id.staging_id - - # do the actual merge to check for the Related header - for repo in [repo_a, repo_b]: - with repo: - repo.post_status('staging.master', 'success', 'legal/cla') - repo.post_status('staging.master', 'success', 'ci/runbot') - repo.post_status('staging.master', 'success', 'foo/bar') - env.run_crons() - - pr_a_ref = to_pr(env, pr_a).display_name - pr_b_ref = to_pr(env, pr_b).display_name - master_a = repo_a.commit('master') - master_b = repo_b.commit('master') - - assert 'Related: {}'.format(pr_b_ref) in master_a.message,\ - "related should be in PR A's message" - assert 'Related: {}'.format(pr_a_ref) not in master_b.message,\ - "related should not be in PR B's message since the ref' was added explicitly" - assert pr_a_ref in master_b.message, "the ref' should still be there though" - -def test_unmatch_patch(env, project, repo_a, repo_b, config): - """ When editing files via the UI for a project you don't have write - access to, a branch called patch-XXX is automatically created in your - profile to hold the change. - - This means it's possible to create a:patch-1 and b:patch-1 without - intending them to be related in any way, and more likely than the opposite - since there is no user control over the branch names (save by actually - creating/renaming branches afterwards before creating the PR). - - -> PRs with a branch name of patch-* should not be label-matched - """ - project.batch_limit = 1 - with repo_a: - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - pr_a = make_pr( - repo_a, 'patch-1', [{'a': 'a_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - with repo_b: - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - pr_b = make_pr( - repo_b, 'patch-1', [{'a': 'b_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - env.run_crons() - - pr_a = to_pr(env, pr_a) - pr_b = to_pr(env, pr_b) - assert pr_a.state == 'ready' - assert pr_a.staging_id - assert pr_b.state == 'ready' - assert not pr_b.staging_id, 'patch-* PRs should not be branch-matched' - -def test_sub_match(env, project, repo_a, repo_b, repo_c, config): - """ Branch-matching should work on a subset of repositories - """ - project.batch_limit = 1 - with repo_a: # no pr here - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - with repo_b: - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - pr_b = make_pr( - repo_b, 'do-a-thing', [{'a': 'b_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - with repo_c: - make_branch(repo_c, 'master', 'initial', {'a': 'c_0'}) - pr_c = make_pr( - repo_c, 'do-a-thing', [{'a': 'c_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - env.run_crons() - - pr_b = to_pr(env, pr_b) - pr_c = to_pr(env, pr_c) - assert pr_b.state == 'ready' - assert pr_b.staging_id - assert pr_c.state == 'ready' - assert pr_c.staging_id - # should be part of the same staging - assert pr_c.staging_id == pr_b.staging_id, \ - "branch-matched PRs should be part of the same staging" - - st = pr_b.staging_id - a_staging = repo_a.commit('staging.master') - b_staging = repo_b.commit('staging.master') - c_staging = repo_c.commit('staging.master') - assert json.loads(st.heads) == { - repo_a.name: a_staging.id, - repo_a.name + '^': a_staging.parents[0], - repo_b.name: b_staging.id, - repo_b.name + '^': b_staging.id, - repo_c.name: c_staging.id, - repo_c.name + '^': c_staging.id, - } - -def test_merge_fail(env, project, repo_a, repo_b, users, config): - """ In a matched-branch scenario, if merging in one of the linked repos - fails it should revert the corresponding merges - """ - project.batch_limit = 1 - - with repo_a, repo_b: - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - - # first set of matched PRs - pr1a = make_pr( - repo_a, 'do-a-thing', [{'a': 'a_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - pr1b = make_pr( - repo_b, 'do-a-thing', [{'a': 'b_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - - # add a conflicting commit to B so the staging fails - repo_b.make_commit('heads/master', 'cn', None, tree={'a': 'cn'}) - - # and a second set of PRs which should get staged while the first set - # fails - pr2a = make_pr( - repo_a, 'do-b-thing', [{'b': 'ok'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - pr2b = make_pr( - repo_b, 'do-b-thing', [{'b': 'ok'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - env.run_crons() - - s2 = to_pr(env, pr2a) | to_pr(env, pr2b) - st = env['runbot_merge.stagings'].search([]) - assert set(st.batch_ids.prs.ids) == set(s2.ids) - - failed = to_pr(env, pr1b) - assert failed.state == 'error' - assert pr1b.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr1b, users), - (users['user'], '@%(user)s @%(reviewer)s unable to stage: merge conflict' % users), - ] - other = to_pr(env, pr1a) - reviewer = get_partner(env, users["reviewer"]).formatted_email - assert not other.staging_id - assert [ - c['commit']['message'] - for c in repo_a.log('heads/staging.master') - ] == [ - """commit_do-b-thing_00 - -closes %s - -Related: %s -Signed-off-by: %s""" % (s2[0].display_name, s2[1].display_name, reviewer), - 'initial' - ], "dummy commit + squash-merged PR commit + root commit" - -def test_ff_fail(env, project, repo_a, repo_b, config): - """ In a matched-branch scenario, fast-forwarding one of the repos fails - the entire thing should be rolled back - """ - project.batch_limit = 1 - - with repo_a, repo_b: - root_a = make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - make_pr( - repo_a, 'do-a-thing', [{'a': 'a_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - make_pr( - repo_b, 'do-a-thing', [{'a': 'b_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - env.run_crons() - - # add second commit blocking FF - with repo_b: - cn = repo_b.make_commit('heads/master', 'second', None, tree={'a': 'b_0', 'b': 'other'}) - assert repo_b.commit('heads/master').id == cn - - with repo_a, repo_b: - repo_a.post_status('heads/staging.master', 'success', 'ci/runbot') - repo_a.post_status('heads/staging.master', 'success', 'legal/cla') - repo_b.post_status('heads/staging.master', 'success', 'ci/runbot') - repo_b.post_status('heads/staging.master', 'success', 'legal/cla') - env.run_crons('runbot_merge.merge_cron', 'runbot_merge.staging_cron') - assert repo_b.commit('heads/master').id == cn,\ - "B should still be at the conflicting commit" - assert repo_a.commit('heads/master').id == root_a,\ - "FF A should have been rolled back when B failed" - - # should be re-staged - st = env['runbot_merge.stagings'].search([]) - assert len(st) == 1 - assert len(st.batch_ids.prs) == 2 - -class TestCompanionsNotReady: - def test_one_pair(self, env, project, repo_a, repo_b, config, users): - """ If the companion of a ready branch-matched PR is not ready, - they should not get staged - """ - project.batch_limit = 1 - with repo_a, repo_b: - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - # pr_a is born ready - p_a = make_pr( - repo_a, 'do-a-thing', [{'a': 'a_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - p_b = make_pr( - repo_b, 'do-a-thing', [{'a': 'b_1'}], - user=config['role_user']['token'], - reviewer=None, - ) - - pr_a = to_pr(env, p_a) - pr_b = to_pr(env, p_b) - assert pr_a.label == pr_b.label == '{}:do-a-thing'.format(config['github']['owner']) - - env.run_crons() - - assert pr_a.state == 'ready' - assert pr_b.state == 'validated' - assert not pr_b.staging_id - assert not pr_a.staging_id, \ - "pr_a should not have been staged as companion is not ready" - - assert p_a.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, p_a, users), - (users['user'], "@%s @%s linked pull request(s) %s not ready. Linked PRs are not staged until all of them are ready." % ( - users['user'], - users['reviewer'], - pr_b.display_name, - )), - ] - # ensure the message is only sent once per PR - env.run_crons('runbot_merge.check_linked_prs_status') - assert p_a.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, p_a, users), - (users['user'], "@%s @%s linked pull request(s) %s not ready. Linked PRs are not staged until all of them are ready." % ( - users['user'], - users['reviewer'], - pr_b.display_name, - )), - ] - assert p_b.comments == [seen(env, p_b, users)] - - def test_two_of_three_unready(self, env, project, repo_a, repo_b, repo_c, users, config): - """ In a 3-batch, if two of the PRs are not ready both should be - linked by the first one - """ - project.batch_limit = 1 - with repo_a, repo_b, repo_c: - make_branch(repo_a, 'master', 'initial', {'f': 'a0'}) - pr_a = make_pr( - repo_a, 'a-thing', [{'f': 'a1'}], - user=config['role_user']['token'], - reviewer=None, - ) - - make_branch(repo_b, 'master', 'initial', {'f': 'b0'}) - pr_b = make_pr( - repo_b, 'a-thing', [{'f': 'b1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - - make_branch(repo_c, 'master', 'initial', {'f': 'c0'}) - pr_c = make_pr( - repo_c, 'a-thing', [{'f': 'c1'}], - user=config['role_user']['token'], - reviewer=None, - ) - env.run_crons() - - assert pr_a.comments == [seen(env, pr_a, users)] - assert pr_b.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr_b, users), - (users['user'], "@%s @%s linked pull request(s) %s#%d, %s#%d not ready. Linked PRs are not staged until all of them are ready." % ( - users['user'], users['reviewer'], - repo_a.name, pr_a.number, - repo_c.name, pr_c.number - )) - ] - assert pr_c.comments == [seen(env, pr_c, users)] - - def test_one_of_three_unready(self, env, project, repo_a, repo_b, repo_c, users, config): - """ In a 3-batch, if one PR is not ready it should be linked on the - other two - """ - project.batch_limit = 1 - with repo_a, repo_b, repo_c: - make_branch(repo_a, 'master', 'initial', {'f': 'a0'}) - pr_a = make_pr( - repo_a, 'a-thing', [{'f': 'a1'}], - user=config['role_user']['token'], - reviewer=None, - ) - - make_branch(repo_b, 'master', 'initial', {'f': 'b0'}) - pr_b = make_pr( - repo_b, 'a-thing', [{'f': 'b1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - - make_branch(repo_c, 'master', 'initial', {'f': 'c0'}) - pr_c = make_pr( - repo_c, 'a-thing', [{'f': 'c1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - env.run_crons() - - assert pr_a.comments == [seen(env, pr_a, users)] - assert pr_b.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr_b, users), - (users['user'], "@%s @%s linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % ( - users['user'], users['reviewer'], - repo_a.name, pr_a.number - )) - ] - assert pr_c.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr_c, users), - (users['user'], - "@%s @%s linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % ( - users['user'], users['reviewer'], - repo_a.name, pr_a.number - )) - ] - -def test_other_failed(env, project, repo_a, repo_b, users, config): - """ In a non-matched-branch scenario, if the companion staging (copy of - targets) fails when built with the PR, it should provide a non-useless - message - """ - with repo_a, repo_b: - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - # pr_a is born ready - pr_a = make_pr( - repo_a, 'do-a-thing', [{'a': 'a_1'}], - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'], - ) - - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - env.run_crons() - - pr = to_pr(env, pr_a) - assert pr.staging_id - - with repo_a, repo_b: - repo_a.post_status('heads/staging.master', 'success', 'legal/cla') - repo_a.post_status('heads/staging.master', 'success', 'ci/runbot', target_url="http://example.org/a") - repo_b.post_status('heads/staging.master', 'success', 'legal/cla') - repo_b.post_status('heads/staging.master', 'failure', 'ci/runbot', target_url="http://example.org/b") - env.run_crons() - - sth = repo_b.commit('heads/staging.master').id - assert not pr.staging_id - assert pr.state == 'error' - assert pr_a.comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr_a, users), - (users['user'], '@%s @%s staging failed: ci/runbot on %s (view more at http://example.org/b)' % ( - users['user'], users['reviewer'], - sth - )) - ] - -class TestMultiBatches: - def test_batching(self, env, project, repo_a, repo_b, config): - """ If multiple batches (label groups) are ready they should get batched - together (within the limits of teh project's batch limit) - """ - project.batch_limit = 3 - - with repo_a, repo_b: - make_branch(repo_a, 'master', 'initial', {'a': 'a0'}) - make_branch(repo_b, 'master', 'initial', {'b': 'b0'}) - - prs = [( - a and make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']), - b and make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']), - ) - for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)]) - ] - env.run_crons() - prs = [ - (a and to_pr(env, a), b and to_pr(env, b)) - for (a, b) in prs - ] - - st = env['runbot_merge.stagings'].search([]) - assert st - assert len(st.batch_ids) == 3,\ - "Should have batched the first <batch_limit> batches" - assert st.mapped('batch_ids.prs') == ( - prs[0][0] | prs[0][1] - | prs[1][1] - | prs[2][0] | prs[2][1] - ) - - assert not prs[3][0].staging_id - assert not prs[3][1].staging_id - assert not prs[4][0].staging_id - - def test_batching_split(self, env, repo_a, repo_b, config): - """ If a staging fails, it should get split properly across repos - """ - with repo_a, repo_b: - make_branch(repo_a, 'master', 'initial', {'a': 'a0'}) - make_branch(repo_b, 'master', 'initial', {'b': 'b0'}) - - prs = [( - a and make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']), - b and make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']), - ) - for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)]) - ] - env.run_crons() - prs = [ - (a and to_pr(env, a), b and to_pr(env, b)) - for (a, b) in prs - ] - - st0 = env['runbot_merge.stagings'].search([]) - assert len(st0.batch_ids) == 5 - assert len(st0.mapped('batch_ids.prs')) == 8 - - # mark b.staging as failed -> should create two splits with (0, 1) - # and (2, 3, 4) and stage the first one - with repo_b: - repo_b.post_status('heads/staging.master', 'success', 'legal/cla') - repo_b.post_status('heads/staging.master', 'failure', 'ci/runbot') - env.run_crons() - - assert not st0.active - - # at this point we have a re-staged split and an unstaged split - st = env['runbot_merge.stagings'].search([]) - sp = env['runbot_merge.split'].search([]) - assert st - assert sp - - assert len(st.batch_ids) == 2 - assert st.mapped('batch_ids.prs') == \ - prs[0][0] | prs[0][1] | prs[1][1] - - assert len(sp.batch_ids) == 3 - assert sp.mapped('batch_ids.prs') == \ - prs[2][0] | prs[2][1] | prs[3][0] | prs[3][1] | prs[4][0] - -def test_urgent(env, repo_a, repo_b, config): - """ Either PR of a co-dependent pair being p=0 leads to the entire pair - being prioritized - """ - with repo_a, repo_b: - make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) - make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) - - pr_a = make_pr(repo_a, 'batch', [{'a1': 'a'}, {'a2': 'a'}], user=config['role_user']['token'], reviewer=None, statuses=[]) - pr_b = make_pr(repo_b, 'batch', [{'b1': 'b'}, {'b2': 'b'}], user=config['role_user']['token'], reviewer=None, statuses=[]) - pr_c = make_pr(repo_a, 'C', [{'c1': 'c', 'c2': 'c'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) - - pr_a.post_comment('hansen rebase-merge', config['role_reviewer']['token']) - pr_b.post_comment('hansen rebase-merge p=0', config['role_reviewer']['token']) - env.run_crons() - # should have batched pr_a and pr_b despite neither being reviewed or - # approved - p_a, p_b = to_pr(env, pr_a), to_pr(env, pr_b) - p_c = to_pr(env, pr_c) - assert p_a.batch_id and p_b.batch_id and p_a.batch_id == p_b.batch_id,\ - "a and b should have been recognised as co-dependent" - assert not p_c.staging_id - -class TestBlocked: - def test_merge_method(self, env, repo_a, config): - with repo_a: - make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) - - pr = make_pr(repo_a, 'A', [{'a1': 'a'}, {'a2': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) - env.run_crons() - - p = to_pr(env, pr) - assert p.state == 'ready' - assert p.blocked - - with repo_a: pr.post_comment('hansen rebase-merge', config['role_reviewer']['token']) - assert not p.blocked - - def test_linked_closed(self, env, repo_a, repo_b, config): - with repo_a, repo_b: - make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) - make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) - - pr = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) - b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'], statuses=[]) - env.run_crons() - - p = to_pr(env, pr) - assert p.blocked - with repo_b: b.close() - # FIXME: find a way for PR.blocked to depend on linked PR somehow so this isn't needed - p.invalidate_cache(['blocked'], [p.id]) - assert not p.blocked - - def test_linked_merged(self, env, repo_a, repo_b, config): - with repo_a, repo_b: - make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) - make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) - - b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) - env.run_crons() # stage b and c - - with repo_a, repo_b: - repo_a.post_status('heads/staging.master', 'success', 'legal/cla') - repo_a.post_status('heads/staging.master', 'success', 'ci/runbot') - repo_b.post_status('heads/staging.master', 'success', 'legal/cla') - repo_b.post_status('heads/staging.master', 'success', 'ci/runbot') - env.run_crons() # merge b and c - assert to_pr(env, b).state == 'merged' - - with repo_a: - pr = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) - env.run_crons() # merge b and c - - p = to_pr(env, pr) - assert not p.blocked - - def test_linked_unready(self, env, repo_a, repo_b, config): - """ Create a PR A linked to a non-ready PR B, - * A is blocked by default - * A is not blocked if A.p=0 - * A is not blocked if B.p=0 - """ - with repo_a, repo_b: - make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) - make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) - - a = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) - b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'], statuses=[]) - env.run_crons() - - pr_a = to_pr(env, a) - assert pr_a.blocked - - with repo_a: a.post_comment('hansen p=0', config['role_reviewer']['token']) - assert not pr_a.blocked - - with repo_a: a.post_comment('hansen p=2', config['role_reviewer']['token']) - assert pr_a.blocked - - with repo_b: b.post_comment('hansen p=0', config['role_reviewer']['token']) - assert not pr_a.blocked - -def test_different_branches(env, project, repo_a, repo_b, config): - project.write({ - 'branch_ids': [(0, 0, {'name': 'dev'})] - }) - # repo_b only works with master - env['runbot_merge.repository'].search([('name', '=', repo_b.name)])\ - .branch_filter = '[("name", "=", "master")]' - with repo_a, repo_b: - make_branch(repo_a, 'dev', 'initial', {'a': '0'}) - make_branch(repo_a, 'master', 'initial', {'b': '0'}) - make_branch(repo_b, 'master', 'initial', {'b': '0'}) - - pr_a = make_pr( - repo_a, 'xxx', [{'a': '1'}], - target='dev', - user=config['role_user']['token'], - reviewer=config['role_reviewer']['token'] - ) - env.run_crons() - - with repo_a: - pr_a.post_comment('hansen r+', config['role_reviewer']['token']) - repo_a.post_status('heads/staging.dev', 'success', 'legal/cla') - repo_a.post_status('heads/staging.dev', 'success', 'ci/runbot') - env.run_crons() - - assert to_pr(env, pr_a).state == 'merged' - -def test_remove_acl(env, partners, repo_a, repo_b, repo_c): - """ Check that our way of deprovisioning works correctly - """ - r = partners['self_reviewer'] - assert r.mapped('review_rights.repository_id.name') == [repo_a.name, repo_b.name, repo_c.name] - r.write({'review_rights': [(5, 0, 0)]}) - assert r.mapped('review_rights.repository_id') == env['runbot_merge.repository'] - -class TestSubstitutions: - def test_substitution_patterns(self, env, port): - p = env['runbot_merge.project'].create({ - 'name': 'proj', - 'github_token': 'wheeee', - 'repo_ids': [(0, 0, {'name': 'xxx/xxx'})], - 'branch_ids': [(0, 0, {'name': 'master'})] - }) - r = p.repo_ids - # replacement pattern, pr label, stored label - cases = [ - ('/^foo:/foo-dev:/', 'foo:bar', 'foo-dev:bar'), - ('/^foo:/foo-dev:/', 'foox:bar', 'foox:bar'), - ('/^foo:/foo-dev:/i', 'FOO:bar', 'foo-dev:bar'), - ('/o/x/g', 'foo:bar', 'fxx:bar'), - ('@foo:@bar:@', 'foo:bar', 'bar:bar'), - ('/foo:/bar:/\n/bar:/baz:/', 'foo:bar', 'baz:bar'), - ] - for pr_number, (pattern, original, target) in enumerate(cases, start=1): - r.substitutions = pattern - requests.post( - 'http://localhost:{}/runbot_merge/hooks'.format(port), - headers={'X-Github-Event': 'pull_request'}, - json={ - 'action': 'opened', - 'repository': { - 'full_name': r.name, - }, - 'pull_request': { - 'state': 'open', - 'draft': False, - 'user': {'login': 'bob'}, - 'base': { - 'repo': {'full_name': r.name}, - 'ref': p.branch_ids.name, - }, - 'number': pr_number, - 'title': "a pr", - 'body': None, - 'commits': 1, - 'head': { - 'label': original, - 'sha': format(pr_number, 'x')*40, - } - }, - 'sender': {'login': 'pytest'} - } - ) - pr = env['runbot_merge.pull_requests'].search([ - ('repository', '=', r.id), - ('number', '=', pr_number) - ]) - assert pr.label == target - - - def test_substitutions_staging(self, env, repo_a, repo_b, config): - """ Different repos from the same project may have different policies for - sourcing PRs. So allow for remapping labels on input in order to match. - """ - repo_b_id = env['runbot_merge.repository'].search([ - ('name', '=', repo_b.name) - ]) - # in repo b, replace owner part by repo_a's owner - repo_b_id.substitutions = r"/.+:/%s:/" % repo_a.owner - - with repo_a: - make_branch(repo_a, 'master', 'initial', {'a': '0'}) - with repo_b: - make_branch(repo_b, 'master', 'initial', {'b': '0'}) - - # policy is that repo_a PRs are created in the same repo while repo_b PRs - # are created in personal forks - with repo_a: - repo_a.make_commits('master', repo_a.Commit('bop', tree={'a': '1'}), ref='heads/abranch') - pra = repo_a.make_pr(target='master', head='abranch') - b_fork = repo_b.fork() - with b_fork, repo_b: - b_fork.make_commits('master', b_fork.Commit('pob', tree={'b': '1'}), ref='heads/abranch') - prb = repo_b.make_pr( - title="a pr", - target='master', head='%s:abranch' % b_fork.owner - ) - - pra_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo_a.name), - ('number', '=', pra.number) - ]) - prb_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo_b.name), - ('number', '=', prb.number) - ]) - assert pra_id.label.endswith(':abranch') - assert prb_id.label.endswith(':abranch') - - with repo_a, repo_b: - repo_a.post_status(pra.head, 'success', 'legal/cla') - repo_a.post_status(pra.head, 'success', 'ci/runbot') - pra.post_comment('hansen r+', config['role_reviewer']['token']) - - repo_b.post_status(prb.head, 'success', 'legal/cla') - repo_b.post_status(prb.head, 'success', 'ci/runbot') - prb.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - assert pra_id.staging_id, 'PR A should be staged' - assert prb_id.staging_id, "PR B should be staged" - assert pra_id.staging_id == prb_id.staging_id, "both prs should be staged together" - assert pra_id.batch_id == prb_id.batch_id, "both prs should be part of the same batch" - -def test_multi_project(env, make_repo, setreviewers, users, config, - tunnel): - """ There should be no linking of PRs across projects, even if there is some - structural overlap between the two. - - Here we have two projects on different forks, then a user creates a PR from - a third fork (or one of the forks should not matter) to *both*. - - The two PRs should be independent. - """ - Projects = env['runbot_merge.project'] - gh_token = config['github']['token'] - - r1 = make_repo("repo_a") - with r1: - r1.make_commits( - None, Commit('root', tree={'a': 'a'}), - ref='heads/default') - r1_dev = r1.fork() - p1 = Projects.create({ - 'name': 'Project 1', - 'github_token': gh_token, - 'github_prefix': 'hansen', - 'repo_ids': [(0, 0, { - 'name': r1.name, - 'group_id': False, - 'required_statuses': 'a', - })], - 'branch_ids': [(0, 0, {'name': 'default'})], - }) - setreviewers(*p1.repo_ids) - - r2 = make_repo('repo_b') - with r2: - r2.make_commits( - None, Commit('root', tree={'b': 'a'}), - ref='heads/default' - ) - r2_dev = r2.fork() - p2 = Projects.create({ - 'name': "Project 2", - 'github_token': gh_token, - 'github_prefix': 'hansen', - 'repo_ids': [(0, 0, { - 'name': r2.name, - 'group_id': False, - 'required_statuses': 'a', - })], - 'branch_ids': [(0, 0, {'name': 'default'})], - }) - setreviewers(*p2.repo_ids) - - assert r1_dev.owner == r2_dev.owner - - with r1, r1_dev: - r1_dev.make_commits('default', Commit('new', tree={'a': 'b'}), ref='heads/other') - - # create, validate, and approve pr1 - pr1 = r1.make_pr(title='pr 1', target='default', head=r1_dev.owner + ':other') - r1.post_status(pr1.head, 'success', 'a') - pr1.post_comment('hansen r+', config['role_reviewer']['token']) - - with r2, r2_dev: - r2_dev.make_commits('default', Commit('new', tree={'b': 'b'}), ref='heads/other') - - # create second PR with the same label *in a different project*, don't - # approve it - pr2 = r2.make_pr(title='pr 2', target='default', head=r2_dev.owner + ':other') - r2.post_status(pr2.head, 'success', 'a') - env.run_crons() - - pr1_id = to_pr(env, pr1) - pr2_id = to_pr(env, pr2) - - print( - pr1.repo.name, pr1.number, pr1_id.display_name, pr1_id.label, - '\n', - pr2.repo.name, pr2.number, pr2_id.display_name, pr2_id.label, - flush=True, - ) - - assert pr1_id.state == 'ready' and not pr1_id.blocked - assert pr2_id.state == 'validated' - - assert pr1_id.staging_id - assert not pr2_id.staging_id - - assert pr1.comments == [ - (users['reviewer'], 'hansen r+'), - (users['user'], f'[Pull request status dashboard]({pr1_id.url}).'), - ] - assert pr2.comments == [ - (users['user'], f'[Pull request status dashboard]({pr2_id.url}).'), - ] - -def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config): - """ Tests the freeze wizard feature (aside from the UI): - - * have a project with 3 repos, and two branches (1.0 and master) each - * have 2 PRs required for the freeze - * prep 3 freeze PRs - * prep 1 bump PR - * trigger the freeze wizard - * trigger it again (check that the same object is returned, there should - only be one freeze per project at a time) - * configure the freeze - * check that it doesn't go through - * merge required PRs - * check that freeze goes through - * check that reminder is shown - * check that new branches are created w/ correct parent & commit info - """ - project.freeze_reminder = "Don't forget to like and subscribe" - - # have a project with 3 repos, and two branches (1.0 and master) - project.branch_ids = [ - (1, project.branch_ids.id, {'sequence': 1}), - (0, 0, {'name': '1.0', 'sequence': 2}), - ] - - [ - (master_head_a, master_head_b, master_head_c), - (pr_required_a, _, pr_required_c), - (pr_rel_a, pr_rel_b, pr_rel_c), - pr_bump_a, - pr_other - ] = setup_mess(repo_a, repo_b, repo_c) - env.run_crons() # process the PRs - - release_prs = { - repo_a.name: to_pr(env, pr_rel_a), - repo_b.name: to_pr(env, pr_rel_b), - repo_c.name: to_pr(env, pr_rel_c), - } - pr_bump_id = to_pr(env, pr_bump_a) - # trigger the ~~tree~~ freeze wizard - w = project.action_prepare_freeze() - w2 = project.action_prepare_freeze() - assert w == w2, "each project should only have one freeze wizard active at a time" - assert w['res_model'] == 'runbot_merge.project.freeze' - - w_id = env[w['res_model']].browse([w['res_id']]) - assert w_id.branch_name == '1.1', "check that the forking incremented the minor by 1" - assert len(w_id.release_pr_ids) == len(project.repo_ids), \ - "should ask for a many release PRs as we have repositories" - - # configure required PRs - w_id.required_pr_ids = (to_pr(env, pr_required_a) | to_pr(env, pr_required_c)).ids - # configure releases - for r in w_id.release_pr_ids: - r.pr_id = release_prs[r.repository_id.name].id - w_id.release_pr_ids[-1].pr_id = to_pr(env, pr_other).id - # configure bump - assert not w_id.bump_pr_ids, "there is no bump pr by default" - w_id.write({'bump_pr_ids': [ - (0, 0, {'repository_id': pr_bump_id.repository.id, 'pr_id': pr_bump_id.id}) - ]}) - r = w_id.action_freeze() - assert r == w, "the freeze is not ready so the wizard should redirect to itself" - owner = repo_c.owner - assert w_id.errors == f"""\ -* All release PRs must have the same label, found '{owner}:release-1.1, {owner}:whocares'. -* 2 required PRs not ready.""" - w_id.release_pr_ids[-1].pr_id = release_prs[repo_c.name].id - - with repo_a: - pr_required_a.post_comment('hansen r+', config['role_reviewer']['token']) - repo_a.post_status('apr', 'success', 'ci/runbot') - repo_a.post_status('apr', 'success', 'legal/cla') - with repo_c: - pr_required_c.post_comment('hansen r+', config['role_reviewer']['token']) - repo_c.post_status('cpr', 'success', 'ci/runbot') - repo_c.post_status('cpr', 'success', 'legal/cla') - env.run_crons() - - for repo in [repo_a, repo_b, repo_c]: - with repo: - repo.post_status('staging.master', 'success', 'ci/runbot') - repo.post_status('staging.master', 'success', 'legal/cla') - env.run_crons() - - assert to_pr(env, pr_required_a).state == 'merged' - assert to_pr(env, pr_required_c).state == 'merged' - - assert not w_id.errors - - # assume the wizard is closed, re-open it - w = project.action_prepare_freeze() - assert w['res_model'] == 'runbot_merge.project.freeze' - assert w['res_id'] == w_id.id, "check that we're still getting the old wizard" - w_id = env[w['res_model']].browse([w['res_id']]) - assert w_id.exists() - - # actually perform the freeze - r = w_id.action_freeze() - # check that the wizard was deleted - assert not w_id.exists() - # check that the wizard pops out a reminder dialog (kinda) - assert r['res_model'] == 'runbot_merge.project' - assert r['res_id'] == project.id - - # stuff that's done directly - for pr_id in release_prs.values(): - assert pr_id.state == 'merged' - assert pr_bump_id.state == 'merged' - - # stuff that's behind a cron - env.run_crons() - - assert pr_rel_a.state == "closed" - assert pr_rel_a.base['ref'] == '1.1' - assert pr_rel_b.state == "closed" - assert pr_rel_b.base['ref'] == '1.1' - assert pr_rel_c.state == "closed" - assert pr_rel_c.base['ref'] == '1.1' - for pr_id in release_prs.values(): - assert pr_id.target.name == '1.1' - - assert pr_bump_a.state == 'closed' - assert pr_bump_a.base['ref'] == 'master' - assert pr_bump_id.target.name == 'master' - - m_a = repo_a.commit('master') - assert m_a.message.startswith('Bump A') - assert repo_a.read_tree(m_a) == { - 'f': '1', # from master - 'g': 'x', # from required PR (merged into master before forking) - 'version': '1.2-alpha', # from bump PR - } - - c_a = repo_a.commit('1.1') - assert c_a.message.startswith('Release 1.1 (A)') - assert repo_a.read_tree(c_a) == { - 'f': '1', # from master - 'g': 'x', # from required pr - 'version': '1.1', # from release commit - } - c_a_parent = repo_a.commit(c_a.parents[0]) - assert c_a_parent.message.startswith('super important file') - assert c_a_parent.parents[0] == master_head_a - - c_b = repo_b.commit('1.1') - assert c_b.message.startswith('Release 1.1 (B)') - assert repo_b.read_tree(c_b) == {'f': '1', 'version': ''} - assert c_b.parents[0] == master_head_b - - c_c = repo_c.commit('1.1') - assert c_c.message.startswith('Release 1.1 (C)') - assert repo_c.read_tree(c_c) == {'f': '2', 'version': ''} - assert repo_c.commit(c_c.parents[0]).parents[0] == master_head_c - - -def setup_mess(repo_a, repo_b, repo_c): - master_heads = [] - for r in [repo_a, repo_b, repo_c]: - with r: - [root, _] = r.make_commits( - None, - Commit('base', tree={'version': '', 'f': '0'}), - Commit('release 1.0', tree={'version': '1.0'} if r is repo_a else None), - ref='heads/1.0' - ) - master_heads.extend(r.make_commits(root, Commit('other', tree={'f': '1'}), ref='heads/master')) - # have 2 PRs required for the freeze - with repo_a: - repo_a.make_commits(master_heads[0], Commit('super important file', tree={'g': 'x'}), ref='heads/apr') - pr_required_a = repo_a.make_pr(target='master', head='apr') - with repo_c: - repo_c.make_commits(master_heads[2], Commit('update thing', tree={'f': '2'}), ref='heads/cpr') - pr_required_c = repo_c.make_pr(target='master', head='cpr') - # have 3 release PRs, only the first one updates the tree (version file) - with repo_a: - repo_a.make_commits( - master_heads[0], - Commit('Release 1.1 (A)', tree={'version': '1.1'}), - ref='heads/release-1.1' - ) - pr_rel_a = repo_a.make_pr(target='master', head='release-1.1') - with repo_b: - repo_b.make_commits( - master_heads[1], - Commit('Release 1.1 (B)', tree=None), - ref='heads/release-1.1' - ) - pr_rel_b = repo_b.make_pr(target='master', head='release-1.1') - with repo_c: - repo_c.make_commits(master_heads[2], Commit("Some change", tree={'a': '1'}), ref='heads/whocares') - pr_other = repo_c.make_pr(target='master', head='whocares') - repo_c.make_commits( - master_heads[2], - Commit('Release 1.1 (C)', tree=None), - ref='heads/release-1.1' - ) - pr_rel_c = repo_c.make_pr(target='master', head='release-1.1') - # have one bump PR on repo A - with repo_a: - repo_a.make_commits( - master_heads[0], - Commit("Bump A", tree={'version': '1.2-alpha'}), - ref='heads/bump-1.1', - ) - pr_bump_a = repo_a.make_pr(target='master', head='bump-1.1') - return master_heads, (pr_required_a, None, pr_required_c), (pr_rel_a, pr_rel_b, pr_rel_c), pr_bump_a, pr_other - -def test_freeze_subset(env, project, repo_a, repo_b, repo_c, users, config): - """It should be possible to only freeze a subset of a project when e.g. one - of the repository is managed differently than the rest and has - non-synchronous releases. - - - it should be possible to mark repositories as non-freezed (just opted out - of the entire thing), in which case no freeze PRs should be asked of them - - it should be possible to remove repositories from the freeze wizard - - repositories which are not in the freeze wizard should just not be frozen - - To do things correctly that should probably match with the branch filters - and stuff, but that's a configuration concern. - """ - # have a project with 3 repos, and two branches (1.0 and master) - project.branch_ids = [ - (1, project.branch_ids.id, {'sequence': 1}), - (0, 0, {'name': '1.0', 'sequence': 2}), - ] - - masters = [] - for r in [repo_a, repo_b, repo_c]: - with r: - [root, _] = r.make_commits( - None, - Commit('base', tree={'version': '', 'f': '0'}), - Commit('release 1.0', tree={'version': '1.0'} if r is repo_a else None), - ref='heads/1.0' - ) - masters.extend(r.make_commits(root, Commit('other', tree={'f': '1'}), ref='heads/master')) - - with repo_a: - repo_a.make_commits( - masters[0], - Commit('Release 1.1', tree={'version': '1.1'}), - ref='heads/release-1.1' - ) - pr_rel_a = repo_a.make_pr(target='master', head='release-1.1') - - # the third repository we opt out of freezing - project.repo_ids.filtered(lambda r: r.name == repo_c.name).freeze = False - env.run_crons() # process the PRs - - # open the freeze wizard - w = project.action_prepare_freeze() - w_id = env[w['res_model']].browse([w['res_id']]) - # check that there are only rels for repos A and B - assert w_id.mapped('release_pr_ids.repository_id.name') == [repo_a.name, repo_b.name] - # remove B from the set - b_id = w_id.release_pr_ids.filtered(lambda r: r.repository_id.name == repo_b.name) - w_id.write({'release_pr_ids': [(3, b_id.id, 0)]}) - assert len(w_id.release_pr_ids) == 1 - # set lone release PR - w_id.release_pr_ids.pr_id = to_pr(env, pr_rel_a).id - assert not w_id.errors - - w_id.action_freeze() - assert not w_id.exists() - - assert repo_a.commit('1.1'), "should have created branch in repo A" - try: - repo_b.commit('1.1') - pytest.fail("should *not* have created branch in repo B") - except AssertionError: - ... - try: - repo_c.commit('1.1') - pytest.fail("should *not* have created branch in repo C") - except AssertionError: - ... - # can't stage because we (wilfully) don't have branches 1.1 in repos B and C - -@pytest.mark.skip("env's session is not thread-safe sadface") -def test_race_conditions(): - """need the ability to dup the env in order to send concurrent requests to - the inner odoo - - - try to run the action_freeze during a cron (merge or staging), should - error (recover and return nice message?) - - somehow get ahead of the action and update master's commit between moment - where it is fetched and moment where the bump pr is fast-forwarded, - there's actually a bit of time thanks to the rate limiting (fetch of base, - update of tmp to base, rebase of commits on tmp, wait 1s, for each release - and bump PR, then the release branches are created, and finally the bump - prs) - """ - ... - -def test_freeze_conflict(env, project, repo_a, repo_b, repo_c, users, config): - """If one of the branches we're trying to create already exists, the wizard - fails. - """ - project.branch_ids = [ - (1, project.branch_ids.id, {'sequence': 1}), - (0, 0, {'name': '1.0', 'sequence': 2}), - ] - heads, _, (pr_rel_a, pr_rel_b, pr_rel_c), bump, other = \ - setup_mess(repo_a, repo_b, repo_c) - env.run_crons() - - release_prs = { - repo_a.name: to_pr(env, pr_rel_a), - repo_b.name: to_pr(env, pr_rel_b), - repo_c.name: to_pr(env, pr_rel_c), - } - - w = project.action_prepare_freeze() - w_id = env[w['res_model']].browse([w['res_id']]) - for repo, release_pr in release_prs.items(): - w_id.release_pr_ids\ - .filtered(lambda r: r.repository_id.name == repo)\ - .pr_id = release_pr.id - - # create conflicting branch - with repo_c: - repo_c.make_ref('heads/1.1', heads[2]) - - # actually perform the freeze - with pytest.raises(xmlrpc.client.Fault) as e: - w_id.action_freeze() - assert f"Unable to create branch {repo_c.name}:1.1" in e.value.faultString - - # branches a and b should have been deleted - with pytest.raises(AssertionError) as e: - repo_a.get_ref('heads/1.1') - assert e.value.args[0].startswith("Not Found") - with pytest.raises(AssertionError) as e: - repo_b.get_ref('heads/1.1') - assert e.value.args[0].startswith("Not Found") diff --git a/runbot_merge/tests/test_oddities.py b/runbot_merge/tests/test_oddities.py deleted file mode 100644 index 3ad1289b..00000000 --- a/runbot_merge/tests/test_oddities.py +++ /dev/null @@ -1,131 +0,0 @@ -import requests - -from utils import Commit, to_pr - - -def test_partner_merge(env): - p_src = env['res.partner'].create({ - 'name': "xxx", - 'github_login': 'xxx' - }) - # proper login with useful info - p_dest = env['res.partner'].create({ - 'name': 'Partner P. Partnersson', - 'github_login': '' - }) - - env['base.partner.merge.automatic.wizard'].create({ - 'state': 'selection', - 'partner_ids': (p_src + p_dest).ids, - 'dst_partner_id': p_dest.id, - }).action_merge() - assert not p_src.exists() - assert p_dest.name == 'Partner P. Partnersson' - assert p_dest.github_login == 'xxx' - -def test_name_search(env): - """ PRs should be findable by: - - * number - * display_name (`repository#number`) - * label - - This way we can find parents or sources by these informations. - """ - p = env['runbot_merge.project'].create({ - 'name': 'proj', - 'github_token': 'no', - }) - b = env['runbot_merge.branch'].create({ - 'name': 'target', - 'project_id': p.id - }) - r = env['runbot_merge.repository'].create({ - 'name': 'repo', - 'project_id': p.id, - }) - - baseline = {'target': b.id, 'repository': r.id} - PRs = env['runbot_merge.pull_requests'] - prs = PRs.create({**baseline, 'number': 1964, 'label': 'victor:thump', 'head': 'a', 'message': 'x'})\ - | PRs.create({**baseline, 'number': 1959, 'label': 'marcus:frankenstein', 'head': 'b', 'message': 'y'})\ - | PRs.create({**baseline, 'number': 1969, 'label': 'victor:patch-1', 'head': 'c', 'message': 'z'}) - pr0, pr1, pr2 = prs.name_get() - - assert PRs.name_search('1964') == [pr0] - assert PRs.name_search('1969') == [pr2] - - assert PRs.name_search('frank') == [pr1] - assert PRs.name_search('victor') == [pr2, pr0] - - assert PRs.name_search('thump') == [pr0] - - assert PRs.name_search('repo') == [pr2, pr0, pr1] - assert PRs.name_search('repo#1959') == [pr1] - -def test_message_desync(env, project, make_repo, users, setreviewers, config): - """If the PR message gets desync'd (github misses sending an update), the - merge message should still match what's on github rather than what's in the - db - """ - repo = make_repo('repo') - env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repo.name, - 'status_ids': [(0, 0, {'context': 'status'})] - }) - setreviewers(*project.repo_ids) - - with repo: - [m] = repo.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') - - [c] = repo.make_commits('master', Commit('whee', tree={'b': '2'})) - pr = repo.make_pr(title='title', body='body', target='master', head=c) - repo.post_status(c, 'success', 'status') - env.run_crons() - - pr_id = to_pr(env, pr) - assert pr_id.message == 'title\n\nbody' - pr_id.message = "xxx" - - with repo: - pr.post_comment('hansen merge r+', config['role_reviewer']['token']) - env.run_crons() - - st = repo.commit('staging.master') - assert st.message.startswith('title\n\nbody'),\ - "the stored PR message should have been ignored when staging" - assert st.parents == [m, c], "check the staging's ancestry is the right one" - -def test_unreviewer(env, project, port): - repo = env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': 'a_test_repo', - 'status_ids': [(0, 0, {'context': 'status'})] - }) - p = env['res.partner'].create({ - 'name': 'George Pearce', - 'github_login': 'emubitch', - 'review_rights': [(0, 0, {'repository_id': repo.id, 'review': True})] - }) - - r = requests.post(f'http://localhost:{port}/runbot_merge/get_reviewers', json={ - 'jsonrpc': '2.0', - 'id': None, - 'method': 'call', - 'params': {}, - }) - r.raise_for_status() - assert 'error' not in r.json() - assert r.json()['result'] == ['emubitch'] - - r = requests.post(f'http://localhost:{port}/runbot_merge/remove_reviewers', json={ - 'jsonrpc': '2.0', - 'id': None, - 'method': 'call', - 'params': {'github_logins': ['emubitch']}, - }) - r.raise_for_status() - assert 'error' not in r.json() - - assert p.review_rights == env['res.partner.review'] diff --git a/runbot_merge/tests/test_provisioning.py b/runbot_merge/tests/test_provisioning.py deleted file mode 100644 index 6119bf17..00000000 --- a/runbot_merge/tests/test_provisioning.py +++ /dev/null @@ -1,102 +0,0 @@ -import pytest -import requests - -GEORGE = { - 'name': "George Pearce", - 'email': 'george@example.org', - 'github_login': 'emubitch', - 'sub': '19321102' -} -def test_basic_provisioning(env, port): - r = provision_user(port, [GEORGE]) - assert r == [1, 0] - - g = env['res.users'].search([('login', '=', GEORGE['email'])]) - assert g.partner_id.name == GEORGE['name'] - assert g.partner_id.github_login == GEORGE['github_login'] - assert g.oauth_uid == GEORGE['sub'] - (model, g_id) = env['ir.model.data']\ - .check_object_reference('base', 'group_user') - assert model == 'res.groups' - assert g.groups_id.id == g_id, "check that users were provisioned as internal (not portal)" - - # repeated provisioning should be a no-op - r = provision_user(port, [GEORGE]) - assert r == [0, 0] - - # the email (real login) should be the determinant, any other field is - # updatable - r = provision_user(port, [{**GEORGE, 'name': "x"}]) - assert r == [0, 1] - - r = provision_user(port, [dict(GEORGE, name="x", github_login="y", sub="42")]) - assert r == [0, 1] - - # can't fail anymore because github_login now used to look up the existing - # user - # with pytest.raises(Exception): - # provision_user(port, [{ - # 'name': "other@example.org", - # 'email': "x", - # 'github_login': "y", - # 'sub': "42" - # }]) - - r = provision_user(port, [dict(GEORGE, active=False)]) - assert r == [0, 1] - assert not env['res.users'].search([('login', '=', GEORGE['email'])]) - assert env['res.partner'].search([('email', '=', GEORGE['email'])]) - -def test_upgrade_partner(env, port): - # If a partner exists for a github login (and / or email?) it can be - # upgraded by creating a user for it - p = env['res.partner'].create({ - 'name': GEORGE['name'], - 'email': GEORGE['email'], - }) - r = provision_user(port, [GEORGE]) - assert r == [1, 0] - assert p.user_ids.read(['email', 'github_login', 'oauth_uid']) == [{ - 'id': p.user_ids.id, - 'github_login': GEORGE['github_login'], - 'oauth_uid': GEORGE['sub'], - 'email': GEORGE['email'], - }] - - p.user_ids.unlink() - p.unlink() - - p = env['res.partner'].create({ - 'name': GEORGE['name'], - 'github_login': GEORGE['github_login'], - }) - r = provision_user(port, [GEORGE]) - assert r == [1, 0] - assert p.user_ids.read(['email', 'github_login', 'oauth_uid']) == [{ - 'id': p.user_ids.id, - 'github_login': GEORGE['github_login'], - 'oauth_uid': GEORGE['sub'], - 'email': GEORGE['email'], - }] - - p.user_ids.unlink() - p.unlink() - -def test_no_email(env, port): - """ Provisioning system should ignore email-less entries - """ - r = provision_user(port, [{**GEORGE, 'email': None}]) - assert r == [0, 0] - -def provision_user(port, users): - r = requests.post(f'http://localhost:{port}/runbot_merge/provision', json={ - 'jsonrpc': '2.0', - 'id': None, - 'method': 'call', - 'params': {'users': users}, - }) - r.raise_for_status() - json = r.json() - assert 'error' not in json - - return json['result'] diff --git a/runbot_merge/tests/test_status_overrides.py b/runbot_merge/tests/test_status_overrides.py deleted file mode 100644 index 9ffe7604..00000000 --- a/runbot_merge/tests/test_status_overrides.py +++ /dev/null @@ -1,214 +0,0 @@ -import json - -import pytest - -from utils import seen, Commit - - -def test_no_duplicates(env): - """ Should not have two override records for the same (context, repo) - """ - Overrides = env['res.partner.override'] - Overrides.create({'context': 'a'}) - with pytest.raises(Exception, match=r'already exists'): - Overrides.create({'context': 'a'}) - -def name_search(Model, name): - """ Convenience function to return a recordset instead of a craplist - """ - return Model.browse(id_ for id_, _ in Model.name_search(name)) -def test_finding(env): - project = env['runbot_merge.project'].create({ - 'name': 'test', - 'github_token': 'xxx', 'github_prefix': 'no', - }) - repo_1 = env['runbot_merge.repository'].create({'project_id': project.id, 'name': 'r1'}) - repo_2 = env['runbot_merge.repository'].create({'project_id': project.id, 'name': 'r2'}) - - Overrides = env['res.partner.override'] - a = Overrides.create({'context': 'ctx1'}) - b = Overrides.create({'context': 'ctx1', 'repository_id': repo_1.id}) - c = Overrides.create({'context': 'ctx1', 'repository_id': repo_2.id}) - d = Overrides.create({'context': 'ctx2', 'repository_id': repo_2.id}) - - assert name_search(Overrides, 'ctx1') == a|b|c - assert name_search(Overrides, 'ctx') == a|b|c|d - assert name_search(Overrides, 'r2') == c|d - -def test_basic(env, project, make_repo, users, setreviewers, config): - """ - Test that we can override a status on a PR: - - * @mergebot override context=status - * target url should be the comment (?) - * description should be overridden by <user> - """ - repo = make_repo('repo') - repo_id = env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repo.name, - 'status_ids': [(0, 0, {'context': 'l/int'})] - }) - setreviewers(*project.repo_ids) - # "other" can override the lint - env['res.partner'].create({ - 'name': config['role_other'].get('name', 'Other'), - 'github_login': users['other'], - 'override_rights': [(0, 0, { - 'repository_id': repo_id.id, - 'context': 'l/int', - })] - }) - - with repo: - m = repo.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') - - repo.make_commits(m, Commit('pr', tree={'a': '2'}), ref='heads/change') - pr = repo.make_pr(target='master', title='super change', head='change') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr.number) - ]) - assert pr_id.state == 'approved' - - with repo: - pr.post_comment('hansen override=l/int', config['role_reviewer']['token']) - env.run_crons() - assert pr_id.state == 'approved' - - with repo: - pr.post_comment('hansen override=l/int', config['role_other']['token']) - env.run_crons() - assert pr_id.state == 'ready' - - comments = pr.comments - assert comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr, users), - (users['reviewer'], 'hansen override=l/int'), - (users['user'], "I'm sorry, @{}: you are not allowed to override this status.".format(users['reviewer'])), - (users['other'], "hansen override=l/int"), - ] - assert pr_id.statuses == '{}' - assert json.loads(pr_id.overrides) == {'l/int': { - 'state': 'success', - 'target_url': comments[-1]['html_url'], - 'description': 'Overridden by @{}'.format(users['other']), - }} - -def test_multiple(env, project, make_repo, users, setreviewers, config): - """ Test that overriding multiple statuses in the same comment works - """ - - repo = make_repo('repo') - repo_id = env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repo.name, - 'status_ids': [(0, 0, {'context': 'l/int'}), (0, 0, {'context': 'c/i'})] - }) - setreviewers(*project.repo_ids) - # "other" can override the lints - env['res.partner'].create({ - 'name': config['role_other'].get('name', 'Other'), - 'github_login': users['other'], - 'override_rights': [(0, 0, { - 'repository_id': repo_id.id, - 'context': 'l/int', - }), (0, 0, { - 'repository_id': repo_id.id, - 'context': 'c/i', - })] - }) - - with repo: - root = repo.make_commits(None, Commit('root', tree={'a': '0'}), ref='heads/master') - for i, comment in enumerate([ - # style 1: multiple commands inline - 'hansen override=l/int override=c/i', - # style 2: multiple parameters to command - 'hansen override=l/int,c/i', - # style 3: multiple commands each on its own line - 'hansen override=l/int\nhansen override=c/i', - ], start=1): - with repo: - repo.make_commits(root, Commit(f'pr{i}', tree={'a': f'{i}'}), ref=f'heads/change{i}') - pr = repo.make_pr(target='master', title=f'super change {i}', head=f'change{i}') - env.run_crons() - - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr.number) - ]) - assert pr_id.state == 'opened' - - with repo: - pr.post_comment(comment, config['role_other']['token']) - env.run_crons() - assert pr_id.state == 'validated' - - comments = pr.comments - assert pr_id.statuses == '{}' - assert json.loads(pr_id.overrides) == { - 'l/int': { - 'state': 'success', - 'target_url': comments[-1]['html_url'], - 'description': 'Overridden by @{}'.format(users['other']), - }, - 'c/i': { - 'state': 'success', - 'target_url': comments[-1]['html_url'], - 'description': 'Overridden by @{}'.format(users['other']), - }, - } - -def test_no_repository(env, project, make_repo, users, setreviewers, config): - """ A repo missing from an override allows overriding the status in every repo - """ - repo = make_repo('repo') - env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repo.name, - 'status_ids': [(0, 0, {'context': 'l/int'})] - }) - setreviewers(*project.repo_ids) - # "other" can override the lint - env['res.partner'].create({ - 'name': config['role_other'].get('name', 'Other'), - 'github_login': users['other'], - 'override_rights': [(0, 0, {'context': 'l/int'})] - }) - - with repo: - m = repo.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') - - repo.make_commits(m, Commit('pr', tree={'a': '2'}), ref='heads/change') - pr = repo.make_pr(target='master', title='super change', head='change') - pr.post_comment('hansen r+', config['role_reviewer']['token']) - env.run_crons() - - pr_id = env['runbot_merge.pull_requests'].search([ - ('repository.name', '=', repo.name), - ('number', '=', pr.number) - ]) - assert pr_id.state == 'approved' - - with repo: - pr.post_comment('hansen override=l/int', config['role_other']['token']) - env.run_crons() - assert pr_id.state == 'ready' - - comments = pr.comments - assert comments == [ - (users['reviewer'], 'hansen r+'), - seen(env, pr, users), - (users['other'], "hansen override=l/int"), - ] - assert pr_id.statuses == '{}' - assert json.loads(pr_id.overrides) == {'l/int': { - 'state': 'success', - 'target_url': comments[-1]['html_url'], - 'description': 'Overridden by @{}'.format(users['other']), - }} diff --git a/runbot_merge/utils.py b/runbot_merge/utils.py deleted file mode 100644 index 6ef1c10d..00000000 --- a/runbot_merge/utils.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -import itertools -import time - - -def shorten(text_ish, length): - """ If necessary, cuts-off the text or bytes input and appends ellipsis to - signal the cutoff, such that the result is below the provided length - (according to whatever "len" means on the text-ish so bytes or codepoints - or code units). - """ - if len(text_ish or ()) <= length: - return text_ish - - cont = '...' - if isinstance(text_ish, bytes): - cont = cont.encode('ascii') # whatever - # add enough room for the ellipsis - return text_ish[:length-3] + cont - -BACKOFF_DELAYS = (0.1, 0.2, 0.4, 0.8, 1.6) -def backoff(func=None, *, delays=BACKOFF_DELAYS, exc=Exception): - if func is None: - return lambda func: backoff(func, delays=delays, exc=exc) - - for delay in itertools.chain(delays, [None]): - try: - return func() - except exc: - if delay is None: - raise - time.sleep(delay) diff --git a/runbot_merge/views/configuration.xml b/runbot_merge/views/configuration.xml deleted file mode 100644 index 70e8d710..00000000 --- a/runbot_merge/views/configuration.xml +++ /dev/null @@ -1,43 +0,0 @@ -<odoo> - <record id="action_overrides" model="ir.actions.act_window"> - <field name="name">CI / statuses overrides</field> - <field name="res_model">res.partner.override</field> - </record> - <record id="tree_overrides" model="ir.ui.view"> - <field name="name">Overrides List</field> - <field name="model">res.partner.override</field> - <field name="arch" type="xml"> - <tree editable="bottom"> - <field name="context"/> - <field name="repository_id"/> - <field name="partner_ids" widget="many2many_tags"/> - </tree> - </field> - </record> - - <record id="action_review" model="ir.actions.act_window"> - <field name="name">Review Rights</field> - <field name="res_model">res.partner.review</field> - <field name="context">{'search_default_group_by_repository': True}</field> - </record> - <record id="tree_review" model="ir.ui.view"> - <field name="name">Review Rights</field> - <field name="model">res.partner.review</field> - <field name="arch" type="xml"> - <tree editable="bottom"> - <field name="repository_id"/> - <field name="partner_id"/> - <field name="review"/> - <field name="self_review"/> - </tree> - </field> - </record> - - <menuitem name="Configuration" id="menu_configuration" parent="runbot_merge_menu"/> - <menuitem name="CI Overrides" id="menu_configuration_overrides" - parent="menu_configuration" - action="action_overrides"/> - <menuitem name="Review Rights" id="menu_configuration_review" - parent="menu_configuration" - action="action_review"/> -</odoo> diff --git a/runbot_merge/views/mergebot.xml b/runbot_merge/views/mergebot.xml deleted file mode 100644 index 9cc3168b..00000000 --- a/runbot_merge/views/mergebot.xml +++ /dev/null @@ -1,242 +0,0 @@ -<odoo> - - <record id="form_repository" model="ir.ui.view"> - <field name="name">Repository form</field> - <field name="model">runbot_merge.repository</field> - <field name="arch" type="xml"> - <form> - <sheet> - <div class="oe_title"> - <h1><field name="name"/></h1> - </div> - <group> - <group> - <field name="group_id" string="Accessible to"/> - </group> - <group> - <field name="branch_filter"/> - </group> - </group> - <separator string="Required Statuses"/> - <field name="status_ids"> - <tree editable="bottom"> - <field name="context"/> - <field name="branch_filter"/> - <field name="prs"/> - <field name="stagings"/> - </tree> - </field> - </sheet> - </form> - </field> - </record> - - <record id="runbot_merge_action_projects" model="ir.actions.act_window"> - <field name="name">Projects</field> - <field name="res_model">runbot_merge.project</field> - <field name="view_mode">tree,form</field> - </record> - - <record id="runbot_merge_action_prs" model="ir.actions.act_window"> - <field name="name">Pull Requests</field> - <field name="res_model">runbot_merge.pull_requests</field> - <field name="view_mode">tree,form</field> - <field name="context">{'search_default_open': True}</field> - </record> - <record id="runbot_merge_search_prs" model="ir.ui.view"> - <field name="name">PR search</field> - <field name="model">runbot_merge.pull_requests</field> - <field name="arch" type="xml"> - <search> - <filter - name="open" string="Open" - domain="[('state', 'not in', ['merged', 'closed'])]" - /> - <field name="number"/> - <field name="author"/> - <field name="label"/> - <field name="target"/> - <field name="repository"/> - <field name="state"/> - - <group> - <filter string="Target" name="target_" context="{'group_by':'target'}"/> - <filter string="Repository" name="repo_" context="{'group_by':'repository'}"/> - <filter string="State" name="state_" context="{'group_by':'state'}"/> - <filter string="Priority" name="priority_" context="{'group_by':'priority'}"/> - </group> - </search> - </field> - </record> - <record id="runbot_merge_tree_prs" model="ir.ui.view"> - <field name="name">PR tree</field> - <field name="model">runbot_merge.pull_requests</field> - <field name="arch" type="xml"> - <tree> - <field name="repository"/> - <field name="number"/> - <field name="target"/> - <field name="state"/> - <field name="author"/> - <field name="reviewed_by"/> - </tree> - </field> - </record> - <record id="runbot_merge_form_prs" model="ir.ui.view"> - <field name="name">PR form</field> - <field name="model">runbot_merge.pull_requests</field> - <field name="arch" type="xml"> - <form> - <header/> - <sheet> - <div class="oe_title"> - <h1> - <field name="repository"/>#<field name="number"/> - </h1> - </div> - <group> - <group> - <field name="target"/> - <field name="state"/> - <field name="author"/> - </group> - <group> - <field name="label"/> - <field name="priority"/> - <field name="squash"/> - </group> - </group> - <group> - <group colspan="4"> - <field name="head"/> - <field name="statuses"/> - </group> - <group colspan="4"> - <field name="overrides"/> - </group> - </group> - <group> - <group colspan="4" string="Message"> - <field name="message" nolabel="1"/> - </group> - </group> - <group> - <group colspan="4" string="Delegates"> - <field name="delegates" nolabel="1"> - <tree> - <field name="name"/> - <field name="github_login"/> - </tree> - </field> - </group> - </group> - </sheet> - </form> - </field> - </record> - - <record id="runbot_merge_action_stagings" model="ir.actions.act_window"> - <field name="name">Stagings</field> - <field name="res_model">runbot_merge.stagings</field> - <field name="view_mode">tree,form</field> - <field name="context">{'search_default_active': True, 'active_test': False}</field> - </record> - <record id="runbot_merge_search_stagings" model="ir.ui.view"> - <field name="name">Stagings Search</field> - <field name="model">runbot_merge.stagings</field> - <field name="arch" type="xml"> - <search> - <filter string="Active" name="active" - domain="[('active', '=', True)]"/> - <field name="state"/> - <field name="target"/> - - <group> - <filter string="Target" name="target_" context="{'group_by': 'target'}"/> - </group> - </search> - </field> - </record> - <record id="runbot_merge_tree_stagings" model="ir.ui.view"> - <field name="name">Stagings Tree</field> - <field name="model">runbot_merge.stagings</field> - <field name="arch" type="xml"> - <tree> - <field name="target"/> - <field name="state"/> - </tree> - </field> - </record> - <record id="runbot_merge_form_stagings" model="ir.ui.view"> - <field name="name">Stagings Form</field> - <field name="model">runbot_merge.stagings</field> - <field name="arch" type="xml"> - <form> - <field name="active" invisible="1"/> - <header> - <button type="object" name="action_cancel" string="Cancel" class="oe_highlight" - attrs="{'invisible': [('active', '=', False)]}" - /> - </header> - <sheet> - <group> - <group> - <field name="target"/> - <field name="state"/> - <field name="reason"/> - </group> - <group> - <field name="staged_at"/> - </group> - </group> - <group string="Heads"> - <field name="head_ids" colspan="4" nolabel="1"> - <tree> - <field name="sha"/> - <field name="statuses"/> - </tree> - </field> - </group> - <group string="Batches"> - <field name="batch_ids" colspan="4" nolabel="1"> - <tree> - <field name="prs" widget="many2many_tags" - options="{'no_quick_create': True}"/> - </tree> - </field> - </group> - </sheet> - </form> - </field> - </record> - - <record id="runbot_merge_action_commits" model="ir.actions.act_window"> - <field name="name">Commit Statuses</field> - <field name="res_model">runbot_merge.commit</field> - <field name="view_mode">tree,form</field> - </record> - <record id="runbot_merge_commits_tree" model="ir.ui.view"> - <field name="name">commits list</field> - <field name="model">runbot_merge.commit</field> - <field name="arch" type="xml"> - <tree> - <field name="sha"/> - <field name="statuses"/> - </tree> - </field> - </record> - - <menuitem name="Mergebot" id="runbot_merge_menu"/> - <menuitem name="Projects" id="runbot_merge_menu_project" - parent="runbot_merge_menu" - action="runbot_merge_action_projects"/> - <menuitem name="Pull Requests" id="runbot_merge_menu_prs" - parent="runbot_merge_menu" - action="runbot_merge_action_prs"/> - <menuitem name="Stagings" id="runbot_merge_menu_stagings" - parent="runbot_merge_menu" - action="runbot_merge_action_stagings"/> - <menuitem name="Commits" id="runbot_merge_menu_commits" - parent="runbot_merge_menu" - action="runbot_merge_action_commits"/> -</odoo> diff --git a/runbot_merge/views/queues.xml b/runbot_merge/views/queues.xml deleted file mode 100644 index a72571f3..00000000 --- a/runbot_merge/views/queues.xml +++ /dev/null @@ -1,97 +0,0 @@ -<odoo> - <!-- - Queues mergebot menu: contains various list views inspecting the cron tasks - (mostly) - --> - <record id="action_splits" model="ir.actions.act_window"> - <field name="name">Splits</field> - <field name="res_model">runbot_merge.split</field> - </record> - <record id="tree_splits" model="ir.ui.view"> - <field name="name">Splits</field> - <field name="model">runbot_merge.split</field> - <field name="arch" type="xml"> - <tree> - <field name="id"/> - <field name="target"/> - </tree> - </field> - </record> - - <record id="action_feedback" model="ir.actions.act_window"> - <field name="name">Feedback</field> - <field name="res_model">runbot_merge.pull_requests.feedback</field> - </record> - <record id="tree_feedback" model="ir.ui.view"> - <field name="name">Feedback</field> - <field name="model">runbot_merge.pull_requests.feedback</field> - <field name="arch" type="xml"> - <tree> - <field name="repository"/> - <field name="pull_request"/> - <field name="message"/> - <field name="close"/> - </tree> - </field> - </record> - - <record id="action_tagging" model="ir.actions.act_window"> - <field name="name">Tagging</field> - <field name="res_model">runbot_merge.pull_requests.tagging</field> - </record> - <record id="tree_tagging" model="ir.ui.view"> - <field name="name">Tagging</field> - <field name="model">runbot_merge.pull_requests.tagging</field> - <field name="arch" type="xml"> - <tree editable="bottom"> - <field name="repository"/> - <field name="pull_request"/> - <field name="tags_add"/> - <field name="tags_remove"/> - </tree> - </field> - </record> - - <record id="action_fetches" model="ir.actions.act_window"> - <field name="name">PRs to fetch</field> - <field name="res_model">runbot_merge.fetch_job</field> - <field name="view_mode">tree</field> - <field name="context">{'default_active': True}</field> - </record> - <record id="search_fetches" model="ir.ui.view"> - <field name="name">Fetches Search</field> - <field name="model">runbot_merge.fetch_job</field> - <field name="arch" type="xml"> - <search> - <filter string="Active" name="active" - domain="[('active', '=', True)]"/> - <field name="repository"/> - <field name="number"/> - </search> - </field> - </record> - <record id="tree_fetches" model="ir.ui.view"> - <field name="name">Fetches Tree</field> - <field name="model">runbot_merge.fetch_job</field> - <field name="arch" type="xml"> - <tree> - <field name="repository"/> - <field name="number"/> - </tree> - </field> - </record> - - <menuitem name="Queues" id="menu_queues" parent="runbot_merge_menu"/> - <menuitem name="Splits" id="menu_queues_splits" - parent="menu_queues" - action="action_splits"/> - <menuitem name="Feedback" id="menu_queues_feedback" - parent="menu_queues" - action="action_feedback"/> - <menuitem name="Tagging" id="menu_queues_tagging" - parent="menu_queues" - action="action_tagging"/> - <menuitem name="Fetches" id="menu_fetches" - parent="menu_queues" - action="action_fetches"/> -</odoo> diff --git a/runbot_merge/views/res_partner.xml b/runbot_merge/views/res_partner.xml deleted file mode 100644 index fde5f7e1..00000000 --- a/runbot_merge/views/res_partner.xml +++ /dev/null @@ -1,87 +0,0 @@ -<odoo> - <record id="runbot_merge_tree_partner" model="ir.ui.view"> - <field name="name">Improve search on partners</field> - <field name="model">res.partner</field> - <field name="inherit_id" ref="base.view_res_partner_filter"/> - <field name="arch" type="xml"> - <field name="name" position="attributes"> - <attribute name="filter_domain">[ - '|', '|', - ('name', 'ilike', self), - ('github_login', 'ilike', self), - ('email', 'ilike', self), - ]</attribute> - </field> - </field> - </record> - - <record id="runbot_merge_tree_partner" model="ir.ui.view"> - <field name="name">Configure partners list to be useful</field> - <field name="model">res.partner</field> - <field name="inherit_id" ref="base.view_partner_tree"/> - <field name="arch" type="xml"> - <xpath expr="//tree" position="replace"> - <tree string="Contacts"> - <field name="display_name" string="Name"/> - <field name="github_login"/> - <field name="review_rights" widget="many2many_tags"/> - </tree> - </xpath> - </field> - </record> - <record id="runbot_merge_form_partner" model="ir.ui.view"> - <field name="name">Add mergebot/GH info to partners form</field> - <field name="model">res.partner</field> - <field name="inherit_id" ref="base.view_partner_form"/> - <field name="arch" type="xml"> - <xpath expr="//sheet" position="before"> - <header> - <button type="object" name="fetch_github_email" - string="Fetch Github Email" class="oe_highlight" - attrs="{'invisible': ['|', ('email', '!=', False), ('github_login', '=', False)]}" - /> - </header> - <div class="alert alert-warning" role="alert" - attrs="{'invisible': ['|', ('email', '!=', False), ('review_rights', '=', [])]}"> - Reviewers must have an email address set! Without an email - configured, reviews will be ignored. - </div> - </xpath> - <xpath expr="//notebook" position="inside"> - <page string="Mergebot" groups="runbot_merge.group_admin"> - <group> - <group> - <field name="github_login"/> - </group> - </group> - <group> - <group colspan="4" string="Review Rights"> - <field name="review_rights" nolabel="1"> - <tree string="Review ACLs" editable="bottom"> - <field name="repository_id"/> - <field name="review"/> - <field name="self_review"/> - </tree> - </field> - </group> - <group colspan="4"> - <field name="override_rights" widget="many2many_tags"/> - </group> - </group> - <group> - <group colspan="4" string="Delegate On"> - <field name="delegate_reviewer" nolabel="1"> - <tree> - <field name="repository"/> - <field name="number"/> - <field name="target"/> - <field name="state"/> - </tree> - </field> - </group> - </group> - </page> - </xpath> - </field> - </record> -</odoo> diff --git a/runbot_merge/views/runbot_merge_project.xml b/runbot_merge/views/runbot_merge_project.xml deleted file mode 100644 index 2f670ed3..00000000 --- a/runbot_merge/views/runbot_merge_project.xml +++ /dev/null @@ -1,78 +0,0 @@ -<odoo> - <record id="runbot_merge_form_project" model="ir.ui.view"> - <field name="name">Project Form</field> - <field name="model">runbot_merge.project</field> - <field name="arch" type="xml"> - <form> - <field name="freeze_id" invisible="1"/> - <header> - <button type="object" name="action_prepare_freeze" - string="Freeze" - attrs="{'invisible': [('freeze_id', '!=', False)]}"/> - <button type="object" name="action_prepare_freeze" - string="View Freeze" class="oe_highlight" - attrs="{'invisible': [('freeze_id', '=', False)]}"/> - </header> - <sheet> - <div class="oe_title"> - <h1><field name="name" placeholder="Name"/></h1> - </div> - <group> - <group> - <field name="github_prefix" string="bot name"/> - </group> - </group> - <group> - <group> - <field name="github_token"/> - <field name="secret"/> - </group> - <group> - <field name="ci_timeout"/> - <field name="batch_limit"/> - </group> - </group> - - <group class="oe_edit_only"> - <group colspan="4"> - <label for="freeze_reminder"> - Reminder to show after freeze - </label> - <field colspan="4" name="freeze_reminder" nolabel="1"/> - </group> - </group> - - <separator string="Repositories"/> - <field name="repo_ids"> - <tree> - <field name="sequence" widget="handle"/> - <field name="name"/> - <field name="branch_filter"/> - <field name="status_ids" widget="many2many_tags"/> - </tree> - </field> - <separator string="Branches"/> - <field name="branch_ids"> - <tree editable="bottom" decoration-muted="not active"> - <field name="sequence" widget="handle" /> - <field name="name"/> - <field name="active"/> - </tree> - </field> - </sheet> - </form> - </field> - </record> - - <record id="project_freeze_reminder" model="ir.ui.view"> - <field name="name">Project Form</field> - <field name="model">runbot_merge.project</field> - <field name="arch" type="xml"> - <form> - <sheet> - <field name="freeze_reminder" nolabel="1" readonly="1"/> - </sheet> - </form> - </field> - </record> -</odoo> diff --git a/runbot_merge/views/templates.xml b/runbot_merge/views/templates.xml deleted file mode 100644 index 0e455f2e..00000000 --- a/runbot_merge/views/templates.xml +++ /dev/null @@ -1,420 +0,0 @@ -<odoo> - <function model="website.page" name="write"> - <value eval="ref('website.homepage_page')"/> - <value eval="{'active': False}"/> - </function> - - <template id="assets_frontend" inherit_id="web.assets_frontend"> - <xpath expr="link[last()]" position="after"> - <link rel="stylesheet" type="text/scss" href="/runbot_merge/static/scss/runbot_merge.scss"/> - </xpath> - </template> - - <template id="link-pr" name="create a link to `pr`"> - <t t-set="title"> - <t t-if="pr.repository.group_id <= env.user.groups_id"> - <t t-esc="pr.message.split('\n')[0]"/> - </t> - </t> - <a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}" - t-att-title="pr.blocked or title.strip()" - t-att-target="target or None" - ><t t-esc="pr.display_name"/></a> - </template> - - <template id="staging-statuses" name="dropdown statuses list of stagings"> - <div class="dropdown" t-if="staging.heads"> - <button class="btn btn-link dropdown-toggle" - type="button" - data-toggle="dropdown" - aria-haspopup="true" - aria-expanded="true" - t-attf-title="Staged at {{staging.staged_at}}Z" - > - <t t-raw="0"/> - <span class="caret"></span> - </button> - <ul class="dropdown-menu staging-statuses"> - <li groups="runbot_merge.group_admin"> - <a t-attf-href="/web#id={{staging.id}}&view_type=form&model=runbot_merge.stagings" - target="new"> - Open Staging - </a> - </li> - <t t-set="statuses" t-value="{(r, c): (s, t) for r, c, s, t in staging.statuses}"/> - <t t-foreach="repo_statuses._for_staging(staging)" t-as="req"> - <t t-set="st" t-value="statuses.get((req.repo_id.name, req.context)) or (None, None)"/> - <li t-att-class=" - 'bg-success' if st[0] == 'success' - else 'bg-danger' if st[0] in ('error', 'failure') - else 'bg-info' if st[0] - else 'bg-light'" - ><a t-att-href="st[1]" target="new"> - <t t-esc="req.repo_id.name"/>: <t t-esc="req.context"/> - </a></li> - </t> - </ul> - </div> - </template> - - <template id="alerts"> - <div id="alerts" class="row text-center"> - <div class="alert alert-light col-md-12 h6 mb-0"> - <a href="/runbot_merge/changelog">Changelog</a> - </div> - <t t-set="stagingcron" t-value="env(user=1).ref('runbot_merge.staging_cron')"/> - <div t-if="not stagingcron.active" class="alert alert-warning col-12 mb-0" role="alert"> - Staging is disabled, "ready" pull requests will not be staged. - </div> - <t t-set="mergecron" t-value="env(user=1).ref('runbot_merge.merge_cron')"/> - <div t-if="not mergecron.active" class="alert alert-warning col-12 mb-0" role="alert"> - Merging is disabled, stagings will not be integrated. - </div> - </div> - </template> - - <template id="dashboard" name="mergebot dashboard"> - <t t-call="website.layout"> - <div id="wrap"><div class="container-fluid"> - <t t-call="runbot_merge.alerts"/> - <section t-foreach="projects.with_context(active_test=False)" t-as="project" class="row"> - <h1 class="col-md-12"><t t-esc="project.name"/></h1> - <div class="col-md-12"> - key: - <ul class="list-inline"> - <li class="bg-success">success (hopefully merged)</li> - <li class="bg-info">ongoing</li> - <li class="bg-danger">failure</li> - <li class="bg-gray-lighter">cancelled</li> - </ul> - </div> - <section t-foreach="project.branch_ids" t-as="branch" t-if="branch.active" class="col-md-12"> - <h2> - <a t-attf-href="/runbot_merge/{{branch.id}}"> - <t t-esc="branch.name"/> - </a> - </h2> - <t t-call="runbot_merge.stagings"/> - <t t-set="splits" t-value="branch.split_ids"/> - <t t-set="ready_unstaged" t-value=" - project.env['runbot_merge.pull_requests'].search([ - ('target', '=', branch.id), - ('state', '=', 'ready'), - ('staging_id', '=', False), - ]) - splits.mapped('batch_ids.prs') - "/> - <t t-set="ready" t-value="ready_unstaged.filtered(lambda p: not p.blocked)"/> - <t t-set="blocked" t-value="ready_unstaged.filtered(lambda p: p.blocked)"/> - <div t-if="splits" class="splits bg-warning pr-awaiting"> - <h5> - Splits - <small class="text-muted">will be staged next</small> - </h5> - <ul> - <li t-foreach="splits" t-as="split"> - <ul class="pr-listing list-inline list-unstyled mb0"> - <li t-foreach="split.mapped('batch_ids.prs')" t-as="pr"> - <t t-call="runbot_merge.link-pr"/> - </li> - </ul> - </li> - </ul> - </div> - <div t-if="ready" class="pr-listing pr-awaiting bg-warning"> - <h5>Awaiting</h5> - <ul class="list-inline"> - <li t-foreach="ready" t-as="pr"> - <t t-call="runbot_merge.link-pr"/> - </li> - </ul> - </div> - <div t-if="blocked" class="pr-listing pr-blocked bg-info"> - <h5>Blocked</h5> - <ul class="list-inline"> - <li t-foreach="blocked" t-as="pr"> - <t t-call="runbot_merge.link-pr"/> - </li> - </ul> - </div> - <t t-set="failed" t-value=" - project.env['runbot_merge.pull_requests'].search([ - ('target', '=', branch.id), - ('state', '=', 'error'), - ('staging_id', '=', False), - ]) - "/> - <div t-if="failed" class="pr-listing pr-failed bg-danger"> - <h5>Failed</h5> - <ul class="list-inline"> - <li t-foreach="failed" t-as="pr"> - <t t-call="runbot_merge.link-pr"/> - </li> - </ul> - </div> - </section> - </section> - </div></div> - </t> - </template> - <template id="stagings" name="mergebot branch stagings"> - <t t-set="repo_statuses" t-value="branch.project_id.repo_ids.having_branch(branch).status_ids"/> - <ul class="list-unstyled stagings"> - <t t-foreach="branch.staging_ids.sorted(lambda s: s.staged_at, reverse=True)[:6]" t-as="staging"> - <t t-set="success" t-value="staging.state == 'success'"/> - <t t-set="failure" t-value="staging.state == 'failure'"/> - <t t-set="pending" t-value="staging.active and (not staging.state or staging.state == 'pending')"/> - <t t-set="stateclass"> - <t t-if="success">bg-success <t t-if="staging.active">bg-unmerged</t></t> - <t t-if="failure">bg-danger</t> - <t t-if="pending">bg-info</t> - <t t-if="not (success or failure or pending)">bg-gray-lighter</t> - </t> - <t t-set="decorationclass" > - <t t-if="staging_index >= 2">hidden-xs</t> - <t t-if="staging_index >= 4">visible-lg-block</t> - </t> - <t t-set="title"> - <t t-if="staging.state == 'ff_failed'">fast forward failed (<t t-esc="staging.reason"/>)</t> - <t t-if="staging.state == 'pending'">last status</t> - </t> - <!-- separate concatenation to avoid having line-break in title as some browsers trigger it --> - <!-- write-date may have microsecond precision, strip that information --> - <!-- use write-date under assumption that a staging is last modified when it ends --> - <t t-set="title"><t t-esc="title.strip() or staging.reason"/> at <t t-esc="staging.write_date.replace(microsecond=0)"/>Z</t> - <li t-attf-class="staging {{stateclass.strip()}} {{decorationclass.strip()}}" t-att-title="title"> - <ul class="list-unstyled"> - <li t-foreach="staging.batch_ids" t-as="batch" class="batch"> - <t t-esc="batch.prs[:1].label"/> - <t t-foreach="batch.prs" t-as="pr"> - <t t-call="runbot_merge.link-pr"/> - </t> - </li> - </ul> - <t t-call="runbot_merge.staging-statuses"> - Staged <span t-field="staging.staged_at" t-options="{'widget': 'relative'}"/> - </t> - </li> - </t> - </ul> - </template> - <template id="branch_stagings" name="mergebot stagings page"> - <t t-set="repo_statuses" t-value="branch.project_id.repo_ids.having_branch(branch).status_ids"/> - <t t-call="website.layout"> - <div id="wrap"><div class="container-fluid"> - <section class="row"> - <h1 class="col-md-12"><t t-esc="branch.project_id.name"/>: <t t-esc="branch.name"/></h1> - </section> - <table> - <t t-foreach="stagings" t-as="staging"> - <t t-set="success" - t-value="staging.state == 'success'"/> - <t t-set="failure" - t-value="staging.state == 'failure'"/> - <t t-set="pending" - t-value="staging.active and (not staging.state or staging.state == 'pending')"/> - <t t-set="stateclass"> - <t t-if="success">bg-success</t> - <t t-if="failure">bg-danger</t> - <t t-if="pending">bg-info</t> - <t t-if="not (success or failure or pending)"> - bg-gray-lighter - </t> - </t> - <t t-set="title"> - <t t-if="staging.state == 'canceled'">Cancelled: - <t t-esc="staging.reason"/> - </t> - <t t-if="staging.state == 'ff_failed'">Fast - Forward Failed - </t> - <t t-if="staging.state not in ('canceled', 'ff_failed')"> - <t t-esc="staging.reason"/> - </t> - </t> - <tr t-att-class="stateclass" - style="border-bottom: 1px solid gainsboro; vertical-align: top"> - <th t-att-title="title.strip() or None"> - <t t-if="not staging.heads"> - <span t-field="staging.staged_at" - t-options="{'format': 'yyyy-MM-dd\'T\'HH:mm:ssZ'}"/> - </t> - <t t-call="runbot_merge.staging-statuses"> - <span t-field="staging.staged_at" - t-options="{'format': 'yyyy-MM-dd\'T\'HH:mm:ssZ'}"/> - </t> - </th> - <td> - <ul class="list-inline list-unstyled mb0"> - <t t-foreach="staging.batch_ids" - t-as="batch"> - <t t-set="first_pr" - t-value="batch.prs[-1:]"/> - <li class="dropdown" t-if="first_pr"> - <button class="btn btn-link dropdown-toggle" - type="button" - data-toggle="dropdown" - aria-haspopup="true" - aria-expanded="true" - > - <t t-esc="first_pr.label"/> - <span class="caret"></span> - </button> - <ul class="dropdown-menu"> - <li t-foreach="batch.prs" t-as="pr"> - <t t-call="runbot_merge.link-pr"> - <t t-set="target">new</t> - </t> - </li> - </ul> - </li> - </t> - </ul> - </td> - </tr> - </t> - </table> - <t t-if="next"> - <a t-attf-href="/runbot_merge/{{branch.id}}?until={{next}}"> - Next > - </a> - </t> - </div></div> - </t> - </template> - <template id="changelog" name="mergebot changelog"> - <t t-call="website.layout"> - <div id="wrap"><div class="container-fluid"> - <h1>Changelog</h1> - <section t-foreach="entries" t-as="entry"> - <h3 t-if="not entry_first" t-esc="entry"/> - <ul> - <li t-foreach="sorted(entry_value)" t-as="item"> - <t t-out="item"/> - </li> - </ul> - </section> - </div></div> - </t> - </template> - - <template id="view_pull_request_info_merged"> - <div class="alert alert-success"> - Merged - <t t-if="merged_head"> - at <a t-attf-href="https://github.com/{{pr.repository.name}}/commit/{{merged_head}}"><t t-esc="merged_head"/></a> - </t> - - <t t-set="linked_prs" t-value="pr._linked_prs"/> - <div t-if="linked_prs"> - Linked pull requests - <ul> - <li t-foreach="linked_prs" t-as="linked"> - <a t-att-href="linked.url" t-field="linked.display_name"/> - </li> - </ul> - </div> - </div> - </template> - <template id="view_pull_request_info_closed"> - <div class="alert alert-light"> - Closed - </div> - </template> - <template id="view_pull_request_info_error"> - <div class="alert alert-danger"> - Error: - <span t-esc="pr.with_context(active_test=False).batch_ids[-1:].staging_id.reason"> - Unable to stage PR - </span> - </div> - </template> - <template id="view_pull_request_info_staging"> - <div class="alert alert-primary"> - Staged <span t-field="pr.staging_id.staged_at" t-options="{'widget': 'relative'}"/>. - - <t t-set="linked_prs" t-value="pr._linked_prs"/> - <div t-if="linked_prs"> - Linked pull requests - <ul> - <li t-foreach="linked_prs" t-as="linked"> - <a t-att-href="linked.url" t-field="linked.display_name"/> - </li> - </ul> - </div> - </div> - </template> - <template id="view_pull_request_info_open"> - <!-- nb: replicates `blocked`, maybe that should be split into various criteria? --> - <div class="alert alert-info"> - <p t-if="pr.blocked" class="alert-danger">Blocked</p> - <p t-else="" class="alert-success">Ready (waiting for staging)</p> - <ul class="todo"> - <li t-att-class="'ok' if pr.squash or pr.merge_method else 'fail'"> - Merge method - </li> - <li t-att-class="'ok' if pr._approved else 'fail'"> - Review - </li> - <li t-att-class="'ok' if pr.state not in ('opened', 'approved') else ''"> - CI - <ul class="todo"> - <t t-foreach="pr.repository.status_ids._for_pr(pr)" t-as="ci"> - <t t-set="st" t-value="statuses.get(ci.context.strip())"/> - <t t-set="result"> - <t t-if="not st or st['state'] == 'pending'"></t> - <t t-elif="st['state'] in ('error', 'failure')">fail</t> - <t t-else="">ok</t> - </t> - <li t-att-class="result"> - <a t-att-href="st.get('target_url') if st else None"><t t-esc="ci.context.strip()"/></a><t t-if="st and st.get('description')">: <t t-esc="st['description']"/></t> - </li> - </t> - </ul> - </li> - <t t-set="linked_prs" t-value="pr._linked_prs"/> - <li t-if="linked_prs" t-att-class="'ok' if all(l._ready for l in linked_prs) else 'fail'"> - Linked pull requests - <ul class="todo"> - <t t-foreach="linked_prs" t-as="linked"> - <li t-att-class="'ok' if linked._ready else 'fail'"> - <a t-att-href="linked.url" t-field="linked.display_name"/> - </li> - </t> - </ul> - </li> - </ul> - </div> - </template> - - <template id="view_pull_request"> - <t t-call="website.layout"> - <div id="wrap"><div class="container-fluid"> - <t t-call="runbot_merge.alerts"/> - <h1> - <a t-att-href="pr.github_url" t-field="pr.display_name"> - </a> - <a t-attf-href="/web#view_type=form&model=runbot_merge.pull_requests&id={{pr.id}}" - class="btn btn-sm btn-secondary align-top float-right" - groups="base.group_user">View in backend</a> - </h1> - <h6>Created by <span t-field="pr.author.display_name"/></h6> - <t t-set="tmpl"> - <t t-if="pr.state in ('merged', 'closed', 'error')"><t t-esc="pr.state"/></t> - <t t-elif="pr.staging_id">staging</t> - <t t-else="">open</t> - </t> - <t t-call="runbot_merge.view_pull_request_info_{{tmpl.strip()}}"/> - <t t-set="target_cls" t-value="None if pr.target.active else 'text-muted bg-warning'"/> - <dl class="runbot-merge-fields"> - <dt>label</dt> - <dd><span t-field="pr.label"/></dd> - <dt>head</dt> - <dd><a t-attf-href="{{pr.github_url}}/commits/{{pr.head}}"><span t-field="pr.head"/></a></dd> - <dt t-att-class="target_cls">target</dt> - <dd t-att-class="target_cls"><span t-field="pr.target"/></dd> - </dl> - <p t-field="pr.message"/> - </div></div> - </t> - </template> -</odoo> diff --git a/runbot_populate/__init__.py b/runbot_populate/__init__.py deleted file mode 100644 index cde864ba..00000000 --- a/runbot_populate/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- - -from . import models diff --git a/runbot_populate/__manifest__.py b/runbot_populate/__manifest__.py deleted file mode 100644 index d86d3b56..00000000 --- a/runbot_populate/__manifest__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -{ - 'name': "runbot demo", - 'summary': "Runbot demo data", - 'description': "Runbot demo data", - 'author': "Odoo SA", - 'website': "http://runbot.odoo.com", - 'category': 'Website', - 'version': '1.0', - 'depends': ['runbot'], - 'demo': [ - 'demo/runbot_demo.xml', - ], - 'license': 'LGPL-3', -} diff --git a/runbot_populate/demo/runbot_demo.xml b/runbot_populate/demo/runbot_demo.xml deleted file mode 100644 index da55fc03..00000000 --- a/runbot_populate/demo/runbot_demo.xml +++ /dev/null @@ -1,160 +0,0 @@ -<?xml version="1.0" encoding="utf-8"?> -<data> - <!-- PROJECTS --> - <record model="runbot.project" id="project_runbot"> - <field name="name">runbot</field> - </record> - - <!-- REPOS --> - <record id="repo_odoo" model="runbot.repo"> - <field name="name">odoo</field> - <field name="project_id" ref="runbot.main_project"/> - <field name="server_files">odoo-bin</field> - <field name="manifest_files">__manifest__.py</field> - <field name="mode">hook</field> - <field name="addons_paths">addons,odoo/addons</field> - <field name="mode">disabled</field> - </record> - <record id="remote_odoo_odoo" model="runbot.remote"> - <field name="name">git@example.com:odoo/odoo</field> - <field name="repo_id" ref="repo_odoo"/> - </record> - <record id="remote_odoo_dev_odoo" model="runbot.remote"> - <field name="name">git@example.com:odoo-dev/odoo</field> - <field name="repo_id" ref="repo_odoo"/> - </record> - <record id="repo_enterprise" model="runbot.repo"> - <field name="name">enterprise</field> - <field name="project_id" ref="runbot.main_project"/> - <field name="server_files"></field> - <field name="manifest_files">__manifest__.py</field> - <field name="mode">hook</field> - <field name="addons_paths"></field> - <field name="mode">disabled</field> - </record> - <record id="remote_odoo_enterprise" model="runbot.remote"> - <field name="name">git@example.com:odoo/enterprise</field> - <field name="repo_id" ref="repo_enterprise"/> - </record> - <record id="remote_odoo_dev_enterprise" model="runbot.remote"> - <field name="name">git@example.com:odoo-dev/enterprise</field> - <field name="repo_id" ref="repo_enterprise"/> - </record> - - <record id="repo_runbot" model="runbot.repo"> - <field name="name">runbot</field> - <field name="project_id" ref="project_runbot"/> - <field name="server_files"></field> - <field name="manifest_files">__manifest__.py</field> - <field name="mode">hook</field> - <field name="addons_paths"></field> - <field name="mode">disabled</field> - </record> - <record id="remote_odoo_runbot" model="runbot.remote"> - <field name="name">git@example.com:odoo/runbot</field> - <field name="repo_id" ref="repo_runbot"/> - </record> - - <!-- BUNDLES --> - <record id="bundle_16_1" model="runbot.bundle"> - <field name="name">saas-16.1</field> - <field name="is_base">True</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - <record id="bundle_16" model="runbot.bundle"> - <field name="name">16.0</field> - <field name="is_base">True</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - <record id="bundle_15_4" model="runbot.bundle"> - <field name="name">saas-15.4</field> - <field name="is_base">True</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - <record id="bundle_15" model="runbot.bundle"> - <field name="name">15.0</field> - <field name="is_base">True</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - <record id="bundle_14" model="runbot.bundle"> - <field name="name">14.0</field> - <field name="is_base">True</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - <record id="bundle_13" model="runbot.bundle"> - <field name="name">13.0</field> - <field name="is_base">True</field> - <field name="sticky">False</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - <record id="bundle_master_dev_tri" model="runbot.bundle"> - <field name="name">master-dev-tri</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - <record id="bundle_15_0_dev_tri" model="runbot.bundle"> - <field name="name">15.0-dev-tri</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - <record id="bundle_master_dev_partial_tri" model="runbot.bundle"> - <field name="name">master-dev-partial-tri</field> - <field name="project_id" ref="runbot.main_project"/> - </record> - - - <record id="bundle_runbot_15_0" model="runbot.bundle"> - <field name="name">15.0</field> - <field name="is_base">True</field> - <field name="project_id" ref="project_runbot"/> - </record> - <record id="bundle_runbot_13_0" model="runbot.bundle"> - <field name="name">13.0</field> - <field name="is_base">True</field> - <field name="project_id" ref="project_runbot"/> - </record> - <record id="bundle_runbot_13_dev_tri" model="runbot.bundle"> - <field name="name">13.0-dev-tri</field> - <field name="project_id" ref="project_runbot"/> - </record> - - <!-- Triggers--> - - <record id="trigger_default_odoo" model="runbot.trigger"> - <field name="name">odoo</field> - <field name="category_id" ref="runbot.default_category"/> - <field name="project_id" ref="runbot.main_project"/> - <field name="config_id" ref="runbot.runbot_build_config_default"/> - <field name="repo_ids" eval="[(4, ref('repo_odoo'))]"/> - <field name="repo_ids" eval="[(4, ref('repo_odoo'))]"/> - <field name="ci_context"/> - </record> - <record id="trigger_default_enterprise" model="runbot.trigger"> - <field name="name">enterprise</field> - <field name="category_id" ref="runbot.default_category"/> - <field name="project_id" ref="runbot.main_project"/> - <field name="config_id" ref="runbot.runbot_build_config_default"/> - <field name="repo_ids" eval="[(4, ref('repo_enterprise'))]"/> - <field name="dependency_ids" eval="[(4, ref('repo_odoo'))]"/> - <field name="ci_context"/> - </record> - <record id="trigger_default_runbot" model="runbot.trigger"> - <field name="name">enterprise</field> - <field name="category_id" ref="runbot.default_category"/> - <field name="project_id" ref="project_runbot"/> - <field name="config_id" ref="runbot.runbot_build_config_default"/> - <field name="repo_ids" eval="[(4, ref('repo_runbot'))]"/> - <field name="dependency_ids" eval="[(4, ref('repo_odoo'))]"/> - <field name="ci_context"/> - </record> - - - <record id="runbot_build_config_linting" model="runbot.build.config"> - <field name="name">Linting</field> - </record> - <record id="runbot_build_config_security" model="runbot.build.config"> - <field name="name">Security</field> - </record> - - <function model="runbot.runbot" name="_create_demo_data"> - </function> - -</data> diff --git a/runbot_populate/models/__init__.py b/runbot_populate/models/__init__.py deleted file mode 100644 index 935e42e8..00000000 --- a/runbot_populate/models/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- - -from . import runbot diff --git a/runbot_populate/models/runbot.py b/runbot_populate/models/runbot.py deleted file mode 100644 index ed9ab36f..00000000 --- a/runbot_populate/models/runbot.py +++ /dev/null @@ -1,135 +0,0 @@ -from odoo import models, fields, api -from unittest.mock import patch -from odoo.tools import mute_logger - -import logging -_logger = logging.getLogger(__name__) - -# after this point, not realy a repo buisness -class Runbot(models.AbstractModel): - _inherit = 'runbot.runbot' - - @api.model - @patch('odoo.addons.runbot.models.repo.Remote._github') - @patch('odoo.addons.runbot.models.repo.Repo._git') - def _create_demo_data(self, mock_git, mock_github): - mock_github.return_value = False - bundles = self.env['runbot.bundle'].browse( - self.env['ir.model.data'].search([ - ('module', '=', 'runbot_populate'), ('model', '=', 'runbot.bundle') - ]).mapped('res_id') - ) - bundles |= self.env.ref('runbot.bundle_master') - bundles = bundles.sorted('is_base', reverse=True) - - assert bundles|self.env.ref('runbot.bundle_dummy') == bundles.search([]) - - if bundles.branch_ids: - # only populate data if no branch are found - return - - if not bundles.branch_ids: - pr = True - count = 1000 - for bundle in bundles: - _logger.info(bundle.name) - for repo in bundle.project_id.repo_ids: - main_remote = repo.main_remote_id - dev_remote = next((remote for remote in repo.remote_ids if remote != main_remote), main_remote) - if bundle.is_base: - dev_remote = main_remote - self.env['runbot.branch'].create({'remote_id': dev_remote.id, 'name': bundle.name, 'is_pr': False}) - if not bundle.is_base: - mock_github.return_value = { - 'base': { - 'ref': bundle.base_id.name - }, - 'head': { - 'label': '%s:%s' % (dev_remote.owner, bundle.name), - 'repo': {'full_name': '%s/%s' % (dev_remote.owner, dev_remote.repo_name)} - } - } - branch = self.env['runbot.branch'].create({ - 'remote_id': main_remote.id, - 'name': str(count), - 'is_pr': True, - }) - count += 1 - branch.flush() - - if 'partial' in bundle.name: - break - - if not bundle.is_base: - pr = not pr - - security_config = self.env.ref('runbot_populate.runbot_build_config_security') - linting_config = self.env.ref('runbot_populate.runbot_build_config_linting') - - for bundle in bundles: - nb_batch = 4 if bundle.sticky else 2 - for i in range(nb_batch): - values = { - 'last_update': fields.Datetime.now(), - 'bundle_id': bundle.id, - 'state': 'preparing', - } - batch = self.env['runbot.batch'].create(values) - bundle.last_batch = batch - for repo in bundle.project_id.repo_ids: - commit = self.env['runbot.commit']._get('%s00b%s0000ba%s000' % (repo.id, bundle.id, batch.id), repo.id, { - 'author': 'Author', - 'author_email': 'author@example.com', - 'committer': 'Committer', - 'committer_email': 'committer@example.com', - 'subject': '[IMP] core: come imp', - 'date': fields.Datetime.now(), - }) - branches = bundle.branch_ids.filtered(lambda b: b.remote_id.repo_id == repo) - for branch in branches: - branch.head = commit - batch._new_commit(branch) - - def git(command): - if command[0] == 'merge-base': - _, sha1, sha2 = command - return sha1 if sha1 == sha2 else sha2 #if bundle.is_base else '%s_%s' % (sha1, sha2) - elif command[0] == 'rev-list': - _, _, _, shas = command - sha1, sha2 = shas.split('...') - return '0\t0' if command[1] == command[2] else '3\t5' - elif command[0] == 'diff': - _, _, sha1, sha2 = command - return '' if sha1 == sha2 else '0 5 _\n1 8 _' - else: - _logger.info(command) - - mock_git.side_effect = git - with mute_logger('odoo.addons.runbot.models.batch'): - batch._prepare() - - if i != nb_batch - 1: - for slot in batch.slot_ids: - if slot.build_id: - build = slot.build_id - with mute_logger('odoo.addons.runbot.models.build'): - build._log('******','Starting step X', level='SEPARATOR') - build._log('******','Some log') - for config in (linting_config, security_config): - child = build._add_child({'config_id': config.id}) - build._log('create_build', 'created with config %s' % config.name, log_type='subbuild', path=str(child.id)) - child.local_state = 'done' - child.local_result = 'ok' - child.description = "Description for security" - build._log('******','Step x finished') - build._log('******','Starting step Y', level='SEPARATOR') - build._log('******','Some log', level='ERROR') - build._log('******','Some log\n with multiple lines', level='ERROR') - build._log('******','**Some** *markdown* [log](http://example.com)', log_type='markdown') - build._log('******','Step x finished', level='SEPARATOR') - - build.local_state = 'done' - build.local_result = 'ok' if bundle.sticky else 'ko' - - - batch._process()