From d8609f7c6b317445c44092cc01db377675285f0b Mon Sep 17 00:00:00 2001 From: Xavier-Do Date: Mon, 18 Sep 2023 14:37:30 +0200 Subject: [PATCH] [FIX] runbot: various fixes and ref - clean thread username - allow to write on params for debug (was mainly usefull to forbid it at the beginning) - imrpove some guidelines about method and actions naming/ ordering - move some code for a cleaner organisation. - remove some useless request.env.user (not useful anymore) --- runbot/__init__.py | 5 +- runbot/__manifest__.py | 2 +- runbot/common.py | 37 +++- runbot/container.py | 31 +--- runbot/controllers/badge.py | 2 +- runbot/controllers/frontend.py | 2 +- runbot/controllers/hook.py | 6 +- runbot/data/error_link.xml | 6 +- runbot/migrations/16.0.5.4/post-migration.py | 82 +++++++++ runbot/models/__init__.py | 2 +- runbot/models/batch.py | 38 ++--- runbot/models/branch.py | 17 +- runbot/models/build.py | 143 +++++++--------- runbot/models/build_config.py | 119 ++++++------- runbot/models/build_config_codeowner.py | 3 +- runbot/models/build_error.py | 87 +++++----- runbot/models/build_stat_regex.py | 3 +- runbot/models/bundle.py | 47 ++--- runbot/models/commit.py | 15 +- runbot/models/custom_trigger.py | 8 +- runbot/models/host.py | 36 ++-- runbot/models/{event.py => ir_logging.py} | 20 +-- runbot/models/module.py | 0 runbot/models/repo.py | 63 +++---- runbot/models/runbot.py | 67 ++++---- runbot/models/team.py | 4 +- runbot/templates/build.xml | 8 +- runbot/templates/bundle.xml | 4 +- runbot/templates/dashboard.xml | 6 +- runbot/templates/frontend.xml | 2 +- runbot/templates/utils.xml | 4 +- runbot/tests/common.py | 5 +- runbot/tests/test_build.py | 16 +- runbot/tests/test_build_config_step.py | 67 ++++---- runbot/tests/test_build_error.py | 16 +- runbot/tests/test_repo.py | 18 +- runbot/tests/test_runbot.py | 2 +- runbot/tests/test_upgrade.py | 2 +- runbot/views/branch_views.xml | 2 +- runbot/views/build_error_views.xml | 2 +- runbot/views/bundle_views.xml | 4 +- runbot/views/custom_trigger_wizard_views.xml | 2 +- runbot/views/repo_views.xml | 2 +- runbot/wizards/stat_regex_wizard.py | 2 +- runbot/wizards/stat_regex_wizard_views.xml | 2 +- runbot_builder/builder.py | 4 +- runbot_builder/dbmover.py | 171 ------------------- runbot_builder/tools.py | 4 +- runbot_cla/build_config.py | 2 +- 49 files changed, 530 insertions(+), 662 deletions(-) create mode 100644 runbot/migrations/16.0.5.4/post-migration.py rename runbot/models/{event.py => ir_logging.py} (98%) create mode 100644 runbot/models/module.py delete mode 100755 runbot_builder/dbmover.py diff --git a/runbot/__init__.py b/runbot/__init__.py index 3d052492..5e574231 100644 --- a/runbot/__init__.py +++ b/runbot/__init__.py @@ -17,7 +17,10 @@ class UserFilter(logging.Filter): uid = getattr(threading.current_thread(), 'uid', None) if uid is None: return True - user_name = getattr(threading.current_thread(), 'user_name', 'user') + user_name = 'user' + if hasattr(threading.current_thread(), 'user_name'): + user_name = threading.current_thread().user_name + del(threading.current_thread().user_name) message_parts[1] = f'({user_name}:{uid})' record.msg = ' '.join(message_parts) return True diff --git a/runbot/__manifest__.py b/runbot/__manifest__.py index 51cfb3bf..6ad3c006 100644 --- a/runbot/__manifest__.py +++ b/runbot/__manifest__.py @@ -6,7 +6,7 @@ 'author': "Odoo SA", 'website': "http://runbot.odoo.com", 'category': 'Website', - 'version': '5.3', + 'version': '5.4', 'application': True, 'depends': ['base', 'base_automation', 'website'], 'data': [ diff --git a/runbot/common.py b/runbot/common.py index 82ea8926..6c33b548 100644 --- a/runbot/common.py +++ b/runbot/common.py @@ -15,7 +15,7 @@ from datetime import timedelta from babel.dates import format_timedelta from markupsafe import Markup -from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT, html_escape +from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT, html_escape, file_open _logger = logging.getLogger(__name__) @@ -44,7 +44,7 @@ def now(): def findall(filename, pattern): - return set(re.findall(pattern, open(filename).read())) + return set(re.findall(pattern, file_open(filename).read())) def grep(filename, string): @@ -54,7 +54,7 @@ def grep(filename, string): def find(filename, string): - return open(filename).read().find(string) + return file_open(filename).read().find(string) def uniq_list(l): @@ -69,7 +69,7 @@ def rfind(filename, pattern): """Determine in something in filename matches the pattern""" if os.path.isfile(filename): regexp = re.compile(pattern, re.M) - with open(filename, 'r') as f: + with file_open(filename, 'r') as f: if regexp.findall(f.read()): return True return False @@ -169,9 +169,36 @@ def pseudo_markdown(text): return text -def _make_github_session(token): +def make_github_session(token): session = requests.Session() if token: session.auth = (token, 'x-oauth-basic') session.headers.update({'Accept': 'application/vnd.github.she-hulk-preview+json'}) return session + +def sanitize(name): + for i in ['@', ':', '/', '\\', '..']: + name = name.replace(i, '_') + return name + + +class ReProxy(): + @classmethod + def match(cls, *args, **kwrags): + return re.match(*args, **kwrags) + + @classmethod + def search(cls, *args, **kwrags): + return re.search(*args, **kwrags) + + @classmethod + def compile(cls, *args, **kwrags): + return re.compile(*args, **kwrags) + + @classmethod + def findall(cls, *args, **kwrags): + return re.findall(*args, **kwrags) + + VERBOSE = re.VERBOSE + MULTILINE = re.MULTILINE + diff --git a/runbot/container.py b/runbot/container.py index d17ae7a9..c9152fa8 100644 --- a/runbot/container.py +++ b/runbot/container.py @@ -17,6 +17,8 @@ import subprocess import time import warnings +from odoo.tools import file_path + # unsolved issue https://github.com/docker/docker-py/issues/2928 with warnings.catch_warnings(): warnings.filterwarnings( @@ -148,6 +150,9 @@ def _docker_run(cmd=False, log_path=False, build_dir=False, container_name=False _logger.info('Docker run command: %s', run_cmd) run_cmd = 'cd /data/build;touch start-%s;%s;cd /data/build;touch end-%s' % (container_name, run_cmd, container_name) docker_clear_state(container_name, build_dir) # ensure that no state are remaining + build_dir = file_path(build_dir) + + file_path(os.path.dirname(log_path)) open(os.path.join(build_dir, 'exist-%s' % container_name), 'w+').close() logs = open(log_path, 'w') logs.write("Docker command:\n%s\n=================================================\n" % cmd_object) @@ -190,7 +195,7 @@ def _docker_run(cmd=False, log_path=False, build_dir=False, container_name=False detach=True ) if container.status not in ('running', 'created') : - _logger.error('Container %s started but status is not running or created: %s', container_name, container.status) # TODO cleanup + _logger.error('Container %s started but status is not running or created: %s', container_name, container.status) else: _logger.info('Started Docker container %s (%s)', container_name, container.short_id) return @@ -288,27 +293,3 @@ def sanitize_container_name(name): """Returns a container name with unallowed characters removed""" name = re.sub('^[^a-zA-Z0-9]+', '', name) return re.sub('[^a-zA-Z0-9_.-]', '', name) - - -############################################################################## -# Ugly monkey patch to set runbot in set runbot in testing mode -# No Docker will be started, instead a fake docker_run function will be used -############################################################################## - -if os.environ.get('RUNBOT_MODE') == 'test': - _logger.warning('Using Fake Docker') - - def fake_docker_run(run_cmd, log_path, build_dir, container_name, exposed_ports=None, cpu_limit=None, preexec_fn=None, ro_volumes=None, env_variables=None, *args, **kwargs): - _logger.info('Docker Fake Run: %s', run_cmd) - open(os.path.join(build_dir, 'exist-%s' % container_name), 'w').write('fake end') - open(os.path.join(build_dir, 'start-%s' % container_name), 'w').write('fake start\n') - open(os.path.join(build_dir, 'end-%s' % container_name), 'w').write('fake end') - with open(log_path, 'w') as log_file: - log_file.write('Fake docker_run started\n') - log_file.write('run_cmd: %s\n' % run_cmd) - log_file.write('build_dir: %s\n' % container_name) - log_file.write('container_name: %s\n' % container_name) - log_file.write('.modules.loading: Modules loaded.\n') - log_file.write('Initiating shutdown\n') - - docker_run = fake_docker_run diff --git a/runbot/controllers/badge.py b/runbot/controllers/badge.py index b5de642d..fa6e031e 100644 --- a/runbot/controllers/badge.py +++ b/runbot/controllers/badge.py @@ -44,7 +44,7 @@ class RunbotBadge(Controller): if not builds: state = 'testing' else: - result = builds.result_multi() + result = builds._result_multi() if result == 'ok': state = 'success' elif result == 'warn': diff --git a/runbot/controllers/frontend.py b/runbot/controllers/frontend.py index 810f21b7..bd5abffc 100644 --- a/runbot/controllers/frontend.py +++ b/runbot/controllers/frontend.py @@ -395,7 +395,7 @@ class Runbot(Controller): 'scheduled_count': scheduled_count, 'bundles': bundles, 'hosts_data': hosts_data, - 'auto_tags': request.env['runbot.build.error'].disabling_tags(), + 'auto_tags': request.env['runbot.build.error']._disabling_tags(), 'build_errors': request.env['runbot.build.error'].search([('random', '=', True)]), 'kwargs': kwargs, 'title': 'monitoring' diff --git a/runbot/controllers/hook.py b/runbot/controllers/hook.py index e521c93e..d46c7f69 100644 --- a/runbot/controllers/hook.py +++ b/runbot/controllers/hook.py @@ -35,13 +35,13 @@ class Hook(http.Controller): # force update of dependencies too in case a hook is lost if not payload or event == 'push': - remote.repo_id.set_hook_time(time.time()) + remote.repo_id._set_hook_time(time.time()) elif event == 'pull_request': pr_number = payload.get('pull_request', {}).get('number', '') branch = request.env['runbot.branch'].sudo().search([('remote_id', '=', remote.id), ('name', '=', pr_number)]) - branch.recompute_infos(payload.get('pull_request', {})) + branch._recompute_infos(payload.get('pull_request', {})) if payload.get('action') in ('synchronize', 'opened', 'reopened'): - remote.repo_id.set_hook_time(time.time()) + remote.repo_id._set_hook_time(time.time()) # remaining recurrent actions: labeled, review_requested, review_request_removed elif event == 'delete': if payload.get('ref_type') == 'branch': diff --git a/runbot/data/error_link.xml b/runbot/data/error_link.xml index 2c4c7d79..bc5ade63 100644 --- a/runbot/data/error_link.xml +++ b/runbot/data/error_link.xml @@ -6,7 +6,7 @@ ir.actions.server code - records.link_errors() + records.action_link_errors() @@ -16,7 +16,7 @@ ir.actions.server code - records.clean_content() + records.action_clean_content() @@ -26,7 +26,7 @@ ir.actions.server code - records.assign() + records.action_assign() diff --git a/runbot/migrations/16.0.5.4/post-migration.py b/runbot/migrations/16.0.5.4/post-migration.py new file mode 100644 index 00000000..87cdea26 --- /dev/null +++ b/runbot/migrations/16.0.5.4/post-migration.py @@ -0,0 +1,82 @@ +import logging + +from odoo import api, SUPERUSER_ID + +_logger = logging.getLogger(__name__) + + +def migrate(cr, version): + env = api.Environment(cr, SUPERUSER_ID, {}) + private = [ + 'set_hook_time', + 'set_ref_time', + 'check_token', + 'get_version_domain', + 'get_builds', + 'get_build_domain', + 'disable', + 'set_psql_conn_count', + 'get_running_max', + 'branch_groups', + 'consistency_warning', + 'fa_link_type', + 'make_python_ctx', + 'parse_config', + 'get_color_class', + 'get_formated_build_time', + 'filter_patterns', + 'http_log_url', + 'result_multi', + 'match_is_base', + 'link_errors', + 'clean_content', + 'test_tags_list', + 'disabling_tags', + 'step_ids', + 'recompute_infos', + 'warning', + 'is_file', + ] + removed = [ + "get_formated_build_age", + "get_formated_job_time", + "make_dirs", + "build_type_label", + ] + for method in private: + pattern = f'.{method}(' + replacepattern = f'._{method}(' + views = env['ir.ui.view'].search([('arch_db', 'like', pattern)]) + if views: + _logger.info(f'Some views contains "{pattern}": {views}') + for view in views: + view.arch_db = view.arch_db.replace(pattern, replacepattern) + + for method in removed: + pattern = f'.{method}(' + views = env['ir.ui.view'].search([('arch_db', 'like', pattern)]) + if views: + _logger.error(f'Some views contains "{pattern}": {views}') + + for method in removed: + pattern = f'.{method}(' + steps =env['runbot.build.config.step'].search(['|', ('python_code', 'like', pattern), ('python_result_code', 'like', pattern)]) + if steps: + _logger.error(f'Some step contains "{pattern}": {steps}') + + for method in private: + pattern = f'.{method}(' + replacepattern = f'._{method}(' + steps = env['runbot.build.config.step'].search(['|', ('python_code', 'like', pattern), ('python_result_code', 'like', pattern)]) + for step in steps: + python_code = pattern in step.python_code + python_result_code = pattern in step.python_result_code + if replacepattern not in step.python_code and python_code: + _logger.warning(f'Some step python_code contains "{pattern}": {step}') + python_code = False + if replacepattern not in step.python_result_code and python_result_code: + _logger.warning(f'Some step python_result_code contains "{pattern}": {step}') + python_result_code = False + + if python_code or python_result_code: + _logger.info(f'Some step python_code contains "{pattern}": {step} but looks like it was adapted') diff --git a/runbot/models/__init__.py b/runbot/models/__init__.py index 8fee1421..45c17f34 100644 --- a/runbot/models/__init__.py +++ b/runbot/models/__init__.py @@ -12,11 +12,11 @@ from . import commit from . import custom_trigger from . import database from . import dockerfile -from . import event from . import host from . import ir_cron from . import ir_http from . import ir_qweb +from . import ir_logging from . import project from . import repo from . import res_config_settings diff --git a/runbot/models/batch.py b/runbot/models/batch.py index ac240b8e..0d064ab6 100644 --- a/runbot/models/batch.py +++ b/runbot/models/batch.py @@ -47,7 +47,7 @@ class Batch(models.Model): else: batch.buildage_age = 0 - def get_formated_age(self): + def _get_formated_age(self): return s2human_long(self.age) def _url(self): @@ -154,9 +154,9 @@ class Batch(models.Model): if not self.bundle_id.base_id: # in some case the base can be detected lately. If a bundle has no base, recompute the base before preparing self.bundle_id._compute_base_id() - for level, message in self.bundle_id.consistency_warning(): + for level, message in self.bundle_id._consistency_warning(): if level == "warning": - self.warning("Bundle warning: %s" % message) + self._warning("Bundle warning: %s" % message) self.state = 'ready' @@ -174,7 +174,7 @@ class Batch(models.Model): ('category_id', '=', self.category_id.id) ]).filtered( lambda t: not t.version_domain or \ - self.bundle_id.version_id.filtered_domain(t.get_version_domain()) + self.bundle_id.version_id.filtered_domain(t._get_version_domain()) ) pushed_repo = self.commit_link_ids.mapped('commit_id.repo_id') @@ -185,7 +185,7 @@ class Batch(models.Model): ###################################### # Find missing commits ###################################### - def fill_missing(branch_commits, match_type): + def _fill_missing(branch_commits, match_type): if branch_commits: for branch, commit in branch_commits.items(): # branch first in case pr is closed. nonlocal missing_repos @@ -218,7 +218,7 @@ class Batch(models.Model): # 1.1 FIND missing commit in bundle heads if missing_repos: - fill_missing({branch: branch.head for branch in bundle.branch_ids.sorted(lambda b: (b.head.id, b.is_pr), reverse=True)}, 'head') + _fill_missing({branch: branch.head for branch in bundle.branch_ids.sorted(lambda b: (b.head.id, b.is_pr), reverse=True)}, 'head') # 1.2 FIND merge_base info for those commits # use last not preparing batch to define previous repos_heads instead of branches heads: @@ -253,14 +253,14 @@ class Batch(models.Model): if batch: if missing_repos: self._log('Using batch [%s](%s) to define missing commits', batch.id, batch._url()) - fill_missing({link.branch_id: link.commit_id for link in batch.commit_link_ids}, 'base_match') + _fill_missing({link.branch_id: link.commit_id for link in batch.commit_link_ids}, 'base_match') # check if all mergebase match reference batch batch_exiting_commit = batch.commit_ids.filtered(lambda c: c.repo_id in merge_base_commits.repo_id) not_matching = (batch_exiting_commit - merge_base_commits) if not_matching and not auto_rebase: message = 'Only %s out of %s merge base matched. You may want to rebase your branches to ensure compatibility' % (len(merge_base_commits)-len(not_matching), len(merge_base_commits)) suggestions = [('Tip: rebase %s to %s' % (commit.repo_id.name, commit.name)) for commit in not_matching] - self.warning('%s\n%s' % (message, '\n'.join(suggestions))) + self._warning('%s\n%s' % (message, '\n'.join(suggestions))) else: self._log('No reference batch found to fill missing commits') @@ -268,14 +268,14 @@ class Batch(models.Model): if missing_repos: if not bundle.is_base: self._log('Not all commit found in bundle branches and base batch. Fallback on base branches heads.') - fill_missing({branch: branch.head for branch in self.bundle_id.base_id.branch_ids}, 'base_head') + _fill_missing({branch: branch.head for branch in self.bundle_id.base_id.branch_ids}, 'base_head') # 3.2 FIND missing commit in master base heads if missing_repos: # this is to get an upgrade branch. if not bundle.is_base: self._log('Not all commit found in current version. Fallback on master branches heads.') master_bundle = self.env['runbot.version']._get('master').with_context(project_id=self.bundle_id.project_id.id).base_bundle_id - fill_missing({branch: branch.head for branch in master_bundle.branch_ids}, 'base_head') + _fill_missing({branch: branch.head for branch in master_bundle.branch_ids}, 'base_head') # 4. FIND missing commit in foreign project if missing_repos: @@ -283,10 +283,10 @@ class Batch(models.Model): if foreign_projects: self._log('Not all commit found. Fallback on foreign base branches heads.') foreign_bundles = bundle.search([('name', '=', bundle.name), ('project_id', 'in', foreign_projects.ids)]) - fill_missing({branch: branch.head for branch in foreign_bundles.mapped('branch_ids').sorted('is_pr', reverse=True)}, 'head') + _fill_missing({branch: branch.head for branch in foreign_bundles.mapped('branch_ids').sorted('is_pr', reverse=True)}, 'head') if missing_repos: foreign_bundles = bundle.search([('name', '=', bundle.base_id.name), ('project_id', 'in', foreign_projects.ids)]) - fill_missing({branch: branch.head for branch in foreign_bundles.mapped('branch_ids')}, 'base_head') + _fill_missing({branch: branch.head for branch in foreign_bundles.mapped('branch_ids')}, 'base_head') # CHECK missing commit if missing_repos: @@ -309,7 +309,7 @@ class Batch(models.Model): trigger_custom = trigger_customs.get(trigger, self.env['runbot.bundle.trigger.custom']) trigger_repos = trigger.repo_ids | trigger.dependency_ids if trigger_repos & missing_repos: - self.warning('Missing commit for repo %s for trigger %s', (trigger_repos & missing_repos).mapped('name'), trigger.name) + self._warning('Missing commit for repo %s for trigger %s', (trigger_repos & missing_repos).mapped('name'), trigger.name) continue # in any case, search for an existing build config = trigger_custom.config_id or trigger.config_id @@ -365,7 +365,7 @@ class Batch(models.Model): commit = link_commit.commit_id base_head = base_head_per_repo.get(commit.repo_id.id) if not base_head: - self.warning('No base head found for repo %s', commit.repo_id.name) + self._warning('No base head found for repo %s', commit.repo_id.name) continue link_commit.base_commit_id = base_head merge_base_sha = False @@ -399,9 +399,9 @@ class Batch(models.Model): except ValueError: # binary files pass except subprocess.CalledProcessError: - self.warning('Commit info failed between %s and %s', commit.name, base_head.name) + self._warning('Commit info failed between %s and %s', commit.name, base_head.name) - def warning(self, message, *args): + def _warning(self, message, *args): self.has_warning = True _logger.warning('batch %s: ' + message, self.id, *args) self._log(message, *args, level='WARNING') @@ -431,13 +431,13 @@ class BatchLog(models.Model): return pseudo_markdown(self.message) +fa_link_types = {'created': 'hashtag', 'matched': 'link', 'rebuild': 'refresh'} class BatchSlot(models.Model): _name = 'runbot.batch.slot' _description = 'Link between a bundle batch and a build' _order = 'trigger_id,id' - _fa_link_type = {'created': 'hashtag', 'matched': 'link', 'rebuild': 'refresh'} batch_id = fields.Many2one('runbot.batch', index=True) trigger_id = fields.Many2one('runbot.trigger', index=True) @@ -458,8 +458,8 @@ class BatchSlot(models.Model): for slot in self: slot.all_build_ids = all_builds.filtered_domain([('id', 'child_of', slot.build_id.ids)]) - def fa_link_type(self): - return self._fa_link_type.get(self.link_type, 'exclamation-triangle') + def _fa_link_type(self): + return fa_link_types.get(self.link_type, 'exclamation-triangle') def _create_missing_build(self): """Create a build when the slot does not have one""" diff --git a/runbot/models/branch.py b/runbot/models/branch.py index 80079bd5..0b8ae314 100644 --- a/runbot/models/branch.py +++ b/runbot/models/branch.py @@ -106,9 +106,6 @@ class Branch(models.Model): break for branch in self: - #branch.target_branch_name = False - #branch.pull_head_name = False - #branch.pull_head_remote_id = False if branch.name: pi = branch.is_pr and (pull_info or pull_info_dict.get((branch.remote_id, branch.name)) or branch._get_pull_info()) if pi: @@ -153,7 +150,7 @@ class Branch(models.Model): project = branch.remote_id.repo_id.project_id or self.env.ref('runbot.main_project') project.ensure_one() bundle = self.env['runbot.bundle'].search([('name', '=', name), ('project_id', '=', project.id)]) - need_new_base = not bundle and branch.match_is_base(name) + need_new_base = not bundle and branch._match_is_base(name) if (bundle.is_base or need_new_base) and branch.remote_id != branch.remote_id.repo_id.main_remote_id: _logger.warning('Trying to add a dev branch to base bundle, falling back on dummy bundle') bundle = dummy @@ -203,18 +200,17 @@ class Branch(models.Model): remote = self.remote_id if self.is_pr: _logger.info('Getting info for %s', self.name) - return remote._github('/repos/:owner/:repo/pulls/%s' % self.name, ignore_errors=False) or {} # TODO catch and send a managable exception + return remote._github('/repos/:owner/:repo/pulls/%s' % self.name, ignore_errors=False) or {} return {} - def ref(self): + def _ref(self): return 'refs/%s/%s/%s' % ( self.remote_id.remote_name, 'pull' if self.is_pr else 'heads', self.name ) - def recompute_infos(self, payload=None): - """ public method to recompute infos on demand """ + def _recompute_infos(self, payload=None): was_draft = self.draft was_alive = self.alive init_target_branch_name = self.target_branch_name @@ -242,7 +238,7 @@ class Branch(models.Model): self.bundle_id._force() @api.model - def match_is_base(self, name): + def _match_is_base(self, name): """match against is_base_regex ir.config_parameter""" if not name: return False @@ -250,6 +246,9 @@ class Branch(models.Model): regex = icp.get_param('runbot.runbot_is_base_regex', False) if regex: return re.match(regex, name) + + def action_recompute_infos(self): + return self._recompute_infos() class RefLog(models.Model): diff --git a/runbot/models/build.py b/runbot/models/build.py index 51944ba8..1f4500cb 100644 --- a/runbot/models/build.py +++ b/runbot/models/build.py @@ -1,27 +1,31 @@ # -*- coding: utf-8 -*- + +import datetime import fnmatch +import getpass +import hashlib import logging import pwd import re import shutil -import subprocess import time -import datetime -import hashlib -from ..common import dt2time, fqdn, now, grep, local_pgadmin_cursor, s2human, dest_reg, os, list_local_dbs, pseudo_markdown, RunbotException, findall -from ..container import docker_stop, docker_state, Command, docker_run -from ..fields import JsonDictField -from odoo import models, fields, api -from odoo.exceptions import UserError, ValidationError -from odoo.http import request -from odoo.tools import appdirs -from odoo.tools.safe_eval import safe_eval +import uuid + from collections import defaultdict from pathlib import Path from psycopg2 import sql from psycopg2.extensions import TransactionRollbackError -import getpass -import uuid + +from ..common import dt2time, now, grep, local_pgadmin_cursor, s2human, dest_reg, os, list_local_dbs, pseudo_markdown, RunbotException, findall, sanitize +from ..container import docker_stop, docker_state, Command, docker_run +from ..fields import JsonDictField + +from odoo import models, fields, api + +from odoo.exceptions import ValidationError +from odoo.tools import file_open, file_path +from odoo.tools.safe_eval import safe_eval + _logger = logging.getLogger(__name__) @@ -123,12 +127,6 @@ class BuildParameters(models.Model): def _find_existing(self, fingerprint): return self.env['runbot.build.params'].search([('fingerprint', '=', fingerprint)], limit=1) - def write(self, vals): - if not self.env.registry.loaded: - return - raise UserError('Params cannot be modified') - - class BuildResult(models.Model): # remove duplicate management # instead, link between bundle_batch and build @@ -209,10 +207,6 @@ class BuildResult(models.Model): default='normal', string='Build type') - # what about parent_id and duplmicates? - # -> always create build, no duplicate? (make sence since duplicate should be the parent and params should be inherited) - # -> build_link ? - parent_id = fields.Many2one('runbot.build', 'Parent Build', index=True) parent_path = fields.Char('Parent path', index=True, unaccent=False) top_parent = fields.Many2one('runbot.build', compute='_compute_top_parent') @@ -253,7 +247,7 @@ class BuildResult(models.Model): @api.depends('params_id.config_id') def _compute_log_list(self): # storing this field because it will be access trhoug repo viewn and keep track of the list at create for build in self: - build.log_list = ','.join({step.name for step in build.params_id.config_id.step_ids() if step._has_log()}) + build.log_list = ','.join({step.name for step in build.params_id.config_id.step_ids if step._has_log()}) # TODO replace logic, add log file to list when executed (avoid 404, link log on docker start, avoid fake is_docker_step) @api.depends('children_ids.global_state', 'local_state') @@ -335,7 +329,7 @@ class BuildResult(models.Model): def _get_run_url(self, db_suffix=None): if db_suffix is None: db_suffix = self.mapped('database_ids')[0].db_suffix - if request.env.user._is_internal(): + if self.env.user._is_internal(): token, token_info = self._get_run_token() db_suffix = f'{db_suffix}-{token}-{token_info}' use_ssl = self.env['ir.config_parameter'].sudo().get_param('runbot.use_ssl', default=True) @@ -410,7 +404,7 @@ class BuildResult(models.Model): 'host': self.host if self.keep_host else False, }) - def result_multi(self): + def _result_multi(self): if all(build.global_result == 'ok' or not build.global_result for build in self): return 'ok' if any(build.global_result in ('skipped', 'killed', 'manually_killed') for build in self): @@ -495,7 +489,7 @@ class BuildResult(models.Model): new_build = self.create(values) if self.parent_id: new_build._github_status() - user = request.env.user if request else self.env.user + user = self.env.user new_build._log('rebuild', 'Rebuild initiated by %s%s' % (user.name, (' :%s' % message) if message else '')) if self.local_state != 'done': @@ -688,7 +682,7 @@ class BuildResult(models.Model): 'port': port, }) build._log('wake_up', '**Waking up build**', log_type='markdown', level='SEPARATOR') - step_ids = build.params_id.config_id.step_ids() + step_ids = build.params_id.config_id.step_ids if step_ids and step_ids[-1]._step_state() == 'running': run_step = step_ids[-1] else: @@ -748,9 +742,9 @@ class BuildResult(models.Model): # compute statistics before starting next job build.active_step._make_stats(build) - build.active_step.log_end(build) + build.active_step._log_end(build) - step_ids = self.params_id.config_id.step_ids() + step_ids = self.params_id.config_id.step_ids if not step_ids: # no job to do, build is done self.active_step = False self.local_state = 'done' @@ -772,7 +766,7 @@ class BuildResult(models.Model): self.local_state = 'done' self.local_result = 'ko' return False - next_index = step_ids.index(self.active_step) + 1 + next_index = list(step_ids).index(self.active_step) + 1 while True: if next_index >= len(step_ids): # final job, build is done @@ -810,7 +804,7 @@ class BuildResult(models.Model): build._log("run", message, level='ERROR') build._kill(result='ko') - def _docker_run(self, cmd=None, ro_volumes=None, **kwargs): + def _docker_run(self, step, cmd=None, ro_volumes=None, **kwargs): self.ensure_one() _ro_volumes = ro_volumes or {} ro_volumes = {} @@ -834,24 +828,24 @@ class BuildResult(models.Model): rc_content = cmd.get_config(starting_config=starting_config) else: rc_content = starting_config - self.write_file('.odoorc', rc_content) + self._write_file('.odoorc', rc_content) user = getpass.getuser() ro_volumes[f'/home/{user}/.odoorc'] = self._path('.odoorc') - kwargs.pop('build_dir', False) # todo check python steps + kwargs.pop('build_dir', False) + kwargs.pop('log_path', False) + log_path = self._path('logs', '%s.txt' % step.name) build_dir = self._path() self.env.flush_all() def start_docker(): - docker_run(cmd=cmd, build_dir=build_dir, ro_volumes=ro_volumes, **kwargs) + docker_run(cmd=cmd, build_dir=build_dir, log_path=log_path, ro_volumes=ro_volumes, **kwargs) return start_docker - def _path(self, *l, **kw): + def _path(self, *paths): """Return the repo build path""" self.ensure_one() - build = self - root = self.env['runbot.runbot']._root() - return os.path.join(root, 'build', build.dest, *l) + return self.env['runbot.runbot']._path('build', self.dest, *paths) - def http_log_url(self): + def _http_log_url(self): use_ssl = self.env['ir.config_parameter'].get_param('runbot.use_ssl', default=True) return '%s://%s/runbot/static/build/%s/logs/' % ('https' if use_ssl else 'http', self.host, self.dest) @@ -859,12 +853,10 @@ class BuildResult(models.Model): """Return the absolute path to the direcory containing the server file, adding optional *path""" self.ensure_one() commit = self._get_server_commit() - if os.path.exists(commit._source_path('odoo')): - return commit._source_path('odoo', *path) - return commit._source_path('openerp', *path) + return commit._source_path('odoo', *path) def _docker_source_folder(self, commit): - return commit.repo_id.name + return sanitize(commit.repo_id.name) def _checkout(self): self.ensure_one() # will raise exception if hash not found, we don't want to fail for all build. @@ -876,7 +868,7 @@ class BuildResult(models.Model): if build_export_path in exports: self._log('_checkout', 'Multiple repo have same export path in build, some source may be missing for %s' % build_export_path, level='ERROR') self._kill(result='ko') - exports[build_export_path] = commit.export(self) + exports[build_export_path] = commit._export(self) checkout_time = time.time() - start if checkout_time > 60: @@ -908,7 +900,7 @@ class BuildResult(models.Model): def _get_modules_to_test(self, modules_patterns=''): self.ensure_one() - def filter_patterns(patterns, default, all): + def _filter_patterns(patterns, default, all): default = set(default) patterns_list = (patterns or '').split(',') patterns_list = [p.strip() for p in patterns_list] @@ -924,10 +916,10 @@ class BuildResult(models.Model): modules_to_install = set() for repo, module_list in self._get_available_modules().items(): available_modules += module_list - modules_to_install |= filter_patterns(repo.modules, module_list, module_list) + modules_to_install |= _filter_patterns(repo.modules, module_list, module_list) - modules_to_install = filter_patterns(self.params_id.modules, modules_to_install, available_modules) - modules_to_install = filter_patterns(modules_patterns, modules_to_install, available_modules) + modules_to_install = _filter_patterns(self.params_id.modules, modules_to_install, available_modules) + modules_to_install = _filter_patterns(modules_patterns, modules_to_install, available_modules) return sorted(modules_to_install) @@ -942,7 +934,7 @@ class BuildResult(models.Model): msg = f"Failed to drop local logs database : {dbname} with exception: {e}" _logger.exception(msg) host_name = self.env['runbot.host']._get_current_name() - self.env['runbot.runbot'].warning(f'Host {host_name}: {msg}') + self.env['runbot.runbot']._warning(f'Host {host_name}: {msg}') def _local_pg_createdb(self, dbname): icp = self.env['ir.config_parameter'] @@ -992,7 +984,7 @@ class BuildResult(models.Model): if lock: self.env.cr.execute("""SELECT id FROM runbot_build WHERE parent_path like %s FOR UPDATE""", ['%s%%' % self.parent_path]) self.ensure_one() - user = request.env.user if request else self.env.user + user = self.env.user uid = user.id build = self message = message or 'Killing build %s, requested by %s (user #%s)' % (build.dest, user.name, uid) @@ -1005,7 +997,7 @@ class BuildResult(models.Model): child._ask_kill(lock=False) def _wake_up(self): - user = request.env.user if request else self.env.user + user = self.env.user self._log('wake_up', f'Wake up initiated by {user.name}') if self.local_state != 'done': self._log('wake_up', 'Impossibe to wake up, state is not done') @@ -1031,7 +1023,7 @@ class BuildResult(models.Model): source_path = self._docker_source_folder(commit) for addons_path in (commit.repo_id.addons_paths or '').split(','): if os.path.isdir(commit._source_path(addons_path)): - yield os.path.join(source_path, addons_path).strip(os.sep) + yield os.sep.join([source_path, addons_path]).strip(os.sep) def _get_server_info(self, commit=None): commit = commit or self._get_server_commit() @@ -1052,7 +1044,7 @@ class BuildResult(models.Model): for commit_id in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids: if not self.params_id.skip_requirements and os.path.isfile(commit_id._source_path('requirements.txt')): repo_dir = self._docker_source_folder(commit_id) - requirement_path = os.path.join(repo_dir, 'requirements.txt') + requirement_path = os.sep.join([repo_dir, 'requirements.txt']) pres.append([f'python{py_version}', '-m', 'pip', 'install','--user', '--progress-bar', 'off', '-r', f'{requirement_path}']) addons_paths = self._get_addons_path() @@ -1060,7 +1052,7 @@ class BuildResult(models.Model): server_dir = self._docker_source_folder(server_commit) # commandline - cmd = ['python%s' % py_version] + python_params + [os.path.join(server_dir, server_file)] + cmd = ['python%s' % py_version] + python_params + [os.sep.join([server_dir, server_file])] if sub_command: cmd += [sub_command] @@ -1118,7 +1110,7 @@ class BuildResult(models.Model): """return the python name to use from build batch""" (server_commit, server_file) = self._get_server_info() server_path = server_commit._source_path(server_file) - with open(server_path, 'r') as f: + with file_open(server_path, 'r') as f: if f.readline().strip().endswith('python3'): return '3' return '' @@ -1131,52 +1123,35 @@ class BuildResult(models.Model): ir_logs = self.env['ir.logging'].search([('level', 'in', ('ERROR', 'WARNING', 'CRITICAL')), ('type', '=', 'server'), ('build_id', 'in', builds_to_scan.ids)]) return BuildError._parse_logs(ir_logs) - def is_file(self, file, mode='r'): + def _is_file(self, file, mode='r'): file_path = self._path(file) return os.path.exists(file_path) - def read_file(self, file, mode='r'): + def _read_file(self, file, mode='r'): file_path = self._path(file) try: - with open(file_path, mode) as f: + with file_open(file_path, mode) as f: return f.read() except Exception as e: self._log('readfile', 'exception: %s' % e) return False - def write_file(self, file, data, mode='w'): - file_path = self._path(file) - file_dir = os.path.split(file_path)[0] + def _write_file(self, file, data, mode='w'): + _file_path = self._path(file) + file_dir = os.path.dirname(_file_path) os.makedirs(file_dir, exist_ok=True) + file_path(os.path.dirname(_file_path)) try: - with open(file_path, mode) as f: + with open(_file_path, mode) as f: f.write(data) except Exception as e: self._log('write_file', 'exception: %s' % e) return False - def make_dirs(self, dir_path): - full_path = self._path(dir_path) - try: - os.makedirs(full_path, exist_ok=True) - except Exception as e: - self._log('make_dirs', 'exception: %s' % e) - return False - - def build_type_label(self): - self.ensure_one() - return dict(self.fields_get('build_type', 'selection')['build_type']['selection']).get(self.build_type, self.build_type) - - def get_formated_job_time(self): - return s2human(self.job_time) - - def get_formated_build_time(self): + def _get_formated_build_time(self): return s2human(self.build_time) - def get_formated_build_age(self): - return s2human(self.build_age) - - def get_color_class(self): + def _get_color_class(self): if self.global_result == 'ko': return 'danger' @@ -1230,5 +1205,5 @@ class BuildResult(models.Model): if 'base_' not in build_commit.match_type and commit.repo_id in trigger.repo_ids: commit._github_status(build, trigger.ci_context, state, target_url, desc) - def parse_config(self): + def _parse_config(self): return set(findall(self._server("tools/config.py"), '--[\w-]+', )) diff --git a/runbot/models/build_config.py b/runbot/models/build_config.py index 0c64b4a4..c243b744 100644 --- a/runbot/models/build_config.py +++ b/runbot/models/build_config.py @@ -7,10 +7,11 @@ import re import shlex import time from unidiff import PatchSet -from ..common import now, grep, time2str, rfind, s2human, os, RunbotException +from ..common import now, grep, time2str, rfind, s2human, os, RunbotException, ReProxy from ..container import docker_get_gateway_ip, Command from odoo import models, fields, api from odoo.exceptions import UserError, ValidationError +from odoo.tools.misc import file_open from odoo.tools.safe_eval import safe_eval, test_python_expr, _SAFE_OPCODES, to_opcodes # adding some additionnal optcode to safe_eval. This is not 100% needed and won't be done in standard but will help @@ -26,26 +27,6 @@ _re_warning = r'^\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ WARNING ' PYTHON_DEFAULT = "# type python code here\n\n\n\n\n\n" -class ReProxy(): - @classmethod - def match(cls, *args, **kwrags): - return re.match(*args, **kwrags) - - @classmethod - def search(cls, *args, **kwrags): - return re.search(*args, **kwrags) - - @classmethod - def compile(cls, *args, **kwrags): - return re.compile(*args, **kwrags) - - @classmethod - def findall(cls, *args, **kwrags): - return re.findall(*args, **kwrags) - - VERBOSE = re.VERBOSE - MULTILINE = re.MULTILINE - class Config(models.Model): _name = 'runbot.build.config' _description = "Build config" @@ -58,6 +39,7 @@ class Config(models.Model): protected = fields.Boolean('Protected', default=False, tracking=True) group = fields.Many2one('runbot.build.config', 'Configuration group', help="Group of config's and config steps") group_name = fields.Char('Group name', related='group.name') + step_ids = fields.Many2many('runbot.build.config.step', compute='_compute_step_ids') @api.model_create_multi def create(self, vals_list): @@ -76,29 +58,25 @@ class Config(models.Model): copy.sudo().write({'protected': False}) return copy - def unlink(self): - super(Config, self).unlink() - - def step_ids(self): - if self: - self.ensure_one() - return [ordered_step.step_id for ordered_step in self.step_order_ids.sorted('sequence')] + @api.depends('step_order_ids.sequence', 'step_order_ids.step_id') + def _compute_step_ids(self): + for config in self: + config.step_ids = config.step_order_ids.sorted('sequence').mapped('step_id') def _check_step_ids_order(self): for record in self: install_job = False - step_ids = record.step_ids() - for step in step_ids: + for step in record.step_ids: if step.job_type == 'install_odoo': install_job = True if step.job_type == 'run_odoo': - if step != step_ids[-1]: + if step != record.step_ids[-1]: raise UserError('Jobs of type run_odoo should be the last one') if not install_job: raise UserError('Jobs of type run_odoo should be preceded by a job of type install_odoo') - record._check_recustion() + record._check_recursion() - def _check_recustion(self, visited=None): + def _check_recursion(self, visited=None): self.ensure_one() visited = visited or [] recursion = False @@ -107,10 +85,10 @@ class Config(models.Model): visited.append(self) if recursion: raise UserError('Impossible to save config, recursion detected with path: %s' % ">".join([v.name for v in visited])) - for step in self.step_ids(): + for step in self.step_ids: if step.job_type == 'create_build': for create_config in step.create_config_ids: - create_config._check_recustion(visited[:]) + create_config._check_recursion(visited[:]) class ConfigStepUpgradeDb(models.Model): @@ -270,7 +248,6 @@ class ConfigStep(models.Model): raise UserError('Invalid extra_params on config step') def _run(self, build): - log_path = build._path('logs', '%s.txt' % self.name) build.write({'job_start': now(), 'job_end': False}) # state, ... log_link = '' if self._has_log(): @@ -278,17 +255,17 @@ class ConfigStep(models.Model): url = f"{log_url}/runbot/static/build/{build.dest}/logs/{self.name}.txt" log_link = f'[@icon-file-text]({url})' build._log('run', 'Starting step **%s** from config **%s** %s' % (self.name, build.params_id.config_id.name, log_link), log_type='markdown', level='SEPARATOR') - return self._run_step(build, log_path) + return self._run_step(build) - def _run_step(self, build, log_path, **kwargs): + def _run_step(self, build, **kwargs): build.log_counter = self.env['ir.config_parameter'].sudo().get_param('runbot.runbot_maxlogs', 100) run_method = getattr(self, '_run_%s' % self.job_type) - docker_params = run_method(build, log_path, **kwargs) + docker_params = run_method(build, **kwargs) if docker_params: - return build._docker_run(**docker_params) + return build._docker_run(self, **docker_params) return True - def _run_create_build(self, build, log_path): + def _run_create_build(self, build): count = 0 config_data = build.params_id.config_data config_ids = config_data.get('create_config_ids', self.create_config_ids) @@ -308,7 +285,7 @@ class ConfigStep(models.Model): child = build._add_child(_child_data, orphan=self.make_orphan) build._log('create_build', 'created with config %s' % create_config.name, log_type='subbuild', path=str(child.id)) - def make_python_ctx(self, build): + def _make_python_ctx(self, build): return { 'self': self, # 'fields': fields, @@ -325,8 +302,8 @@ class ConfigStep(models.Model): 'PatchSet': PatchSet, } - def _run_python(self, build, log_path, force=False): - eval_ctx = self.make_python_ctx(build) + def _run_python(self, build, force=False): + eval_ctx = self._make_python_ctx(build) eval_ctx['force'] = force try: safe_eval(self.python_code.strip(), eval_ctx, mode="exec", nocopy=True) @@ -350,7 +327,7 @@ class ConfigStep(models.Model): self.ensure_one() return self.job_type in ('install_odoo', 'run_odoo', 'restore', 'test_upgrade') or (self.job_type == 'python' and ('docker_params =' in self.python_code or '_run_' in self.python_code)) - def _run_run_odoo(self, build, log_path, force=False): + def _run_run_odoo(self, build, force=False): if not force: if build.parent_id: build._log('_run_run_odoo', 'build has a parent, skip run') @@ -365,7 +342,7 @@ class ConfigStep(models.Model): # run server cmd = build._cmd(local_only=False, enable_log_db=self.enable_log_db) - available_options = build.parse_config() + available_options = build._parse_config() if "--workers" in available_options: cmd += ["--workers", "2"] @@ -406,9 +383,9 @@ class ConfigStep(models.Model): except Exception: _logger.exception('An error occured while reloading nginx') build._log('', "An error occured while reloading nginx, skipping") - return dict(cmd=cmd, log_path=log_path, container_name=docker_name, exposed_ports=[build_port, build_port + 1], ro_volumes=exports, env_variables=env_variables) + return dict(cmd=cmd, container_name=docker_name, exposed_ports=[build_port, build_port + 1], ro_volumes=exports, env_variables=env_variables) - def _run_install_odoo(self, build, log_path): + def _run_install_odoo(self, build): exports = build._checkout() modules_to_install = self._modules_to_install(build) @@ -434,7 +411,7 @@ class ConfigStep(models.Model): cmd += ['-i', mods] config_path = build._server("tools/config.py") - available_options = build.parse_config() + available_options = build._parse_config() if self.test_enable: if "--test-enable" in available_options: cmd.extend(['--test-enable']) @@ -452,7 +429,7 @@ class ConfigStep(models.Model): test_tags += self.test_tags.replace(' ', '').split(',') if self.enable_auto_tags and not build.params_id.config_data.get('disable_auto_tags', False): if grep(config_path, "[/module][:class]"): - auto_tags = self.env['runbot.build.error'].disabling_tags() + auto_tags = self.env['runbot.build.error']._disabling_tags() if auto_tags: test_tags += auto_tags @@ -486,7 +463,7 @@ class ConfigStep(models.Model): cmd.finals.append(['cp', '-r', filestore_path, filestore_dest]) cmd.finals.append(['cd', dump_dir, '&&', 'zip', '-rmq9', zip_path, '*']) infos = '{\n "db_name": "%s",\n "build_id": %s,\n "shas": [%s]\n}' % (db_name, build.id, ', '.join(['"%s"' % build_commit.commit_id.dname for build_commit in build.params_id.commit_link_ids])) - build.write_file('logs/%s/info.json' % db_name, infos) + build._write_file('logs/%s/info.json' % db_name, infos) if self.flamegraph: cmd.finals.append(['flamegraph.pl', '--title', 'Flamegraph %s for build %s' % (self.name, build.id), self._perfs_data_path(), '>', self._perfs_data_path(ext='svg')]) @@ -494,12 +471,12 @@ class ConfigStep(models.Model): max_timeout = int(self.env['ir.config_parameter'].get_param('runbot.runbot_timeout', default=10000)) timeout = min(self.cpu_limit, max_timeout) env_variables = self.additionnal_env.split(';') if self.additionnal_env else [] - return dict(cmd=cmd, log_path=log_path, container_name=build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables) + return dict(cmd=cmd, container_name=build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables) def _upgrade_create_childs(self): pass - def _run_configure_upgrade_complement(self, build, *args): + def _run_configure_upgrade_complement(self, build): """ Parameters: - upgrade_dumps_trigger_id: a configure_upgradestep @@ -512,7 +489,7 @@ class ConfigStep(models.Model): builds_references = param.builds_reference_ids builds_references_by_version_id = {b.params_id.version_id.id: b for b in builds_references} upgrade_complement_step = build.params_id.trigger_id.upgrade_dumps_trigger_id.upgrade_step_id - version_domain = build.params_id.trigger_id.upgrade_dumps_trigger_id.get_version_domain() + version_domain = build.params_id.trigger_id.upgrade_dumps_trigger_id._get_version_domain() valid_targets = build.browse() next_versions = version.next_major_version_id | version.next_intermediate_version_ids if version_domain: # filter only on version where trigger is enabled @@ -542,7 +519,7 @@ class ConfigStep(models.Model): ) child._log('', 'This build tests change of schema in stable version testing upgrade to %s' % target.params_id.version_id.name) - def _run_configure_upgrade(self, build, log_path): + def _run_configure_upgrade(self, build): """ Source/target parameters: - upgrade_to_current | (upgrade_to_master + (upgrade_to_major_versions | upgrade_to_all_versions)) @@ -704,7 +681,7 @@ class ConfigStep(models.Model): if any(fnmatch.fnmatch(db.db_suffix, pat) for pat in pat_list): yield db - def _run_test_upgrade(self, build, log_path): + def _run_test_upgrade(self, build): target = build.params_id.upgrade_to_build_id commit_ids = build.params_id.commit_ids target_commit_ids = target.params_id.commit_ids @@ -742,9 +719,9 @@ class ConfigStep(models.Model): exception_env = self.env['runbot.upgrade.exception']._generate() if exception_env: env_variables.append(exception_env) - return dict(cmd=migrate_cmd, log_path=log_path, container_name=build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables, image_tag=target.params_id.dockerfile_id.image_tag) + return dict(cmd=migrate_cmd, container_name=build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables, image_tag=target.params_id.dockerfile_id.image_tag) - def _run_restore(self, build, log_path): + def _run_restore(self, build): # exports = build._checkout() params = build.params_id dump_db = params.dump_db @@ -776,7 +753,7 @@ class ConfigStep(models.Model): assert download_db_suffix and dump_build download_db_name = '%s-%s' % (dump_build.dest, download_db_suffix) zip_name = '%s.zip' % download_db_name - dump_url = '%s%s' % (dump_build.http_log_url(), zip_name) + dump_url = '%s%s' % (dump_build._http_log_url(), zip_name) build._log('test-migration', 'Restoring dump [%s](%s) from build [%s](%s)' % (zip_name, dump_url, dump_build.id, dump_build.build_url), log_type='markdown') restore_suffix = self.restore_rename_db_suffix or dump_db.db_suffix or suffix assert restore_suffix @@ -802,7 +779,7 @@ class ConfigStep(models.Model): ]) - return dict(cmd=cmd, log_path=log_path, container_name=build._get_docker_name(), cpu_limit=self.cpu_limit) + return dict(cmd=cmd, container_name=build._get_docker_name(), cpu_limit=self.cpu_limit) def _reference_builds(self, bundle, trigger): upgrade_dumps_trigger_id = trigger.upgrade_dumps_trigger_id @@ -879,7 +856,7 @@ class ConfigStep(models.Model): category_id=category_id ).mapped('last_done_batch') - def log_end(self, build): + def _log_end(self, build): if self.job_type == 'create_build': build._logger('Step %s finished in %s' % (self.name, s2human(build.job_time))) return @@ -888,19 +865,19 @@ class ConfigStep(models.Model): if self.job_type == 'install_odoo': kwargs['message'] += ' $$fa-download$$' db_suffix = build.params_id.config_data.get('db_name') or (build.params_id.dump_db.db_suffix if not self.create_db else False) or self.db_name - kwargs['path'] = '%s%s-%s.zip' % (build.http_log_url(), build.dest, db_suffix) + kwargs['path'] = '%s%s-%s.zip' % (build._http_log_url(), build.dest, db_suffix) kwargs['log_type'] = 'link' build._log('', **kwargs) if self.coverage: - xml_url = '%scoverage.xml' % build.http_log_url() + xml_url = '%scoverage.xml' % build._http_log_url() html_url = 'http://%s/runbot/static/build/%s/coverage/index.html' % (build.host, build.dest) message = 'Coverage report: [xml @icon-download](%s), [html @icon-eye](%s)' % (xml_url, html_url) build._log('end_job', message, log_type='markdown') if self.flamegraph: - dat_url = '%sflame_%s.%s' % (build.http_log_url(), self.name, 'log.gz') - svg_url = '%sflame_%s.%s' % (build.http_log_url(), self.name, 'svg') + dat_url = '%sflame_%s.%s' % (build._http_log_url(), self.name, 'log.gz') + svg_url = '%sflame_%s.%s' % (build._http_log_url(), self.name, 'svg') message = 'Flamegraph report: [data @icon-download](%s), [svg @icon-eye](%s)' % (dat_url, svg_url) build._log('end_job', message, log_type='markdown') @@ -932,7 +909,7 @@ class ConfigStep(models.Model): for (addons_path, module, _) in commit._get_available_modules(): if module not in modules_to_install: # we want to omit docker_source_folder/[addons/path/]module/* - module_path_in_docker = os.path.join(docker_source_folder, addons_path, module) + module_path_in_docker = os.sep.join([docker_source_folder, addons_path, module]) pattern_to_omit.add('%s/*' % (module_path_in_docker)) return ['--omit', ','.join(pattern_to_omit)] @@ -953,7 +930,7 @@ class ConfigStep(models.Model): build.write(self._make_restore_results(build)) def _make_python_results(self, build): - eval_ctx = self.make_python_ctx(build) + eval_ctx = self._make_python_ctx(build) safe_eval(self.python_result_code.strip(), eval_ctx, mode="exec", nocopy=True) return_value = eval_ctx.get('return_value', {}) # todo check return_value or write in try except. Example: local result setted to wrong value @@ -966,7 +943,7 @@ class ConfigStep(models.Model): build._log('coverage_result', 'Start getting coverage result') cov_path = build._path('coverage/index.html') if os.path.exists(cov_path): - with open(cov_path, 'r') as f: + with file_open(cov_path, 'r') as f: data = f.read() covgrep = re.search(r'pc_cov.>(?P\d+)%', data) build_values['coverage_result'] = covgrep and covgrep.group('coverage') or False @@ -997,11 +974,11 @@ class ConfigStep(models.Model): return build_values def _check_module_states(self, build): - if not build.is_file('logs/modules_states.txt'): + if not build._is_file('logs/modules_states.txt'): build._log('', '"logs/modules_states.txt" file not found.', level='ERROR') return 'ko' - content = build.read_file('logs/modules_states.txt') or '' + content = build._read_file('logs/modules_states.txt') or '' if '(0 rows)' not in content: build._log('', 'Some modules are not in installed/uninstalled/uninstallable state after migration. \n %s' % content) return 'ko' @@ -1155,7 +1132,7 @@ class ConfigStep(models.Model): commit = commit_link.commit_id modified = commit.repo_id._git(['diff', '--name-only', '%s..%s' % (commit_link.merge_base_commit_id.name, commit.name)]) if modified: - files = [('%s/%s' % (build._docker_source_folder(commit), file)) for file in modified.split('\n') if file] + files = [os.sep.join([build._docker_source_folder(commit), file]) for file in modified.split('\n') if file] modified_files[commit_link] = files return modified_files diff --git a/runbot/models/build_config_codeowner.py b/runbot/models/build_config_codeowner.py index 79da1cff..e5910fb7 100644 --- a/runbot/models/build_config_codeowner.py +++ b/runbot/models/build_config_codeowner.py @@ -60,7 +60,7 @@ class ConfigStep(models.Model): reviewer_per_file[file] = file_reviewers return reviewer_per_file - def _run_codeowner(self, build, log_path): + def _run_codeowner(self, build): bundle = build.params_id.create_batch_id.bundle_id if bundle.is_base: build._log('', 'Skipping base bundle') @@ -133,7 +133,6 @@ class ConfigStep(models.Model): pr = pr_by_commit[commit_link] new_reviewers = reviewers - set((pr.reviewers or '').split(',')) if new_reviewers: - # todo replace all team by a runbot team and simplify this logic to remove search author_skippable_teams = skippable_teams.filtered(lambda team: team.skip_team_pr and team.github_team in new_reviewers and pr.pr_author in team._get_members_logins()) author_skipped_teams = set(author_skippable_teams.mapped('github_team')) if author_skipped_teams: diff --git a/runbot/models/build_error.py b/runbot/models/build_error.py index 12d0595c..a4179caa 100644 --- a/runbot/models/build_error.py +++ b/runbot/models/build_error.py @@ -63,26 +63,15 @@ class BuildError(models.Model): cleaners = self.env['runbot.error.regex'].search([('re_type', '=', 'cleaning')]) for vals in vals_list: content = vals.get('content') - cleaned_content = cleaners.r_sub('%', content) + cleaned_content = cleaners._r_sub('%', content) vals.update({ 'cleaned_content': cleaned_content, 'fingerprint': self._digest(cleaned_content) }) records = super().create(vals_list) - records.assign() + records.action_assign() return records - def assign(self): - if not any((not record.responsible and not record.team_id and record.file_path and not record.parent_id) for record in self): - return - teams = self.env['runbot.team'].search(['|', ('path_glob', '!=', False), ('module_ownership_ids', '!=', False)]) - repos = self.env['runbot.repo'].search([]) - for record in self: - if not record.responsible and not record.team_id and record.file_path and not record.parent_id: - team = teams._get_team(record.file_path, repos) - if team: - record.team_id = team - def write(self, vals): if 'active' in vals: for build_error in self: @@ -177,9 +166,9 @@ class BuildError(models.Model): hash_dict = defaultdict(self.env['ir.logging'].browse) for log in ir_logs: - if search_regs.r_search(log.message): + if search_regs._r_search(log.message): continue - fingerprint = self._digest(cleaning_regs.r_sub('%', log.message)) + fingerprint = self._digest(cleaning_regs._r_sub('%', log.message)) hash_dict[fingerprint] |= log build_errors = self.env['runbot.build.error'] @@ -220,7 +209,27 @@ class BuildError(models.Model): window_action["res_id"] = build_errors.id return window_action - def link_errors(self): + @api.model + def _test_tags_list(self): + active_errors = self.search([('test_tags', '!=', False)]) + test_tag_list = active_errors.mapped('test_tags') + return [test_tag for error_tags in test_tag_list for test_tag in (error_tags).split(',')] + + @api.model + def _disabling_tags(self): + return ['-%s' % tag for tag in self._test_tags_list()] + + def _search_version(self, operator, value): + return [('build_ids.version_id', operator, value)] + + def _search_trigger_ids(self, operator, value): + return [('build_ids.trigger_id', operator, value)] + + #################### + # Actions + #################### + + def action_link_errors(self): """ Link errors with the first one of the recordset choosing parent in error with responsible, random bug and finally fisrt seen """ @@ -230,26 +239,22 @@ class BuildError(models.Model): build_errors = self.search([('id', 'in', self.ids)], order='responsible asc, random desc, id asc') build_errors[1:].write({'parent_id': build_errors[0].id}) - def clean_content(self): + def action_clean_content(self): cleaning_regs = self.env['runbot.error.regex'].search([('re_type', '=', 'cleaning')]) for build_error in self: - build_error.cleaned_content = cleaning_regs.r_sub('%', build_error.content) + build_error.cleaned_content = cleaning_regs._r_sub('%', build_error.content) - @api.model - def test_tags_list(self): - active_errors = self.search([('test_tags', '!=', False)]) - test_tag_list = active_errors.mapped('test_tags') - return [test_tag for error_tags in test_tag_list for test_tag in (error_tags).split(',')] + def action_assign(self): + if not any((not record.responsible and not record.team_id and record.file_path and not record.parent_id) for record in self): + return + teams = self.env['runbot.team'].search(['|', ('path_glob', '!=', False), ('module_ownership_ids', '!=', False)]) + repos = self.env['runbot.repo'].search([]) + for record in self: + if not record.responsible and not record.team_id and record.file_path and not record.parent_id: + team = teams._get_team(record.file_path, repos) + if team: + record.team_id = team - @api.model - def disabling_tags(self): - return ['-%s' % tag for tag in self.test_tags_list()] - - def _search_version(self, operator, value): - return [('build_ids.version_id', operator, value)] - - def _search_trigger_ids(self, operator, value): - return [('build_ids.trigger_id', operator, value)] class BuildErrorTag(models.Model): @@ -272,13 +277,13 @@ class ErrorRegex(models.Model): re_type = fields.Selection([('filter', 'Filter out'), ('cleaning', 'Cleaning')], string="Regex type") sequence = fields.Integer('Sequence', default=100) - def r_sub(self, replace, s): + def _r_sub(self, replace, s): """ replaces patterns from the recordset by replace in the given string """ for c in self: s = re.sub(c.regex, '%', s) return s - def r_search(self, s): + def _r_search(self, s): """ Return True if one of the regex is found in s """ for filter in self: if re.search(filter.regex, s): @@ -297,7 +302,13 @@ class ErrorBulkWizard(models.TransientModel): archive = fields.Boolean('Close error (archive)', default=False) chatter_comment = fields.Text('Chatter Comment') - def submit(self): + @api.onchange('fixing_commit', 'chatter_comment') + def _onchange_commit_comment(self): + for record in self: + if record.fixing_commit or record.chatter_comment: + record.archive = True + + def action_submit(self): error_ids = self.env['runbot.build.error'].browse(self.env.context.get('active_ids')) if error_ids: if self.team_id: @@ -313,9 +324,3 @@ class ErrorBulkWizard(models.TransientModel): if self.chatter_comment: for build_error in error_ids: build_error.message_post(body=self.chatter_comment, subject="Bullk Wizard Comment") - - @api.onchange('fixing_commit', 'chatter_comment') - def _onchange_commit_comment(self): - for record in self: - if record.fixing_commit or record.chatter_comment: - record.archive = True diff --git a/runbot/models/build_stat_regex.py b/runbot/models/build_stat_regex.py index 8e5eb9fc..e845cbad 100644 --- a/runbot/models/build_stat_regex.py +++ b/runbot/models/build_stat_regex.py @@ -6,6 +6,7 @@ import re from odoo import models, fields, api from odoo.exceptions import ValidationError +from odoo.tools import file_open VALUE_PATTERN = r"\(\?P\.+\)" # used to verify value group pattern @@ -53,7 +54,7 @@ class BuildStatRegex(models.Model): if not os.path.exists(file_path): return {} stats_matches = {} - with open(file_path, "r") as log_file: + with file_open(file_path, "r") as log_file: data = log_file.read() for build_stat_regex in self: current_stat_matches = {} diff --git a/runbot/models/bundle.py b/runbot/models/bundle.py index 7710eef7..266d2eb2 100644 --- a/runbot/models/bundle.py +++ b/runbot/models/bundle.py @@ -225,7 +225,7 @@ class Bundle(models.Model): self.last_batch = new return new - def consistency_warning(self): + def _consistency_warning(self): if self.defined_base_id: return [('info', 'This bundle has a forced base: %s' % self.defined_base_id.name)] warnings = [] @@ -242,34 +242,13 @@ class Bundle(models.Model): warnings.append(('warning', 'Branch %s not starting with version name (%s)' % (branch.dname, self.base_id.name))) return warnings - def branch_groups(self): + def _branch_groups(self): self.branch_ids.sorted(key=lambda b: (b.remote_id.repo_id.sequence, b.remote_id.repo_id.id, b.is_pr)) branch_groups = {repo: [] for repo in self.branch_ids.mapped('remote_id.repo_id').sorted('sequence')} for branch in self.branch_ids.sorted(key=lambda b: (b.is_pr)): branch_groups[branch.remote_id.repo_id].append(branch) return branch_groups - def generate_custom_trigger_multi_action(self): - context = { - 'default_bundle_id': self.id, - 'default_config_id': self.env.ref('runbot.runbot_build_config_custom_multi').id, - 'default_child_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id, - 'default_extra_params': False, - 'default_child_extra_params': '--test-tags /module.test_method', - 'default_number_build': 10, - } - return self._generate_custom_trigger_action(context) - - def generate_custom_trigger_restore_action(self): - context = { - 'default_bundle_id': self.id, - 'default_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id, - 'default_child_config_id': False, - 'default_extra_params': '--test-tags /module.test_method', - 'default_child_extra_params': False, - 'default_number_build': 0, - } - return self._generate_custom_trigger_action(context) def _generate_custom_trigger_action(self, context): return { @@ -280,3 +259,25 @@ class Bundle(models.Model): 'target': 'new', 'context': context, } + + def action_generate_custom_trigger_multi_action(self): + context = { + 'default_bundle_id': self.id, + 'default_config_id': self.env.ref('runbot.runbot_build_config_custom_multi').id, + 'default_child_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id, + 'default_extra_params': False, + 'default_child_extra_params': '--test-tags /module.test_method', + 'default_number_build': 10, + } + return self._generate_custom_trigger_action(context) + + def action_generate_custom_trigger_restore_action(self): + context = { + 'default_bundle_id': self.id, + 'default_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id, + 'default_child_config_id': False, + 'default_extra_params': '--test-tags /module.test_method', + 'default_child_extra_params': False, + 'default_number_build': 0, + } + return self._generate_custom_trigger_action(context) diff --git a/runbot/models/commit.py b/runbot/models/commit.py index c33ec964..f04228ed 100644 --- a/runbot/models/commit.py +++ b/runbot/models/commit.py @@ -1,11 +1,12 @@ import subprocess -from ..common import os, RunbotException, _make_github_session +from ..common import os, RunbotException, make_github_session import glob import shutil from odoo import models, fields, api, registry +from odoo.tools import file_open import logging _logger = logging.getLogger(__name__) @@ -52,7 +53,7 @@ class Commit(models.Model): module = os.path.basename(os.path.dirname(manifest_path)) yield (addons_path, module, manifest_file_name) - def export(self, build): + def _export(self, build): """Export a git repo into a sources""" # TODO add automated tests self.ensure_one() @@ -106,19 +107,19 @@ class Commit(models.Model): return export_path - def read_source(self, file, mode='r'): + def _read_source(self, file, mode='r'): file_path = self._source_path(file) try: - with open(file_path, mode) as f: + with file_open(file_path, mode) as f: return f.read() except: return False - def _source_path(self, *path): + def _source_path(self, *paths): export_name = self.name if self.rebase_on_id: export_name = '%s_%s' % (self.name, self.rebase_on_id.name) - return os.path.join(self.env['runbot.runbot']._root(), 'sources', self.repo_id.name, export_name, *path) + return self.repo_id._source_path(export_name, *paths) @api.depends('name', 'repo_id.name') def _compute_dname(self): @@ -201,7 +202,7 @@ class CommitStatus(models.Model): _logger.warning('No token on remote %s, skipping status', remote.mapped("name")) else: if remote.token not in session_cache: - session_cache[remote.token] = _make_github_session(remote.token) + session_cache[remote.token] = make_github_session(remote.token) session = session_cache[remote.token] _logger.info( "github updating %s status %s to %s in repo %s", diff --git a/runbot/models/custom_trigger.py b/runbot/models/custom_trigger.py index c8603b71..15c4d9c9 100644 --- a/runbot/models/custom_trigger.py +++ b/runbot/models/custom_trigger.py @@ -56,17 +56,17 @@ class CustomTriggerWizard(models.TransientModel): @api.depends('config_id') def _compute_has_create_step(self): for record in self: - record.has_create_step = any(step.job_type == 'create_build' for step in self.config_id.step_ids()) + record.has_create_step = any(step.job_type == 'create_build' for step in self.config_id.step_ids) @api.depends('config_id') def _compute_has_restore_step(self): for record in self: - record.has_restore_step = any(step.job_type == 'restore' for step in self.config_id.step_ids()) + record.has_restore_step = any(step.job_type == 'restore' for step in self.config_id.step_ids) @api.depends('child_config_id') def _compute_has_child_with_restore_step(self): for record in self: - record.has_child_with_restore_step = record.child_config_id and any(step.job_type == 'restore' for step in self.child_config_id.step_ids()) + record.has_child_with_restore_step = record.child_config_id and any(step.job_type == 'restore' for step in self.child_config_id.step_ids) @api.onchange('extra_params', 'child_extra_params', 'restore_dump_url', 'config_id', 'child_config_id', 'number_build', 'config_id', 'restore_mode', 'restore_database_suffix', 'restore_trigger_id') def _onchange_warnings(self): @@ -164,7 +164,7 @@ class CustomTriggerWizard(models.TransientModel): def _get_existing_trigger(self): return self.env['runbot.bundle.trigger.custom'].search([('bundle_id', '=', self.bundle_id.id), ('trigger_id', '=', self.trigger_id.id)]) - def submit(self): + def action_submit(self): self.ensure_one() self._get_existing_trigger().unlink() self.env['runbot.bundle.trigger.custom'].create({ diff --git a/runbot/models/host.py b/runbot/models/host.py index e9939415..d4637058 100644 --- a/runbot/models/host.py +++ b/runbot/models/host.py @@ -5,7 +5,7 @@ import time from collections import defaultdict from odoo import models, fields, api -from odoo.tools import config, ormcache +from odoo.tools import config, ormcache, file_open from ..common import fqdn, local_pgadmin_cursor, os, list_local_dbs, local_pg_cursor from ..container import docker_build @@ -106,8 +106,8 @@ class Host(models.Model): def _bootstrap(self): """ Create needed directories in static """ dirs = ['build', 'nginx', 'repo', 'sources', 'src', 'docker'] - static_path = self._get_work_path() - static_dirs = {d: os.path.join(static_path, d) for d in dirs} + static_path = self.env['runbot.runbot']._root() + static_dirs = {d: self.env['runbot.runbot']._path(d) for d in dirs} for dir, path in static_dirs.items(): os.makedirs(path, exist_ok=True) self._bootstrap_db_template() @@ -117,16 +117,14 @@ class Host(models.Model): """ build docker images needed by locally pending builds""" _logger.info('Building docker images...') self.ensure_one() - static_path = self._get_work_path() self.clear_caches() # needed to ensure that content is updated on all hosts for dockerfile in self.env['runbot.dockerfile'].search([('to_build', '=', True)]): - self._docker_build_dockerfile(dockerfile, static_path) + self._docker_build_dockerfile(dockerfile) _logger.info('Done...') - def _docker_build_dockerfile(self, dockerfile, workdir): + def _docker_build_dockerfile(self, dockerfile): start = time.time() - # _logger.info('Building %s, %s', dockerfile.name, hash(str(dockerfile.dockerfile))) - docker_build_path = os.path.join(workdir, 'docker', dockerfile.image_tag) + docker_build_path = self.env['runbot.runbot']._path('docker', dockerfile.image_tag) os.makedirs(docker_build_path, exist_ok=True) user = getpass.getuser() @@ -139,22 +137,18 @@ class Host(models.Model): USER {user} ENV COVERAGE_FILE /data/build/.coverage """ - - with open(os.path.join(docker_build_path, 'Dockerfile'), 'w') as Dockerfile: + with open(self.env['runbot.runbot']._path('docker', dockerfile.image_tag, 'Dockerfile'), 'w') as Dockerfile: Dockerfile.write(dockerfile.dockerfile + docker_append) docker_build_success, msg = docker_build(docker_build_path, dockerfile.image_tag) if not docker_build_success: dockerfile.to_build = False dockerfile.message_post(body=f'Build failure:\n{msg}') - # self.env['runbot.runbot'].warning(f'Dockerfile build "{dockerfile.image_tag}" failed on host {self.name}') + # self.env['runbot.runbot']._warning(f'Dockerfile build "{dockerfile.image_tag}" failed on host {self.name}') else: duration = time.time() - start if duration > 1: _logger.info('Dockerfile %s finished build in %s', dockerfile.image_tag, duration) - - def _get_work_path(self): - return os.path.abspath(os.path.join(os.path.dirname(__file__), '../static')) @ormcache() def _host_list(self): @@ -172,11 +166,11 @@ class Host(models.Model): def _get_current_name(self): return config.get('forced_host_name') or fqdn() - def get_running_max(self): + def _get_running_max(self): icp = self.env['ir.config_parameter'] return int(icp.get_param('runbot.runbot_running_max', default=5)) - def set_psql_conn_count(self): + def _set_psql_conn_count(self): _logger.info('Updating psql connection count...') self.ensure_one() with local_pgadmin_cursor() as local_cr: @@ -190,7 +184,7 @@ class Host(models.Model): def _total_workers(self): return sum(host.nb_worker for host in self) - def disable(self): + def _disable(self): """ Reserve host if possible """ self.ensure_one() nb_hosts = self.env['runbot.host'].search_count([]) @@ -271,12 +265,12 @@ class Host(models.Model): with local_pg_cursor(logs_db_name) as local_cr: local_cr.execute("DELETE FROM ir_logging WHERE id in %s", [tuple(local_log_ids)]) - def get_build_domain(self, domain=None): + def _get_build_domain(self, domain=None): domain = domain or [] return [('host', '=', self.name)] + domain - def get_builds(self, domain, order=None): - return self.env['runbot.build'].search(self.get_build_domain(domain), order=order) + def _get_builds(self, domain, order=None): + return self.env['runbot.build'].search(self._get_build_domain(domain), order=order) def _process_messages(self): self.host_message_ids._process() @@ -298,5 +292,5 @@ class MessageQueue(models.Model): # todo consume messages here if records: for record in records: - self.env['runbot.runbot'].warning(f'Host {record.host_id.name} got an unexpected message {record.message}') + self.env['runbot.runbot']._warning(f'Host {record.host_id.name} got an unexpected message {record.message}') self.unlink() diff --git a/runbot/models/event.py b/runbot/models/ir_logging.py similarity index 98% rename from runbot/models/event.py rename to runbot/models/ir_logging.py index 119fbc95..a4666796 100644 --- a/runbot/models/event.py +++ b/runbot/models/ir_logging.py @@ -13,7 +13,7 @@ _logger = logging.getLogger(__name__) TYPES = [(t, t.capitalize()) for t in 'client server runbot subbuild link markdown'.split()] -class runbot_event(models.Model): +class IrLogging(models.Model): _inherit = "ir.logging" _order = 'id' @@ -55,7 +55,7 @@ class runbot_event(models.Model): for ir_logging in self: ir_logging.error_id = False if ir_logging.level in ('ERROR', 'CRITICAL', 'WARNING') and ir_logging.type == 'server': - fingerprints[self.env['runbot.build.error']._digest(cleaning_regexes.r_sub('%', ir_logging.message))].append(ir_logging) + fingerprints[self.env['runbot.build.error']._digest(cleaning_regexes._r_sub('%', ir_logging.message))].append(ir_logging) for build_error in self.env['runbot.build.error'].search([('fingerprint', 'in', list(fingerprints.keys()))]): for ir_logging in fingerprints[build_error.fingerprint]: ir_logging.error_id = build_error.id @@ -106,14 +106,6 @@ class RunbotErrorLog(models.Model): for l in self: l.build_url = '/runbot/build/%s' % l.build_id.id - def action_goto_build(self): - self.ensure_one() - return { - "type": "ir.actions.act_url", - "url": "runbot/build/%s" % self.build_id.id, - "target": "new", - } - def _compute_bundle_id(self): slots = self.env['runbot.batch.slot'].search([('build_id', 'in', self.mapped('top_parent_id').ids)]) for l in self: @@ -198,3 +190,11 @@ class RunbotErrorLog(models.Model): WHERE l.level = 'ERROR' )""") + + def action_goto_build(self): + self.ensure_one() + return { + "type": "ir.actions.act_url", + "url": "runbot/build/%s" % self.build_id.id, + "target": "new", + } diff --git a/runbot/models/module.py b/runbot/models/module.py new file mode 100644 index 00000000..e69de29b diff --git a/runbot/models/repo.py b/runbot/models/repo.py index 629c950e..a30b3486 100644 --- a/runbot/models/repo.py +++ b/runbot/models/repo.py @@ -11,18 +11,14 @@ import requests from pathlib import Path from odoo import models, fields, api -from ..common import os, RunbotException, _make_github_session +from odoo.tools import file_open +from ..common import os, RunbotException, make_github_session, sanitize from odoo.exceptions import UserError from odoo.tools.safe_eval import safe_eval _logger = logging.getLogger(__name__) -def _sanitize(name): - for i in '@:/': - name = name.replace(i, '_') - return name - class Trigger(models.Model): """ @@ -89,7 +85,7 @@ class Trigger(models.Model): return [(4, b.id) for b in refs_builds] return [] - def get_version_domain(self): + def _get_version_domain(self): if self.version_domain: return safe_eval(self.version_domain) return [] @@ -147,7 +143,7 @@ class Remote(models.Model): def _compute_remote_name(self): for remote in self: - remote.remote_name = _sanitize(remote.short_name) + remote.remote_name = sanitize(remote.short_name) def create(self, values_list): remote = super().create(values_list) @@ -175,7 +171,7 @@ class Remote(models.Model): url = url.replace(':owner', remote.owner) url = url.replace(':repo', remote.repo_name) url = 'https://api.%s%s' % (remote.repo_domain, url) - session = session or _make_github_session(remote.token) + session = session or make_github_session(remote.token) while url: if recursive: _logger.info('Getting page %s', url) @@ -212,12 +208,12 @@ class Remote(models.Model): else: raise - def check_token(self): + def action_check_token(self): if not self.user_has_groups('runbot.group_runbot_admin'): raise UserError('This action is restricted to admin users') token_results = {} for repo in self: - session = _make_github_session(repo.token) + session = make_github_session(repo.token) if repo.token not in token_results: token_results[repo.token] = session.get("https://api.github.com/user") response = token_results[repo.token] @@ -287,7 +283,7 @@ class Repo(models.Model): upgrade_paths = fields.Char('Upgrade paths', help='Comma separated list of possible upgrade path', default='', tracking=True) sequence = fields.Integer('Sequence', tracking=True) - path = fields.Char(compute='_get_path', string='Directory', readonly=True) + path = fields.Char(compute='_compute_path', string='Directory', readonly=True) mode = fields.Selection([('disabled', 'Disabled'), ('poll', 'Poll'), ('hook', 'Hook')], @@ -326,12 +322,12 @@ class Repo(models.Model): for repo in self: repo.hook_time = times.get(repo.id, 0) - def set_hook_time(self, value): + def _set_hook_time(self, value): for repo in self: self.env['runbot.repo.hooktime'].create({'time': value, 'repo_id': repo.id}) self.invalidate_recordset(['hook_time']) - def set_ref_time(self, value): + def _set_ref_time(self, value): for repo in self: self.env['runbot.repo.reftime'].create({'time': value, 'repo_id': repo.id}) self.invalidate_recordset(['get_ref_time']) @@ -349,11 +345,16 @@ class Repo(models.Model): """) @api.depends('name') - def _get_path(self): - """compute the server path of repo from the name""" - root = self.env['runbot.runbot']._root() + def _compute_path(self): + """compute the server path of repo from the for name""" for repo in self: - repo.path = os.path.join(root, 'repo', _sanitize(repo.name)) + repo.path = repo._path() + + def _path(self, *path_parts): + return self.env['runbot.runbot']._path('repo', sanitize(self.name), *path_parts) + + def _source_path(self, *path_parts): + return self.env['runbot.runbot']._path('sources', sanitize(self.name), *path_parts) def _git(self, cmd, errors='strict'): """Execute a git command 'cmd'""" @@ -396,7 +397,7 @@ class Repo(models.Model): def _get_fetch_head_time(self): self.ensure_one() - fname_fetch_head = os.path.join(self.path, 'FETCH_HEAD') + fname_fetch_head = self._path('FETCH_HEAD') if os.path.exists(fname_fetch_head): return os.path.getmtime(fname_fetch_head) return 0 @@ -411,7 +412,7 @@ class Repo(models.Model): commit_limit = time.time() - (60 * 60 * 24 * max_age) if not self.get_ref_time or get_ref_time > self.get_ref_time: try: - self.set_ref_time(get_ref_time) + self._set_ref_time(get_ref_time) fields = ['refname', 'objectname', 'committerdate:unix', 'authorname', 'authoremail', 'subject', 'committername', 'committeremail'] fmt = "%00".join(["%(" + field + ")" for field in fields]) cmd = ['for-each-ref', '--format', fmt, '--sort=-committerdate', 'refs/*/heads/*'] @@ -423,7 +424,7 @@ class Repo(models.Model): return [] refs = [tuple(field for field in line.split('\x00')) for line in git_refs.split('\n')] refs = [r for r in refs if not re.match(r'^refs/[\w-]+/heads/\d+$', r[0])] # remove branches with interger names to avoid confusion with pr names - refs = [r for r in refs if int(r[2]) > commit_limit or self.env['runbot.branch'].match_is_base(r[0].split('/')[-1])] + refs = [r for r in refs if int(r[2]) > commit_limit or self.env['runbot.branch']._match_is_base(r[0].split('/')[-1])] if ignore: refs = [r for r in refs if r[0].split('/')[-1] not in ignore] return refs @@ -443,7 +444,7 @@ class Repo(models.Model): # FIXME WIP names = [r[0].split('/')[-1] for r in refs] branches = self.env['runbot.branch'].search([('name', 'in', names), ('remote_id', 'in', self.remote_ids.ids)]) - ref_branches = {branch.ref(): branch for branch in branches} + ref_branches = {branch._ref(): branch for branch in branches} new_branch_values = [] for ref_name, sha, date, author, author_email, subject, committer, committer_email in refs: if not ref_branches.get(ref_name): @@ -462,7 +463,7 @@ class Repo(models.Model): _logger.info('Creating new branches') new_branches = self.env['runbot.branch'].create(new_branch_values) for branch in new_branches: - ref_branches[branch.ref()] = branch + ref_branches[branch._ref()] = branch return ref_branches def _find_new_commits(self, refs, ref_branches): @@ -532,11 +533,11 @@ class Repo(models.Model): if repo.mode == 'disabled': _logger.info(f'skipping disabled repo {repo.name}') continue - if os.path.isdir(os.path.join(repo.path, 'refs')): - git_config_path = os.path.join(repo.path, 'config') + if os.path.isdir(repo._path('refs')): + git_config_path = repo._path('config') template_params = {'repo': repo} git_config = self.env['ir.ui.view']._render_template("runbot.git_config", template_params) - with open(git_config_path, 'w') as config_file: + with file_open(git_config_path, 'w') as config_file: config_file.write(str(git_config)) _logger.info('Config updated for repo %s' % repo.name) else: @@ -546,7 +547,7 @@ class Repo(models.Model): """ Clone the remote repo if needed """ self.ensure_one() repo = self - if not os.path.isdir(os.path.join(repo.path, 'refs')): + if not os.path.isdir(repo._path('refs')): _logger.info("Initiating repository '%s' in '%s'" % (repo.name, repo.path)) git_init = subprocess.run(['git', 'init', '--bare', repo.path], stderr=subprocess.PIPE) if git_init.returncode: @@ -561,11 +562,11 @@ class Repo(models.Model): repo = self if not repo.remote_ids: return False - if not os.path.isdir(os.path.join(repo.path)): + if not os.path.isdir(repo.path): os.makedirs(repo.path) force = self._git_init() or force - fname_fetch_head = os.path.join(repo.path, 'FETCH_HEAD') + fname_fetch_head = repo._path('FETCH_HEAD') if not force and os.path.isfile(fname_fetch_head): fetch_time = os.path.getmtime(fname_fetch_head) if repo.mode == 'hook': @@ -599,9 +600,9 @@ class Repo(models.Model): host.message_post(body=message) icp = self.env['ir.config_parameter'].sudo() if icp.get_param('runbot.runbot_disable_host_on_fetch_failure'): - self.env['runbot.runbot'].warning('Host %s got reserved because of fetch failure' % host.name) + self.env['runbot.runbot']._warning('Host %s got reserved because of fetch failure' % host.name) _logger.exception(message) - host.disable() + host._disable() return success def _update(self, force=False, poll_delay=5*60): diff --git a/runbot/models/runbot.py b/runbot/models/runbot.py index 696609a0..b4c345d5 100644 --- a/runbot/models/runbot.py +++ b/runbot/models/runbot.py @@ -11,13 +11,13 @@ from contextlib import contextmanager from requests.exceptions import HTTPError from subprocess import CalledProcessError -from ..common import fqdn, dest_reg, os +from ..common import dest_reg, os, sanitize from ..container import docker_ps, docker_stop from odoo import models, fields +from odoo.exceptions import UserError from odoo.osv import expression -from odoo.tools import config -from odoo.modules.module import get_module_resource +from odoo.tools import config, file_open _logger = logging.getLogger(__name__) @@ -33,14 +33,21 @@ class Runbot(models.AbstractModel): def _root(self): """Return root directory of repository""" - default = os.path.join(os.path.dirname(__file__), '../static') - return os.path.abspath(default) + return os.path.abspath(os.sep.join([os.path.dirname(__file__), '../static'])) + + def _path(self, *path_parts): + """Return the repo build path""" + root = self.env['runbot.runbot']._root() + file_path = os.path.normpath(os.sep.join([root] + [sanitize(path) for path_part in path_parts for path in path_part.split(os.sep) if path])) + if not file_path.startswith(root): + raise UserError('Invalid path') + return file_path def _scheduler(self, host): self._gc_testing(host) self._commit() processed = 0 - for build in host.get_builds([('requested_action', 'in', ['wake_up', 'deathrow'])]): + for build in host._get_builds([('requested_action', 'in', ['wake_up', 'deathrow'])]): build = build.browse(build.id) processed += 1 build._process_requested_actions() @@ -49,7 +56,7 @@ class Runbot(models.AbstractModel): self._commit() host._process_messages() self._commit() - for build in host.get_builds([('local_state', 'in', ['testing', 'running'])]) | self._get_builds_to_init(host): + for build in host._get_builds([('local_state', 'in', ['testing', 'running'])]) | self._get_builds_to_init(host): build = build.browse(build.id) # remove preftech ids, manage build one by one result = build._schedule() if result: @@ -73,7 +80,7 @@ class Runbot(models.AbstractModel): def _assign_pending_builds(self, host, nb_worker, domain=None): if host.assigned_only or nb_worker <= 0: return 0 - reserved_slots = len(host.get_builds([('local_state', 'in', ('testing', 'pending'))])) + reserved_slots = len(host._get_builds([('local_state', 'in', ('testing', 'pending'))])) assignable_slots = (nb_worker - reserved_slots) if assignable_slots > 0: allocated = self._allocate_builds(host, assignable_slots, domain) @@ -83,8 +90,8 @@ class Runbot(models.AbstractModel): return 0 def _get_builds_to_init(self, host): - domain_host = host.get_build_domain() - used_slots = len(host.get_builds([('local_state', '=', 'testing')])) + domain_host = host._get_build_domain() + used_slots = len(host._get_builds([('local_state', '=', 'testing')])) available_slots = host.nb_worker - used_slots build_to_init = self.env['runbot.build'] if available_slots > 0: @@ -94,16 +101,16 @@ class Runbot(models.AbstractModel): return build_to_init def _gc_running(self, host): - running_max = host.get_running_max() + running_max = host._get_running_max() Build = self.env['runbot.build'] - cannot_be_killed_ids = host.get_builds([('keep_running', '=', True)]).ids + cannot_be_killed_ids = host._get_builds([('keep_running', '=', True)]).ids sticky_bundles = self.env['runbot.bundle'].search([('sticky', '=', True), ('project_id.keep_sticky_running', '=', True)]) cannot_be_killed_ids += [ build.id for build in sticky_bundles.mapped('last_batchs.slot_ids.build_id') if build.host == host.name ][:running_max] - build_ids = host.get_builds([('local_state', '=', 'running'), ('id', 'not in', cannot_be_killed_ids)], order='job_start desc').ids + build_ids = host._get_builds([('local_state', '=', 'running'), ('id', 'not in', cannot_be_killed_ids)], order='job_start desc').ids for build in Build.browse(build_ids)[running_max:]: build._kill() @@ -112,7 +119,7 @@ class Runbot(models.AbstractModel): """garbage collect builds that could be killed""" # decide if we need room Build = self.env['runbot.build'] - domain_host = host.get_build_domain() + domain_host = host._get_build_domain() testing_builds = Build.search(domain_host + [('local_state', 'in', ['testing', 'pending']), ('requested_action', '!=', 'deathrow')]) used_slots = len(testing_builds) available_slots = host.nb_worker - used_slots @@ -153,9 +160,9 @@ class Runbot(models.AbstractModel): env = self.env settings = {} settings['port'] = config.get('http_port') - settings['runbot_static'] = os.path.join(get_module_resource('runbot', 'static'), '') + settings['runbot_static'] = self.env['runbot.runbot']._root() + os.sep settings['base_url'] = self.get_base_url() - nginx_dir = os.path.join(self._root(), 'nginx') + nginx_dir = self.env['runbot.runbot']._path('nginx') settings['nginx_dir'] = nginx_dir settings['re_escape'] = re.escape host_name = self.env['runbot.host']._get_current_name() @@ -166,17 +173,17 @@ class Runbot(models.AbstractModel): nginx_config = env['ir.ui.view']._render_template("runbot.nginx_config", settings) os.makedirs(nginx_dir, exist_ok=True) content = None - nginx_conf_path = os.path.join(nginx_dir, 'nginx.conf') + nginx_conf_path = self.env['runbot.runbot']._path('nginx', 'nginx.conf') content = '' if os.path.isfile(nginx_conf_path): - with open(nginx_conf_path, 'r') as f: + with file_open(nginx_conf_path, 'r') as f: content = f.read() if content != nginx_config: _logger.info('reload nginx') with open(nginx_conf_path, 'w') as f: f.write(str(nginx_config)) try: - pid = int(open(os.path.join(nginx_dir, 'nginx.pid')).read().strip(' \n')) + pid = int(file_open(self.env['runbot.runbot']._path('nginx', 'nginx.pid')).read().strip(' \n')) os.kill(pid, signal.SIGHUP) except Exception: _logger.info('start nginx') @@ -210,7 +217,7 @@ class Runbot(models.AbstractModel): runbot_do_fetch = get_param('runbot.runbot_do_fetch') runbot_do_schedule = get_param('runbot.runbot_do_schedule') host = self.env['runbot.host']._get_current() - host.set_psql_conn_count() + host._set_psql_conn_count() host.last_start_loop = fields.Datetime.now() self._commit() # Bootstrap @@ -227,18 +234,16 @@ class Runbot(models.AbstractModel): self._fetch_loop_turn(host, pull_info_failures) if runbot_do_schedule: sleep_time = self._scheduler_loop_turn(host, update_frequency) - self.sleep(sleep_time) + time.sleep(sleep_time) else: - self.sleep(update_frequency) + time.sleep(update_frequency) self._commit() host.last_end_loop = fields.Datetime.now() - def sleep(self, t): - time.sleep(t) def _fetch_loop_turn(self, host, pull_info_failures, default_sleep=1): - with self.manage_host_exception(host) as manager: + with self._manage_host_exception(host) as manager: repos = self.env['runbot.repo'].search([('mode', '!=', 'disabled')]) processing_batch = self.env['runbot.batch'].search([('state', 'in', ('preparing', 'ready'))], order='id asc') preparing_batch = processing_batch.filtered(lambda b: b.state == 'preparing') @@ -261,7 +266,7 @@ class Runbot(models.AbstractModel): self.env.clear() pull_number = e.response.url.split('/')[-1] pull_info_failures[pull_number] = time.time() - self.warning('Pr pull info failed for %s', pull_number) + self._warning('Pr pull info failed for %s', pull_number) self._commit() if processing_batch: @@ -283,13 +288,13 @@ class Runbot(models.AbstractModel): return manager.get('sleep', default_sleep) def _scheduler_loop_turn(self, host, sleep=5): - with self.manage_host_exception(host) as manager: + with self._manage_host_exception(host) as manager: if self._scheduler(host): sleep = 0.1 return manager.get('sleep', sleep) @contextmanager - def manage_host_exception(self, host): + def _manage_host_exception(self, host): res = {} try: yield res @@ -335,7 +340,7 @@ class Runbot(models.AbstractModel): to_keep = set() repos = self.env['runbot.repo'].search([('mode', '!=', 'disabled')]) for repo in repos: - repo_source = os.path.join(self._root(), 'sources', repo.name, '*') + repo_source = repo._source_path('*') for source_dir in glob.glob(repo_source): if source_dir not in cannot_be_deleted_path: to_delete.add(source_dir) @@ -387,9 +392,9 @@ class Runbot(models.AbstractModel): repo._git(['gc', '--prune=all', '--quiet']) except CalledProcessError as e: message = f'git gc failed for {repo.name} on {host.name} with exit status {e.returncode} and message "{e.output[:60]} ..."' - self.warning(message) + self._warning(message) - def warning(self, message, *args): + def _warning(self, message, *args): if args: message = message % args existing = self.env['runbot.warning'].search([('message', '=', message)], limit=1) diff --git a/runbot/models/team.py b/runbot/models/team.py index 63635402..1216b8ef 100644 --- a/runbot/models/team.py +++ b/runbot/models/team.py @@ -4,7 +4,7 @@ import hashlib import logging import re -from ..common import _make_github_session +from ..common import make_github_session from collections import defaultdict from dateutil.relativedelta import relativedelta from fnmatch import fnmatch @@ -98,7 +98,7 @@ class RunbotTeam(models.Model): for team in self: if team.github_team: url = f"https://api.github.com/orgs/{team.organisation}/teams/{team.github_team}" - session = _make_github_session(team.project_id.sudo().token) + session = make_github_session(team.project_id.sudo().token) response = session.get(url) if response.status_code != 200: raise UserError(f'Cannot find team {team.github_team}') diff --git a/runbot/templates/build.xml b/runbot/templates/build.xml index e8780d6b..ebea394f 100644 --- a/runbot/templates/build.xml +++ b/runbot/templates/build.xml @@ -148,7 +148,7 @@ @@ -159,7 +159,7 @@
Total time: - +
Stats: @@ -204,7 +204,7 @@ - + @@ -378,7 +378,7 @@ - + diff --git a/runbot/templates/bundle.xml b/runbot/templates/bundle.xml index d9c07240..ec096a96 100644 --- a/runbot/templates/bundle.xml +++ b/runbot/templates/bundle.xml @@ -39,7 +39,7 @@ Branches - + -
+
diff --git a/runbot/templates/dashboard.xml b/runbot/templates/dashboard.xml index 66a34307..ec6fe7a7 100644 --- a/runbot/templates/dashboard.xml +++ b/runbot/templates/dashboard.xml @@ -26,7 +26,7 @@ - + @@ -163,12 +163,12 @@ - + - + diff --git a/runbot/templates/frontend.xml b/runbot/templates/frontend.xml index 08bff742..0210a657 100644 --- a/runbot/templates/frontend.xml +++ b/runbot/templates/frontend.xml @@ -87,7 +87,7 @@
- + View batch... diff --git a/runbot/templates/utils.xml b/runbot/templates/utils.xml index 1d11b232..aee9e332 100644 --- a/runbot/templates/utils.xml +++ b/runbot/templates/utils.xml @@ -190,10 +190,10 @@