[FIX] runbot: various fixes and ref

- clean thread username
- allow to write on params for debug (was mainly usefull to forbid it
at the beginning)
- imrpove some guidelines about method and actions naming/ ordering
- move some code for a cleaner organisation.
- remove some useless request.env.user (not useful anymore)
This commit is contained in:
Xavier-Do 2023-09-18 14:37:30 +02:00 committed by Christophe Monniez
parent 8d619d11f8
commit 435ac449f5
49 changed files with 530 additions and 662 deletions

View File

@ -17,7 +17,10 @@ class UserFilter(logging.Filter):
uid = getattr(threading.current_thread(), 'uid', None) uid = getattr(threading.current_thread(), 'uid', None)
if uid is None: if uid is None:
return True return True
user_name = getattr(threading.current_thread(), 'user_name', 'user') user_name = 'user'
if hasattr(threading.current_thread(), 'user_name'):
user_name = threading.current_thread().user_name
del(threading.current_thread().user_name)
message_parts[1] = f'({user_name}:{uid})' message_parts[1] = f'({user_name}:{uid})'
record.msg = ' '.join(message_parts) record.msg = ' '.join(message_parts)
return True return True

View File

@ -6,7 +6,7 @@
'author': "Odoo SA", 'author': "Odoo SA",
'website': "http://runbot.odoo.com", 'website': "http://runbot.odoo.com",
'category': 'Website', 'category': 'Website',
'version': '5.3', 'version': '5.4',
'application': True, 'application': True,
'depends': ['base', 'base_automation', 'website'], 'depends': ['base', 'base_automation', 'website'],
'data': [ 'data': [

View File

@ -15,7 +15,7 @@ from datetime import timedelta
from babel.dates import format_timedelta from babel.dates import format_timedelta
from markupsafe import Markup from markupsafe import Markup
from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT, html_escape from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT, html_escape, file_open
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
@ -44,7 +44,7 @@ def now():
def findall(filename, pattern): def findall(filename, pattern):
return set(re.findall(pattern, open(filename).read())) return set(re.findall(pattern, file_open(filename).read()))
def grep(filename, string): def grep(filename, string):
@ -54,7 +54,7 @@ def grep(filename, string):
def find(filename, string): def find(filename, string):
return open(filename).read().find(string) return file_open(filename).read().find(string)
def uniq_list(l): def uniq_list(l):
@ -69,7 +69,7 @@ def rfind(filename, pattern):
"""Determine in something in filename matches the pattern""" """Determine in something in filename matches the pattern"""
if os.path.isfile(filename): if os.path.isfile(filename):
regexp = re.compile(pattern, re.M) regexp = re.compile(pattern, re.M)
with open(filename, 'r') as f: with file_open(filename, 'r') as f:
if regexp.findall(f.read()): if regexp.findall(f.read()):
return True return True
return False return False
@ -169,9 +169,36 @@ def pseudo_markdown(text):
return text return text
def _make_github_session(token): def make_github_session(token):
session = requests.Session() session = requests.Session()
if token: if token:
session.auth = (token, 'x-oauth-basic') session.auth = (token, 'x-oauth-basic')
session.headers.update({'Accept': 'application/vnd.github.she-hulk-preview+json'}) session.headers.update({'Accept': 'application/vnd.github.she-hulk-preview+json'})
return session return session
def sanitize(name):
for i in ['@', ':', '/', '\\', '..']:
name = name.replace(i, '_')
return name
class ReProxy():
@classmethod
def match(cls, *args, **kwrags):
return re.match(*args, **kwrags)
@classmethod
def search(cls, *args, **kwrags):
return re.search(*args, **kwrags)
@classmethod
def compile(cls, *args, **kwrags):
return re.compile(*args, **kwrags)
@classmethod
def findall(cls, *args, **kwrags):
return re.findall(*args, **kwrags)
VERBOSE = re.VERBOSE
MULTILINE = re.MULTILINE

View File

@ -17,6 +17,8 @@ import subprocess
import time import time
import warnings import warnings
from odoo.tools import file_path
# unsolved issue https://github.com/docker/docker-py/issues/2928 # unsolved issue https://github.com/docker/docker-py/issues/2928
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.filterwarnings( warnings.filterwarnings(
@ -148,6 +150,9 @@ def _docker_run(cmd=False, log_path=False, build_dir=False, container_name=False
_logger.info('Docker run command: %s', run_cmd) _logger.info('Docker run command: %s', run_cmd)
run_cmd = 'cd /data/build;touch start-%s;%s;cd /data/build;touch end-%s' % (container_name, run_cmd, container_name) run_cmd = 'cd /data/build;touch start-%s;%s;cd /data/build;touch end-%s' % (container_name, run_cmd, container_name)
docker_clear_state(container_name, build_dir) # ensure that no state are remaining docker_clear_state(container_name, build_dir) # ensure that no state are remaining
build_dir = file_path(build_dir)
file_path(os.path.dirname(log_path))
open(os.path.join(build_dir, 'exist-%s' % container_name), 'w+').close() open(os.path.join(build_dir, 'exist-%s' % container_name), 'w+').close()
logs = open(log_path, 'w') logs = open(log_path, 'w')
logs.write("Docker command:\n%s\n=================================================\n" % cmd_object) logs.write("Docker command:\n%s\n=================================================\n" % cmd_object)
@ -190,7 +195,7 @@ def _docker_run(cmd=False, log_path=False, build_dir=False, container_name=False
detach=True detach=True
) )
if container.status not in ('running', 'created') : if container.status not in ('running', 'created') :
_logger.error('Container %s started but status is not running or created: %s', container_name, container.status) # TODO cleanup _logger.error('Container %s started but status is not running or created: %s', container_name, container.status)
else: else:
_logger.info('Started Docker container %s (%s)', container_name, container.short_id) _logger.info('Started Docker container %s (%s)', container_name, container.short_id)
return return
@ -288,27 +293,3 @@ def sanitize_container_name(name):
"""Returns a container name with unallowed characters removed""" """Returns a container name with unallowed characters removed"""
name = re.sub('^[^a-zA-Z0-9]+', '', name) name = re.sub('^[^a-zA-Z0-9]+', '', name)
return re.sub('[^a-zA-Z0-9_.-]', '', name) return re.sub('[^a-zA-Z0-9_.-]', '', name)
##############################################################################
# Ugly monkey patch to set runbot in set runbot in testing mode
# No Docker will be started, instead a fake docker_run function will be used
##############################################################################
if os.environ.get('RUNBOT_MODE') == 'test':
_logger.warning('Using Fake Docker')
def fake_docker_run(run_cmd, log_path, build_dir, container_name, exposed_ports=None, cpu_limit=None, preexec_fn=None, ro_volumes=None, env_variables=None, *args, **kwargs):
_logger.info('Docker Fake Run: %s', run_cmd)
open(os.path.join(build_dir, 'exist-%s' % container_name), 'w').write('fake end')
open(os.path.join(build_dir, 'start-%s' % container_name), 'w').write('fake start\n')
open(os.path.join(build_dir, 'end-%s' % container_name), 'w').write('fake end')
with open(log_path, 'w') as log_file:
log_file.write('Fake docker_run started\n')
log_file.write('run_cmd: %s\n' % run_cmd)
log_file.write('build_dir: %s\n' % container_name)
log_file.write('container_name: %s\n' % container_name)
log_file.write('.modules.loading: Modules loaded.\n')
log_file.write('Initiating shutdown\n')
docker_run = fake_docker_run

View File

@ -44,7 +44,7 @@ class RunbotBadge(Controller):
if not builds: if not builds:
state = 'testing' state = 'testing'
else: else:
result = builds.result_multi() result = builds._result_multi()
if result == 'ok': if result == 'ok':
state = 'success' state = 'success'
elif result == 'warn': elif result == 'warn':

View File

@ -395,7 +395,7 @@ class Runbot(Controller):
'scheduled_count': scheduled_count, 'scheduled_count': scheduled_count,
'bundles': bundles, 'bundles': bundles,
'hosts_data': hosts_data, 'hosts_data': hosts_data,
'auto_tags': request.env['runbot.build.error'].disabling_tags(), 'auto_tags': request.env['runbot.build.error']._disabling_tags(),
'build_errors': request.env['runbot.build.error'].search([('random', '=', True)]), 'build_errors': request.env['runbot.build.error'].search([('random', '=', True)]),
'kwargs': kwargs, 'kwargs': kwargs,
'title': 'monitoring' 'title': 'monitoring'

View File

@ -35,13 +35,13 @@ class Hook(http.Controller):
# force update of dependencies too in case a hook is lost # force update of dependencies too in case a hook is lost
if not payload or event == 'push': if not payload or event == 'push':
remote.repo_id.set_hook_time(time.time()) remote.repo_id._set_hook_time(time.time())
elif event == 'pull_request': elif event == 'pull_request':
pr_number = payload.get('pull_request', {}).get('number', '') pr_number = payload.get('pull_request', {}).get('number', '')
branch = request.env['runbot.branch'].sudo().search([('remote_id', '=', remote.id), ('name', '=', pr_number)]) branch = request.env['runbot.branch'].sudo().search([('remote_id', '=', remote.id), ('name', '=', pr_number)])
branch.recompute_infos(payload.get('pull_request', {})) branch._recompute_infos(payload.get('pull_request', {}))
if payload.get('action') in ('synchronize', 'opened', 'reopened'): if payload.get('action') in ('synchronize', 'opened', 'reopened'):
remote.repo_id.set_hook_time(time.time()) remote.repo_id._set_hook_time(time.time())
# remaining recurrent actions: labeled, review_requested, review_request_removed # remaining recurrent actions: labeled, review_requested, review_request_removed
elif event == 'delete': elif event == 'delete':
if payload.get('ref_type') == 'branch': if payload.get('ref_type') == 'branch':

View File

@ -6,7 +6,7 @@
<field name="type">ir.actions.server</field> <field name="type">ir.actions.server</field>
<field name="state">code</field> <field name="state">code</field>
<field name="code"> <field name="code">
records.link_errors() records.action_link_errors()
</field> </field>
</record> </record>
<record model="ir.actions.server" id="action_clean_build_errors"> <record model="ir.actions.server" id="action_clean_build_errors">
@ -16,7 +16,7 @@
<field name="type">ir.actions.server</field> <field name="type">ir.actions.server</field>
<field name="state">code</field> <field name="state">code</field>
<field name="code"> <field name="code">
records.clean_content() records.action_clean_content()
</field> </field>
</record> </record>
<record model="ir.actions.server" id="action_reassign_build_errors"> <record model="ir.actions.server" id="action_reassign_build_errors">
@ -26,7 +26,7 @@
<field name="type">ir.actions.server</field> <field name="type">ir.actions.server</field>
<field name="state">code</field> <field name="state">code</field>
<field name="code"> <field name="code">
records.assign() records.action_assign()
</field> </field>
</record> </record>
</odoo> </odoo>

View File

@ -0,0 +1,82 @@
import logging
from odoo import api, SUPERUSER_ID
_logger = logging.getLogger(__name__)
def migrate(cr, version):
env = api.Environment(cr, SUPERUSER_ID, {})
private = [
'set_hook_time',
'set_ref_time',
'check_token',
'get_version_domain',
'get_builds',
'get_build_domain',
'disable',
'set_psql_conn_count',
'get_running_max',
'branch_groups',
'consistency_warning',
'fa_link_type',
'make_python_ctx',
'parse_config',
'get_color_class',
'get_formated_build_time',
'filter_patterns',
'http_log_url',
'result_multi',
'match_is_base',
'link_errors',
'clean_content',
'test_tags_list',
'disabling_tags',
'step_ids',
'recompute_infos',
'warning',
'is_file',
]
removed = [
"get_formated_build_age",
"get_formated_job_time",
"make_dirs",
"build_type_label",
]
for method in private:
pattern = f'.{method}('
replacepattern = f'._{method}('
views = env['ir.ui.view'].search([('arch_db', 'like', pattern)])
if views:
_logger.info(f'Some views contains "{pattern}": {views}')
for view in views:
view.arch_db = view.arch_db.replace(pattern, replacepattern)
for method in removed:
pattern = f'.{method}('
views = env['ir.ui.view'].search([('arch_db', 'like', pattern)])
if views:
_logger.error(f'Some views contains "{pattern}": {views}')
for method in removed:
pattern = f'.{method}('
steps =env['runbot.build.config.step'].search(['|', ('python_code', 'like', pattern), ('python_result_code', 'like', pattern)])
if steps:
_logger.error(f'Some step contains "{pattern}": {steps}')
for method in private:
pattern = f'.{method}('
replacepattern = f'._{method}('
steps = env['runbot.build.config.step'].search(['|', ('python_code', 'like', pattern), ('python_result_code', 'like', pattern)])
for step in steps:
python_code = pattern in step.python_code
python_result_code = pattern in step.python_result_code
if replacepattern not in step.python_code and python_code:
_logger.warning(f'Some step python_code contains "{pattern}": {step}')
python_code = False
if replacepattern not in step.python_result_code and python_result_code:
_logger.warning(f'Some step python_result_code contains "{pattern}": {step}')
python_result_code = False
if python_code or python_result_code:
_logger.info(f'Some step python_code contains "{pattern}": {step} but looks like it was adapted')

View File

@ -12,11 +12,11 @@ from . import commit
from . import custom_trigger from . import custom_trigger
from . import database from . import database
from . import dockerfile from . import dockerfile
from . import event
from . import host from . import host
from . import ir_cron from . import ir_cron
from . import ir_http from . import ir_http
from . import ir_qweb from . import ir_qweb
from . import ir_logging
from . import project from . import project
from . import repo from . import repo
from . import res_config_settings from . import res_config_settings

View File

@ -47,7 +47,7 @@ class Batch(models.Model):
else: else:
batch.buildage_age = 0 batch.buildage_age = 0
def get_formated_age(self): def _get_formated_age(self):
return s2human_long(self.age) return s2human_long(self.age)
def _url(self): def _url(self):
@ -154,9 +154,9 @@ class Batch(models.Model):
if not self.bundle_id.base_id: if not self.bundle_id.base_id:
# in some case the base can be detected lately. If a bundle has no base, recompute the base before preparing # in some case the base can be detected lately. If a bundle has no base, recompute the base before preparing
self.bundle_id._compute_base_id() self.bundle_id._compute_base_id()
for level, message in self.bundle_id.consistency_warning(): for level, message in self.bundle_id._consistency_warning():
if level == "warning": if level == "warning":
self.warning("Bundle warning: %s" % message) self._warning("Bundle warning: %s" % message)
self.state = 'ready' self.state = 'ready'
@ -174,7 +174,7 @@ class Batch(models.Model):
('category_id', '=', self.category_id.id) ('category_id', '=', self.category_id.id)
]).filtered( ]).filtered(
lambda t: not t.version_domain or \ lambda t: not t.version_domain or \
self.bundle_id.version_id.filtered_domain(t.get_version_domain()) self.bundle_id.version_id.filtered_domain(t._get_version_domain())
) )
pushed_repo = self.commit_link_ids.mapped('commit_id.repo_id') pushed_repo = self.commit_link_ids.mapped('commit_id.repo_id')
@ -185,7 +185,7 @@ class Batch(models.Model):
###################################### ######################################
# Find missing commits # Find missing commits
###################################### ######################################
def fill_missing(branch_commits, match_type): def _fill_missing(branch_commits, match_type):
if branch_commits: if branch_commits:
for branch, commit in branch_commits.items(): # branch first in case pr is closed. for branch, commit in branch_commits.items(): # branch first in case pr is closed.
nonlocal missing_repos nonlocal missing_repos
@ -218,7 +218,7 @@ class Batch(models.Model):
# 1.1 FIND missing commit in bundle heads # 1.1 FIND missing commit in bundle heads
if missing_repos: if missing_repos:
fill_missing({branch: branch.head for branch in bundle.branch_ids.sorted(lambda b: (b.head.id, b.is_pr), reverse=True)}, 'head') _fill_missing({branch: branch.head for branch in bundle.branch_ids.sorted(lambda b: (b.head.id, b.is_pr), reverse=True)}, 'head')
# 1.2 FIND merge_base info for those commits # 1.2 FIND merge_base info for those commits
# use last not preparing batch to define previous repos_heads instead of branches heads: # use last not preparing batch to define previous repos_heads instead of branches heads:
@ -253,14 +253,14 @@ class Batch(models.Model):
if batch: if batch:
if missing_repos: if missing_repos:
self._log('Using batch [%s](%s) to define missing commits', batch.id, batch._url()) self._log('Using batch [%s](%s) to define missing commits', batch.id, batch._url())
fill_missing({link.branch_id: link.commit_id for link in batch.commit_link_ids}, 'base_match') _fill_missing({link.branch_id: link.commit_id for link in batch.commit_link_ids}, 'base_match')
# check if all mergebase match reference batch # check if all mergebase match reference batch
batch_exiting_commit = batch.commit_ids.filtered(lambda c: c.repo_id in merge_base_commits.repo_id) batch_exiting_commit = batch.commit_ids.filtered(lambda c: c.repo_id in merge_base_commits.repo_id)
not_matching = (batch_exiting_commit - merge_base_commits) not_matching = (batch_exiting_commit - merge_base_commits)
if not_matching and not auto_rebase: if not_matching and not auto_rebase:
message = 'Only %s out of %s merge base matched. You may want to rebase your branches to ensure compatibility' % (len(merge_base_commits)-len(not_matching), len(merge_base_commits)) message = 'Only %s out of %s merge base matched. You may want to rebase your branches to ensure compatibility' % (len(merge_base_commits)-len(not_matching), len(merge_base_commits))
suggestions = [('Tip: rebase %s to %s' % (commit.repo_id.name, commit.name)) for commit in not_matching] suggestions = [('Tip: rebase %s to %s' % (commit.repo_id.name, commit.name)) for commit in not_matching]
self.warning('%s\n%s' % (message, '\n'.join(suggestions))) self._warning('%s\n%s' % (message, '\n'.join(suggestions)))
else: else:
self._log('No reference batch found to fill missing commits') self._log('No reference batch found to fill missing commits')
@ -268,14 +268,14 @@ class Batch(models.Model):
if missing_repos: if missing_repos:
if not bundle.is_base: if not bundle.is_base:
self._log('Not all commit found in bundle branches and base batch. Fallback on base branches heads.') self._log('Not all commit found in bundle branches and base batch. Fallback on base branches heads.')
fill_missing({branch: branch.head for branch in self.bundle_id.base_id.branch_ids}, 'base_head') _fill_missing({branch: branch.head for branch in self.bundle_id.base_id.branch_ids}, 'base_head')
# 3.2 FIND missing commit in master base heads # 3.2 FIND missing commit in master base heads
if missing_repos: # this is to get an upgrade branch. if missing_repos: # this is to get an upgrade branch.
if not bundle.is_base: if not bundle.is_base:
self._log('Not all commit found in current version. Fallback on master branches heads.') self._log('Not all commit found in current version. Fallback on master branches heads.')
master_bundle = self.env['runbot.version']._get('master').with_context(project_id=self.bundle_id.project_id.id).base_bundle_id master_bundle = self.env['runbot.version']._get('master').with_context(project_id=self.bundle_id.project_id.id).base_bundle_id
fill_missing({branch: branch.head for branch in master_bundle.branch_ids}, 'base_head') _fill_missing({branch: branch.head for branch in master_bundle.branch_ids}, 'base_head')
# 4. FIND missing commit in foreign project # 4. FIND missing commit in foreign project
if missing_repos: if missing_repos:
@ -283,10 +283,10 @@ class Batch(models.Model):
if foreign_projects: if foreign_projects:
self._log('Not all commit found. Fallback on foreign base branches heads.') self._log('Not all commit found. Fallback on foreign base branches heads.')
foreign_bundles = bundle.search([('name', '=', bundle.name), ('project_id', 'in', foreign_projects.ids)]) foreign_bundles = bundle.search([('name', '=', bundle.name), ('project_id', 'in', foreign_projects.ids)])
fill_missing({branch: branch.head for branch in foreign_bundles.mapped('branch_ids').sorted('is_pr', reverse=True)}, 'head') _fill_missing({branch: branch.head for branch in foreign_bundles.mapped('branch_ids').sorted('is_pr', reverse=True)}, 'head')
if missing_repos: if missing_repos:
foreign_bundles = bundle.search([('name', '=', bundle.base_id.name), ('project_id', 'in', foreign_projects.ids)]) foreign_bundles = bundle.search([('name', '=', bundle.base_id.name), ('project_id', 'in', foreign_projects.ids)])
fill_missing({branch: branch.head for branch in foreign_bundles.mapped('branch_ids')}, 'base_head') _fill_missing({branch: branch.head for branch in foreign_bundles.mapped('branch_ids')}, 'base_head')
# CHECK missing commit # CHECK missing commit
if missing_repos: if missing_repos:
@ -309,7 +309,7 @@ class Batch(models.Model):
trigger_custom = trigger_customs.get(trigger, self.env['runbot.bundle.trigger.custom']) trigger_custom = trigger_customs.get(trigger, self.env['runbot.bundle.trigger.custom'])
trigger_repos = trigger.repo_ids | trigger.dependency_ids trigger_repos = trigger.repo_ids | trigger.dependency_ids
if trigger_repos & missing_repos: if trigger_repos & missing_repos:
self.warning('Missing commit for repo %s for trigger %s', (trigger_repos & missing_repos).mapped('name'), trigger.name) self._warning('Missing commit for repo %s for trigger %s', (trigger_repos & missing_repos).mapped('name'), trigger.name)
continue continue
# in any case, search for an existing build # in any case, search for an existing build
config = trigger_custom.config_id or trigger.config_id config = trigger_custom.config_id or trigger.config_id
@ -365,7 +365,7 @@ class Batch(models.Model):
commit = link_commit.commit_id commit = link_commit.commit_id
base_head = base_head_per_repo.get(commit.repo_id.id) base_head = base_head_per_repo.get(commit.repo_id.id)
if not base_head: if not base_head:
self.warning('No base head found for repo %s', commit.repo_id.name) self._warning('No base head found for repo %s', commit.repo_id.name)
continue continue
link_commit.base_commit_id = base_head link_commit.base_commit_id = base_head
merge_base_sha = False merge_base_sha = False
@ -399,9 +399,9 @@ class Batch(models.Model):
except ValueError: # binary files except ValueError: # binary files
pass pass
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
self.warning('Commit info failed between %s and %s', commit.name, base_head.name) self._warning('Commit info failed between %s and %s', commit.name, base_head.name)
def warning(self, message, *args): def _warning(self, message, *args):
self.has_warning = True self.has_warning = True
_logger.warning('batch %s: ' + message, self.id, *args) _logger.warning('batch %s: ' + message, self.id, *args)
self._log(message, *args, level='WARNING') self._log(message, *args, level='WARNING')
@ -431,13 +431,13 @@ class BatchLog(models.Model):
return pseudo_markdown(self.message) return pseudo_markdown(self.message)
fa_link_types = {'created': 'hashtag', 'matched': 'link', 'rebuild': 'refresh'}
class BatchSlot(models.Model): class BatchSlot(models.Model):
_name = 'runbot.batch.slot' _name = 'runbot.batch.slot'
_description = 'Link between a bundle batch and a build' _description = 'Link between a bundle batch and a build'
_order = 'trigger_id,id' _order = 'trigger_id,id'
_fa_link_type = {'created': 'hashtag', 'matched': 'link', 'rebuild': 'refresh'}
batch_id = fields.Many2one('runbot.batch', index=True) batch_id = fields.Many2one('runbot.batch', index=True)
trigger_id = fields.Many2one('runbot.trigger', index=True) trigger_id = fields.Many2one('runbot.trigger', index=True)
@ -458,8 +458,8 @@ class BatchSlot(models.Model):
for slot in self: for slot in self:
slot.all_build_ids = all_builds.filtered_domain([('id', 'child_of', slot.build_id.ids)]) slot.all_build_ids = all_builds.filtered_domain([('id', 'child_of', slot.build_id.ids)])
def fa_link_type(self): def _fa_link_type(self):
return self._fa_link_type.get(self.link_type, 'exclamation-triangle') return fa_link_types.get(self.link_type, 'exclamation-triangle')
def _create_missing_build(self): def _create_missing_build(self):
"""Create a build when the slot does not have one""" """Create a build when the slot does not have one"""

View File

@ -106,9 +106,6 @@ class Branch(models.Model):
break break
for branch in self: for branch in self:
#branch.target_branch_name = False
#branch.pull_head_name = False
#branch.pull_head_remote_id = False
if branch.name: if branch.name:
pi = branch.is_pr and (pull_info or pull_info_dict.get((branch.remote_id, branch.name)) or branch._get_pull_info()) pi = branch.is_pr and (pull_info or pull_info_dict.get((branch.remote_id, branch.name)) or branch._get_pull_info())
if pi: if pi:
@ -153,7 +150,7 @@ class Branch(models.Model):
project = branch.remote_id.repo_id.project_id or self.env.ref('runbot.main_project') project = branch.remote_id.repo_id.project_id or self.env.ref('runbot.main_project')
project.ensure_one() project.ensure_one()
bundle = self.env['runbot.bundle'].search([('name', '=', name), ('project_id', '=', project.id)]) bundle = self.env['runbot.bundle'].search([('name', '=', name), ('project_id', '=', project.id)])
need_new_base = not bundle and branch.match_is_base(name) need_new_base = not bundle and branch._match_is_base(name)
if (bundle.is_base or need_new_base) and branch.remote_id != branch.remote_id.repo_id.main_remote_id: if (bundle.is_base or need_new_base) and branch.remote_id != branch.remote_id.repo_id.main_remote_id:
_logger.warning('Trying to add a dev branch to base bundle, falling back on dummy bundle') _logger.warning('Trying to add a dev branch to base bundle, falling back on dummy bundle')
bundle = dummy bundle = dummy
@ -203,18 +200,17 @@ class Branch(models.Model):
remote = self.remote_id remote = self.remote_id
if self.is_pr: if self.is_pr:
_logger.info('Getting info for %s', self.name) _logger.info('Getting info for %s', self.name)
return remote._github('/repos/:owner/:repo/pulls/%s' % self.name, ignore_errors=False) or {} # TODO catch and send a managable exception return remote._github('/repos/:owner/:repo/pulls/%s' % self.name, ignore_errors=False) or {}
return {} return {}
def ref(self): def _ref(self):
return 'refs/%s/%s/%s' % ( return 'refs/%s/%s/%s' % (
self.remote_id.remote_name, self.remote_id.remote_name,
'pull' if self.is_pr else 'heads', 'pull' if self.is_pr else 'heads',
self.name self.name
) )
def recompute_infos(self, payload=None): def _recompute_infos(self, payload=None):
""" public method to recompute infos on demand """
was_draft = self.draft was_draft = self.draft
was_alive = self.alive was_alive = self.alive
init_target_branch_name = self.target_branch_name init_target_branch_name = self.target_branch_name
@ -242,7 +238,7 @@ class Branch(models.Model):
self.bundle_id._force() self.bundle_id._force()
@api.model @api.model
def match_is_base(self, name): def _match_is_base(self, name):
"""match against is_base_regex ir.config_parameter""" """match against is_base_regex ir.config_parameter"""
if not name: if not name:
return False return False
@ -250,6 +246,9 @@ class Branch(models.Model):
regex = icp.get_param('runbot.runbot_is_base_regex', False) regex = icp.get_param('runbot.runbot_is_base_regex', False)
if regex: if regex:
return re.match(regex, name) return re.match(regex, name)
def action_recompute_infos(self):
return self._recompute_infos()
class RefLog(models.Model): class RefLog(models.Model):

View File

@ -1,27 +1,31 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import datetime
import fnmatch import fnmatch
import getpass
import hashlib
import logging import logging
import pwd import pwd
import re import re
import shutil import shutil
import subprocess
import time import time
import datetime import uuid
import hashlib
from ..common import dt2time, fqdn, now, grep, local_pgadmin_cursor, s2human, dest_reg, os, list_local_dbs, pseudo_markdown, RunbotException, findall
from ..container import docker_stop, docker_state, Command, docker_run
from ..fields import JsonDictField
from odoo import models, fields, api
from odoo.exceptions import UserError, ValidationError
from odoo.http import request
from odoo.tools import appdirs
from odoo.tools.safe_eval import safe_eval
from collections import defaultdict from collections import defaultdict
from pathlib import Path from pathlib import Path
from psycopg2 import sql from psycopg2 import sql
from psycopg2.extensions import TransactionRollbackError from psycopg2.extensions import TransactionRollbackError
import getpass
import uuid from ..common import dt2time, now, grep, local_pgadmin_cursor, s2human, dest_reg, os, list_local_dbs, pseudo_markdown, RunbotException, findall, sanitize
from ..container import docker_stop, docker_state, Command, docker_run
from ..fields import JsonDictField
from odoo import models, fields, api
from odoo.exceptions import ValidationError
from odoo.tools import file_open, file_path
from odoo.tools.safe_eval import safe_eval
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
@ -123,12 +127,6 @@ class BuildParameters(models.Model):
def _find_existing(self, fingerprint): def _find_existing(self, fingerprint):
return self.env['runbot.build.params'].search([('fingerprint', '=', fingerprint)], limit=1) return self.env['runbot.build.params'].search([('fingerprint', '=', fingerprint)], limit=1)
def write(self, vals):
if not self.env.registry.loaded:
return
raise UserError('Params cannot be modified')
class BuildResult(models.Model): class BuildResult(models.Model):
# remove duplicate management # remove duplicate management
# instead, link between bundle_batch and build # instead, link between bundle_batch and build
@ -209,10 +207,6 @@ class BuildResult(models.Model):
default='normal', default='normal',
string='Build type') string='Build type')
# what about parent_id and duplmicates?
# -> always create build, no duplicate? (make sence since duplicate should be the parent and params should be inherited)
# -> build_link ?
parent_id = fields.Many2one('runbot.build', 'Parent Build', index=True) parent_id = fields.Many2one('runbot.build', 'Parent Build', index=True)
parent_path = fields.Char('Parent path', index=True, unaccent=False) parent_path = fields.Char('Parent path', index=True, unaccent=False)
top_parent = fields.Many2one('runbot.build', compute='_compute_top_parent') top_parent = fields.Many2one('runbot.build', compute='_compute_top_parent')
@ -253,7 +247,7 @@ class BuildResult(models.Model):
@api.depends('params_id.config_id') @api.depends('params_id.config_id')
def _compute_log_list(self): # storing this field because it will be access trhoug repo viewn and keep track of the list at create def _compute_log_list(self): # storing this field because it will be access trhoug repo viewn and keep track of the list at create
for build in self: for build in self:
build.log_list = ','.join({step.name for step in build.params_id.config_id.step_ids() if step._has_log()}) build.log_list = ','.join({step.name for step in build.params_id.config_id.step_ids if step._has_log()})
# TODO replace logic, add log file to list when executed (avoid 404, link log on docker start, avoid fake is_docker_step) # TODO replace logic, add log file to list when executed (avoid 404, link log on docker start, avoid fake is_docker_step)
@api.depends('children_ids.global_state', 'local_state') @api.depends('children_ids.global_state', 'local_state')
@ -335,7 +329,7 @@ class BuildResult(models.Model):
def _get_run_url(self, db_suffix=None): def _get_run_url(self, db_suffix=None):
if db_suffix is None: if db_suffix is None:
db_suffix = self.mapped('database_ids')[0].db_suffix db_suffix = self.mapped('database_ids')[0].db_suffix
if request.env.user._is_internal(): if self.env.user._is_internal():
token, token_info = self._get_run_token() token, token_info = self._get_run_token()
db_suffix = f'{db_suffix}-{token}-{token_info}' db_suffix = f'{db_suffix}-{token}-{token_info}'
use_ssl = self.env['ir.config_parameter'].sudo().get_param('runbot.use_ssl', default=True) use_ssl = self.env['ir.config_parameter'].sudo().get_param('runbot.use_ssl', default=True)
@ -410,7 +404,7 @@ class BuildResult(models.Model):
'host': self.host if self.keep_host else False, 'host': self.host if self.keep_host else False,
}) })
def result_multi(self): def _result_multi(self):
if all(build.global_result == 'ok' or not build.global_result for build in self): if all(build.global_result == 'ok' or not build.global_result for build in self):
return 'ok' return 'ok'
if any(build.global_result in ('skipped', 'killed', 'manually_killed') for build in self): if any(build.global_result in ('skipped', 'killed', 'manually_killed') for build in self):
@ -495,7 +489,7 @@ class BuildResult(models.Model):
new_build = self.create(values) new_build = self.create(values)
if self.parent_id: if self.parent_id:
new_build._github_status() new_build._github_status()
user = request.env.user if request else self.env.user user = self.env.user
new_build._log('rebuild', 'Rebuild initiated by %s%s' % (user.name, (' :%s' % message) if message else '')) new_build._log('rebuild', 'Rebuild initiated by %s%s' % (user.name, (' :%s' % message) if message else ''))
if self.local_state != 'done': if self.local_state != 'done':
@ -688,7 +682,7 @@ class BuildResult(models.Model):
'port': port, 'port': port,
}) })
build._log('wake_up', '**Waking up build**', log_type='markdown', level='SEPARATOR') build._log('wake_up', '**Waking up build**', log_type='markdown', level='SEPARATOR')
step_ids = build.params_id.config_id.step_ids() step_ids = build.params_id.config_id.step_ids
if step_ids and step_ids[-1]._step_state() == 'running': if step_ids and step_ids[-1]._step_state() == 'running':
run_step = step_ids[-1] run_step = step_ids[-1]
else: else:
@ -748,9 +742,9 @@ class BuildResult(models.Model):
# compute statistics before starting next job # compute statistics before starting next job
build.active_step._make_stats(build) build.active_step._make_stats(build)
build.active_step.log_end(build) build.active_step._log_end(build)
step_ids = self.params_id.config_id.step_ids() step_ids = self.params_id.config_id.step_ids
if not step_ids: # no job to do, build is done if not step_ids: # no job to do, build is done
self.active_step = False self.active_step = False
self.local_state = 'done' self.local_state = 'done'
@ -772,7 +766,7 @@ class BuildResult(models.Model):
self.local_state = 'done' self.local_state = 'done'
self.local_result = 'ko' self.local_result = 'ko'
return False return False
next_index = step_ids.index(self.active_step) + 1 next_index = list(step_ids).index(self.active_step) + 1
while True: while True:
if next_index >= len(step_ids): # final job, build is done if next_index >= len(step_ids): # final job, build is done
@ -810,7 +804,7 @@ class BuildResult(models.Model):
build._log("run", message, level='ERROR') build._log("run", message, level='ERROR')
build._kill(result='ko') build._kill(result='ko')
def _docker_run(self, cmd=None, ro_volumes=None, **kwargs): def _docker_run(self, step, cmd=None, ro_volumes=None, **kwargs):
self.ensure_one() self.ensure_one()
_ro_volumes = ro_volumes or {} _ro_volumes = ro_volumes or {}
ro_volumes = {} ro_volumes = {}
@ -834,24 +828,24 @@ class BuildResult(models.Model):
rc_content = cmd.get_config(starting_config=starting_config) rc_content = cmd.get_config(starting_config=starting_config)
else: else:
rc_content = starting_config rc_content = starting_config
self.write_file('.odoorc', rc_content) self._write_file('.odoorc', rc_content)
user = getpass.getuser() user = getpass.getuser()
ro_volumes[f'/home/{user}/.odoorc'] = self._path('.odoorc') ro_volumes[f'/home/{user}/.odoorc'] = self._path('.odoorc')
kwargs.pop('build_dir', False) # todo check python steps kwargs.pop('build_dir', False)
kwargs.pop('log_path', False)
log_path = self._path('logs', '%s.txt' % step.name)
build_dir = self._path() build_dir = self._path()
self.env.flush_all() self.env.flush_all()
def start_docker(): def start_docker():
docker_run(cmd=cmd, build_dir=build_dir, ro_volumes=ro_volumes, **kwargs) docker_run(cmd=cmd, build_dir=build_dir, log_path=log_path, ro_volumes=ro_volumes, **kwargs)
return start_docker return start_docker
def _path(self, *l, **kw): def _path(self, *paths):
"""Return the repo build path""" """Return the repo build path"""
self.ensure_one() self.ensure_one()
build = self return self.env['runbot.runbot']._path('build', self.dest, *paths)
root = self.env['runbot.runbot']._root()
return os.path.join(root, 'build', build.dest, *l)
def http_log_url(self): def _http_log_url(self):
use_ssl = self.env['ir.config_parameter'].get_param('runbot.use_ssl', default=True) use_ssl = self.env['ir.config_parameter'].get_param('runbot.use_ssl', default=True)
return '%s://%s/runbot/static/build/%s/logs/' % ('https' if use_ssl else 'http', self.host, self.dest) return '%s://%s/runbot/static/build/%s/logs/' % ('https' if use_ssl else 'http', self.host, self.dest)
@ -859,12 +853,10 @@ class BuildResult(models.Model):
"""Return the absolute path to the direcory containing the server file, adding optional *path""" """Return the absolute path to the direcory containing the server file, adding optional *path"""
self.ensure_one() self.ensure_one()
commit = self._get_server_commit() commit = self._get_server_commit()
if os.path.exists(commit._source_path('odoo')): return commit._source_path('odoo', *path)
return commit._source_path('odoo', *path)
return commit._source_path('openerp', *path)
def _docker_source_folder(self, commit): def _docker_source_folder(self, commit):
return commit.repo_id.name return sanitize(commit.repo_id.name)
def _checkout(self): def _checkout(self):
self.ensure_one() # will raise exception if hash not found, we don't want to fail for all build. self.ensure_one() # will raise exception if hash not found, we don't want to fail for all build.
@ -876,7 +868,7 @@ class BuildResult(models.Model):
if build_export_path in exports: if build_export_path in exports:
self._log('_checkout', 'Multiple repo have same export path in build, some source may be missing for %s' % build_export_path, level='ERROR') self._log('_checkout', 'Multiple repo have same export path in build, some source may be missing for %s' % build_export_path, level='ERROR')
self._kill(result='ko') self._kill(result='ko')
exports[build_export_path] = commit.export(self) exports[build_export_path] = commit._export(self)
checkout_time = time.time() - start checkout_time = time.time() - start
if checkout_time > 60: if checkout_time > 60:
@ -908,7 +900,7 @@ class BuildResult(models.Model):
def _get_modules_to_test(self, modules_patterns=''): def _get_modules_to_test(self, modules_patterns=''):
self.ensure_one() self.ensure_one()
def filter_patterns(patterns, default, all): def _filter_patterns(patterns, default, all):
default = set(default) default = set(default)
patterns_list = (patterns or '').split(',') patterns_list = (patterns or '').split(',')
patterns_list = [p.strip() for p in patterns_list] patterns_list = [p.strip() for p in patterns_list]
@ -924,10 +916,10 @@ class BuildResult(models.Model):
modules_to_install = set() modules_to_install = set()
for repo, module_list in self._get_available_modules().items(): for repo, module_list in self._get_available_modules().items():
available_modules += module_list available_modules += module_list
modules_to_install |= filter_patterns(repo.modules, module_list, module_list) modules_to_install |= _filter_patterns(repo.modules, module_list, module_list)
modules_to_install = filter_patterns(self.params_id.modules, modules_to_install, available_modules) modules_to_install = _filter_patterns(self.params_id.modules, modules_to_install, available_modules)
modules_to_install = filter_patterns(modules_patterns, modules_to_install, available_modules) modules_to_install = _filter_patterns(modules_patterns, modules_to_install, available_modules)
return sorted(modules_to_install) return sorted(modules_to_install)
@ -942,7 +934,7 @@ class BuildResult(models.Model):
msg = f"Failed to drop local logs database : {dbname} with exception: {e}" msg = f"Failed to drop local logs database : {dbname} with exception: {e}"
_logger.exception(msg) _logger.exception(msg)
host_name = self.env['runbot.host']._get_current_name() host_name = self.env['runbot.host']._get_current_name()
self.env['runbot.runbot'].warning(f'Host {host_name}: {msg}') self.env['runbot.runbot']._warning(f'Host {host_name}: {msg}')
def _local_pg_createdb(self, dbname): def _local_pg_createdb(self, dbname):
icp = self.env['ir.config_parameter'] icp = self.env['ir.config_parameter']
@ -992,7 +984,7 @@ class BuildResult(models.Model):
if lock: if lock:
self.env.cr.execute("""SELECT id FROM runbot_build WHERE parent_path like %s FOR UPDATE""", ['%s%%' % self.parent_path]) self.env.cr.execute("""SELECT id FROM runbot_build WHERE parent_path like %s FOR UPDATE""", ['%s%%' % self.parent_path])
self.ensure_one() self.ensure_one()
user = request.env.user if request else self.env.user user = self.env.user
uid = user.id uid = user.id
build = self build = self
message = message or 'Killing build %s, requested by %s (user #%s)' % (build.dest, user.name, uid) message = message or 'Killing build %s, requested by %s (user #%s)' % (build.dest, user.name, uid)
@ -1005,7 +997,7 @@ class BuildResult(models.Model):
child._ask_kill(lock=False) child._ask_kill(lock=False)
def _wake_up(self): def _wake_up(self):
user = request.env.user if request else self.env.user user = self.env.user
self._log('wake_up', f'Wake up initiated by {user.name}') self._log('wake_up', f'Wake up initiated by {user.name}')
if self.local_state != 'done': if self.local_state != 'done':
self._log('wake_up', 'Impossibe to wake up, state is not done') self._log('wake_up', 'Impossibe to wake up, state is not done')
@ -1031,7 +1023,7 @@ class BuildResult(models.Model):
source_path = self._docker_source_folder(commit) source_path = self._docker_source_folder(commit)
for addons_path in (commit.repo_id.addons_paths or '').split(','): for addons_path in (commit.repo_id.addons_paths or '').split(','):
if os.path.isdir(commit._source_path(addons_path)): if os.path.isdir(commit._source_path(addons_path)):
yield os.path.join(source_path, addons_path).strip(os.sep) yield os.sep.join([source_path, addons_path]).strip(os.sep)
def _get_server_info(self, commit=None): def _get_server_info(self, commit=None):
commit = commit or self._get_server_commit() commit = commit or self._get_server_commit()
@ -1052,7 +1044,7 @@ class BuildResult(models.Model):
for commit_id in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids: for commit_id in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids:
if not self.params_id.skip_requirements and os.path.isfile(commit_id._source_path('requirements.txt')): if not self.params_id.skip_requirements and os.path.isfile(commit_id._source_path('requirements.txt')):
repo_dir = self._docker_source_folder(commit_id) repo_dir = self._docker_source_folder(commit_id)
requirement_path = os.path.join(repo_dir, 'requirements.txt') requirement_path = os.sep.join([repo_dir, 'requirements.txt'])
pres.append([f'python{py_version}', '-m', 'pip', 'install','--user', '--progress-bar', 'off', '-r', f'{requirement_path}']) pres.append([f'python{py_version}', '-m', 'pip', 'install','--user', '--progress-bar', 'off', '-r', f'{requirement_path}'])
addons_paths = self._get_addons_path() addons_paths = self._get_addons_path()
@ -1060,7 +1052,7 @@ class BuildResult(models.Model):
server_dir = self._docker_source_folder(server_commit) server_dir = self._docker_source_folder(server_commit)
# commandline # commandline
cmd = ['python%s' % py_version] + python_params + [os.path.join(server_dir, server_file)] cmd = ['python%s' % py_version] + python_params + [os.sep.join([server_dir, server_file])]
if sub_command: if sub_command:
cmd += [sub_command] cmd += [sub_command]
@ -1118,7 +1110,7 @@ class BuildResult(models.Model):
"""return the python name to use from build batch""" """return the python name to use from build batch"""
(server_commit, server_file) = self._get_server_info() (server_commit, server_file) = self._get_server_info()
server_path = server_commit._source_path(server_file) server_path = server_commit._source_path(server_file)
with open(server_path, 'r') as f: with file_open(server_path, 'r') as f:
if f.readline().strip().endswith('python3'): if f.readline().strip().endswith('python3'):
return '3' return '3'
return '' return ''
@ -1131,52 +1123,35 @@ class BuildResult(models.Model):
ir_logs = self.env['ir.logging'].search([('level', 'in', ('ERROR', 'WARNING', 'CRITICAL')), ('type', '=', 'server'), ('build_id', 'in', builds_to_scan.ids)]) ir_logs = self.env['ir.logging'].search([('level', 'in', ('ERROR', 'WARNING', 'CRITICAL')), ('type', '=', 'server'), ('build_id', 'in', builds_to_scan.ids)])
return BuildError._parse_logs(ir_logs) return BuildError._parse_logs(ir_logs)
def is_file(self, file, mode='r'): def _is_file(self, file, mode='r'):
file_path = self._path(file) file_path = self._path(file)
return os.path.exists(file_path) return os.path.exists(file_path)
def read_file(self, file, mode='r'): def _read_file(self, file, mode='r'):
file_path = self._path(file) file_path = self._path(file)
try: try:
with open(file_path, mode) as f: with file_open(file_path, mode) as f:
return f.read() return f.read()
except Exception as e: except Exception as e:
self._log('readfile', 'exception: %s' % e) self._log('readfile', 'exception: %s' % e)
return False return False
def write_file(self, file, data, mode='w'): def _write_file(self, file, data, mode='w'):
file_path = self._path(file) _file_path = self._path(file)
file_dir = os.path.split(file_path)[0] file_dir = os.path.dirname(_file_path)
os.makedirs(file_dir, exist_ok=True) os.makedirs(file_dir, exist_ok=True)
file_path(os.path.dirname(_file_path))
try: try:
with open(file_path, mode) as f: with open(_file_path, mode) as f:
f.write(data) f.write(data)
except Exception as e: except Exception as e:
self._log('write_file', 'exception: %s' % e) self._log('write_file', 'exception: %s' % e)
return False return False
def make_dirs(self, dir_path): def _get_formated_build_time(self):
full_path = self._path(dir_path)
try:
os.makedirs(full_path, exist_ok=True)
except Exception as e:
self._log('make_dirs', 'exception: %s' % e)
return False
def build_type_label(self):
self.ensure_one()
return dict(self.fields_get('build_type', 'selection')['build_type']['selection']).get(self.build_type, self.build_type)
def get_formated_job_time(self):
return s2human(self.job_time)
def get_formated_build_time(self):
return s2human(self.build_time) return s2human(self.build_time)
def get_formated_build_age(self): def _get_color_class(self):
return s2human(self.build_age)
def get_color_class(self):
if self.global_result == 'ko': if self.global_result == 'ko':
return 'danger' return 'danger'
@ -1230,5 +1205,5 @@ class BuildResult(models.Model):
if 'base_' not in build_commit.match_type and commit.repo_id in trigger.repo_ids: if 'base_' not in build_commit.match_type and commit.repo_id in trigger.repo_ids:
commit._github_status(build, trigger.ci_context, state, target_url, desc) commit._github_status(build, trigger.ci_context, state, target_url, desc)
def parse_config(self): def _parse_config(self):
return set(findall(self._server("tools/config.py"), '--[\w-]+', )) return set(findall(self._server("tools/config.py"), '--[\w-]+', ))

View File

@ -7,10 +7,11 @@ import re
import shlex import shlex
import time import time
from unidiff import PatchSet from unidiff import PatchSet
from ..common import now, grep, time2str, rfind, s2human, os, RunbotException from ..common import now, grep, time2str, rfind, s2human, os, RunbotException, ReProxy
from ..container import docker_get_gateway_ip, Command from ..container import docker_get_gateway_ip, Command
from odoo import models, fields, api from odoo import models, fields, api
from odoo.exceptions import UserError, ValidationError from odoo.exceptions import UserError, ValidationError
from odoo.tools.misc import file_open
from odoo.tools.safe_eval import safe_eval, test_python_expr, _SAFE_OPCODES, to_opcodes from odoo.tools.safe_eval import safe_eval, test_python_expr, _SAFE_OPCODES, to_opcodes
# adding some additionnal optcode to safe_eval. This is not 100% needed and won't be done in standard but will help # adding some additionnal optcode to safe_eval. This is not 100% needed and won't be done in standard but will help
@ -26,26 +27,6 @@ _re_warning = r'^\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ WARNING '
PYTHON_DEFAULT = "# type python code here\n\n\n\n\n\n" PYTHON_DEFAULT = "# type python code here\n\n\n\n\n\n"
class ReProxy():
@classmethod
def match(cls, *args, **kwrags):
return re.match(*args, **kwrags)
@classmethod
def search(cls, *args, **kwrags):
return re.search(*args, **kwrags)
@classmethod
def compile(cls, *args, **kwrags):
return re.compile(*args, **kwrags)
@classmethod
def findall(cls, *args, **kwrags):
return re.findall(*args, **kwrags)
VERBOSE = re.VERBOSE
MULTILINE = re.MULTILINE
class Config(models.Model): class Config(models.Model):
_name = 'runbot.build.config' _name = 'runbot.build.config'
_description = "Build config" _description = "Build config"
@ -58,6 +39,7 @@ class Config(models.Model):
protected = fields.Boolean('Protected', default=False, tracking=True) protected = fields.Boolean('Protected', default=False, tracking=True)
group = fields.Many2one('runbot.build.config', 'Configuration group', help="Group of config's and config steps") group = fields.Many2one('runbot.build.config', 'Configuration group', help="Group of config's and config steps")
group_name = fields.Char('Group name', related='group.name') group_name = fields.Char('Group name', related='group.name')
step_ids = fields.Many2many('runbot.build.config.step', compute='_compute_step_ids')
@api.model_create_multi @api.model_create_multi
def create(self, vals_list): def create(self, vals_list):
@ -76,29 +58,25 @@ class Config(models.Model):
copy.sudo().write({'protected': False}) copy.sudo().write({'protected': False})
return copy return copy
def unlink(self): @api.depends('step_order_ids.sequence', 'step_order_ids.step_id')
super(Config, self).unlink() def _compute_step_ids(self):
for config in self:
def step_ids(self): config.step_ids = config.step_order_ids.sorted('sequence').mapped('step_id')
if self:
self.ensure_one()
return [ordered_step.step_id for ordered_step in self.step_order_ids.sorted('sequence')]
def _check_step_ids_order(self): def _check_step_ids_order(self):
for record in self: for record in self:
install_job = False install_job = False
step_ids = record.step_ids() for step in record.step_ids:
for step in step_ids:
if step.job_type == 'install_odoo': if step.job_type == 'install_odoo':
install_job = True install_job = True
if step.job_type == 'run_odoo': if step.job_type == 'run_odoo':
if step != step_ids[-1]: if step != record.step_ids[-1]:
raise UserError('Jobs of type run_odoo should be the last one') raise UserError('Jobs of type run_odoo should be the last one')
if not install_job: if not install_job:
raise UserError('Jobs of type run_odoo should be preceded by a job of type install_odoo') raise UserError('Jobs of type run_odoo should be preceded by a job of type install_odoo')
record._check_recustion() record._check_recursion()
def _check_recustion(self, visited=None): def _check_recursion(self, visited=None):
self.ensure_one() self.ensure_one()
visited = visited or [] visited = visited or []
recursion = False recursion = False
@ -107,10 +85,10 @@ class Config(models.Model):
visited.append(self) visited.append(self)
if recursion: if recursion:
raise UserError('Impossible to save config, recursion detected with path: %s' % ">".join([v.name for v in visited])) raise UserError('Impossible to save config, recursion detected with path: %s' % ">".join([v.name for v in visited]))
for step in self.step_ids(): for step in self.step_ids:
if step.job_type == 'create_build': if step.job_type == 'create_build':
for create_config in step.create_config_ids: for create_config in step.create_config_ids:
create_config._check_recustion(visited[:]) create_config._check_recursion(visited[:])
class ConfigStepUpgradeDb(models.Model): class ConfigStepUpgradeDb(models.Model):
@ -270,7 +248,6 @@ class ConfigStep(models.Model):
raise UserError('Invalid extra_params on config step') raise UserError('Invalid extra_params on config step')
def _run(self, build): def _run(self, build):
log_path = build._path('logs', '%s.txt' % self.name)
build.write({'job_start': now(), 'job_end': False}) # state, ... build.write({'job_start': now(), 'job_end': False}) # state, ...
log_link = '' log_link = ''
if self._has_log(): if self._has_log():
@ -278,17 +255,17 @@ class ConfigStep(models.Model):
url = f"{log_url}/runbot/static/build/{build.dest}/logs/{self.name}.txt" url = f"{log_url}/runbot/static/build/{build.dest}/logs/{self.name}.txt"
log_link = f'[@icon-file-text]({url})' log_link = f'[@icon-file-text]({url})'
build._log('run', 'Starting step **%s** from config **%s** %s' % (self.name, build.params_id.config_id.name, log_link), log_type='markdown', level='SEPARATOR') build._log('run', 'Starting step **%s** from config **%s** %s' % (self.name, build.params_id.config_id.name, log_link), log_type='markdown', level='SEPARATOR')
return self._run_step(build, log_path) return self._run_step(build)
def _run_step(self, build, log_path, **kwargs): def _run_step(self, build, **kwargs):
build.log_counter = self.env['ir.config_parameter'].sudo().get_param('runbot.runbot_maxlogs', 100) build.log_counter = self.env['ir.config_parameter'].sudo().get_param('runbot.runbot_maxlogs', 100)
run_method = getattr(self, '_run_%s' % self.job_type) run_method = getattr(self, '_run_%s' % self.job_type)
docker_params = run_method(build, log_path, **kwargs) docker_params = run_method(build, **kwargs)
if docker_params: if docker_params:
return build._docker_run(**docker_params) return build._docker_run(self, **docker_params)
return True return True
def _run_create_build(self, build, log_path): def _run_create_build(self, build):
count = 0 count = 0
config_data = build.params_id.config_data config_data = build.params_id.config_data
config_ids = config_data.get('create_config_ids', self.create_config_ids) config_ids = config_data.get('create_config_ids', self.create_config_ids)
@ -308,7 +285,7 @@ class ConfigStep(models.Model):
child = build._add_child(_child_data, orphan=self.make_orphan) child = build._add_child(_child_data, orphan=self.make_orphan)
build._log('create_build', 'created with config %s' % create_config.name, log_type='subbuild', path=str(child.id)) build._log('create_build', 'created with config %s' % create_config.name, log_type='subbuild', path=str(child.id))
def make_python_ctx(self, build): def _make_python_ctx(self, build):
return { return {
'self': self, 'self': self,
# 'fields': fields, # 'fields': fields,
@ -325,8 +302,8 @@ class ConfigStep(models.Model):
'PatchSet': PatchSet, 'PatchSet': PatchSet,
} }
def _run_python(self, build, log_path, force=False): def _run_python(self, build, force=False):
eval_ctx = self.make_python_ctx(build) eval_ctx = self._make_python_ctx(build)
eval_ctx['force'] = force eval_ctx['force'] = force
try: try:
safe_eval(self.python_code.strip(), eval_ctx, mode="exec", nocopy=True) safe_eval(self.python_code.strip(), eval_ctx, mode="exec", nocopy=True)
@ -350,7 +327,7 @@ class ConfigStep(models.Model):
self.ensure_one() self.ensure_one()
return self.job_type in ('install_odoo', 'run_odoo', 'restore', 'test_upgrade') or (self.job_type == 'python' and ('docker_params =' in self.python_code or '_run_' in self.python_code)) return self.job_type in ('install_odoo', 'run_odoo', 'restore', 'test_upgrade') or (self.job_type == 'python' and ('docker_params =' in self.python_code or '_run_' in self.python_code))
def _run_run_odoo(self, build, log_path, force=False): def _run_run_odoo(self, build, force=False):
if not force: if not force:
if build.parent_id: if build.parent_id:
build._log('_run_run_odoo', 'build has a parent, skip run') build._log('_run_run_odoo', 'build has a parent, skip run')
@ -365,7 +342,7 @@ class ConfigStep(models.Model):
# run server # run server
cmd = build._cmd(local_only=False, enable_log_db=self.enable_log_db) cmd = build._cmd(local_only=False, enable_log_db=self.enable_log_db)
available_options = build.parse_config() available_options = build._parse_config()
if "--workers" in available_options: if "--workers" in available_options:
cmd += ["--workers", "2"] cmd += ["--workers", "2"]
@ -406,9 +383,9 @@ class ConfigStep(models.Model):
except Exception: except Exception:
_logger.exception('An error occured while reloading nginx') _logger.exception('An error occured while reloading nginx')
build._log('', "An error occured while reloading nginx, skipping") build._log('', "An error occured while reloading nginx, skipping")
return dict(cmd=cmd, log_path=log_path, container_name=docker_name, exposed_ports=[build_port, build_port + 1], ro_volumes=exports, env_variables=env_variables) return dict(cmd=cmd, container_name=docker_name, exposed_ports=[build_port, build_port + 1], ro_volumes=exports, env_variables=env_variables)
def _run_install_odoo(self, build, log_path): def _run_install_odoo(self, build):
exports = build._checkout() exports = build._checkout()
modules_to_install = self._modules_to_install(build) modules_to_install = self._modules_to_install(build)
@ -434,7 +411,7 @@ class ConfigStep(models.Model):
cmd += ['-i', mods] cmd += ['-i', mods]
config_path = build._server("tools/config.py") config_path = build._server("tools/config.py")
available_options = build.parse_config() available_options = build._parse_config()
if self.test_enable: if self.test_enable:
if "--test-enable" in available_options: if "--test-enable" in available_options:
cmd.extend(['--test-enable']) cmd.extend(['--test-enable'])
@ -452,7 +429,7 @@ class ConfigStep(models.Model):
test_tags += self.test_tags.replace(' ', '').split(',') test_tags += self.test_tags.replace(' ', '').split(',')
if self.enable_auto_tags and not build.params_id.config_data.get('disable_auto_tags', False): if self.enable_auto_tags and not build.params_id.config_data.get('disable_auto_tags', False):
if grep(config_path, "[/module][:class]"): if grep(config_path, "[/module][:class]"):
auto_tags = self.env['runbot.build.error'].disabling_tags() auto_tags = self.env['runbot.build.error']._disabling_tags()
if auto_tags: if auto_tags:
test_tags += auto_tags test_tags += auto_tags
@ -486,7 +463,7 @@ class ConfigStep(models.Model):
cmd.finals.append(['cp', '-r', filestore_path, filestore_dest]) cmd.finals.append(['cp', '-r', filestore_path, filestore_dest])
cmd.finals.append(['cd', dump_dir, '&&', 'zip', '-rmq9', zip_path, '*']) cmd.finals.append(['cd', dump_dir, '&&', 'zip', '-rmq9', zip_path, '*'])
infos = '{\n "db_name": "%s",\n "build_id": %s,\n "shas": [%s]\n}' % (db_name, build.id, ', '.join(['"%s"' % build_commit.commit_id.dname for build_commit in build.params_id.commit_link_ids])) infos = '{\n "db_name": "%s",\n "build_id": %s,\n "shas": [%s]\n}' % (db_name, build.id, ', '.join(['"%s"' % build_commit.commit_id.dname for build_commit in build.params_id.commit_link_ids]))
build.write_file('logs/%s/info.json' % db_name, infos) build._write_file('logs/%s/info.json' % db_name, infos)
if self.flamegraph: if self.flamegraph:
cmd.finals.append(['flamegraph.pl', '--title', 'Flamegraph %s for build %s' % (self.name, build.id), self._perfs_data_path(), '>', self._perfs_data_path(ext='svg')]) cmd.finals.append(['flamegraph.pl', '--title', 'Flamegraph %s for build %s' % (self.name, build.id), self._perfs_data_path(), '>', self._perfs_data_path(ext='svg')])
@ -494,12 +471,12 @@ class ConfigStep(models.Model):
max_timeout = int(self.env['ir.config_parameter'].get_param('runbot.runbot_timeout', default=10000)) max_timeout = int(self.env['ir.config_parameter'].get_param('runbot.runbot_timeout', default=10000))
timeout = min(self.cpu_limit, max_timeout) timeout = min(self.cpu_limit, max_timeout)
env_variables = self.additionnal_env.split(';') if self.additionnal_env else [] env_variables = self.additionnal_env.split(';') if self.additionnal_env else []
return dict(cmd=cmd, log_path=log_path, container_name=build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables) return dict(cmd=cmd, container_name=build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables)
def _upgrade_create_childs(self): def _upgrade_create_childs(self):
pass pass
def _run_configure_upgrade_complement(self, build, *args): def _run_configure_upgrade_complement(self, build):
""" """
Parameters: Parameters:
- upgrade_dumps_trigger_id: a configure_upgradestep - upgrade_dumps_trigger_id: a configure_upgradestep
@ -512,7 +489,7 @@ class ConfigStep(models.Model):
builds_references = param.builds_reference_ids builds_references = param.builds_reference_ids
builds_references_by_version_id = {b.params_id.version_id.id: b for b in builds_references} builds_references_by_version_id = {b.params_id.version_id.id: b for b in builds_references}
upgrade_complement_step = build.params_id.trigger_id.upgrade_dumps_trigger_id.upgrade_step_id upgrade_complement_step = build.params_id.trigger_id.upgrade_dumps_trigger_id.upgrade_step_id
version_domain = build.params_id.trigger_id.upgrade_dumps_trigger_id.get_version_domain() version_domain = build.params_id.trigger_id.upgrade_dumps_trigger_id._get_version_domain()
valid_targets = build.browse() valid_targets = build.browse()
next_versions = version.next_major_version_id | version.next_intermediate_version_ids next_versions = version.next_major_version_id | version.next_intermediate_version_ids
if version_domain: # filter only on version where trigger is enabled if version_domain: # filter only on version where trigger is enabled
@ -542,7 +519,7 @@ class ConfigStep(models.Model):
) )
child._log('', 'This build tests change of schema in stable version testing upgrade to %s' % target.params_id.version_id.name) child._log('', 'This build tests change of schema in stable version testing upgrade to %s' % target.params_id.version_id.name)
def _run_configure_upgrade(self, build, log_path): def _run_configure_upgrade(self, build):
""" """
Source/target parameters: Source/target parameters:
- upgrade_to_current | (upgrade_to_master + (upgrade_to_major_versions | upgrade_to_all_versions)) - upgrade_to_current | (upgrade_to_master + (upgrade_to_major_versions | upgrade_to_all_versions))
@ -704,7 +681,7 @@ class ConfigStep(models.Model):
if any(fnmatch.fnmatch(db.db_suffix, pat) for pat in pat_list): if any(fnmatch.fnmatch(db.db_suffix, pat) for pat in pat_list):
yield db yield db
def _run_test_upgrade(self, build, log_path): def _run_test_upgrade(self, build):
target = build.params_id.upgrade_to_build_id target = build.params_id.upgrade_to_build_id
commit_ids = build.params_id.commit_ids commit_ids = build.params_id.commit_ids
target_commit_ids = target.params_id.commit_ids target_commit_ids = target.params_id.commit_ids
@ -742,9 +719,9 @@ class ConfigStep(models.Model):
exception_env = self.env['runbot.upgrade.exception']._generate() exception_env = self.env['runbot.upgrade.exception']._generate()
if exception_env: if exception_env:
env_variables.append(exception_env) env_variables.append(exception_env)
return dict(cmd=migrate_cmd, log_path=log_path, container_name=build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables, image_tag=target.params_id.dockerfile_id.image_tag) return dict(cmd=migrate_cmd, container_name=build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables, image_tag=target.params_id.dockerfile_id.image_tag)
def _run_restore(self, build, log_path): def _run_restore(self, build):
# exports = build._checkout() # exports = build._checkout()
params = build.params_id params = build.params_id
dump_db = params.dump_db dump_db = params.dump_db
@ -776,7 +753,7 @@ class ConfigStep(models.Model):
assert download_db_suffix and dump_build assert download_db_suffix and dump_build
download_db_name = '%s-%s' % (dump_build.dest, download_db_suffix) download_db_name = '%s-%s' % (dump_build.dest, download_db_suffix)
zip_name = '%s.zip' % download_db_name zip_name = '%s.zip' % download_db_name
dump_url = '%s%s' % (dump_build.http_log_url(), zip_name) dump_url = '%s%s' % (dump_build._http_log_url(), zip_name)
build._log('test-migration', 'Restoring dump [%s](%s) from build [%s](%s)' % (zip_name, dump_url, dump_build.id, dump_build.build_url), log_type='markdown') build._log('test-migration', 'Restoring dump [%s](%s) from build [%s](%s)' % (zip_name, dump_url, dump_build.id, dump_build.build_url), log_type='markdown')
restore_suffix = self.restore_rename_db_suffix or dump_db.db_suffix or suffix restore_suffix = self.restore_rename_db_suffix or dump_db.db_suffix or suffix
assert restore_suffix assert restore_suffix
@ -802,7 +779,7 @@ class ConfigStep(models.Model):
]) ])
return dict(cmd=cmd, log_path=log_path, container_name=build._get_docker_name(), cpu_limit=self.cpu_limit) return dict(cmd=cmd, container_name=build._get_docker_name(), cpu_limit=self.cpu_limit)
def _reference_builds(self, bundle, trigger): def _reference_builds(self, bundle, trigger):
upgrade_dumps_trigger_id = trigger.upgrade_dumps_trigger_id upgrade_dumps_trigger_id = trigger.upgrade_dumps_trigger_id
@ -879,7 +856,7 @@ class ConfigStep(models.Model):
category_id=category_id category_id=category_id
).mapped('last_done_batch') ).mapped('last_done_batch')
def log_end(self, build): def _log_end(self, build):
if self.job_type == 'create_build': if self.job_type == 'create_build':
build._logger('Step %s finished in %s' % (self.name, s2human(build.job_time))) build._logger('Step %s finished in %s' % (self.name, s2human(build.job_time)))
return return
@ -888,19 +865,19 @@ class ConfigStep(models.Model):
if self.job_type == 'install_odoo': if self.job_type == 'install_odoo':
kwargs['message'] += ' $$fa-download$$' kwargs['message'] += ' $$fa-download$$'
db_suffix = build.params_id.config_data.get('db_name') or (build.params_id.dump_db.db_suffix if not self.create_db else False) or self.db_name db_suffix = build.params_id.config_data.get('db_name') or (build.params_id.dump_db.db_suffix if not self.create_db else False) or self.db_name
kwargs['path'] = '%s%s-%s.zip' % (build.http_log_url(), build.dest, db_suffix) kwargs['path'] = '%s%s-%s.zip' % (build._http_log_url(), build.dest, db_suffix)
kwargs['log_type'] = 'link' kwargs['log_type'] = 'link'
build._log('', **kwargs) build._log('', **kwargs)
if self.coverage: if self.coverage:
xml_url = '%scoverage.xml' % build.http_log_url() xml_url = '%scoverage.xml' % build._http_log_url()
html_url = 'http://%s/runbot/static/build/%s/coverage/index.html' % (build.host, build.dest) html_url = 'http://%s/runbot/static/build/%s/coverage/index.html' % (build.host, build.dest)
message = 'Coverage report: [xml @icon-download](%s), [html @icon-eye](%s)' % (xml_url, html_url) message = 'Coverage report: [xml @icon-download](%s), [html @icon-eye](%s)' % (xml_url, html_url)
build._log('end_job', message, log_type='markdown') build._log('end_job', message, log_type='markdown')
if self.flamegraph: if self.flamegraph:
dat_url = '%sflame_%s.%s' % (build.http_log_url(), self.name, 'log.gz') dat_url = '%sflame_%s.%s' % (build._http_log_url(), self.name, 'log.gz')
svg_url = '%sflame_%s.%s' % (build.http_log_url(), self.name, 'svg') svg_url = '%sflame_%s.%s' % (build._http_log_url(), self.name, 'svg')
message = 'Flamegraph report: [data @icon-download](%s), [svg @icon-eye](%s)' % (dat_url, svg_url) message = 'Flamegraph report: [data @icon-download](%s), [svg @icon-eye](%s)' % (dat_url, svg_url)
build._log('end_job', message, log_type='markdown') build._log('end_job', message, log_type='markdown')
@ -932,7 +909,7 @@ class ConfigStep(models.Model):
for (addons_path, module, _) in commit._get_available_modules(): for (addons_path, module, _) in commit._get_available_modules():
if module not in modules_to_install: if module not in modules_to_install:
# we want to omit docker_source_folder/[addons/path/]module/* # we want to omit docker_source_folder/[addons/path/]module/*
module_path_in_docker = os.path.join(docker_source_folder, addons_path, module) module_path_in_docker = os.sep.join([docker_source_folder, addons_path, module])
pattern_to_omit.add('%s/*' % (module_path_in_docker)) pattern_to_omit.add('%s/*' % (module_path_in_docker))
return ['--omit', ','.join(pattern_to_omit)] return ['--omit', ','.join(pattern_to_omit)]
@ -953,7 +930,7 @@ class ConfigStep(models.Model):
build.write(self._make_restore_results(build)) build.write(self._make_restore_results(build))
def _make_python_results(self, build): def _make_python_results(self, build):
eval_ctx = self.make_python_ctx(build) eval_ctx = self._make_python_ctx(build)
safe_eval(self.python_result_code.strip(), eval_ctx, mode="exec", nocopy=True) safe_eval(self.python_result_code.strip(), eval_ctx, mode="exec", nocopy=True)
return_value = eval_ctx.get('return_value', {}) return_value = eval_ctx.get('return_value', {})
# todo check return_value or write in try except. Example: local result setted to wrong value # todo check return_value or write in try except. Example: local result setted to wrong value
@ -966,7 +943,7 @@ class ConfigStep(models.Model):
build._log('coverage_result', 'Start getting coverage result') build._log('coverage_result', 'Start getting coverage result')
cov_path = build._path('coverage/index.html') cov_path = build._path('coverage/index.html')
if os.path.exists(cov_path): if os.path.exists(cov_path):
with open(cov_path, 'r') as f: with file_open(cov_path, 'r') as f:
data = f.read() data = f.read()
covgrep = re.search(r'pc_cov.>(?P<coverage>\d+)%', data) covgrep = re.search(r'pc_cov.>(?P<coverage>\d+)%', data)
build_values['coverage_result'] = covgrep and covgrep.group('coverage') or False build_values['coverage_result'] = covgrep and covgrep.group('coverage') or False
@ -997,11 +974,11 @@ class ConfigStep(models.Model):
return build_values return build_values
def _check_module_states(self, build): def _check_module_states(self, build):
if not build.is_file('logs/modules_states.txt'): if not build._is_file('logs/modules_states.txt'):
build._log('', '"logs/modules_states.txt" file not found.', level='ERROR') build._log('', '"logs/modules_states.txt" file not found.', level='ERROR')
return 'ko' return 'ko'
content = build.read_file('logs/modules_states.txt') or '' content = build._read_file('logs/modules_states.txt') or ''
if '(0 rows)' not in content: if '(0 rows)' not in content:
build._log('', 'Some modules are not in installed/uninstalled/uninstallable state after migration. \n %s' % content) build._log('', 'Some modules are not in installed/uninstalled/uninstallable state after migration. \n %s' % content)
return 'ko' return 'ko'
@ -1155,7 +1132,7 @@ class ConfigStep(models.Model):
commit = commit_link.commit_id commit = commit_link.commit_id
modified = commit.repo_id._git(['diff', '--name-only', '%s..%s' % (commit_link.merge_base_commit_id.name, commit.name)]) modified = commit.repo_id._git(['diff', '--name-only', '%s..%s' % (commit_link.merge_base_commit_id.name, commit.name)])
if modified: if modified:
files = [('%s/%s' % (build._docker_source_folder(commit), file)) for file in modified.split('\n') if file] files = [os.sep.join([build._docker_source_folder(commit), file]) for file in modified.split('\n') if file]
modified_files[commit_link] = files modified_files[commit_link] = files
return modified_files return modified_files

View File

@ -60,7 +60,7 @@ class ConfigStep(models.Model):
reviewer_per_file[file] = file_reviewers reviewer_per_file[file] = file_reviewers
return reviewer_per_file return reviewer_per_file
def _run_codeowner(self, build, log_path): def _run_codeowner(self, build):
bundle = build.params_id.create_batch_id.bundle_id bundle = build.params_id.create_batch_id.bundle_id
if bundle.is_base: if bundle.is_base:
build._log('', 'Skipping base bundle') build._log('', 'Skipping base bundle')
@ -133,7 +133,6 @@ class ConfigStep(models.Model):
pr = pr_by_commit[commit_link] pr = pr_by_commit[commit_link]
new_reviewers = reviewers - set((pr.reviewers or '').split(',')) new_reviewers = reviewers - set((pr.reviewers or '').split(','))
if new_reviewers: if new_reviewers:
# todo replace all team by a runbot team and simplify this logic to remove search
author_skippable_teams = skippable_teams.filtered(lambda team: team.skip_team_pr and team.github_team in new_reviewers and pr.pr_author in team._get_members_logins()) author_skippable_teams = skippable_teams.filtered(lambda team: team.skip_team_pr and team.github_team in new_reviewers and pr.pr_author in team._get_members_logins())
author_skipped_teams = set(author_skippable_teams.mapped('github_team')) author_skipped_teams = set(author_skippable_teams.mapped('github_team'))
if author_skipped_teams: if author_skipped_teams:

View File

@ -63,26 +63,15 @@ class BuildError(models.Model):
cleaners = self.env['runbot.error.regex'].search([('re_type', '=', 'cleaning')]) cleaners = self.env['runbot.error.regex'].search([('re_type', '=', 'cleaning')])
for vals in vals_list: for vals in vals_list:
content = vals.get('content') content = vals.get('content')
cleaned_content = cleaners.r_sub('%', content) cleaned_content = cleaners._r_sub('%', content)
vals.update({ vals.update({
'cleaned_content': cleaned_content, 'cleaned_content': cleaned_content,
'fingerprint': self._digest(cleaned_content) 'fingerprint': self._digest(cleaned_content)
}) })
records = super().create(vals_list) records = super().create(vals_list)
records.assign() records.action_assign()
return records return records
def assign(self):
if not any((not record.responsible and not record.team_id and record.file_path and not record.parent_id) for record in self):
return
teams = self.env['runbot.team'].search(['|', ('path_glob', '!=', False), ('module_ownership_ids', '!=', False)])
repos = self.env['runbot.repo'].search([])
for record in self:
if not record.responsible and not record.team_id and record.file_path and not record.parent_id:
team = teams._get_team(record.file_path, repos)
if team:
record.team_id = team
def write(self, vals): def write(self, vals):
if 'active' in vals: if 'active' in vals:
for build_error in self: for build_error in self:
@ -177,9 +166,9 @@ class BuildError(models.Model):
hash_dict = defaultdict(self.env['ir.logging'].browse) hash_dict = defaultdict(self.env['ir.logging'].browse)
for log in ir_logs: for log in ir_logs:
if search_regs.r_search(log.message): if search_regs._r_search(log.message):
continue continue
fingerprint = self._digest(cleaning_regs.r_sub('%', log.message)) fingerprint = self._digest(cleaning_regs._r_sub('%', log.message))
hash_dict[fingerprint] |= log hash_dict[fingerprint] |= log
build_errors = self.env['runbot.build.error'] build_errors = self.env['runbot.build.error']
@ -220,7 +209,27 @@ class BuildError(models.Model):
window_action["res_id"] = build_errors.id window_action["res_id"] = build_errors.id
return window_action return window_action
def link_errors(self): @api.model
def _test_tags_list(self):
active_errors = self.search([('test_tags', '!=', False)])
test_tag_list = active_errors.mapped('test_tags')
return [test_tag for error_tags in test_tag_list for test_tag in (error_tags).split(',')]
@api.model
def _disabling_tags(self):
return ['-%s' % tag for tag in self._test_tags_list()]
def _search_version(self, operator, value):
return [('build_ids.version_id', operator, value)]
def _search_trigger_ids(self, operator, value):
return [('build_ids.trigger_id', operator, value)]
####################
# Actions
####################
def action_link_errors(self):
""" Link errors with the first one of the recordset """ Link errors with the first one of the recordset
choosing parent in error with responsible, random bug and finally fisrt seen choosing parent in error with responsible, random bug and finally fisrt seen
""" """
@ -230,26 +239,22 @@ class BuildError(models.Model):
build_errors = self.search([('id', 'in', self.ids)], order='responsible asc, random desc, id asc') build_errors = self.search([('id', 'in', self.ids)], order='responsible asc, random desc, id asc')
build_errors[1:].write({'parent_id': build_errors[0].id}) build_errors[1:].write({'parent_id': build_errors[0].id})
def clean_content(self): def action_clean_content(self):
cleaning_regs = self.env['runbot.error.regex'].search([('re_type', '=', 'cleaning')]) cleaning_regs = self.env['runbot.error.regex'].search([('re_type', '=', 'cleaning')])
for build_error in self: for build_error in self:
build_error.cleaned_content = cleaning_regs.r_sub('%', build_error.content) build_error.cleaned_content = cleaning_regs._r_sub('%', build_error.content)
@api.model def action_assign(self):
def test_tags_list(self): if not any((not record.responsible and not record.team_id and record.file_path and not record.parent_id) for record in self):
active_errors = self.search([('test_tags', '!=', False)]) return
test_tag_list = active_errors.mapped('test_tags') teams = self.env['runbot.team'].search(['|', ('path_glob', '!=', False), ('module_ownership_ids', '!=', False)])
return [test_tag for error_tags in test_tag_list for test_tag in (error_tags).split(',')] repos = self.env['runbot.repo'].search([])
for record in self:
if not record.responsible and not record.team_id and record.file_path and not record.parent_id:
team = teams._get_team(record.file_path, repos)
if team:
record.team_id = team
@api.model
def disabling_tags(self):
return ['-%s' % tag for tag in self.test_tags_list()]
def _search_version(self, operator, value):
return [('build_ids.version_id', operator, value)]
def _search_trigger_ids(self, operator, value):
return [('build_ids.trigger_id', operator, value)]
class BuildErrorTag(models.Model): class BuildErrorTag(models.Model):
@ -272,13 +277,13 @@ class ErrorRegex(models.Model):
re_type = fields.Selection([('filter', 'Filter out'), ('cleaning', 'Cleaning')], string="Regex type") re_type = fields.Selection([('filter', 'Filter out'), ('cleaning', 'Cleaning')], string="Regex type")
sequence = fields.Integer('Sequence', default=100) sequence = fields.Integer('Sequence', default=100)
def r_sub(self, replace, s): def _r_sub(self, replace, s):
""" replaces patterns from the recordset by replace in the given string """ """ replaces patterns from the recordset by replace in the given string """
for c in self: for c in self:
s = re.sub(c.regex, '%', s) s = re.sub(c.regex, '%', s)
return s return s
def r_search(self, s): def _r_search(self, s):
""" Return True if one of the regex is found in s """ """ Return True if one of the regex is found in s """
for filter in self: for filter in self:
if re.search(filter.regex, s): if re.search(filter.regex, s):
@ -297,7 +302,13 @@ class ErrorBulkWizard(models.TransientModel):
archive = fields.Boolean('Close error (archive)', default=False) archive = fields.Boolean('Close error (archive)', default=False)
chatter_comment = fields.Text('Chatter Comment') chatter_comment = fields.Text('Chatter Comment')
def submit(self): @api.onchange('fixing_commit', 'chatter_comment')
def _onchange_commit_comment(self):
for record in self:
if record.fixing_commit or record.chatter_comment:
record.archive = True
def action_submit(self):
error_ids = self.env['runbot.build.error'].browse(self.env.context.get('active_ids')) error_ids = self.env['runbot.build.error'].browse(self.env.context.get('active_ids'))
if error_ids: if error_ids:
if self.team_id: if self.team_id:
@ -313,9 +324,3 @@ class ErrorBulkWizard(models.TransientModel):
if self.chatter_comment: if self.chatter_comment:
for build_error in error_ids: for build_error in error_ids:
build_error.message_post(body=self.chatter_comment, subject="Bullk Wizard Comment") build_error.message_post(body=self.chatter_comment, subject="Bullk Wizard Comment")
@api.onchange('fixing_commit', 'chatter_comment')
def _onchange_commit_comment(self):
for record in self:
if record.fixing_commit or record.chatter_comment:
record.archive = True

View File

@ -6,6 +6,7 @@ import re
from odoo import models, fields, api from odoo import models, fields, api
from odoo.exceptions import ValidationError from odoo.exceptions import ValidationError
from odoo.tools import file_open
VALUE_PATTERN = r"\(\?P\<value\>.+\)" # used to verify value group pattern VALUE_PATTERN = r"\(\?P\<value\>.+\)" # used to verify value group pattern
@ -53,7 +54,7 @@ class BuildStatRegex(models.Model):
if not os.path.exists(file_path): if not os.path.exists(file_path):
return {} return {}
stats_matches = {} stats_matches = {}
with open(file_path, "r") as log_file: with file_open(file_path, "r") as log_file:
data = log_file.read() data = log_file.read()
for build_stat_regex in self: for build_stat_regex in self:
current_stat_matches = {} current_stat_matches = {}

View File

@ -225,7 +225,7 @@ class Bundle(models.Model):
self.last_batch = new self.last_batch = new
return new return new
def consistency_warning(self): def _consistency_warning(self):
if self.defined_base_id: if self.defined_base_id:
return [('info', 'This bundle has a forced base: %s' % self.defined_base_id.name)] return [('info', 'This bundle has a forced base: %s' % self.defined_base_id.name)]
warnings = [] warnings = []
@ -242,34 +242,13 @@ class Bundle(models.Model):
warnings.append(('warning', 'Branch %s not starting with version name (%s)' % (branch.dname, self.base_id.name))) warnings.append(('warning', 'Branch %s not starting with version name (%s)' % (branch.dname, self.base_id.name)))
return warnings return warnings
def branch_groups(self): def _branch_groups(self):
self.branch_ids.sorted(key=lambda b: (b.remote_id.repo_id.sequence, b.remote_id.repo_id.id, b.is_pr)) self.branch_ids.sorted(key=lambda b: (b.remote_id.repo_id.sequence, b.remote_id.repo_id.id, b.is_pr))
branch_groups = {repo: [] for repo in self.branch_ids.mapped('remote_id.repo_id').sorted('sequence')} branch_groups = {repo: [] for repo in self.branch_ids.mapped('remote_id.repo_id').sorted('sequence')}
for branch in self.branch_ids.sorted(key=lambda b: (b.is_pr)): for branch in self.branch_ids.sorted(key=lambda b: (b.is_pr)):
branch_groups[branch.remote_id.repo_id].append(branch) branch_groups[branch.remote_id.repo_id].append(branch)
return branch_groups return branch_groups
def generate_custom_trigger_multi_action(self):
context = {
'default_bundle_id': self.id,
'default_config_id': self.env.ref('runbot.runbot_build_config_custom_multi').id,
'default_child_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id,
'default_extra_params': False,
'default_child_extra_params': '--test-tags /module.test_method',
'default_number_build': 10,
}
return self._generate_custom_trigger_action(context)
def generate_custom_trigger_restore_action(self):
context = {
'default_bundle_id': self.id,
'default_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id,
'default_child_config_id': False,
'default_extra_params': '--test-tags /module.test_method',
'default_child_extra_params': False,
'default_number_build': 0,
}
return self._generate_custom_trigger_action(context)
def _generate_custom_trigger_action(self, context): def _generate_custom_trigger_action(self, context):
return { return {
@ -280,3 +259,25 @@ class Bundle(models.Model):
'target': 'new', 'target': 'new',
'context': context, 'context': context,
} }
def action_generate_custom_trigger_multi_action(self):
context = {
'default_bundle_id': self.id,
'default_config_id': self.env.ref('runbot.runbot_build_config_custom_multi').id,
'default_child_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id,
'default_extra_params': False,
'default_child_extra_params': '--test-tags /module.test_method',
'default_number_build': 10,
}
return self._generate_custom_trigger_action(context)
def action_generate_custom_trigger_restore_action(self):
context = {
'default_bundle_id': self.id,
'default_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id,
'default_child_config_id': False,
'default_extra_params': '--test-tags /module.test_method',
'default_child_extra_params': False,
'default_number_build': 0,
}
return self._generate_custom_trigger_action(context)

View File

@ -1,11 +1,12 @@
import subprocess import subprocess
from ..common import os, RunbotException, _make_github_session from ..common import os, RunbotException, make_github_session
import glob import glob
import shutil import shutil
from odoo import models, fields, api, registry from odoo import models, fields, api, registry
from odoo.tools import file_open
import logging import logging
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
@ -52,7 +53,7 @@ class Commit(models.Model):
module = os.path.basename(os.path.dirname(manifest_path)) module = os.path.basename(os.path.dirname(manifest_path))
yield (addons_path, module, manifest_file_name) yield (addons_path, module, manifest_file_name)
def export(self, build): def _export(self, build):
"""Export a git repo into a sources""" """Export a git repo into a sources"""
# TODO add automated tests # TODO add automated tests
self.ensure_one() self.ensure_one()
@ -106,19 +107,19 @@ class Commit(models.Model):
return export_path return export_path
def read_source(self, file, mode='r'): def _read_source(self, file, mode='r'):
file_path = self._source_path(file) file_path = self._source_path(file)
try: try:
with open(file_path, mode) as f: with file_open(file_path, mode) as f:
return f.read() return f.read()
except: except:
return False return False
def _source_path(self, *path): def _source_path(self, *paths):
export_name = self.name export_name = self.name
if self.rebase_on_id: if self.rebase_on_id:
export_name = '%s_%s' % (self.name, self.rebase_on_id.name) export_name = '%s_%s' % (self.name, self.rebase_on_id.name)
return os.path.join(self.env['runbot.runbot']._root(), 'sources', self.repo_id.name, export_name, *path) return self.repo_id._source_path(export_name, *paths)
@api.depends('name', 'repo_id.name') @api.depends('name', 'repo_id.name')
def _compute_dname(self): def _compute_dname(self):
@ -201,7 +202,7 @@ class CommitStatus(models.Model):
_logger.warning('No token on remote %s, skipping status', remote.mapped("name")) _logger.warning('No token on remote %s, skipping status', remote.mapped("name"))
else: else:
if remote.token not in session_cache: if remote.token not in session_cache:
session_cache[remote.token] = _make_github_session(remote.token) session_cache[remote.token] = make_github_session(remote.token)
session = session_cache[remote.token] session = session_cache[remote.token]
_logger.info( _logger.info(
"github updating %s status %s to %s in repo %s", "github updating %s status %s to %s in repo %s",

View File

@ -56,17 +56,17 @@ class CustomTriggerWizard(models.TransientModel):
@api.depends('config_id') @api.depends('config_id')
def _compute_has_create_step(self): def _compute_has_create_step(self):
for record in self: for record in self:
record.has_create_step = any(step.job_type == 'create_build' for step in self.config_id.step_ids()) record.has_create_step = any(step.job_type == 'create_build' for step in self.config_id.step_ids)
@api.depends('config_id') @api.depends('config_id')
def _compute_has_restore_step(self): def _compute_has_restore_step(self):
for record in self: for record in self:
record.has_restore_step = any(step.job_type == 'restore' for step in self.config_id.step_ids()) record.has_restore_step = any(step.job_type == 'restore' for step in self.config_id.step_ids)
@api.depends('child_config_id') @api.depends('child_config_id')
def _compute_has_child_with_restore_step(self): def _compute_has_child_with_restore_step(self):
for record in self: for record in self:
record.has_child_with_restore_step = record.child_config_id and any(step.job_type == 'restore' for step in self.child_config_id.step_ids()) record.has_child_with_restore_step = record.child_config_id and any(step.job_type == 'restore' for step in self.child_config_id.step_ids)
@api.onchange('extra_params', 'child_extra_params', 'restore_dump_url', 'config_id', 'child_config_id', 'number_build', 'config_id', 'restore_mode', 'restore_database_suffix', 'restore_trigger_id') @api.onchange('extra_params', 'child_extra_params', 'restore_dump_url', 'config_id', 'child_config_id', 'number_build', 'config_id', 'restore_mode', 'restore_database_suffix', 'restore_trigger_id')
def _onchange_warnings(self): def _onchange_warnings(self):
@ -164,7 +164,7 @@ class CustomTriggerWizard(models.TransientModel):
def _get_existing_trigger(self): def _get_existing_trigger(self):
return self.env['runbot.bundle.trigger.custom'].search([('bundle_id', '=', self.bundle_id.id), ('trigger_id', '=', self.trigger_id.id)]) return self.env['runbot.bundle.trigger.custom'].search([('bundle_id', '=', self.bundle_id.id), ('trigger_id', '=', self.trigger_id.id)])
def submit(self): def action_submit(self):
self.ensure_one() self.ensure_one()
self._get_existing_trigger().unlink() self._get_existing_trigger().unlink()
self.env['runbot.bundle.trigger.custom'].create({ self.env['runbot.bundle.trigger.custom'].create({

View File

@ -5,7 +5,7 @@ import time
from collections import defaultdict from collections import defaultdict
from odoo import models, fields, api from odoo import models, fields, api
from odoo.tools import config, ormcache from odoo.tools import config, ormcache, file_open
from ..common import fqdn, local_pgadmin_cursor, os, list_local_dbs, local_pg_cursor from ..common import fqdn, local_pgadmin_cursor, os, list_local_dbs, local_pg_cursor
from ..container import docker_build from ..container import docker_build
@ -106,8 +106,8 @@ class Host(models.Model):
def _bootstrap(self): def _bootstrap(self):
""" Create needed directories in static """ """ Create needed directories in static """
dirs = ['build', 'nginx', 'repo', 'sources', 'src', 'docker'] dirs = ['build', 'nginx', 'repo', 'sources', 'src', 'docker']
static_path = self._get_work_path() static_path = self.env['runbot.runbot']._root()
static_dirs = {d: os.path.join(static_path, d) for d in dirs} static_dirs = {d: self.env['runbot.runbot']._path(d) for d in dirs}
for dir, path in static_dirs.items(): for dir, path in static_dirs.items():
os.makedirs(path, exist_ok=True) os.makedirs(path, exist_ok=True)
self._bootstrap_db_template() self._bootstrap_db_template()
@ -117,16 +117,14 @@ class Host(models.Model):
""" build docker images needed by locally pending builds""" """ build docker images needed by locally pending builds"""
_logger.info('Building docker images...') _logger.info('Building docker images...')
self.ensure_one() self.ensure_one()
static_path = self._get_work_path()
self.clear_caches() # needed to ensure that content is updated on all hosts self.clear_caches() # needed to ensure that content is updated on all hosts
for dockerfile in self.env['runbot.dockerfile'].search([('to_build', '=', True)]): for dockerfile in self.env['runbot.dockerfile'].search([('to_build', '=', True)]):
self._docker_build_dockerfile(dockerfile, static_path) self._docker_build_dockerfile(dockerfile)
_logger.info('Done...') _logger.info('Done...')
def _docker_build_dockerfile(self, dockerfile, workdir): def _docker_build_dockerfile(self, dockerfile):
start = time.time() start = time.time()
# _logger.info('Building %s, %s', dockerfile.name, hash(str(dockerfile.dockerfile))) docker_build_path = self.env['runbot.runbot']._path('docker', dockerfile.image_tag)
docker_build_path = os.path.join(workdir, 'docker', dockerfile.image_tag)
os.makedirs(docker_build_path, exist_ok=True) os.makedirs(docker_build_path, exist_ok=True)
user = getpass.getuser() user = getpass.getuser()
@ -139,22 +137,18 @@ class Host(models.Model):
USER {user} USER {user}
ENV COVERAGE_FILE /data/build/.coverage ENV COVERAGE_FILE /data/build/.coverage
""" """
with open(self.env['runbot.runbot']._path('docker', dockerfile.image_tag, 'Dockerfile'), 'w') as Dockerfile:
with open(os.path.join(docker_build_path, 'Dockerfile'), 'w') as Dockerfile:
Dockerfile.write(dockerfile.dockerfile + docker_append) Dockerfile.write(dockerfile.dockerfile + docker_append)
docker_build_success, msg = docker_build(docker_build_path, dockerfile.image_tag) docker_build_success, msg = docker_build(docker_build_path, dockerfile.image_tag)
if not docker_build_success: if not docker_build_success:
dockerfile.to_build = False dockerfile.to_build = False
dockerfile.message_post(body=f'Build failure:\n{msg}') dockerfile.message_post(body=f'Build failure:\n{msg}')
# self.env['runbot.runbot'].warning(f'Dockerfile build "{dockerfile.image_tag}" failed on host {self.name}') # self.env['runbot.runbot']._warning(f'Dockerfile build "{dockerfile.image_tag}" failed on host {self.name}')
else: else:
duration = time.time() - start duration = time.time() - start
if duration > 1: if duration > 1:
_logger.info('Dockerfile %s finished build in %s', dockerfile.image_tag, duration) _logger.info('Dockerfile %s finished build in %s', dockerfile.image_tag, duration)
def _get_work_path(self):
return os.path.abspath(os.path.join(os.path.dirname(__file__), '../static'))
@ormcache() @ormcache()
def _host_list(self): def _host_list(self):
@ -172,11 +166,11 @@ class Host(models.Model):
def _get_current_name(self): def _get_current_name(self):
return config.get('forced_host_name') or fqdn() return config.get('forced_host_name') or fqdn()
def get_running_max(self): def _get_running_max(self):
icp = self.env['ir.config_parameter'] icp = self.env['ir.config_parameter']
return int(icp.get_param('runbot.runbot_running_max', default=5)) return int(icp.get_param('runbot.runbot_running_max', default=5))
def set_psql_conn_count(self): def _set_psql_conn_count(self):
_logger.info('Updating psql connection count...') _logger.info('Updating psql connection count...')
self.ensure_one() self.ensure_one()
with local_pgadmin_cursor() as local_cr: with local_pgadmin_cursor() as local_cr:
@ -190,7 +184,7 @@ class Host(models.Model):
def _total_workers(self): def _total_workers(self):
return sum(host.nb_worker for host in self) return sum(host.nb_worker for host in self)
def disable(self): def _disable(self):
""" Reserve host if possible """ """ Reserve host if possible """
self.ensure_one() self.ensure_one()
nb_hosts = self.env['runbot.host'].search_count([]) nb_hosts = self.env['runbot.host'].search_count([])
@ -271,12 +265,12 @@ class Host(models.Model):
with local_pg_cursor(logs_db_name) as local_cr: with local_pg_cursor(logs_db_name) as local_cr:
local_cr.execute("DELETE FROM ir_logging WHERE id in %s", [tuple(local_log_ids)]) local_cr.execute("DELETE FROM ir_logging WHERE id in %s", [tuple(local_log_ids)])
def get_build_domain(self, domain=None): def _get_build_domain(self, domain=None):
domain = domain or [] domain = domain or []
return [('host', '=', self.name)] + domain return [('host', '=', self.name)] + domain
def get_builds(self, domain, order=None): def _get_builds(self, domain, order=None):
return self.env['runbot.build'].search(self.get_build_domain(domain), order=order) return self.env['runbot.build'].search(self._get_build_domain(domain), order=order)
def _process_messages(self): def _process_messages(self):
self.host_message_ids._process() self.host_message_ids._process()
@ -298,5 +292,5 @@ class MessageQueue(models.Model):
# todo consume messages here # todo consume messages here
if records: if records:
for record in records: for record in records:
self.env['runbot.runbot'].warning(f'Host {record.host_id.name} got an unexpected message {record.message}') self.env['runbot.runbot']._warning(f'Host {record.host_id.name} got an unexpected message {record.message}')
self.unlink() self.unlink()

View File

@ -13,7 +13,7 @@ _logger = logging.getLogger(__name__)
TYPES = [(t, t.capitalize()) for t in 'client server runbot subbuild link markdown'.split()] TYPES = [(t, t.capitalize()) for t in 'client server runbot subbuild link markdown'.split()]
class runbot_event(models.Model): class IrLogging(models.Model):
_inherit = "ir.logging" _inherit = "ir.logging"
_order = 'id' _order = 'id'
@ -55,7 +55,7 @@ class runbot_event(models.Model):
for ir_logging in self: for ir_logging in self:
ir_logging.error_id = False ir_logging.error_id = False
if ir_logging.level in ('ERROR', 'CRITICAL', 'WARNING') and ir_logging.type == 'server': if ir_logging.level in ('ERROR', 'CRITICAL', 'WARNING') and ir_logging.type == 'server':
fingerprints[self.env['runbot.build.error']._digest(cleaning_regexes.r_sub('%', ir_logging.message))].append(ir_logging) fingerprints[self.env['runbot.build.error']._digest(cleaning_regexes._r_sub('%', ir_logging.message))].append(ir_logging)
for build_error in self.env['runbot.build.error'].search([('fingerprint', 'in', list(fingerprints.keys()))]): for build_error in self.env['runbot.build.error'].search([('fingerprint', 'in', list(fingerprints.keys()))]):
for ir_logging in fingerprints[build_error.fingerprint]: for ir_logging in fingerprints[build_error.fingerprint]:
ir_logging.error_id = build_error.id ir_logging.error_id = build_error.id
@ -106,14 +106,6 @@ class RunbotErrorLog(models.Model):
for l in self: for l in self:
l.build_url = '/runbot/build/%s' % l.build_id.id l.build_url = '/runbot/build/%s' % l.build_id.id
def action_goto_build(self):
self.ensure_one()
return {
"type": "ir.actions.act_url",
"url": "runbot/build/%s" % self.build_id.id,
"target": "new",
}
def _compute_bundle_id(self): def _compute_bundle_id(self):
slots = self.env['runbot.batch.slot'].search([('build_id', 'in', self.mapped('top_parent_id').ids)]) slots = self.env['runbot.batch.slot'].search([('build_id', 'in', self.mapped('top_parent_id').ids)])
for l in self: for l in self:
@ -198,3 +190,11 @@ class RunbotErrorLog(models.Model):
WHERE WHERE
l.level = 'ERROR' l.level = 'ERROR'
)""") )""")
def action_goto_build(self):
self.ensure_one()
return {
"type": "ir.actions.act_url",
"url": "runbot/build/%s" % self.build_id.id,
"target": "new",
}

0
runbot/models/module.py Normal file
View File

View File

@ -11,18 +11,14 @@ import requests
from pathlib import Path from pathlib import Path
from odoo import models, fields, api from odoo import models, fields, api
from ..common import os, RunbotException, _make_github_session from odoo.tools import file_open
from ..common import os, RunbotException, make_github_session, sanitize
from odoo.exceptions import UserError from odoo.exceptions import UserError
from odoo.tools.safe_eval import safe_eval from odoo.tools.safe_eval import safe_eval
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
def _sanitize(name):
for i in '@:/':
name = name.replace(i, '_')
return name
class Trigger(models.Model): class Trigger(models.Model):
""" """
@ -89,7 +85,7 @@ class Trigger(models.Model):
return [(4, b.id) for b in refs_builds] return [(4, b.id) for b in refs_builds]
return [] return []
def get_version_domain(self): def _get_version_domain(self):
if self.version_domain: if self.version_domain:
return safe_eval(self.version_domain) return safe_eval(self.version_domain)
return [] return []
@ -147,7 +143,7 @@ class Remote(models.Model):
def _compute_remote_name(self): def _compute_remote_name(self):
for remote in self: for remote in self:
remote.remote_name = _sanitize(remote.short_name) remote.remote_name = sanitize(remote.short_name)
def create(self, values_list): def create(self, values_list):
remote = super().create(values_list) remote = super().create(values_list)
@ -175,7 +171,7 @@ class Remote(models.Model):
url = url.replace(':owner', remote.owner) url = url.replace(':owner', remote.owner)
url = url.replace(':repo', remote.repo_name) url = url.replace(':repo', remote.repo_name)
url = 'https://api.%s%s' % (remote.repo_domain, url) url = 'https://api.%s%s' % (remote.repo_domain, url)
session = session or _make_github_session(remote.token) session = session or make_github_session(remote.token)
while url: while url:
if recursive: if recursive:
_logger.info('Getting page %s', url) _logger.info('Getting page %s', url)
@ -212,12 +208,12 @@ class Remote(models.Model):
else: else:
raise raise
def check_token(self): def action_check_token(self):
if not self.user_has_groups('runbot.group_runbot_admin'): if not self.user_has_groups('runbot.group_runbot_admin'):
raise UserError('This action is restricted to admin users') raise UserError('This action is restricted to admin users')
token_results = {} token_results = {}
for repo in self: for repo in self:
session = _make_github_session(repo.token) session = make_github_session(repo.token)
if repo.token not in token_results: if repo.token not in token_results:
token_results[repo.token] = session.get("https://api.github.com/user") token_results[repo.token] = session.get("https://api.github.com/user")
response = token_results[repo.token] response = token_results[repo.token]
@ -287,7 +283,7 @@ class Repo(models.Model):
upgrade_paths = fields.Char('Upgrade paths', help='Comma separated list of possible upgrade path', default='', tracking=True) upgrade_paths = fields.Char('Upgrade paths', help='Comma separated list of possible upgrade path', default='', tracking=True)
sequence = fields.Integer('Sequence', tracking=True) sequence = fields.Integer('Sequence', tracking=True)
path = fields.Char(compute='_get_path', string='Directory', readonly=True) path = fields.Char(compute='_compute_path', string='Directory', readonly=True)
mode = fields.Selection([('disabled', 'Disabled'), mode = fields.Selection([('disabled', 'Disabled'),
('poll', 'Poll'), ('poll', 'Poll'),
('hook', 'Hook')], ('hook', 'Hook')],
@ -326,12 +322,12 @@ class Repo(models.Model):
for repo in self: for repo in self:
repo.hook_time = times.get(repo.id, 0) repo.hook_time = times.get(repo.id, 0)
def set_hook_time(self, value): def _set_hook_time(self, value):
for repo in self: for repo in self:
self.env['runbot.repo.hooktime'].create({'time': value, 'repo_id': repo.id}) self.env['runbot.repo.hooktime'].create({'time': value, 'repo_id': repo.id})
self.invalidate_recordset(['hook_time']) self.invalidate_recordset(['hook_time'])
def set_ref_time(self, value): def _set_ref_time(self, value):
for repo in self: for repo in self:
self.env['runbot.repo.reftime'].create({'time': value, 'repo_id': repo.id}) self.env['runbot.repo.reftime'].create({'time': value, 'repo_id': repo.id})
self.invalidate_recordset(['get_ref_time']) self.invalidate_recordset(['get_ref_time'])
@ -349,11 +345,16 @@ class Repo(models.Model):
""") """)
@api.depends('name') @api.depends('name')
def _get_path(self): def _compute_path(self):
"""compute the server path of repo from the name""" """compute the server path of repo from the for name"""
root = self.env['runbot.runbot']._root()
for repo in self: for repo in self:
repo.path = os.path.join(root, 'repo', _sanitize(repo.name)) repo.path = repo._path()
def _path(self, *path_parts):
return self.env['runbot.runbot']._path('repo', sanitize(self.name), *path_parts)
def _source_path(self, *path_parts):
return self.env['runbot.runbot']._path('sources', sanitize(self.name), *path_parts)
def _git(self, cmd, errors='strict'): def _git(self, cmd, errors='strict'):
"""Execute a git command 'cmd'""" """Execute a git command 'cmd'"""
@ -396,7 +397,7 @@ class Repo(models.Model):
def _get_fetch_head_time(self): def _get_fetch_head_time(self):
self.ensure_one() self.ensure_one()
fname_fetch_head = os.path.join(self.path, 'FETCH_HEAD') fname_fetch_head = self._path('FETCH_HEAD')
if os.path.exists(fname_fetch_head): if os.path.exists(fname_fetch_head):
return os.path.getmtime(fname_fetch_head) return os.path.getmtime(fname_fetch_head)
return 0 return 0
@ -411,7 +412,7 @@ class Repo(models.Model):
commit_limit = time.time() - (60 * 60 * 24 * max_age) commit_limit = time.time() - (60 * 60 * 24 * max_age)
if not self.get_ref_time or get_ref_time > self.get_ref_time: if not self.get_ref_time or get_ref_time > self.get_ref_time:
try: try:
self.set_ref_time(get_ref_time) self._set_ref_time(get_ref_time)
fields = ['refname', 'objectname', 'committerdate:unix', 'authorname', 'authoremail', 'subject', 'committername', 'committeremail'] fields = ['refname', 'objectname', 'committerdate:unix', 'authorname', 'authoremail', 'subject', 'committername', 'committeremail']
fmt = "%00".join(["%(" + field + ")" for field in fields]) fmt = "%00".join(["%(" + field + ")" for field in fields])
cmd = ['for-each-ref', '--format', fmt, '--sort=-committerdate', 'refs/*/heads/*'] cmd = ['for-each-ref', '--format', fmt, '--sort=-committerdate', 'refs/*/heads/*']
@ -423,7 +424,7 @@ class Repo(models.Model):
return [] return []
refs = [tuple(field for field in line.split('\x00')) for line in git_refs.split('\n')] refs = [tuple(field for field in line.split('\x00')) for line in git_refs.split('\n')]
refs = [r for r in refs if not re.match(r'^refs/[\w-]+/heads/\d+$', r[0])] # remove branches with interger names to avoid confusion with pr names refs = [r for r in refs if not re.match(r'^refs/[\w-]+/heads/\d+$', r[0])] # remove branches with interger names to avoid confusion with pr names
refs = [r for r in refs if int(r[2]) > commit_limit or self.env['runbot.branch'].match_is_base(r[0].split('/')[-1])] refs = [r for r in refs if int(r[2]) > commit_limit or self.env['runbot.branch']._match_is_base(r[0].split('/')[-1])]
if ignore: if ignore:
refs = [r for r in refs if r[0].split('/')[-1] not in ignore] refs = [r for r in refs if r[0].split('/')[-1] not in ignore]
return refs return refs
@ -443,7 +444,7 @@ class Repo(models.Model):
# FIXME WIP # FIXME WIP
names = [r[0].split('/')[-1] for r in refs] names = [r[0].split('/')[-1] for r in refs]
branches = self.env['runbot.branch'].search([('name', 'in', names), ('remote_id', 'in', self.remote_ids.ids)]) branches = self.env['runbot.branch'].search([('name', 'in', names), ('remote_id', 'in', self.remote_ids.ids)])
ref_branches = {branch.ref(): branch for branch in branches} ref_branches = {branch._ref(): branch for branch in branches}
new_branch_values = [] new_branch_values = []
for ref_name, sha, date, author, author_email, subject, committer, committer_email in refs: for ref_name, sha, date, author, author_email, subject, committer, committer_email in refs:
if not ref_branches.get(ref_name): if not ref_branches.get(ref_name):
@ -462,7 +463,7 @@ class Repo(models.Model):
_logger.info('Creating new branches') _logger.info('Creating new branches')
new_branches = self.env['runbot.branch'].create(new_branch_values) new_branches = self.env['runbot.branch'].create(new_branch_values)
for branch in new_branches: for branch in new_branches:
ref_branches[branch.ref()] = branch ref_branches[branch._ref()] = branch
return ref_branches return ref_branches
def _find_new_commits(self, refs, ref_branches): def _find_new_commits(self, refs, ref_branches):
@ -532,11 +533,11 @@ class Repo(models.Model):
if repo.mode == 'disabled': if repo.mode == 'disabled':
_logger.info(f'skipping disabled repo {repo.name}') _logger.info(f'skipping disabled repo {repo.name}')
continue continue
if os.path.isdir(os.path.join(repo.path, 'refs')): if os.path.isdir(repo._path('refs')):
git_config_path = os.path.join(repo.path, 'config') git_config_path = repo._path('config')
template_params = {'repo': repo} template_params = {'repo': repo}
git_config = self.env['ir.ui.view']._render_template("runbot.git_config", template_params) git_config = self.env['ir.ui.view']._render_template("runbot.git_config", template_params)
with open(git_config_path, 'w') as config_file: with file_open(git_config_path, 'w') as config_file:
config_file.write(str(git_config)) config_file.write(str(git_config))
_logger.info('Config updated for repo %s' % repo.name) _logger.info('Config updated for repo %s' % repo.name)
else: else:
@ -546,7 +547,7 @@ class Repo(models.Model):
""" Clone the remote repo if needed """ """ Clone the remote repo if needed """
self.ensure_one() self.ensure_one()
repo = self repo = self
if not os.path.isdir(os.path.join(repo.path, 'refs')): if not os.path.isdir(repo._path('refs')):
_logger.info("Initiating repository '%s' in '%s'" % (repo.name, repo.path)) _logger.info("Initiating repository '%s' in '%s'" % (repo.name, repo.path))
git_init = subprocess.run(['git', 'init', '--bare', repo.path], stderr=subprocess.PIPE) git_init = subprocess.run(['git', 'init', '--bare', repo.path], stderr=subprocess.PIPE)
if git_init.returncode: if git_init.returncode:
@ -561,11 +562,11 @@ class Repo(models.Model):
repo = self repo = self
if not repo.remote_ids: if not repo.remote_ids:
return False return False
if not os.path.isdir(os.path.join(repo.path)): if not os.path.isdir(repo.path):
os.makedirs(repo.path) os.makedirs(repo.path)
force = self._git_init() or force force = self._git_init() or force
fname_fetch_head = os.path.join(repo.path, 'FETCH_HEAD') fname_fetch_head = repo._path('FETCH_HEAD')
if not force and os.path.isfile(fname_fetch_head): if not force and os.path.isfile(fname_fetch_head):
fetch_time = os.path.getmtime(fname_fetch_head) fetch_time = os.path.getmtime(fname_fetch_head)
if repo.mode == 'hook': if repo.mode == 'hook':
@ -599,9 +600,9 @@ class Repo(models.Model):
host.message_post(body=message) host.message_post(body=message)
icp = self.env['ir.config_parameter'].sudo() icp = self.env['ir.config_parameter'].sudo()
if icp.get_param('runbot.runbot_disable_host_on_fetch_failure'): if icp.get_param('runbot.runbot_disable_host_on_fetch_failure'):
self.env['runbot.runbot'].warning('Host %s got reserved because of fetch failure' % host.name) self.env['runbot.runbot']._warning('Host %s got reserved because of fetch failure' % host.name)
_logger.exception(message) _logger.exception(message)
host.disable() host._disable()
return success return success
def _update(self, force=False, poll_delay=5*60): def _update(self, force=False, poll_delay=5*60):

View File

@ -11,13 +11,13 @@ from contextlib import contextmanager
from requests.exceptions import HTTPError from requests.exceptions import HTTPError
from subprocess import CalledProcessError from subprocess import CalledProcessError
from ..common import fqdn, dest_reg, os from ..common import dest_reg, os, sanitize
from ..container import docker_ps, docker_stop from ..container import docker_ps, docker_stop
from odoo import models, fields from odoo import models, fields
from odoo.exceptions import UserError
from odoo.osv import expression from odoo.osv import expression
from odoo.tools import config from odoo.tools import config, file_open
from odoo.modules.module import get_module_resource
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
@ -33,14 +33,21 @@ class Runbot(models.AbstractModel):
def _root(self): def _root(self):
"""Return root directory of repository""" """Return root directory of repository"""
default = os.path.join(os.path.dirname(__file__), '../static') return os.path.abspath(os.sep.join([os.path.dirname(__file__), '../static']))
return os.path.abspath(default)
def _path(self, *path_parts):
"""Return the repo build path"""
root = self.env['runbot.runbot']._root()
file_path = os.path.normpath(os.sep.join([root] + [sanitize(path) for path_part in path_parts for path in path_part.split(os.sep) if path]))
if not file_path.startswith(root):
raise UserError('Invalid path')
return file_path
def _scheduler(self, host): def _scheduler(self, host):
self._gc_testing(host) self._gc_testing(host)
self._commit() self._commit()
processed = 0 processed = 0
for build in host.get_builds([('requested_action', 'in', ['wake_up', 'deathrow'])]): for build in host._get_builds([('requested_action', 'in', ['wake_up', 'deathrow'])]):
build = build.browse(build.id) build = build.browse(build.id)
processed += 1 processed += 1
build._process_requested_actions() build._process_requested_actions()
@ -49,7 +56,7 @@ class Runbot(models.AbstractModel):
self._commit() self._commit()
host._process_messages() host._process_messages()
self._commit() self._commit()
for build in host.get_builds([('local_state', 'in', ['testing', 'running'])]) | self._get_builds_to_init(host): for build in host._get_builds([('local_state', 'in', ['testing', 'running'])]) | self._get_builds_to_init(host):
build = build.browse(build.id) # remove preftech ids, manage build one by one build = build.browse(build.id) # remove preftech ids, manage build one by one
result = build._schedule() result = build._schedule()
if result: if result:
@ -73,7 +80,7 @@ class Runbot(models.AbstractModel):
def _assign_pending_builds(self, host, nb_worker, domain=None): def _assign_pending_builds(self, host, nb_worker, domain=None):
if host.assigned_only or nb_worker <= 0: if host.assigned_only or nb_worker <= 0:
return 0 return 0
reserved_slots = len(host.get_builds([('local_state', 'in', ('testing', 'pending'))])) reserved_slots = len(host._get_builds([('local_state', 'in', ('testing', 'pending'))]))
assignable_slots = (nb_worker - reserved_slots) assignable_slots = (nb_worker - reserved_slots)
if assignable_slots > 0: if assignable_slots > 0:
allocated = self._allocate_builds(host, assignable_slots, domain) allocated = self._allocate_builds(host, assignable_slots, domain)
@ -83,8 +90,8 @@ class Runbot(models.AbstractModel):
return 0 return 0
def _get_builds_to_init(self, host): def _get_builds_to_init(self, host):
domain_host = host.get_build_domain() domain_host = host._get_build_domain()
used_slots = len(host.get_builds([('local_state', '=', 'testing')])) used_slots = len(host._get_builds([('local_state', '=', 'testing')]))
available_slots = host.nb_worker - used_slots available_slots = host.nb_worker - used_slots
build_to_init = self.env['runbot.build'] build_to_init = self.env['runbot.build']
if available_slots > 0: if available_slots > 0:
@ -94,16 +101,16 @@ class Runbot(models.AbstractModel):
return build_to_init return build_to_init
def _gc_running(self, host): def _gc_running(self, host):
running_max = host.get_running_max() running_max = host._get_running_max()
Build = self.env['runbot.build'] Build = self.env['runbot.build']
cannot_be_killed_ids = host.get_builds([('keep_running', '=', True)]).ids cannot_be_killed_ids = host._get_builds([('keep_running', '=', True)]).ids
sticky_bundles = self.env['runbot.bundle'].search([('sticky', '=', True), ('project_id.keep_sticky_running', '=', True)]) sticky_bundles = self.env['runbot.bundle'].search([('sticky', '=', True), ('project_id.keep_sticky_running', '=', True)])
cannot_be_killed_ids += [ cannot_be_killed_ids += [
build.id build.id
for build in sticky_bundles.mapped('last_batchs.slot_ids.build_id') for build in sticky_bundles.mapped('last_batchs.slot_ids.build_id')
if build.host == host.name if build.host == host.name
][:running_max] ][:running_max]
build_ids = host.get_builds([('local_state', '=', 'running'), ('id', 'not in', cannot_be_killed_ids)], order='job_start desc').ids build_ids = host._get_builds([('local_state', '=', 'running'), ('id', 'not in', cannot_be_killed_ids)], order='job_start desc').ids
for build in Build.browse(build_ids)[running_max:]: for build in Build.browse(build_ids)[running_max:]:
build._kill() build._kill()
@ -112,7 +119,7 @@ class Runbot(models.AbstractModel):
"""garbage collect builds that could be killed""" """garbage collect builds that could be killed"""
# decide if we need room # decide if we need room
Build = self.env['runbot.build'] Build = self.env['runbot.build']
domain_host = host.get_build_domain() domain_host = host._get_build_domain()
testing_builds = Build.search(domain_host + [('local_state', 'in', ['testing', 'pending']), ('requested_action', '!=', 'deathrow')]) testing_builds = Build.search(domain_host + [('local_state', 'in', ['testing', 'pending']), ('requested_action', '!=', 'deathrow')])
used_slots = len(testing_builds) used_slots = len(testing_builds)
available_slots = host.nb_worker - used_slots available_slots = host.nb_worker - used_slots
@ -153,9 +160,9 @@ class Runbot(models.AbstractModel):
env = self.env env = self.env
settings = {} settings = {}
settings['port'] = config.get('http_port') settings['port'] = config.get('http_port')
settings['runbot_static'] = os.path.join(get_module_resource('runbot', 'static'), '') settings['runbot_static'] = self.env['runbot.runbot']._root() + os.sep
settings['base_url'] = self.get_base_url() settings['base_url'] = self.get_base_url()
nginx_dir = os.path.join(self._root(), 'nginx') nginx_dir = self.env['runbot.runbot']._path('nginx')
settings['nginx_dir'] = nginx_dir settings['nginx_dir'] = nginx_dir
settings['re_escape'] = re.escape settings['re_escape'] = re.escape
host_name = self.env['runbot.host']._get_current_name() host_name = self.env['runbot.host']._get_current_name()
@ -166,17 +173,17 @@ class Runbot(models.AbstractModel):
nginx_config = env['ir.ui.view']._render_template("runbot.nginx_config", settings) nginx_config = env['ir.ui.view']._render_template("runbot.nginx_config", settings)
os.makedirs(nginx_dir, exist_ok=True) os.makedirs(nginx_dir, exist_ok=True)
content = None content = None
nginx_conf_path = os.path.join(nginx_dir, 'nginx.conf') nginx_conf_path = self.env['runbot.runbot']._path('nginx', 'nginx.conf')
content = '' content = ''
if os.path.isfile(nginx_conf_path): if os.path.isfile(nginx_conf_path):
with open(nginx_conf_path, 'r') as f: with file_open(nginx_conf_path, 'r') as f:
content = f.read() content = f.read()
if content != nginx_config: if content != nginx_config:
_logger.info('reload nginx') _logger.info('reload nginx')
with open(nginx_conf_path, 'w') as f: with open(nginx_conf_path, 'w') as f:
f.write(str(nginx_config)) f.write(str(nginx_config))
try: try:
pid = int(open(os.path.join(nginx_dir, 'nginx.pid')).read().strip(' \n')) pid = int(file_open(self.env['runbot.runbot']._path('nginx', 'nginx.pid')).read().strip(' \n'))
os.kill(pid, signal.SIGHUP) os.kill(pid, signal.SIGHUP)
except Exception: except Exception:
_logger.info('start nginx') _logger.info('start nginx')
@ -210,7 +217,7 @@ class Runbot(models.AbstractModel):
runbot_do_fetch = get_param('runbot.runbot_do_fetch') runbot_do_fetch = get_param('runbot.runbot_do_fetch')
runbot_do_schedule = get_param('runbot.runbot_do_schedule') runbot_do_schedule = get_param('runbot.runbot_do_schedule')
host = self.env['runbot.host']._get_current() host = self.env['runbot.host']._get_current()
host.set_psql_conn_count() host._set_psql_conn_count()
host.last_start_loop = fields.Datetime.now() host.last_start_loop = fields.Datetime.now()
self._commit() self._commit()
# Bootstrap # Bootstrap
@ -227,18 +234,16 @@ class Runbot(models.AbstractModel):
self._fetch_loop_turn(host, pull_info_failures) self._fetch_loop_turn(host, pull_info_failures)
if runbot_do_schedule: if runbot_do_schedule:
sleep_time = self._scheduler_loop_turn(host, update_frequency) sleep_time = self._scheduler_loop_turn(host, update_frequency)
self.sleep(sleep_time) time.sleep(sleep_time)
else: else:
self.sleep(update_frequency) time.sleep(update_frequency)
self._commit() self._commit()
host.last_end_loop = fields.Datetime.now() host.last_end_loop = fields.Datetime.now()
def sleep(self, t):
time.sleep(t)
def _fetch_loop_turn(self, host, pull_info_failures, default_sleep=1): def _fetch_loop_turn(self, host, pull_info_failures, default_sleep=1):
with self.manage_host_exception(host) as manager: with self._manage_host_exception(host) as manager:
repos = self.env['runbot.repo'].search([('mode', '!=', 'disabled')]) repos = self.env['runbot.repo'].search([('mode', '!=', 'disabled')])
processing_batch = self.env['runbot.batch'].search([('state', 'in', ('preparing', 'ready'))], order='id asc') processing_batch = self.env['runbot.batch'].search([('state', 'in', ('preparing', 'ready'))], order='id asc')
preparing_batch = processing_batch.filtered(lambda b: b.state == 'preparing') preparing_batch = processing_batch.filtered(lambda b: b.state == 'preparing')
@ -261,7 +266,7 @@ class Runbot(models.AbstractModel):
self.env.clear() self.env.clear()
pull_number = e.response.url.split('/')[-1] pull_number = e.response.url.split('/')[-1]
pull_info_failures[pull_number] = time.time() pull_info_failures[pull_number] = time.time()
self.warning('Pr pull info failed for %s', pull_number) self._warning('Pr pull info failed for %s', pull_number)
self._commit() self._commit()
if processing_batch: if processing_batch:
@ -283,13 +288,13 @@ class Runbot(models.AbstractModel):
return manager.get('sleep', default_sleep) return manager.get('sleep', default_sleep)
def _scheduler_loop_turn(self, host, sleep=5): def _scheduler_loop_turn(self, host, sleep=5):
with self.manage_host_exception(host) as manager: with self._manage_host_exception(host) as manager:
if self._scheduler(host): if self._scheduler(host):
sleep = 0.1 sleep = 0.1
return manager.get('sleep', sleep) return manager.get('sleep', sleep)
@contextmanager @contextmanager
def manage_host_exception(self, host): def _manage_host_exception(self, host):
res = {} res = {}
try: try:
yield res yield res
@ -335,7 +340,7 @@ class Runbot(models.AbstractModel):
to_keep = set() to_keep = set()
repos = self.env['runbot.repo'].search([('mode', '!=', 'disabled')]) repos = self.env['runbot.repo'].search([('mode', '!=', 'disabled')])
for repo in repos: for repo in repos:
repo_source = os.path.join(self._root(), 'sources', repo.name, '*') repo_source = repo._source_path('*')
for source_dir in glob.glob(repo_source): for source_dir in glob.glob(repo_source):
if source_dir not in cannot_be_deleted_path: if source_dir not in cannot_be_deleted_path:
to_delete.add(source_dir) to_delete.add(source_dir)
@ -387,9 +392,9 @@ class Runbot(models.AbstractModel):
repo._git(['gc', '--prune=all', '--quiet']) repo._git(['gc', '--prune=all', '--quiet'])
except CalledProcessError as e: except CalledProcessError as e:
message = f'git gc failed for {repo.name} on {host.name} with exit status {e.returncode} and message "{e.output[:60]} ..."' message = f'git gc failed for {repo.name} on {host.name} with exit status {e.returncode} and message "{e.output[:60]} ..."'
self.warning(message) self._warning(message)
def warning(self, message, *args): def _warning(self, message, *args):
if args: if args:
message = message % args message = message % args
existing = self.env['runbot.warning'].search([('message', '=', message)], limit=1) existing = self.env['runbot.warning'].search([('message', '=', message)], limit=1)

View File

@ -4,7 +4,7 @@ import hashlib
import logging import logging
import re import re
from ..common import _make_github_session from ..common import make_github_session
from collections import defaultdict from collections import defaultdict
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from fnmatch import fnmatch from fnmatch import fnmatch
@ -98,7 +98,7 @@ class RunbotTeam(models.Model):
for team in self: for team in self:
if team.github_team: if team.github_team:
url = f"https://api.github.com/orgs/{team.organisation}/teams/{team.github_team}" url = f"https://api.github.com/orgs/{team.organisation}/teams/{team.github_team}"
session = _make_github_session(team.project_id.sudo().token) session = make_github_session(team.project_id.sudo().token)
response = session.get(url) response = session.get(url)
if response.status_code != 200: if response.status_code != 200:
raise UserError(f'Cannot find team {team.github_team}') raise UserError(f'Cannot find team {team.github_team}')

View File

@ -148,7 +148,7 @@
<t t-foreach="build.params_id.build_ids" t-as="simbuild"> <t t-foreach="build.params_id.build_ids" t-as="simbuild">
<a t-if="simbuild.id != build.id" t-attf-href="/runbot/build/#{simbuild.id}"> <a t-if="simbuild.id != build.id" t-attf-href="/runbot/build/#{simbuild.id}">
<span <span
t-attf-class="badge badge-{{simbuild.get_color_class()}}" t-attf-class="badge badge-{{simbuild._get_color_class()}}"
t-esc="simbuild.id"/> t-esc="simbuild.id"/>
</a> </a>
</t> </t>
@ -159,7 +159,7 @@
<br/> <br/>
</t> </t>
<b>Total time:</b> <b>Total time:</b>
<t t-esc="build.get_formated_build_time()"/> <t t-esc="build._get_formated_build_time()"/>
<br/> <br/>
<t t-if="build.stat_ids"> <t t-if="build.stat_ids">
<b>Stats:</b> <b>Stats:</b>
@ -204,7 +204,7 @@
</t> </t>
</td> </td>
<td> <td>
<span t-attf-class="badge badge-info" t-esc="child.get_formated_build_time()"/> <span t-attf-class="badge badge-info" t-esc="child._get_formated_build_time()"/>
</td> </td>
<td> <td>
<t t-call="runbot.build_button"> <t t-call="runbot.build_button">
@ -378,7 +378,7 @@
<span t-esc="build.params_id.version_id.name"/> <span t-esc="build.params_id.version_id.name"/>
</td> </td>
<td> <td>
<span t-esc="build.get_formated_build_time()"/> <span t-esc="build._get_formated_build_time()"/>
</td> </td>
<td> <td>
<t t-call="runbot.build_button"> <t t-call="runbot.build_button">

View File

@ -39,7 +39,7 @@
<tr> <tr>
<td>Branches</td> <td>Branches</td>
<td> <td>
<t t-foreach="bundle.branch_groups().items()" t-as="group"> <t t-foreach="bundle._branch_groups().items()" t-as="group">
<t t-foreach="group[1]" t-as="branch"> <t t-foreach="group[1]" t-as="branch">
<small> <small>
<div class="btn-toolbar mb-1" role="toolbar"> <div class="btn-toolbar mb-1" role="toolbar">
@ -75,7 +75,7 @@
</tr> </tr>
</table> </table>
</div> </div>
<div t-foreach="bundle.consistency_warning()" t-as="warning" t-esc="warning[1]" t-attf-class="alert alert-{{warning[0]}}"/> <div t-foreach="bundle._consistency_warning()" t-as="warning" t-esc="warning[1]" t-attf-class="alert alert-{{warning[0]}}"/>
<div class="batch_row" t-foreach="batchs" t-as="batch"> <div class="batch_row" t-foreach="batchs" t-as="batch">
<t t-call="runbot.batch_tile"/> <t t-call="runbot.batch_tile"/>
</div> </div>

View File

@ -26,7 +26,7 @@
<t t-esc="bundle.name"/> <t t-esc="bundle.name"/>
</h4> </h4>
<t t-foreach="bundle.last_done_batch.slot_ids" t-as="slot"> <t t-foreach="bundle.last_done_batch.slot_ids" t-as="slot">
<span t-attf-class="badge badge-{{slot.build_id.get_color_class()}}"> <span t-attf-class="badge badge-{{slot.build_id._get_color_class()}}">
<t t-esc="slot.trigger_id.name"/> <t t-esc="slot.trigger_id.name"/>
</span> </span>
</t> </t>
@ -163,12 +163,12 @@
</td> </td>
<t t-set="build" t-value="slot.build_id"/> <t t-set="build" t-value="slot.build_id"/>
<td> <td>
<span t-attf-class="badge badge-{{slot.build_id.get_color_class()}}"> <span t-attf-class="badge badge-{{slot.build_id._get_color_class()}}">
<i t-attf-class="fa fa-{{category.icon}}"/> <i t-attf-class="fa fa-{{category.icon}}"/>
</span> </span>
</td> </td>
<td t-foreach="build.children_ids" t-as="child"> <td t-foreach="build.children_ids" t-as="child">
<span t-attf-class="badge badge-{{slot.build_id.get_color_class()}}"> <span t-attf-class="badge badge-{{slot.build_id._get_color_class()}}">
<t t-esc="child.params_id.config_id.name[:4]"/> <t t-esc="child.params_id.config_id.name[:4]"/>
</span> </span>
</td> </td>

View File

@ -87,7 +87,7 @@
<a t-attf-href="/runbot/batch/#{batch.id}" title="View Batch"> <a t-attf-href="/runbot/batch/#{batch.id}" title="View Batch">
<div class="batch_header"> <div class="batch_header">
<span t-attf-class="badge badge-{{'warning' if batch.has_warning else 'light'}}"> <span t-attf-class="badge badge-{{'warning' if batch.has_warning else 'light'}}">
<t t-esc="batch.get_formated_age()"/> <t t-esc="batch._get_formated_age()"/>
<i class="fa fa-exclamation-triangle" t-if="batch.has_warning"/> <i class="fa fa-exclamation-triangle" t-if="batch.has_warning"/>
</span> </span>
<span class="float-right header_hover">View batch...</span> <span class="float-right header_hover">View batch...</span>

View File

@ -190,10 +190,10 @@
<template id="runbot.slot_button"> <template id="runbot.slot_button">
<t t-set="bu" t-value="slot.build_id"/> <t t-set="bu" t-value="slot.build_id"/>
<t t-set="color" t-value="bu.get_color_class()"/> <t t-set="color" t-value="bu._get_color_class()"/>
<div t-attf-class="btn-group btn-group-ssm slot_button_group"> <div t-attf-class="btn-group btn-group-ssm slot_button_group">
<span t-attf-class="btn btn-{{color}} disabled" t-att-title="slot.link_type"> <span t-attf-class="btn btn-{{color}} disabled" t-att-title="slot.link_type">
<i t-attf-class="fa fa-{{slot.fa_link_type()}}"/> <i t-attf-class="fa fa-{{slot._fa_link_type()}}"/>
</span> </span>
<a t-if="bu" t-attf-href="/runbot/batch/{{slot.batch_id.id}}/build/#{bu.id}" t-attf-class="btn btn-default slot_name"> <a t-if="bu" t-attf-href="/runbot/batch/{{slot.batch_id.id}}/build/#{bu.id}" t-attf-class="btn btn-default slot_name">
<span t-esc="slot.trigger_id.name"/> <span t-esc="slot.trigger_id.name"/>

View File

@ -168,7 +168,6 @@ class RunbotCase(TransactionCase):
self.start_patcher('git_patcher', 'odoo.addons.runbot.models.repo.Repo._git', new=self.mock_git_helper()) self.start_patcher('git_patcher', 'odoo.addons.runbot.models.repo.Repo._git', new=self.mock_git_helper())
self.start_patcher('hostname_patcher', 'odoo.addons.runbot.common.socket.gethostname', 'host.runbot.com') self.start_patcher('hostname_patcher', 'odoo.addons.runbot.common.socket.gethostname', 'host.runbot.com')
self.start_patcher('github_patcher', 'odoo.addons.runbot.models.repo.Remote._github', {}) self.start_patcher('github_patcher', 'odoo.addons.runbot.models.repo.Remote._github', {})
self.start_patcher('repo_root_patcher', 'odoo.addons.runbot.models.runbot.Runbot._root', '/tmp/runbot_test/static')
self.start_patcher('makedirs', 'odoo.addons.runbot.common.os.makedirs', True) self.start_patcher('makedirs', 'odoo.addons.runbot.common.os.makedirs', True)
self.start_patcher('mkdir', 'odoo.addons.runbot.common.os.mkdir', True) self.start_patcher('mkdir', 'odoo.addons.runbot.common.os.mkdir', True)
self.start_patcher('local_pgadmin_cursor', 'odoo.addons.runbot.common.local_pgadmin_cursor', False) # avoid to create databases self.start_patcher('local_pgadmin_cursor', 'odoo.addons.runbot.common.local_pgadmin_cursor', False) # avoid to create databases
@ -185,11 +184,13 @@ class RunbotCase(TransactionCase):
self.start_patcher('_local_cleanup_patcher', 'odoo.addons.runbot.models.build.BuildResult._local_cleanup') self.start_patcher('_local_cleanup_patcher', 'odoo.addons.runbot.models.build.BuildResult._local_cleanup')
self.start_patcher('_local_pg_dropdb_patcher', 'odoo.addons.runbot.models.build.BuildResult._local_pg_dropdb') self.start_patcher('_local_pg_dropdb_patcher', 'odoo.addons.runbot.models.build.BuildResult._local_pg_dropdb')
self.start_patcher('set_psql_conn_count', 'odoo.addons.runbot.models.host.Host.set_psql_conn_count', None) self.start_patcher('set_psql_conn_count', 'odoo.addons.runbot.models.host.Host._set_psql_conn_count', None)
self.start_patcher('reload_nginx', 'odoo.addons.runbot.models.runbot.Runbot._reload_nginx', None) self.start_patcher('reload_nginx', 'odoo.addons.runbot.models.runbot.Runbot._reload_nginx', None)
self.start_patcher('update_commits_infos', 'odoo.addons.runbot.models.batch.Batch._update_commits_infos', None) self.start_patcher('update_commits_infos', 'odoo.addons.runbot.models.batch.Batch._update_commits_infos', None)
self.start_patcher('_local_pg_createdb', 'odoo.addons.runbot.models.build.BuildResult._local_pg_createdb', True) self.start_patcher('_local_pg_createdb', 'odoo.addons.runbot.models.build.BuildResult._local_pg_createdb', True)
self.start_patcher('getmtime', 'odoo.addons.runbot.common.os.path.getmtime', datetime.datetime.now().timestamp()) self.start_patcher('getmtime', 'odoo.addons.runbot.common.os.path.getmtime', datetime.datetime.now().timestamp())
self.start_patcher('file_exist', 'odoo.tools.misc.os.path.exists', True)
self.start_patcher('_get_py_version', 'odoo.addons.runbot.models.build.BuildResult._get_py_version', 3) self.start_patcher('_get_py_version', 'odoo.addons.runbot.models.build.BuildResult._get_py_version', 3)

View File

@ -57,10 +57,6 @@ class TestBuildParams(RunbotCaseMinimalSetup):
self.assertEqual(params.fingerprint, same_params.fingerprint) self.assertEqual(params.fingerprint, same_params.fingerprint)
self.assertEqual(params.id, same_params.id) self.assertEqual(params.id, same_params.id)
# test that params cannot be overwitten
with self.assertRaises(UserError):
params.write({'modules': 'bar'})
# Test that a copied param without changes does not create a new record # Test that a copied param without changes does not create a new record
copied_params = params.copy() copied_params = params.copy()
self.assertEqual(copied_params.id, params.id) self.assertEqual(copied_params.id, params.id)
@ -242,13 +238,13 @@ class TestBuildResult(RunbotCase):
def is_file(file): def is_file(file):
self.assertIn(file, [ self.assertIn(file, [
'/tmp/runbot_test/static/sources/addons/d0d0caca0000ffffffffffffffffffffffffffff/requirements.txt', self.env['runbot.runbot']._path('sources/addons/d0d0caca0000ffffffffffffffffffffffffffff/requirements.txt'),
'/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/requirements.txt', self.env['runbot.runbot']._path('sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/requirements.txt'),
'/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/server.py', self.env['runbot.runbot']._path('sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/server.py'),
'/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/openerp/tools/config.py', self.env['runbot.runbot']._path('sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/odoo/tools/config.py'),
'/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/openerp/sql_db.py' self.env['runbot.runbot']._path('sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/odoo/sql_db.py')
]) ])
if file == '/tmp/runbot_test/static/sources/addons/d0d0caca0000ffffffffffffffffffffffffffff/requirements.txt': if file == self.env['runbot.runbot']._path('static/sources/addons/d0d0caca0000ffffffffffffffffffffffffffff/requirements.txt'):
return False return False
return True return True

View File

@ -23,7 +23,7 @@ class TestBuildConfigStepCommon(RunbotCase):
'local_result': 'ok', 'local_result': 'ok',
}) })
self.start_patcher('find_patcher', 'odoo.addons.runbot.common.find', 0) self.start_patcher('find_patcher', 'odoo.addons.runbot.common.find', 0)
self.start_patcher('findall_patcher', 'odoo.addons.runbot.models.build.BuildResult.parse_config', {}) self.start_patcher('findall_patcher', 'odoo.addons.runbot.models.build.BuildResult._parse_config', {})
class TestCodeowner(TestBuildConfigStepCommon): class TestCodeowner(TestBuildConfigStepCommon):
@ -44,7 +44,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
def test_codeowner_is_base(self): def test_codeowner_is_base(self):
self.dev_bundle.is_base = True self.dev_bundle.is_base = True
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
self.assertEqual(self.parent_build.log_ids.mapped('message'), [ self.assertEqual(self.parent_build.log_ids.mapped('message'), [
'Skipping base bundle', 'Skipping base bundle',
]) ])
@ -53,7 +53,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
def test_codeowner_check_limits(self): def test_codeowner_check_limits(self):
self.parent_build.params_id.commit_link_ids[0].file_changed = 451 self.parent_build.params_id.commit_link_ids[0].file_changed = 451
self.parent_build.params_id.commit_link_ids[0].base_ahead = 51 self.parent_build.params_id.commit_link_ids[0].base_ahead = 51
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
self.assertEqual(self.parent_build.log_ids.mapped('message'), [ self.assertEqual(self.parent_build.log_ids.mapped('message'), [
'Limit reached: dfdfcfcf has more than 50 commit (51) and will be skipped. Contact runbot team to increase your limit if it was intended', 'Limit reached: dfdfcfcf has more than 50 commit (51) and will be skipped. Contact runbot team to increase your limit if it was intended',
'Limit reached: dfdfcfcf has more than 450 modified files (451) and will be skipped. Contact runbot team to increase your limit if it was intended', 'Limit reached: dfdfcfcf has more than 450 modified files (451) and will be skipped. Contact runbot team to increase your limit if it was intended',
@ -62,7 +62,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
def test_codeowner_draft(self): def test_codeowner_draft(self):
self.dev_pr.draft = True self.dev_pr.draft = True
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
self.assertEqual(self.parent_build.log_ids.mapped('message'), [ self.assertEqual(self.parent_build.log_ids.mapped('message'), [
'Some pr are draft, skipping: 1234' 'Some pr are draft, skipping: 1234'
]) ])
@ -75,7 +75,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
def test_codeowner_forwardpot(self): def test_codeowner_forwardpot(self):
self.dev_pr.pr_author = 'fw-bot' self.dev_pr.pr_author = 'fw-bot'
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
self.assertEqual(self.parent_build.log_ids.mapped('message'), [ self.assertEqual(self.parent_build.log_ids.mapped('message'), [
'Ignoring forward port pull request: 1234' 'Ignoring forward port pull request: 1234'
]) ])
@ -83,7 +83,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
def test_codeowner_invalid_target(self): def test_codeowner_invalid_target(self):
self.dev_pr.target_branch_name = 'master-other-dev-branch' self.dev_pr.target_branch_name = 'master-other-dev-branch'
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
self.assertEqual(self.parent_build.log_ids.mapped('message'), [ self.assertEqual(self.parent_build.log_ids.mapped('message'), [
'Some pr have an invalid target: 1234' 'Some pr have an invalid target: 1234'
]) ])
@ -99,7 +99,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
}) })
second_pr.pull_head_name = f'{self.remote_server.owner}:{self.dev_branch.name}' second_pr.pull_head_name = f'{self.remote_server.owner}:{self.dev_branch.name}'
second_pr.bundle_id = self.dev_bundle.id second_pr.bundle_id = self.dev_bundle.id
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
self.assertEqual(self.parent_build.log_ids.mapped('message'), [ self.assertEqual(self.parent_build.log_ids.mapped('message'), [
"More than one open pr in this bundle for server: ['1234', '1235']" "More than one open pr in this bundle for server: ['1234', '1235']"
]) ])
@ -115,7 +115,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
def test_codeowner_regex_multiple(self): def test_codeowner_regex_multiple(self):
self.diff = 'file.js\nfile.py\nfile.xml' self.diff = 'file.js\nfile.py\nfile.xml'
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
messages = self.parent_build.log_ids.mapped('message') messages = self.parent_build.log_ids.mapped('message')
self.assertEqual(messages[1], 'Checking 2 codeowner regexed on 3 files') self.assertEqual(messages[1], 'Checking 2 codeowner regexed on 3 files')
self.assertEqual(messages[2], 'Adding team_js to reviewers for file [server/file.js](https://False/blob/dfdfcfcf/file.js)') self.assertEqual(messages[2], 'Adding team_js to reviewers for file [server/file.js](https://False/blob/dfdfcfcf/file.js)')
@ -127,14 +127,14 @@ class TestCodeowner(TestBuildConfigStepCommon):
def test_codeowner_regex_some_already_on(self): def test_codeowner_regex_some_already_on(self):
self.diff = 'file.js\nfile.py\nfile.xml' self.diff = 'file.js\nfile.py\nfile.xml'
self.dev_pr.reviewers = 'codeowner-team,team_js' self.dev_pr.reviewers = 'codeowner-team,team_js'
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
messages = self.parent_build.log_ids.mapped('message') messages = self.parent_build.log_ids.mapped('message')
self.assertEqual(messages[5], 'Requesting review for pull request [base/server:1234](https://example.com/base/server/pull/1234): team_py') self.assertEqual(messages[5], 'Requesting review for pull request [base/server:1234](https://example.com/base/server/pull/1234): team_py')
def test_codeowner_regex_all_already_on(self): def test_codeowner_regex_all_already_on(self):
self.diff = 'file.js\nfile.py\nfile.xml' self.diff = 'file.js\nfile.py\nfile.xml'
self.dev_pr.reviewers = 'codeowner-team,team_js,team_py' self.dev_pr.reviewers = 'codeowner-team,team_js,team_py'
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
messages = self.parent_build.log_ids.mapped('message') messages = self.parent_build.log_ids.mapped('message')
self.assertEqual(messages[5], 'All reviewers are already on pull request [base/server:1234](https://example.com/base/server/pull/1234)') self.assertEqual(messages[5], 'All reviewers are already on pull request [base/server:1234](https://example.com/base/server/pull/1234)')
@ -144,7 +144,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
self.team1.github_logins = 'some_member,another_member' self.team1.github_logins = 'some_member,another_member'
self.team1.skip_team_pr = True self.team1.skip_team_pr = True
self.dev_pr.pr_author = 'some_member' self.dev_pr.pr_author = 'some_member'
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
messages = self.parent_build.log_ids.mapped('message') messages = self.parent_build.log_ids.mapped('message')
self.assertEqual(messages[5], "Skipping teams ['team_py'] since author is part of the team members") self.assertEqual(messages[5], "Skipping teams ['team_py'] since author is part of the team members")
self.assertEqual(messages[6], 'Requesting review for pull request [base/server:1234](https://example.com/base/server/pull/1234): codeowner-team, team_js') self.assertEqual(messages[6], 'Requesting review for pull request [base/server:1234](https://example.com/base/server/pull/1234): codeowner-team, team_js')
@ -156,7 +156,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
self.diff = '\n'.join([ self.diff = '\n'.join([
'core/addons/module1/some/file.py', 'core/addons/module1/some/file.py',
]) ])
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
messages = self.parent_build.log_ids.mapped('message') messages = self.parent_build.log_ids.mapped('message')
self.assertEqual( self.assertEqual(
messages[2], messages[2],
@ -169,7 +169,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
self.diff = '\n'.join([ self.diff = '\n'.join([
'core/addons/module1/some/file.py', 'core/addons/module1/some/file.py',
]) ])
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
messages = self.parent_build.log_ids.mapped('message') messages = self.parent_build.log_ids.mapped('message')
self.assertEqual( self.assertEqual(
messages[2], messages[2],
@ -187,7 +187,7 @@ class TestCodeowner(TestBuildConfigStepCommon):
'core/addons/module3/some/file.js', 'core/addons/module3/some/file.js',
'core/addons/module4/some/file.txt', 'core/addons/module4/some/file.txt',
]) ])
self.config_step._run_codeowner(self.parent_build, '/tmp/essai') self.config_step._run_codeowner(self.parent_build)
messages = self.parent_build.log_ids.mapped('message') messages = self.parent_build.log_ids.mapped('message')
self.assertEqual(messages, [ self.assertEqual(messages, [
'PR [base/server:1234](https://example.com/base/server/pull/1234) found for repo **server**', 'PR [base/server:1234](https://example.com/base/server/pull/1234) found for repo **server**',
@ -251,7 +251,7 @@ class TestBuildConfigStepRestore(TestBuildConfigStepCommon):
dev_build = dev_batch.slot_ids.build_id dev_build = dev_batch.slot_ids.build_id
self.assertEqual(dev_build.params_id.config_data, config_data) self.assertEqual(dev_build.params_id.config_data, config_data)
docker_params = self.restore_config_step._run_restore(dev_build, '/tmp/logs') docker_params = self.restore_config_step._run_restore(dev_build)
cmds = docker_params['cmd'].split(' && ') cmds = docker_params['cmd'].split(' && ')
self.assertEqual(f'wget https://False/runbot/static/build/{reference_build.dest}/logs/{reference_build.dest}-suffix.zip', cmds[2]) self.assertEqual(f'wget https://False/runbot/static/build/{reference_build.dest}/logs/{reference_build.dest}-suffix.zip', cmds[2])
self.assertEqual(f'psql -q {dev_build.dest}-suffix < dump.sql', cmds[8]) self.assertEqual(f'psql -q {dev_build.dest}-suffix < dump.sql', cmds[8])
@ -274,7 +274,7 @@ class TestBuildConfigStepCreate(TestBuildConfigStepCommon):
def test_config_step_create_results(self): def test_config_step_create_results(self):
""" Test child builds are taken into account""" """ Test child builds are taken into account"""
self.config_step._run_create_build(self.parent_build, '/tmp/essai') self.config_step._run_create_build(self.parent_build)
self.assertEqual(len(self.parent_build.children_ids), 2, 'Two sub-builds should have been generated') self.assertEqual(len(self.parent_build.children_ids), 2, 'Two sub-builds should have been generated')
# check that the result will be ignored by parent build # check that the result will be ignored by parent build
@ -289,7 +289,7 @@ class TestBuildConfigStepCreate(TestBuildConfigStepCommon):
def test_config_step_create(self): def test_config_step_create(self):
""" Test the config step of type create """ """ Test the config step of type create """
self.config_step.make_orphan = True self.config_step.make_orphan = True
self.config_step._run_create_build(self.parent_build, '/tmp/essai') self.config_step._run_create_build(self.parent_build)
self.assertEqual(len(self.parent_build.children_ids), 2, 'Two sub-builds should have been generated') self.assertEqual(len(self.parent_build.children_ids), 2, 'Two sub-builds should have been generated')
# check that the result will be ignored by parent build # check that the result will be ignored by parent build
@ -313,7 +313,7 @@ class TestBuildConfigStepCreate(TestBuildConfigStepCommon):
}).id, }).id,
}) })
self.config_step._run_create_build(self.parent_build, '/tmp/essai') self.config_step._run_create_build(self.parent_build)
self.assertEqual(len(self.parent_build.children_ids), 10, '10 build should have been generated') self.assertEqual(len(self.parent_build.children_ids), 10, '10 build should have been generated')
# check that the result will be ignored by parent build # check that the result will be ignored by parent build
@ -332,7 +332,7 @@ class TestBuildConfigStepCreate(TestBuildConfigStepCommon):
}).id, }).id,
}) })
self.config_step._run_create_build(self.parent_build, '/tmp/essai') self.config_step._run_create_build(self.parent_build)
self.assertEqual(len(self.parent_build.children_ids), 5, '5 build should have been generated') self.assertEqual(len(self.parent_build.children_ids), 5, '5 build should have been generated')
# check that the result will be ignored by parent build # check that the result will be ignored by parent build
@ -356,7 +356,7 @@ class TestBuildConfigStepCreate(TestBuildConfigStepCommon):
}).id, }).id,
}) })
self.config_step._run_create_build(self.parent_build, '/tmp/essai') self.config_step._run_create_build(self.parent_build)
self.assertEqual(len(self.parent_build.children_ids), 10, '10 build should have been generated') self.assertEqual(len(self.parent_build.children_ids), 10, '10 build should have been generated')
self.assertEqual(len(self.parent_build.children_ids.filtered(lambda b: b.config_id == test_config_1)), 5) self.assertEqual(len(self.parent_build.children_ids.filtered(lambda b: b.config_id == test_config_1)), 5)
self.assertEqual(len(self.parent_build.children_ids.filtered(lambda b: b.config_id == test_config_2)), 5) self.assertEqual(len(self.parent_build.children_ids.filtered(lambda b: b.config_id == test_config_2)), 5)
@ -429,6 +429,7 @@ class TestBuildConfigStep(TestBuildConfigStepCommon):
@patch('odoo.addons.runbot.models.build.BuildResult._checkout') @patch('odoo.addons.runbot.models.build.BuildResult._checkout')
def test_coverage(self, mock_checkout): def test_coverage(self, mock_checkout):
config_step = self.ConfigStep.create({ config_step = self.ConfigStep.create({
'name': 'coverage', 'name': 'coverage',
'job_type': 'install_odoo', 'job_type': 'install_odoo',
@ -443,7 +444,7 @@ class TestBuildConfigStep(TestBuildConfigStepCommon):
self.assertEqual(log_path, 'dev/null/logpath') self.assertEqual(log_path, 'dev/null/logpath')
self.patchers['docker_run'].side_effect = docker_run self.patchers['docker_run'].side_effect = docker_run
config_step._run_install_odoo(self.parent_build, 'dev/null/logpath') config_step._run_install_odoo(self.parent_build)
@patch('odoo.addons.runbot.models.build.BuildResult._checkout') @patch('odoo.addons.runbot.models.build.BuildResult._checkout')
def test_dump(self, mock_checkout): def test_dump(self, mock_checkout):
@ -462,14 +463,14 @@ class TestBuildConfigStep(TestBuildConfigStepCommon):
self.patchers['docker_run'].side_effect = docker_run self.patchers['docker_run'].side_effect = docker_run
config_step._run_install_odoo(self.parent_build, 'dev/null/logpath') config_step._run_install_odoo(self.parent_build)
def get_test_tags(self, params): def get_test_tags(self, params):
cmds = params['cmd'].build().split(' && ') cmds = params['cmd'].build().split(' && ')
self.assertEqual(cmds[1].split(' server/server.py')[0], 'python3') self.assertEqual(cmds[1].split(' server/server.py')[0], 'python3')
return cmds[1].split('--test-tags ')[1].split(' ')[0] return cmds[1].split('--test-tags ')[1].split(' ')[0]
@patch('odoo.addons.runbot.models.build.BuildResult.parse_config') @patch('odoo.addons.runbot.models.build.BuildResult._parse_config')
@patch('odoo.addons.runbot.models.build.BuildResult._checkout') @patch('odoo.addons.runbot.models.build.BuildResult._checkout')
def test_install_tags(self, mock_checkout, parse_config): def test_install_tags(self, mock_checkout, parse_config):
parse_config.return_value = {'--test-enable', '--test-tags'} parse_config.return_value = {'--test-enable', '--test-tags'}
@ -484,16 +485,16 @@ class TestBuildConfigStep(TestBuildConfigStepCommon):
'random': True, 'random': True,
'test_tags': ':otherclass.othertest' 'test_tags': ':otherclass.othertest'
}) })
params = config_step._run_install_odoo(self.parent_build, 'dev/null/logpath') params = config_step._run_install_odoo(self.parent_build)
tags = self.get_test_tags(params) tags = self.get_test_tags(params)
self.assertEqual(tags, '/module,:class.method') self.assertEqual(tags, '/module,:class.method')
config_step.enable_auto_tags = True config_step.enable_auto_tags = True
params = config_step._run_install_odoo(self.parent_build, 'dev/null/logpath') params = config_step._run_install_odoo(self.parent_build)
tags = self.get_test_tags(params) tags = self.get_test_tags(params)
self.assertEqual(tags, '/module,:class.method,-:otherclass.othertest') self.assertEqual(tags, '/module,:class.method,-:otherclass.othertest')
@patch('odoo.addons.runbot.models.build.BuildResult.parse_config') @patch('odoo.addons.runbot.models.build.BuildResult._parse_config')
@patch('odoo.addons.runbot.models.build.BuildResult._checkout') @patch('odoo.addons.runbot.models.build.BuildResult._checkout')
def test_install_custom_tags(self, mock_checkout, parse_config): def test_install_custom_tags(self, mock_checkout, parse_config):
parse_config.return_value = {'--test-enable', '--test-tags'} parse_config.return_value = {'--test-enable', '--test-tags'}
@ -510,7 +511,7 @@ class TestBuildConfigStep(TestBuildConfigStepCommon):
child = self.parent_build._add_child({'config_data': {'test_tags': '-at_install,/module1,/module2'}}) child = self.parent_build._add_child({'config_data': {'test_tags': '-at_install,/module1,/module2'}})
params = config_step._run_install_odoo(child, 'dev/null/logpath') params = config_step._run_install_odoo(child)
tags = self.get_test_tags(params) tags = self.get_test_tags(params)
self.assertEqual(tags, '-at_install,/module1,/module2,-:otherclass.othertest') self.assertEqual(tags, '-at_install,/module1,/module2,-:otherclass.othertest')
@ -533,19 +534,19 @@ class TestBuildConfigStep(TestBuildConfigStepCommon):
self.patchers['docker_run'].side_effect = docker_run self.patchers['docker_run'].side_effect = docker_run
config_step._run_step(self.parent_build, 'dev/null/logpath')() config_step._run_step(self.parent_build)()
assert_db_name = 'custom_build' assert_db_name = 'custom_build'
parent_build_params = self.parent_build.params_id.copy({'config_data': {'db_name': 'custom_build'}}) parent_build_params = self.parent_build.params_id.copy({'config_data': {'db_name': 'custom_build'}})
parent_build = self.parent_build.copy({'params_id': parent_build_params.id}) parent_build = self.parent_build.copy({'params_id': parent_build_params.id})
config_step._run_step(parent_build, 'dev/null/logpath')() config_step._run_step(parent_build)()
config_step = self.ConfigStep.create({ config_step = self.ConfigStep.create({
'name': 'run_test', 'name': 'run_test',
'job_type': 'run_odoo', 'job_type': 'run_odoo',
'custom_db_name': 'custom', 'custom_db_name': 'custom',
}) })
config_step._run_step(parent_build, 'dev/null/logpath')() config_step._run_step(parent_build)()
self.assertEqual(call_count, 3) self.assertEqual(call_count, 3)
@ -567,7 +568,7 @@ docker_params = dict(cmd=cmd)
self.assertIn('-d test_database', run_cmd) self.assertIn('-d test_database', run_cmd)
self.patchers['docker_run'].side_effect = docker_run self.patchers['docker_run'].side_effect = docker_run
config_step._run_step(self.parent_build, 'dev/null/logpath')() config_step._run_step(self.parent_build)()
self.patchers['docker_run'].assert_called_once() self.patchers['docker_run'].assert_called_once()
db = self.env['runbot.database'].search([('name', '=', 'test_database')]) db = self.env['runbot.database'].search([('name', '=', 'test_database')])
self.assertEqual(db.build_id, self.parent_build) self.assertEqual(db.build_id, self.parent_build)
@ -584,7 +585,7 @@ def run():
'python_code': test_code, 'python_code': test_code,
}) })
retult = config_step._run_python(self.parent_build, 'dev/null/logpath') retult = config_step._run_python(self.parent_build)
self.assertEqual(retult, {'a': 'b'}) self.assertEqual(retult, {'a': 'b'})
@patch('odoo.addons.runbot.models.build.BuildResult._checkout') @patch('odoo.addons.runbot.models.build.BuildResult._checkout')
@ -603,7 +604,7 @@ def run():
call_count += 1 call_count += 1
self.patchers['docker_run'].side_effect = docker_run self.patchers['docker_run'].side_effect = docker_run
config_step._run_step(self.parent_build, 'dev/null/logpath')() config_step._run_step(self.parent_build)()
self.assertEqual(call_count, 1) self.assertEqual(call_count, 1)

View File

@ -115,7 +115,7 @@ class TestBuildError(RunbotCase):
# test that the random bug is parent when linking errors # test that the random bug is parent when linking errors
all_errors = error_a | error_b all_errors = error_a | error_b
all_errors.link_errors() all_errors.action_link_errors()
self.assertEqual(error_b.child_ids, error_a, 'Random error should be the parent') self.assertEqual(error_b.child_ids, error_a, 'Random error should be the parent')
# Test that changing bug resolution is propagated to children # Test that changing bug resolution is propagated to children
@ -155,20 +155,20 @@ class TestBuildError(RunbotCase):
error_a.test_tags = 'foo,bar' error_a.test_tags = 'foo,bar'
error_b.test_tags = 'blah' error_b.test_tags = 'blah'
self.assertIn('foo', self.BuildError.test_tags_list()) self.assertIn('foo', self.BuildError._test_tags_list())
self.assertIn('bar', self.BuildError.test_tags_list()) self.assertIn('bar', self.BuildError._test_tags_list())
self.assertIn('-foo', self.BuildError.disabling_tags()) self.assertIn('-foo', self.BuildError._disabling_tags())
self.assertIn('-bar', self.BuildError.disabling_tags()) self.assertIn('-bar', self.BuildError._disabling_tags())
# test that test tags on fixed errors are not taken into account # test that test tags on fixed errors are not taken into account
self.assertNotIn('blah', self.BuildError.test_tags_list()) self.assertNotIn('blah', self.BuildError._test_tags_list())
self.assertNotIn('-blah', self.BuildError.disabling_tags()) self.assertNotIn('-blah', self.BuildError._disabling_tags())
error_a.test_tags = False error_a.test_tags = False
error_b.active = True error_b.active = True
error_b.parent_id = error_a.id error_b.parent_id = error_a.id
self.assertEqual(error_b.test_tags, False) self.assertEqual(error_b.test_tags, False)
self.assertEqual(self.BuildError.disabling_tags(), ['-blah',]) self.assertEqual(self.BuildError._disabling_tags(), ['-blah',])
def test_build_error_team_wildcards(self): def test_build_error_team_wildcards(self):

View File

@ -18,15 +18,12 @@ class TestRepo(RunbotCaseMinimalSetup):
def setUp(self): def setUp(self):
super(TestRepo, self).setUp() super(TestRepo, self).setUp()
self.commit_list = {} self.commit_list = {}
self.mock_root = self.patchers['repo_root_patcher']
def test_base_fields(self): def test_base_fields(self):
self.mock_root.return_value = '/tmp/static'
repo = self.repo_server repo = self.repo_server
remote = self.remote_server remote = self.remote_server
# name = 'bla@example.com:base/server' # name = 'bla@example.com:base/server'
self.assertEqual(repo.path, '/tmp/static/repo/server') self.assertTrue(repo.path.endswith('static/repo/server'))
self.assertEqual(remote.base_url, 'example.com/base/server') self.assertEqual(remote.base_url, 'example.com/base/server')
self.assertEqual(remote.short_name, 'base/server') self.assertEqual(remote.short_name, 'base/server')
self.assertEqual(remote.owner, 'base') self.assertEqual(remote.owner, 'base')
@ -253,7 +250,6 @@ class TestRepo(RunbotCaseMinimalSetup):
@skip('This test is for performances. It needs a lot of real branches in DB to mean something') @skip('This test is for performances. It needs a lot of real branches in DB to mean something')
def test_repo_perf_find_new_commits(self): def test_repo_perf_find_new_commits(self):
self.mock_root.return_value = '/tmp/static'
repo = self.env['runbot.repo'].search([('name', '=', 'blabla')]) repo = self.env['runbot.repo'].search([('name', '=', 'blabla')])
self.commit_list[self.repo_server.id] = [] self.commit_list[self.repo_server.id] = []
@ -307,8 +303,8 @@ class TestRepo(RunbotCaseMinimalSetup):
self.assertEqual(repo1[field_name], 1.3) self.assertEqual(repo1[field_name], 1.3)
self.assertEqual(repo2[field_name], 1.4) self.assertEqual(repo2[field_name], 1.4)
_test_times('runbot.repo.hooktime', 'set_hook_time', 'hook_time') _test_times('runbot.repo.hooktime', '_set_hook_time', 'hook_time')
_test_times('runbot.repo.reftime', 'set_ref_time', 'get_ref_time') _test_times('runbot.repo.reftime', '_set_ref_time', 'get_ref_time')
class TestGithub(TransactionCase): class TestGithub(TransactionCase):
@ -356,7 +352,6 @@ class TestFetch(RunbotCase):
def setUp(self): def setUp(self):
super(TestFetch, self).setUp() super(TestFetch, self).setUp()
self.mock_root = self.patchers['repo_root_patcher']
self.fetch_count = 0 self.fetch_count = 0
self.force_failure = False self.force_failure = False
@ -425,13 +420,6 @@ class TestIdentityFile(RunbotCase):
class TestRepoScheduler(RunbotCase): class TestRepoScheduler(RunbotCase):
def setUp(self):
# as the _scheduler method commits, we need to protect the database
super(TestRepoScheduler, self).setUp()
mock_root = self.patchers['repo_root_patcher']
mock_root.return_value = '/tmp/static'
@patch('odoo.addons.runbot.models.build.BuildResult._kill') @patch('odoo.addons.runbot.models.build.BuildResult._kill')
@patch('odoo.addons.runbot.models.build.BuildResult._schedule') @patch('odoo.addons.runbot.models.build.BuildResult._schedule')
@patch('odoo.addons.runbot.models.build.BuildResult._init_pendings') @patch('odoo.addons.runbot.models.build.BuildResult._init_pendings')

View File

@ -9,6 +9,6 @@ _logger = logging.getLogger(__name__)
class TestRunbot(RunbotCase): class TestRunbot(RunbotCase):
def test_warning_from_runbot_abstract(self): def test_warning_from_runbot_abstract(self):
warning = self.env['runbot.runbot'].warning('Test warning message') warning = self.env['runbot.runbot']._warning('Test warning message')
self.assertTrue(self.env['runbot.warning'].browse(warning.id).exists()) self.assertTrue(self.env['runbot.warning'].browse(warning.id).exists())

View File

@ -451,7 +451,7 @@ class TestUpgradeFlow(RunbotCase):
def docker_run_upgrade(cmd, *args, ro_volumes=False, **kwargs): def docker_run_upgrade(cmd, *args, ro_volumes=False, **kwargs):
user = getpass.getuser() user = getpass.getuser()
self.assertTrue(ro_volumes.pop(f'/home/{user}/.odoorc').startswith('/tmp/runbot_test/static/build/')) self.assertTrue(ro_volumes.pop(f'/home/{user}/.odoorc').startswith(self.env['runbot.runbot']._path('build')))
self.assertEqual( self.assertEqual(
list(ro_volumes.keys()), [ list(ro_volumes.keys()), [
'/data/build/addons', '/data/build/addons',

View File

@ -6,7 +6,7 @@
<field name="arch" type="xml"> <field name="arch" type="xml">
<form> <form>
<header> <header>
<button name="recompute_infos" string="Recompute Infos" type="object" class="oe_highlight"/> <button name="action_recompute_infos" string="Recompute Infos" type="object" class="oe_highlight"/>
</header> </header>
<sheet> <sheet>
<group name="branch_group"> <group name="branch_group">

View File

@ -123,7 +123,7 @@
<field name="archive"/> <field name="archive"/>
</group> </group>
<footer> <footer>
<button string="Submit" name="submit" type="object" class="btn-primary"/> <button string="Submit" name="action_submit" type="object" class="btn-primary"/>
<button string="Cancel" class="btn-secondary" special="cancel"/> <button string="Cancel" class="btn-secondary" special="cancel"/>
</footer> </footer>
</sheet> </sheet>

View File

@ -37,8 +37,8 @@
<field name="arch" type="xml"> <field name="arch" type="xml">
<form string="Bundles"> <form string="Bundles">
<header> <header>
<button name="generate_custom_trigger_multi_action" string="New custom multi" type="object" class="oe_highlight"/> <button name="action_generate_custom_trigger_multi_action" string="New custom multi" type="object" class="oe_highlight"/>
<button name="generate_custom_trigger_restore_action" string="New custom restore" type="object" class="oe_highlight"/> <button name="action_generate_custom_trigger_restore_action" string="New custom restore" type="object" class="oe_highlight"/>
</header> </header>
<sheet> <sheet>
<group> <group>

View File

@ -30,7 +30,7 @@
</group> </group>
</group> </group>
<footer> <footer>
<button name="submit" string="Submit" type="object" class="btn-primary"/> <button name="action_submit" string="Submit" type="object" class="btn-primary"/>
<button string="Cancel" special="cancel" class="btn-default"/> <button string="Cancel" special="cancel" class="btn-default"/>
</footer> </footer>
</form> </form>

View File

@ -160,7 +160,7 @@
<field name="arch" type="xml"> <field name="arch" type="xml">
<form> <form>
<header> <header>
<button name="check_token" string="Check token" type="object" class="oe_highlight" groups="runbot.group_runbot_admin"/> <button name="action_check_token" string="Check token" type="object" class="oe_highlight" groups="runbot.group_runbot_admin"/>
</header> </header>
<sheet> <sheet>
<group name="repo_group"> <group name="repo_group">

View File

@ -53,7 +53,7 @@ class StatRegexWizard(models.TransientModel):
self.key = key self.key = key
self.value = value self.value = value
def save(self): def action_save(self):
if self.regex and self.test_text: if self.regex and self.test_text:
self._validate_regex() self._validate_regex()
stat_regex = self.env['runbot.build.stat.regex'].create({ stat_regex = self.env['runbot.build.stat.regex'].create({

View File

@ -19,7 +19,7 @@
<field name="message" readonly="1"/> <field name="message" readonly="1"/>
</group> </group>
<footer> <footer>
<button name="save" string="Save" type="object" class="btn-primary"/> <button name="action_save" string="Save" type="object" class="btn-primary"/>
<button string="Cancel" special="cancel" class="btn-default"/> <button string="Cancel" special="cancel" class="btn-default"/>
</footer> </footer>
</form> </form>

View File

@ -12,7 +12,7 @@ _logger = logging.getLogger(__name__)
class BuilderClient(RunbotClient): class BuilderClient(RunbotClient):
def on_start(self): def on_start(self):
builds_path = Path(self.env['runbot.runbot']._root()) / 'build' builds_path = self.env['runbot.runbot']._path('build')
monitoring_thread = threading.Thread(target=docker_monitoring_loop, args=(builds_path,), daemon=True) monitoring_thread = threading.Thread(target=docker_monitoring_loop, args=(builds_path,), daemon=True)
monitoring_thread.start() monitoring_thread.start()
@ -24,7 +24,7 @@ class BuilderClient(RunbotClient):
self.env['runbot.runbot']._source_cleanup() self.env['runbot.runbot']._source_cleanup()
self.env['runbot.build']._local_cleanup() self.env['runbot.build']._local_cleanup()
self.env['runbot.runbot']._docker_cleanup() self.env['runbot.runbot']._docker_cleanup()
self.host.set_psql_conn_count() self.host._set_psql_conn_count()
self.host._docker_build() self.host._docker_build()
self.env['runbot.repo']._update_git_config() self.env['runbot.repo']._update_git_config()
self.git_gc() self.git_gc()

View File

@ -1,171 +0,0 @@
#!/usr/bin/python3
import argparse
import contextlib
import logging
import psycopg2
import os
import re
import shutil
import sys
from collections import defaultdict
from logging.handlers import WatchedFileHandler
LOG_FORMAT = '%(asctime)s %(levelname)s %(name)s: %(message)s'
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
logging.getLogger('odoo.addons.runbot').setLevel(logging.DEBUG)
logging.addLevelName(25, "!NFO")
_logger = logging.getLogger(__name__)
DBRE = r'^(?P<build_id>\d+)-.+-[0-9a-f]{6}-?(?P<db_suffix>.*)$'
@contextlib.contextmanager
def local_pgadmin_cursor():
cnx = None
try:
cnx = psycopg2.connect("dbname=postgres")
cnx.autocommit = True # required for admin commands
yield cnx.cursor()
finally:
if cnx:
cnx.close()
def list_local_dbs():
with local_pgadmin_cursor() as local_cr:
local_cr.execute("""
SELECT datname
FROM pg_database
WHERE pg_get_userbyid(datdba) = current_user
""")
return [d[0] for d in local_cr.fetchall()]
def _local_pg_rename_db(dbname, new_db_name):
with local_pgadmin_cursor() as local_cr:
pid_col = 'pid' if local_cr.connection.server_version >= 90200 else 'procpid'
query = 'SELECT pg_terminate_backend({}) FROM pg_stat_activity WHERE datname=%s'.format(pid_col)
local_cr.execute(query, [dbname])
local_cr.execute("ALTER DATABASE \"%s\" RENAME TO \"%s\";" % (dbname, new_db_name))
class RunbotClient():
def __init__(self, env):
self.env = env
def rename_build_dirs(self, args):
builds_root = os.path.join(self.env['runbot.runbot']._root(), 'build')
builds_backup_root = os.path.join(self.env['runbot.runbot']._root(), 'build-backup')
if not args.dry_run:
try:
_logger.info('Backup build dir in "%s"', builds_backup_root)
shutil.copytree(builds_root, builds_backup_root, copy_function=os.link)
except FileExistsError:
_logger.info('Backup path "%s" already exists, skipping', builds_backup_root)
build_dirs = {}
leftovers = []
for dir_name in os.listdir(builds_root):
match = re.match(DBRE, dir_name)
if match and match['db_suffix'] == '':
build_dirs[match['build_id']] = dir_name
else:
leftovers.append(dir_name)
for build in self.env['runbot.build'].search([('id', 'in', list(build_dirs.keys()))]):
origin_dir = build_dirs[str(build.id)]
origin_path = os.path.join(builds_root, origin_dir)
if origin_dir == build.dest:
_logger.info('Skip moving %s, already moved', build.dest)
continue
_logger.info('Moving "%s" --> "%s"', origin_dir, build.dest)
if args.dry_run:
continue
dest_path = os.path.join(builds_root, build.dest)
os.rename(origin_path, dest_path)
for leftover in leftovers:
_logger.info("leftover: %s", leftover)
def rename_databases(self, args):
total_db = 0
db_names = defaultdict(dict)
leftovers = []
for local_db_name in list_local_dbs():
match = re.match(DBRE, local_db_name)
if match and match['db_suffix'] != '':
db_names[match['build_id']][match['db_suffix']] = local_db_name
else:
leftovers.append(local_db_name)
total_db += 1
nb_matching = 0
ids = [int(i) for i in db_names.keys()]
builds = self.env['runbot.build'].search([('id', 'in', ids)])
for build in builds:
for suffix in db_names[str(build.id)].keys():
origin_name = db_names[str(build.id)][suffix]
dest_name = "%s-%s" % (build.dest, suffix)
nb_matching += 1
_logger.info('Renaming database "%s" --> "%s"', origin_name, dest_name)
if args.dry_run:
continue
_local_pg_rename_db(origin_name, dest_name)
_logger.info("Found %s databases", total_db)
_logger.info("Found %s matching databases", nb_matching)
_logger.info("Leftovers: %s", len(leftovers))
_logger.info("Builds not found : %s", len(set(ids) - set(builds.ids)))
def run():
# parse args
parser = argparse.ArgumentParser()
parser.add_argument('--odoo-path', help='Odoo sources path')
parser.add_argument('--db_host', default='127.0.0.1')
parser.add_argument('--db_port', default='5432')
parser.add_argument('--db_user')
parser.add_argument('--db_password')
parser.add_argument('-d', '--database', default='runbot_upgrade', help='name of runbot db')
parser.add_argument('--logfile', default=False)
parser.add_argument('-n', '--dry-run', action='store_true')
args = parser.parse_args()
if args.logfile:
dirname = os.path.dirname(args.logfile)
if dirname and not os.path.isdir(dirname):
os.makedirs(dirname)
handler = WatchedFileHandler(args.logfile)
formatter = logging.Formatter(LOG_FORMAT)
handler.setFormatter(formatter)
_logger.parent.handlers.clear()
_logger.parent.addHandler(handler)
# configure odoo
sys.path.append(args.odoo_path)
import odoo
_logger.info("Starting upgrade move script using database %s", args.database)
odoo.tools.config['db_host'] = args.db_host
odoo.tools.config['db_port'] = args.db_port
odoo.tools.config['db_user'] = args.db_user
odoo.tools.config['db_password'] = args.db_password
addon_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
config_addons_path = odoo.tools.config['addons_path']
odoo.tools.config['addons_path'] = ','.join([config_addons_path, addon_path])
# create environment
registry = odoo.registry(args.database)
with odoo.api.Environment.manage():
with registry.cursor() as cr:
env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {})
runbot_client = RunbotClient(env)
runbot_client.rename_build_dirs(args)
runbot_client.rename_databases(args)
if __name__ == '__main__':
run()
_logger.info("All done")

View File

@ -224,6 +224,7 @@ def prepare_stats_log(dest, previous_stats, current_stats):
return previous_stats, '\n'.join(log_lines) return previous_stats, '\n'.join(log_lines)
def docker_monitoring_loop(builds_dir): def docker_monitoring_loop(builds_dir):
builds_dir = Path(builds_dir)
docker_client = docker.from_env() docker_client = docker.from_env()
previous_stats_per_docker = {} previous_stats_per_docker = {}
_logger.info('Starting docker monitoring loop thread') _logger.info('Starting docker monitoring loop thread')
@ -242,7 +243,8 @@ def docker_monitoring_loop(builds_dir):
previous_stats, log_line = prepare_stats_log(dest, previous_stats, current_stats) previous_stats, log_line = prepare_stats_log(dest, previous_stats, current_stats)
if log_line: if log_line:
stat_log_file = container_log_dir / f'{suffix}-stats.txt' stat_log_file = container_log_dir / f'{suffix}-stats.txt'
stat_log_file.open(mode='a').write(f'{log_line}\n') with open(stat_log_file, mode='a') as f:
f.write(f'{log_line}\n')
stats_per_docker[container.name] = previous_stats stats_per_docker[container.name] = previous_stats
previous_stats_per_docker = stats_per_docker previous_stats_per_docker = stats_per_docker
time.sleep(1) time.sleep(1)

View File

@ -15,7 +15,7 @@ class Step(models.Model):
job_type = fields.Selection(selection_add=[('cla_check', 'Check cla')], ondelete={'cla_check': 'cascade'}) job_type = fields.Selection(selection_add=[('cla_check', 'Check cla')], ondelete={'cla_check': 'cascade'})
def _run_cla_check(self, build, log_path): def _run_cla_check(self, build):
build._checkout() build._checkout()
cla_glob = glob.glob(build._get_server_commit()._source_path("doc/cla/*/*.md")) cla_glob = glob.glob(build._get_server_commit()._source_path("doc/cla/*/*.md"))
error = False error = False