.+\)" # used to verify value group pattern
@@ -53,7 +54,7 @@ class BuildStatRegex(models.Model):
if not os.path.exists(file_path):
return {}
stats_matches = {}
- with open(file_path, "r") as log_file:
+ with file_open(file_path, "r") as log_file:
data = log_file.read()
for build_stat_regex in self:
current_stat_matches = {}
diff --git a/runbot/models/bundle.py b/runbot/models/bundle.py
index 7710eef7..266d2eb2 100644
--- a/runbot/models/bundle.py
+++ b/runbot/models/bundle.py
@@ -225,7 +225,7 @@ class Bundle(models.Model):
self.last_batch = new
return new
- def consistency_warning(self):
+ def _consistency_warning(self):
if self.defined_base_id:
return [('info', 'This bundle has a forced base: %s' % self.defined_base_id.name)]
warnings = []
@@ -242,34 +242,13 @@ class Bundle(models.Model):
warnings.append(('warning', 'Branch %s not starting with version name (%s)' % (branch.dname, self.base_id.name)))
return warnings
- def branch_groups(self):
+ def _branch_groups(self):
self.branch_ids.sorted(key=lambda b: (b.remote_id.repo_id.sequence, b.remote_id.repo_id.id, b.is_pr))
branch_groups = {repo: [] for repo in self.branch_ids.mapped('remote_id.repo_id').sorted('sequence')}
for branch in self.branch_ids.sorted(key=lambda b: (b.is_pr)):
branch_groups[branch.remote_id.repo_id].append(branch)
return branch_groups
- def generate_custom_trigger_multi_action(self):
- context = {
- 'default_bundle_id': self.id,
- 'default_config_id': self.env.ref('runbot.runbot_build_config_custom_multi').id,
- 'default_child_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id,
- 'default_extra_params': False,
- 'default_child_extra_params': '--test-tags /module.test_method',
- 'default_number_build': 10,
- }
- return self._generate_custom_trigger_action(context)
-
- def generate_custom_trigger_restore_action(self):
- context = {
- 'default_bundle_id': self.id,
- 'default_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id,
- 'default_child_config_id': False,
- 'default_extra_params': '--test-tags /module.test_method',
- 'default_child_extra_params': False,
- 'default_number_build': 0,
- }
- return self._generate_custom_trigger_action(context)
def _generate_custom_trigger_action(self, context):
return {
@@ -280,3 +259,25 @@ class Bundle(models.Model):
'target': 'new',
'context': context,
}
+
+ def action_generate_custom_trigger_multi_action(self):
+ context = {
+ 'default_bundle_id': self.id,
+ 'default_config_id': self.env.ref('runbot.runbot_build_config_custom_multi').id,
+ 'default_child_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id,
+ 'default_extra_params': False,
+ 'default_child_extra_params': '--test-tags /module.test_method',
+ 'default_number_build': 10,
+ }
+ return self._generate_custom_trigger_action(context)
+
+ def action_generate_custom_trigger_restore_action(self):
+ context = {
+ 'default_bundle_id': self.id,
+ 'default_config_id': self.env.ref('runbot.runbot_build_config_restore_and_test').id,
+ 'default_child_config_id': False,
+ 'default_extra_params': '--test-tags /module.test_method',
+ 'default_child_extra_params': False,
+ 'default_number_build': 0,
+ }
+ return self._generate_custom_trigger_action(context)
diff --git a/runbot/models/commit.py b/runbot/models/commit.py
index c33ec964..f04228ed 100644
--- a/runbot/models/commit.py
+++ b/runbot/models/commit.py
@@ -1,11 +1,12 @@
import subprocess
-from ..common import os, RunbotException, _make_github_session
+from ..common import os, RunbotException, make_github_session
import glob
import shutil
from odoo import models, fields, api, registry
+from odoo.tools import file_open
import logging
_logger = logging.getLogger(__name__)
@@ -52,7 +53,7 @@ class Commit(models.Model):
module = os.path.basename(os.path.dirname(manifest_path))
yield (addons_path, module, manifest_file_name)
- def export(self, build):
+ def _export(self, build):
"""Export a git repo into a sources"""
# TODO add automated tests
self.ensure_one()
@@ -106,19 +107,19 @@ class Commit(models.Model):
return export_path
- def read_source(self, file, mode='r'):
+ def _read_source(self, file, mode='r'):
file_path = self._source_path(file)
try:
- with open(file_path, mode) as f:
+ with file_open(file_path, mode) as f:
return f.read()
except:
return False
- def _source_path(self, *path):
+ def _source_path(self, *paths):
export_name = self.name
if self.rebase_on_id:
export_name = '%s_%s' % (self.name, self.rebase_on_id.name)
- return os.path.join(self.env['runbot.runbot']._root(), 'sources', self.repo_id.name, export_name, *path)
+ return self.repo_id._source_path(export_name, *paths)
@api.depends('name', 'repo_id.name')
def _compute_dname(self):
@@ -201,7 +202,7 @@ class CommitStatus(models.Model):
_logger.warning('No token on remote %s, skipping status', remote.mapped("name"))
else:
if remote.token not in session_cache:
- session_cache[remote.token] = _make_github_session(remote.token)
+ session_cache[remote.token] = make_github_session(remote.token)
session = session_cache[remote.token]
_logger.info(
"github updating %s status %s to %s in repo %s",
diff --git a/runbot/models/custom_trigger.py b/runbot/models/custom_trigger.py
index c8603b71..15c4d9c9 100644
--- a/runbot/models/custom_trigger.py
+++ b/runbot/models/custom_trigger.py
@@ -56,17 +56,17 @@ class CustomTriggerWizard(models.TransientModel):
@api.depends('config_id')
def _compute_has_create_step(self):
for record in self:
- record.has_create_step = any(step.job_type == 'create_build' for step in self.config_id.step_ids())
+ record.has_create_step = any(step.job_type == 'create_build' for step in self.config_id.step_ids)
@api.depends('config_id')
def _compute_has_restore_step(self):
for record in self:
- record.has_restore_step = any(step.job_type == 'restore' for step in self.config_id.step_ids())
+ record.has_restore_step = any(step.job_type == 'restore' for step in self.config_id.step_ids)
@api.depends('child_config_id')
def _compute_has_child_with_restore_step(self):
for record in self:
- record.has_child_with_restore_step = record.child_config_id and any(step.job_type == 'restore' for step in self.child_config_id.step_ids())
+ record.has_child_with_restore_step = record.child_config_id and any(step.job_type == 'restore' for step in self.child_config_id.step_ids)
@api.onchange('extra_params', 'child_extra_params', 'restore_dump_url', 'config_id', 'child_config_id', 'number_build', 'config_id', 'restore_mode', 'restore_database_suffix', 'restore_trigger_id')
def _onchange_warnings(self):
@@ -164,7 +164,7 @@ class CustomTriggerWizard(models.TransientModel):
def _get_existing_trigger(self):
return self.env['runbot.bundle.trigger.custom'].search([('bundle_id', '=', self.bundle_id.id), ('trigger_id', '=', self.trigger_id.id)])
- def submit(self):
+ def action_submit(self):
self.ensure_one()
self._get_existing_trigger().unlink()
self.env['runbot.bundle.trigger.custom'].create({
diff --git a/runbot/models/host.py b/runbot/models/host.py
index e9939415..d4637058 100644
--- a/runbot/models/host.py
+++ b/runbot/models/host.py
@@ -5,7 +5,7 @@ import time
from collections import defaultdict
from odoo import models, fields, api
-from odoo.tools import config, ormcache
+from odoo.tools import config, ormcache, file_open
from ..common import fqdn, local_pgadmin_cursor, os, list_local_dbs, local_pg_cursor
from ..container import docker_build
@@ -106,8 +106,8 @@ class Host(models.Model):
def _bootstrap(self):
""" Create needed directories in static """
dirs = ['build', 'nginx', 'repo', 'sources', 'src', 'docker']
- static_path = self._get_work_path()
- static_dirs = {d: os.path.join(static_path, d) for d in dirs}
+ static_path = self.env['runbot.runbot']._root()
+ static_dirs = {d: self.env['runbot.runbot']._path(d) for d in dirs}
for dir, path in static_dirs.items():
os.makedirs(path, exist_ok=True)
self._bootstrap_db_template()
@@ -117,16 +117,14 @@ class Host(models.Model):
""" build docker images needed by locally pending builds"""
_logger.info('Building docker images...')
self.ensure_one()
- static_path = self._get_work_path()
self.clear_caches() # needed to ensure that content is updated on all hosts
for dockerfile in self.env['runbot.dockerfile'].search([('to_build', '=', True)]):
- self._docker_build_dockerfile(dockerfile, static_path)
+ self._docker_build_dockerfile(dockerfile)
_logger.info('Done...')
- def _docker_build_dockerfile(self, dockerfile, workdir):
+ def _docker_build_dockerfile(self, dockerfile):
start = time.time()
- # _logger.info('Building %s, %s', dockerfile.name, hash(str(dockerfile.dockerfile)))
- docker_build_path = os.path.join(workdir, 'docker', dockerfile.image_tag)
+ docker_build_path = self.env['runbot.runbot']._path('docker', dockerfile.image_tag)
os.makedirs(docker_build_path, exist_ok=True)
user = getpass.getuser()
@@ -139,22 +137,18 @@ class Host(models.Model):
USER {user}
ENV COVERAGE_FILE /data/build/.coverage
"""
-
- with open(os.path.join(docker_build_path, 'Dockerfile'), 'w') as Dockerfile:
+ with open(self.env['runbot.runbot']._path('docker', dockerfile.image_tag, 'Dockerfile'), 'w') as Dockerfile:
Dockerfile.write(dockerfile.dockerfile + docker_append)
docker_build_success, msg = docker_build(docker_build_path, dockerfile.image_tag)
if not docker_build_success:
dockerfile.to_build = False
dockerfile.message_post(body=f'Build failure:\n{msg}')
- # self.env['runbot.runbot'].warning(f'Dockerfile build "{dockerfile.image_tag}" failed on host {self.name}')
+ # self.env['runbot.runbot']._warning(f'Dockerfile build "{dockerfile.image_tag}" failed on host {self.name}')
else:
duration = time.time() - start
if duration > 1:
_logger.info('Dockerfile %s finished build in %s', dockerfile.image_tag, duration)
-
- def _get_work_path(self):
- return os.path.abspath(os.path.join(os.path.dirname(__file__), '../static'))
@ormcache()
def _host_list(self):
@@ -172,11 +166,11 @@ class Host(models.Model):
def _get_current_name(self):
return config.get('forced_host_name') or fqdn()
- def get_running_max(self):
+ def _get_running_max(self):
icp = self.env['ir.config_parameter']
return int(icp.get_param('runbot.runbot_running_max', default=5))
- def set_psql_conn_count(self):
+ def _set_psql_conn_count(self):
_logger.info('Updating psql connection count...')
self.ensure_one()
with local_pgadmin_cursor() as local_cr:
@@ -190,7 +184,7 @@ class Host(models.Model):
def _total_workers(self):
return sum(host.nb_worker for host in self)
- def disable(self):
+ def _disable(self):
""" Reserve host if possible """
self.ensure_one()
nb_hosts = self.env['runbot.host'].search_count([])
@@ -271,12 +265,12 @@ class Host(models.Model):
with local_pg_cursor(logs_db_name) as local_cr:
local_cr.execute("DELETE FROM ir_logging WHERE id in %s", [tuple(local_log_ids)])
- def get_build_domain(self, domain=None):
+ def _get_build_domain(self, domain=None):
domain = domain or []
return [('host', '=', self.name)] + domain
- def get_builds(self, domain, order=None):
- return self.env['runbot.build'].search(self.get_build_domain(domain), order=order)
+ def _get_builds(self, domain, order=None):
+ return self.env['runbot.build'].search(self._get_build_domain(domain), order=order)
def _process_messages(self):
self.host_message_ids._process()
@@ -298,5 +292,5 @@ class MessageQueue(models.Model):
# todo consume messages here
if records:
for record in records:
- self.env['runbot.runbot'].warning(f'Host {record.host_id.name} got an unexpected message {record.message}')
+ self.env['runbot.runbot']._warning(f'Host {record.host_id.name} got an unexpected message {record.message}')
self.unlink()
diff --git a/runbot/models/event.py b/runbot/models/ir_logging.py
similarity index 98%
rename from runbot/models/event.py
rename to runbot/models/ir_logging.py
index 119fbc95..a4666796 100644
--- a/runbot/models/event.py
+++ b/runbot/models/ir_logging.py
@@ -13,7 +13,7 @@ _logger = logging.getLogger(__name__)
TYPES = [(t, t.capitalize()) for t in 'client server runbot subbuild link markdown'.split()]
-class runbot_event(models.Model):
+class IrLogging(models.Model):
_inherit = "ir.logging"
_order = 'id'
@@ -55,7 +55,7 @@ class runbot_event(models.Model):
for ir_logging in self:
ir_logging.error_id = False
if ir_logging.level in ('ERROR', 'CRITICAL', 'WARNING') and ir_logging.type == 'server':
- fingerprints[self.env['runbot.build.error']._digest(cleaning_regexes.r_sub('%', ir_logging.message))].append(ir_logging)
+ fingerprints[self.env['runbot.build.error']._digest(cleaning_regexes._r_sub('%', ir_logging.message))].append(ir_logging)
for build_error in self.env['runbot.build.error'].search([('fingerprint', 'in', list(fingerprints.keys()))]):
for ir_logging in fingerprints[build_error.fingerprint]:
ir_logging.error_id = build_error.id
@@ -106,14 +106,6 @@ class RunbotErrorLog(models.Model):
for l in self:
l.build_url = '/runbot/build/%s' % l.build_id.id
- def action_goto_build(self):
- self.ensure_one()
- return {
- "type": "ir.actions.act_url",
- "url": "runbot/build/%s" % self.build_id.id,
- "target": "new",
- }
-
def _compute_bundle_id(self):
slots = self.env['runbot.batch.slot'].search([('build_id', 'in', self.mapped('top_parent_id').ids)])
for l in self:
@@ -198,3 +190,11 @@ class RunbotErrorLog(models.Model):
WHERE
l.level = 'ERROR'
)""")
+
+ def action_goto_build(self):
+ self.ensure_one()
+ return {
+ "type": "ir.actions.act_url",
+ "url": "runbot/build/%s" % self.build_id.id,
+ "target": "new",
+ }
diff --git a/runbot/models/module.py b/runbot/models/module.py
new file mode 100644
index 00000000..e69de29b
diff --git a/runbot/models/repo.py b/runbot/models/repo.py
index 629c950e..a30b3486 100644
--- a/runbot/models/repo.py
+++ b/runbot/models/repo.py
@@ -11,18 +11,14 @@ import requests
from pathlib import Path
from odoo import models, fields, api
-from ..common import os, RunbotException, _make_github_session
+from odoo.tools import file_open
+from ..common import os, RunbotException, make_github_session, sanitize
from odoo.exceptions import UserError
from odoo.tools.safe_eval import safe_eval
_logger = logging.getLogger(__name__)
-def _sanitize(name):
- for i in '@:/':
- name = name.replace(i, '_')
- return name
-
class Trigger(models.Model):
"""
@@ -89,7 +85,7 @@ class Trigger(models.Model):
return [(4, b.id) for b in refs_builds]
return []
- def get_version_domain(self):
+ def _get_version_domain(self):
if self.version_domain:
return safe_eval(self.version_domain)
return []
@@ -147,7 +143,7 @@ class Remote(models.Model):
def _compute_remote_name(self):
for remote in self:
- remote.remote_name = _sanitize(remote.short_name)
+ remote.remote_name = sanitize(remote.short_name)
def create(self, values_list):
remote = super().create(values_list)
@@ -175,7 +171,7 @@ class Remote(models.Model):
url = url.replace(':owner', remote.owner)
url = url.replace(':repo', remote.repo_name)
url = 'https://api.%s%s' % (remote.repo_domain, url)
- session = session or _make_github_session(remote.token)
+ session = session or make_github_session(remote.token)
while url:
if recursive:
_logger.info('Getting page %s', url)
@@ -212,12 +208,12 @@ class Remote(models.Model):
else:
raise
- def check_token(self):
+ def action_check_token(self):
if not self.user_has_groups('runbot.group_runbot_admin'):
raise UserError('This action is restricted to admin users')
token_results = {}
for repo in self:
- session = _make_github_session(repo.token)
+ session = make_github_session(repo.token)
if repo.token not in token_results:
token_results[repo.token] = session.get("https://api.github.com/user")
response = token_results[repo.token]
@@ -287,7 +283,7 @@ class Repo(models.Model):
upgrade_paths = fields.Char('Upgrade paths', help='Comma separated list of possible upgrade path', default='', tracking=True)
sequence = fields.Integer('Sequence', tracking=True)
- path = fields.Char(compute='_get_path', string='Directory', readonly=True)
+ path = fields.Char(compute='_compute_path', string='Directory', readonly=True)
mode = fields.Selection([('disabled', 'Disabled'),
('poll', 'Poll'),
('hook', 'Hook')],
@@ -326,12 +322,12 @@ class Repo(models.Model):
for repo in self:
repo.hook_time = times.get(repo.id, 0)
- def set_hook_time(self, value):
+ def _set_hook_time(self, value):
for repo in self:
self.env['runbot.repo.hooktime'].create({'time': value, 'repo_id': repo.id})
self.invalidate_recordset(['hook_time'])
- def set_ref_time(self, value):
+ def _set_ref_time(self, value):
for repo in self:
self.env['runbot.repo.reftime'].create({'time': value, 'repo_id': repo.id})
self.invalidate_recordset(['get_ref_time'])
@@ -349,11 +345,16 @@ class Repo(models.Model):
""")
@api.depends('name')
- def _get_path(self):
- """compute the server path of repo from the name"""
- root = self.env['runbot.runbot']._root()
+ def _compute_path(self):
+ """compute the server path of repo from the for name"""
for repo in self:
- repo.path = os.path.join(root, 'repo', _sanitize(repo.name))
+ repo.path = repo._path()
+
+ def _path(self, *path_parts):
+ return self.env['runbot.runbot']._path('repo', sanitize(self.name), *path_parts)
+
+ def _source_path(self, *path_parts):
+ return self.env['runbot.runbot']._path('sources', sanitize(self.name), *path_parts)
def _git(self, cmd, errors='strict'):
"""Execute a git command 'cmd'"""
@@ -396,7 +397,7 @@ class Repo(models.Model):
def _get_fetch_head_time(self):
self.ensure_one()
- fname_fetch_head = os.path.join(self.path, 'FETCH_HEAD')
+ fname_fetch_head = self._path('FETCH_HEAD')
if os.path.exists(fname_fetch_head):
return os.path.getmtime(fname_fetch_head)
return 0
@@ -411,7 +412,7 @@ class Repo(models.Model):
commit_limit = time.time() - (60 * 60 * 24 * max_age)
if not self.get_ref_time or get_ref_time > self.get_ref_time:
try:
- self.set_ref_time(get_ref_time)
+ self._set_ref_time(get_ref_time)
fields = ['refname', 'objectname', 'committerdate:unix', 'authorname', 'authoremail', 'subject', 'committername', 'committeremail']
fmt = "%00".join(["%(" + field + ")" for field in fields])
cmd = ['for-each-ref', '--format', fmt, '--sort=-committerdate', 'refs/*/heads/*']
@@ -423,7 +424,7 @@ class Repo(models.Model):
return []
refs = [tuple(field for field in line.split('\x00')) for line in git_refs.split('\n')]
refs = [r for r in refs if not re.match(r'^refs/[\w-]+/heads/\d+$', r[0])] # remove branches with interger names to avoid confusion with pr names
- refs = [r for r in refs if int(r[2]) > commit_limit or self.env['runbot.branch'].match_is_base(r[0].split('/')[-1])]
+ refs = [r for r in refs if int(r[2]) > commit_limit or self.env['runbot.branch']._match_is_base(r[0].split('/')[-1])]
if ignore:
refs = [r for r in refs if r[0].split('/')[-1] not in ignore]
return refs
@@ -443,7 +444,7 @@ class Repo(models.Model):
# FIXME WIP
names = [r[0].split('/')[-1] for r in refs]
branches = self.env['runbot.branch'].search([('name', 'in', names), ('remote_id', 'in', self.remote_ids.ids)])
- ref_branches = {branch.ref(): branch for branch in branches}
+ ref_branches = {branch._ref(): branch for branch in branches}
new_branch_values = []
for ref_name, sha, date, author, author_email, subject, committer, committer_email in refs:
if not ref_branches.get(ref_name):
@@ -462,7 +463,7 @@ class Repo(models.Model):
_logger.info('Creating new branches')
new_branches = self.env['runbot.branch'].create(new_branch_values)
for branch in new_branches:
- ref_branches[branch.ref()] = branch
+ ref_branches[branch._ref()] = branch
return ref_branches
def _find_new_commits(self, refs, ref_branches):
@@ -532,11 +533,11 @@ class Repo(models.Model):
if repo.mode == 'disabled':
_logger.info(f'skipping disabled repo {repo.name}')
continue
- if os.path.isdir(os.path.join(repo.path, 'refs')):
- git_config_path = os.path.join(repo.path, 'config')
+ if os.path.isdir(repo._path('refs')):
+ git_config_path = repo._path('config')
template_params = {'repo': repo}
git_config = self.env['ir.ui.view']._render_template("runbot.git_config", template_params)
- with open(git_config_path, 'w') as config_file:
+ with file_open(git_config_path, 'w') as config_file:
config_file.write(str(git_config))
_logger.info('Config updated for repo %s' % repo.name)
else:
@@ -546,7 +547,7 @@ class Repo(models.Model):
""" Clone the remote repo if needed """
self.ensure_one()
repo = self
- if not os.path.isdir(os.path.join(repo.path, 'refs')):
+ if not os.path.isdir(repo._path('refs')):
_logger.info("Initiating repository '%s' in '%s'" % (repo.name, repo.path))
git_init = subprocess.run(['git', 'init', '--bare', repo.path], stderr=subprocess.PIPE)
if git_init.returncode:
@@ -561,11 +562,11 @@ class Repo(models.Model):
repo = self
if not repo.remote_ids:
return False
- if not os.path.isdir(os.path.join(repo.path)):
+ if not os.path.isdir(repo.path):
os.makedirs(repo.path)
force = self._git_init() or force
- fname_fetch_head = os.path.join(repo.path, 'FETCH_HEAD')
+ fname_fetch_head = repo._path('FETCH_HEAD')
if not force and os.path.isfile(fname_fetch_head):
fetch_time = os.path.getmtime(fname_fetch_head)
if repo.mode == 'hook':
@@ -599,9 +600,9 @@ class Repo(models.Model):
host.message_post(body=message)
icp = self.env['ir.config_parameter'].sudo()
if icp.get_param('runbot.runbot_disable_host_on_fetch_failure'):
- self.env['runbot.runbot'].warning('Host %s got reserved because of fetch failure' % host.name)
+ self.env['runbot.runbot']._warning('Host %s got reserved because of fetch failure' % host.name)
_logger.exception(message)
- host.disable()
+ host._disable()
return success
def _update(self, force=False, poll_delay=5*60):
diff --git a/runbot/models/runbot.py b/runbot/models/runbot.py
index 696609a0..b4c345d5 100644
--- a/runbot/models/runbot.py
+++ b/runbot/models/runbot.py
@@ -11,13 +11,13 @@ from contextlib import contextmanager
from requests.exceptions import HTTPError
from subprocess import CalledProcessError
-from ..common import fqdn, dest_reg, os
+from ..common import dest_reg, os, sanitize
from ..container import docker_ps, docker_stop
from odoo import models, fields
+from odoo.exceptions import UserError
from odoo.osv import expression
-from odoo.tools import config
-from odoo.modules.module import get_module_resource
+from odoo.tools import config, file_open
_logger = logging.getLogger(__name__)
@@ -33,14 +33,21 @@ class Runbot(models.AbstractModel):
def _root(self):
"""Return root directory of repository"""
- default = os.path.join(os.path.dirname(__file__), '../static')
- return os.path.abspath(default)
+ return os.path.abspath(os.sep.join([os.path.dirname(__file__), '../static']))
+
+ def _path(self, *path_parts):
+ """Return the repo build path"""
+ root = self.env['runbot.runbot']._root()
+ file_path = os.path.normpath(os.sep.join([root] + [sanitize(path) for path_part in path_parts for path in path_part.split(os.sep) if path]))
+ if not file_path.startswith(root):
+ raise UserError('Invalid path')
+ return file_path
def _scheduler(self, host):
self._gc_testing(host)
self._commit()
processed = 0
- for build in host.get_builds([('requested_action', 'in', ['wake_up', 'deathrow'])]):
+ for build in host._get_builds([('requested_action', 'in', ['wake_up', 'deathrow'])]):
build = build.browse(build.id)
processed += 1
build._process_requested_actions()
@@ -49,7 +56,7 @@ class Runbot(models.AbstractModel):
self._commit()
host._process_messages()
self._commit()
- for build in host.get_builds([('local_state', 'in', ['testing', 'running'])]) | self._get_builds_to_init(host):
+ for build in host._get_builds([('local_state', 'in', ['testing', 'running'])]) | self._get_builds_to_init(host):
build = build.browse(build.id) # remove preftech ids, manage build one by one
result = build._schedule()
if result:
@@ -73,7 +80,7 @@ class Runbot(models.AbstractModel):
def _assign_pending_builds(self, host, nb_worker, domain=None):
if host.assigned_only or nb_worker <= 0:
return 0
- reserved_slots = len(host.get_builds([('local_state', 'in', ('testing', 'pending'))]))
+ reserved_slots = len(host._get_builds([('local_state', 'in', ('testing', 'pending'))]))
assignable_slots = (nb_worker - reserved_slots)
if assignable_slots > 0:
allocated = self._allocate_builds(host, assignable_slots, domain)
@@ -83,8 +90,8 @@ class Runbot(models.AbstractModel):
return 0
def _get_builds_to_init(self, host):
- domain_host = host.get_build_domain()
- used_slots = len(host.get_builds([('local_state', '=', 'testing')]))
+ domain_host = host._get_build_domain()
+ used_slots = len(host._get_builds([('local_state', '=', 'testing')]))
available_slots = host.nb_worker - used_slots
build_to_init = self.env['runbot.build']
if available_slots > 0:
@@ -94,16 +101,16 @@ class Runbot(models.AbstractModel):
return build_to_init
def _gc_running(self, host):
- running_max = host.get_running_max()
+ running_max = host._get_running_max()
Build = self.env['runbot.build']
- cannot_be_killed_ids = host.get_builds([('keep_running', '=', True)]).ids
+ cannot_be_killed_ids = host._get_builds([('keep_running', '=', True)]).ids
sticky_bundles = self.env['runbot.bundle'].search([('sticky', '=', True), ('project_id.keep_sticky_running', '=', True)])
cannot_be_killed_ids += [
build.id
for build in sticky_bundles.mapped('last_batchs.slot_ids.build_id')
if build.host == host.name
][:running_max]
- build_ids = host.get_builds([('local_state', '=', 'running'), ('id', 'not in', cannot_be_killed_ids)], order='job_start desc').ids
+ build_ids = host._get_builds([('local_state', '=', 'running'), ('id', 'not in', cannot_be_killed_ids)], order='job_start desc').ids
for build in Build.browse(build_ids)[running_max:]:
build._kill()
@@ -112,7 +119,7 @@ class Runbot(models.AbstractModel):
"""garbage collect builds that could be killed"""
# decide if we need room
Build = self.env['runbot.build']
- domain_host = host.get_build_domain()
+ domain_host = host._get_build_domain()
testing_builds = Build.search(domain_host + [('local_state', 'in', ['testing', 'pending']), ('requested_action', '!=', 'deathrow')])
used_slots = len(testing_builds)
available_slots = host.nb_worker - used_slots
@@ -153,9 +160,9 @@ class Runbot(models.AbstractModel):
env = self.env
settings = {}
settings['port'] = config.get('http_port')
- settings['runbot_static'] = os.path.join(get_module_resource('runbot', 'static'), '')
+ settings['runbot_static'] = self.env['runbot.runbot']._root() + os.sep
settings['base_url'] = self.get_base_url()
- nginx_dir = os.path.join(self._root(), 'nginx')
+ nginx_dir = self.env['runbot.runbot']._path('nginx')
settings['nginx_dir'] = nginx_dir
settings['re_escape'] = re.escape
host_name = self.env['runbot.host']._get_current_name()
@@ -166,17 +173,17 @@ class Runbot(models.AbstractModel):
nginx_config = env['ir.ui.view']._render_template("runbot.nginx_config", settings)
os.makedirs(nginx_dir, exist_ok=True)
content = None
- nginx_conf_path = os.path.join(nginx_dir, 'nginx.conf')
+ nginx_conf_path = self.env['runbot.runbot']._path('nginx', 'nginx.conf')
content = ''
if os.path.isfile(nginx_conf_path):
- with open(nginx_conf_path, 'r') as f:
+ with file_open(nginx_conf_path, 'r') as f:
content = f.read()
if content != nginx_config:
_logger.info('reload nginx')
with open(nginx_conf_path, 'w') as f:
f.write(str(nginx_config))
try:
- pid = int(open(os.path.join(nginx_dir, 'nginx.pid')).read().strip(' \n'))
+ pid = int(file_open(self.env['runbot.runbot']._path('nginx', 'nginx.pid')).read().strip(' \n'))
os.kill(pid, signal.SIGHUP)
except Exception:
_logger.info('start nginx')
@@ -210,7 +217,7 @@ class Runbot(models.AbstractModel):
runbot_do_fetch = get_param('runbot.runbot_do_fetch')
runbot_do_schedule = get_param('runbot.runbot_do_schedule')
host = self.env['runbot.host']._get_current()
- host.set_psql_conn_count()
+ host._set_psql_conn_count()
host.last_start_loop = fields.Datetime.now()
self._commit()
# Bootstrap
@@ -227,18 +234,16 @@ class Runbot(models.AbstractModel):
self._fetch_loop_turn(host, pull_info_failures)
if runbot_do_schedule:
sleep_time = self._scheduler_loop_turn(host, update_frequency)
- self.sleep(sleep_time)
+ time.sleep(sleep_time)
else:
- self.sleep(update_frequency)
+ time.sleep(update_frequency)
self._commit()
host.last_end_loop = fields.Datetime.now()
- def sleep(self, t):
- time.sleep(t)
def _fetch_loop_turn(self, host, pull_info_failures, default_sleep=1):
- with self.manage_host_exception(host) as manager:
+ with self._manage_host_exception(host) as manager:
repos = self.env['runbot.repo'].search([('mode', '!=', 'disabled')])
processing_batch = self.env['runbot.batch'].search([('state', 'in', ('preparing', 'ready'))], order='id asc')
preparing_batch = processing_batch.filtered(lambda b: b.state == 'preparing')
@@ -261,7 +266,7 @@ class Runbot(models.AbstractModel):
self.env.clear()
pull_number = e.response.url.split('/')[-1]
pull_info_failures[pull_number] = time.time()
- self.warning('Pr pull info failed for %s', pull_number)
+ self._warning('Pr pull info failed for %s', pull_number)
self._commit()
if processing_batch:
@@ -283,13 +288,13 @@ class Runbot(models.AbstractModel):
return manager.get('sleep', default_sleep)
def _scheduler_loop_turn(self, host, sleep=5):
- with self.manage_host_exception(host) as manager:
+ with self._manage_host_exception(host) as manager:
if self._scheduler(host):
sleep = 0.1
return manager.get('sleep', sleep)
@contextmanager
- def manage_host_exception(self, host):
+ def _manage_host_exception(self, host):
res = {}
try:
yield res
@@ -335,7 +340,7 @@ class Runbot(models.AbstractModel):
to_keep = set()
repos = self.env['runbot.repo'].search([('mode', '!=', 'disabled')])
for repo in repos:
- repo_source = os.path.join(self._root(), 'sources', repo.name, '*')
+ repo_source = repo._source_path('*')
for source_dir in glob.glob(repo_source):
if source_dir not in cannot_be_deleted_path:
to_delete.add(source_dir)
@@ -387,9 +392,9 @@ class Runbot(models.AbstractModel):
repo._git(['gc', '--prune=all', '--quiet'])
except CalledProcessError as e:
message = f'git gc failed for {repo.name} on {host.name} with exit status {e.returncode} and message "{e.output[:60]} ..."'
- self.warning(message)
+ self._warning(message)
- def warning(self, message, *args):
+ def _warning(self, message, *args):
if args:
message = message % args
existing = self.env['runbot.warning'].search([('message', '=', message)], limit=1)
diff --git a/runbot/models/team.py b/runbot/models/team.py
index 63635402..1216b8ef 100644
--- a/runbot/models/team.py
+++ b/runbot/models/team.py
@@ -4,7 +4,7 @@ import hashlib
import logging
import re
-from ..common import _make_github_session
+from ..common import make_github_session
from collections import defaultdict
from dateutil.relativedelta import relativedelta
from fnmatch import fnmatch
@@ -98,7 +98,7 @@ class RunbotTeam(models.Model):
for team in self:
if team.github_team:
url = f"https://api.github.com/orgs/{team.organisation}/teams/{team.github_team}"
- session = _make_github_session(team.project_id.sudo().token)
+ session = make_github_session(team.project_id.sudo().token)
response = session.get(url)
if response.status_code != 200:
raise UserError(f'Cannot find team {team.github_team}')
diff --git a/runbot/templates/build.xml b/runbot/templates/build.xml
index e8780d6b..ebea394f 100644
--- a/runbot/templates/build.xml
+++ b/runbot/templates/build.xml
@@ -148,7 +148,7 @@
@@ -159,7 +159,7 @@
Total time:
-
+
Stats:
@@ -204,7 +204,7 @@
-
+
|
@@ -378,7 +378,7 @@
|
-
+
|
diff --git a/runbot/templates/bundle.xml b/runbot/templates/bundle.xml
index d9c07240..ec096a96 100644
--- a/runbot/templates/bundle.xml
+++ b/runbot/templates/bundle.xml
@@ -39,7 +39,7 @@
Branches |
-
+
@@ -75,7 +75,7 @@
|
-
+
diff --git a/runbot/templates/dashboard.xml b/runbot/templates/dashboard.xml
index 66a34307..ec6fe7a7 100644
--- a/runbot/templates/dashboard.xml
+++ b/runbot/templates/dashboard.xml
@@ -26,7 +26,7 @@
-
+
@@ -163,12 +163,12 @@
|
-
+
|
-
+
|
diff --git a/runbot/templates/frontend.xml b/runbot/templates/frontend.xml
index 08bff742..0210a657 100644
--- a/runbot/templates/frontend.xml
+++ b/runbot/templates/frontend.xml
@@ -87,7 +87,7 @@