mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 23:45:44 +07:00
[IMP] runbot: share sources between builds
Multibuild can create generate a lots of checkout, especially for small and fast jobs, which can overload runbot discs since we are trying not to clean build immediatly. (To ease bug fix and allow wake up) This commit proposes to store source on a single place, so that docker can add them as ro volume in the build directory. The checkout is also moved to the installs jobs, so that builds containing only create builds steps won't checkout the sources. This change implies to use --addons-path correctly, since odoo and enterprise addons wont be merged in the same repo anymore. This will allow to test addons a dev will do, with a closer command line. This implies to change the code structure a litle, some changes where made to remove no-so-usefull fields on build, and some hard-coded logic (manifest_names and server_names) are now stored on repo instead. This changes implies that a build CANNOT write in his sources. It shouldn't be the case, but it means that runbot cannot be tested on runbot untill datas are written elsewhere than in static. Other possibilities are possible, like bind mounting the sources in the build directory instead of adding ro volumes in docker. Unfortunately, this needs to give access to mount as sudo for runbot user and changes docjker config to allow mounts in volumes which is not the case by default. A plus of this solution would be to be able to make an overlay mount.
This commit is contained in:
parent
0830557cd6
commit
f7a4fb7ac3
1
.gitignore
vendored
1
.gitignore
vendored
@ -7,4 +7,5 @@
|
||||
# runbot work files
|
||||
runbot/static/build
|
||||
runbot/static/repo
|
||||
runbot/static/sources
|
||||
runbot/static/nginx
|
||||
|
@ -6,7 +6,7 @@
|
||||
'author': "Odoo SA",
|
||||
'website': "http://runbot.odoo.com",
|
||||
'category': 'Website',
|
||||
'version': '4.2',
|
||||
'version': '4.3',
|
||||
'depends': ['website', 'base'],
|
||||
'data': [
|
||||
'security/runbot_security.xml',
|
||||
|
@ -17,6 +17,20 @@ from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Commit():
|
||||
def __init__(self, repo, sha):
|
||||
self.repo = repo
|
||||
self.sha = sha
|
||||
|
||||
def _source_path(self, *path):
|
||||
return self.repo._source_path(self.sha, *path)
|
||||
|
||||
def export(self):
|
||||
return self.repo._git_export(self.sha)
|
||||
|
||||
def __str__(self):
|
||||
return '%s:%s' % (self.repo.short_name, self.sha)
|
||||
|
||||
def fqdn():
|
||||
return socket.getfqdn()
|
||||
|
||||
|
@ -39,7 +39,9 @@ def build_odoo_cmd(odoo_cmd):
|
||||
# build cmd
|
||||
cmd_chain = []
|
||||
cmd_chain.append('cd /data/build')
|
||||
cmd_chain.append('head -1 odoo-bin | grep -q python3 && sudo pip3 install -r requirements.txt || sudo pip install -r requirements.txt')
|
||||
server_path = odoo_cmd[0]
|
||||
requirement_path = os.path.join(os.path.dirname(server_path), 'requirements.txt')
|
||||
cmd_chain.append('head -1 %s | grep -q python3 && sudo pip3 install -r %s || sudo pip install -r %s' % (server_path, requirement_path, requirement_path))
|
||||
cmd_chain.append(' '.join(odoo_cmd))
|
||||
return ' && '.join(cmd_chain)
|
||||
|
||||
@ -60,7 +62,7 @@ def docker_build(log_path, build_dir):
|
||||
dbuild = subprocess.Popen(['docker', 'build', '--tag', 'odoo:runbot_tests', '.'], stdout=logs, stderr=logs, cwd=docker_dir)
|
||||
dbuild.wait()
|
||||
|
||||
def docker_run(run_cmd, log_path, build_dir, container_name, exposed_ports=None, cpu_limit=None, preexec_fn=None):
|
||||
def docker_run(run_cmd, log_path, build_dir, container_name, exposed_ports=None, cpu_limit=None, preexec_fn=None, ro_volumes=None):
|
||||
"""Run tests in a docker container
|
||||
:param run_cmd: command string to run in container
|
||||
:param log_path: path to the logfile that will contain odoo stdout and stderr
|
||||
@ -68,6 +70,7 @@ def docker_run(run_cmd, log_path, build_dir, container_name, exposed_ports=None,
|
||||
This directory is shared as a volume with the container
|
||||
:param container_name: used to give a name to the container for later reference
|
||||
:param exposed_ports: if not None, starting at 8069, ports will be exposed as exposed_ports numbers
|
||||
:params ro_volumes: dict of dest:source volumes to mount readonly in builddir
|
||||
"""
|
||||
_logger.debug('Docker run command: %s', run_cmd)
|
||||
logs = open(log_path, 'w')
|
||||
@ -81,13 +84,18 @@ def docker_run(run_cmd, log_path, build_dir, container_name, exposed_ports=None,
|
||||
'--shm-size=128m',
|
||||
'--init',
|
||||
]
|
||||
if ro_volumes:
|
||||
for dest, source in ro_volumes.items():
|
||||
logs.write("Adding readonly volume '%s' pointing to %s \n" % (dest, source))
|
||||
docker_command.append('--volume=%s:/data/build/%s:ro' % (source, dest))
|
||||
|
||||
serverrc_path = os.path.expanduser('~/.openerp_serverrc')
|
||||
odoorc_path = os.path.expanduser('~/.odoorc')
|
||||
final_rc = odoorc_path if os.path.exists(odoorc_path) else serverrc_path if os.path.exists(serverrc_path) else None
|
||||
if final_rc:
|
||||
docker_command.extend(['--volume=%s:/home/odoo/.odoorc:ro' % final_rc])
|
||||
if exposed_ports:
|
||||
for dp,hp in enumerate(exposed_ports, start=8069):
|
||||
for dp, hp in enumerate(exposed_ports, start=8069):
|
||||
docker_command.extend(['-p', '127.0.0.1:%s:%s' % (hp, dp)])
|
||||
if cpu_limit:
|
||||
docker_command.extend(['--ulimit', 'cpu=%s' % int(cpu_limit)])
|
||||
|
@ -8,10 +8,11 @@ import shutil
|
||||
import subprocess
|
||||
import time
|
||||
import datetime
|
||||
from ..common import dt2time, fqdn, now, grep, uniq_list, local_pgadmin_cursor, s2human
|
||||
from ..common import dt2time, fqdn, now, grep, uniq_list, local_pgadmin_cursor, s2human, Commit
|
||||
from ..container import docker_build, docker_stop, docker_is_running
|
||||
from odoo.addons.runbot.models.repo import HashMissingException
|
||||
from odoo import models, fields, api
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
from odoo.http import request
|
||||
from odoo.tools import appdirs
|
||||
from collections import defaultdict
|
||||
@ -47,7 +48,6 @@ class runbot_build(models.Model):
|
||||
committer_email = fields.Char('Committer Email')
|
||||
subject = fields.Text('Subject')
|
||||
sequence = fields.Integer('Sequence')
|
||||
modules = fields.Char("Modules to Install")
|
||||
|
||||
# state machine
|
||||
|
||||
@ -73,10 +73,6 @@ class runbot_build(models.Model):
|
||||
build_time = fields.Integer(compute='_compute_build_time', string='Job time')
|
||||
build_age = fields.Integer(compute='_compute_build_age', string='Build age')
|
||||
duplicate_id = fields.Many2one('runbot.build', 'Corresponding Build', index=True)
|
||||
server_match = fields.Selection([('builtin', 'This branch includes Odoo server'),
|
||||
('match', 'This branch includes Odoo server'),
|
||||
('default', 'No match found - defaults to master')],
|
||||
string='Server branch matching')
|
||||
revdep_build_ids = fields.Many2many('runbot.build', 'runbot_rev_dep_builds',
|
||||
column1='rev_dep_id', column2='dependent_id',
|
||||
string='Builds that depends on this build')
|
||||
@ -101,6 +97,11 @@ class runbot_build(models.Model):
|
||||
log_list = fields.Char('Comma separted list of step_ids names with logs', compute="_compute_log_list", store=True)
|
||||
orphan_result = fields.Boolean('No effect on the parent result', default=False)
|
||||
|
||||
commit_path_mode = fields.Selection([('rep_sha', 'repo name + sha'),
|
||||
('soft', 'repo name only'),
|
||||
],
|
||||
default='soft',
|
||||
string='Source export path mode')
|
||||
@api.depends('config_id')
|
||||
def _compute_log_list(self): # storing this field because it will be access trhoug repo viewn and keep track of the list at create
|
||||
for build in self:
|
||||
@ -281,6 +282,14 @@ class runbot_build(models.Model):
|
||||
extra_info.update({'local_state': 'duplicate', 'duplicate_id': duplicate_id})
|
||||
# maybe update duplicate priority if needed
|
||||
|
||||
docker_source_folders = set()
|
||||
for commit in build_id.get_all_commit():
|
||||
docker_source_folder = build_id._docker_source_folder(commit)
|
||||
if docker_source_folder in docker_source_folders:
|
||||
extra_info['commit_path_mode'] = 'rep_sha'
|
||||
continue
|
||||
docker_source_folders.add(docker_source_folder)
|
||||
|
||||
build_id.write(extra_info)
|
||||
if build_id.local_state == 'duplicate' and build_id.duplicate_id.global_state in ('running', 'done'): # and not build_id.parent_id:
|
||||
build_id._github_status()
|
||||
@ -396,7 +405,6 @@ class runbot_build(models.Model):
|
||||
'committer': build.committer,
|
||||
'committer_email': build.committer_email,
|
||||
'subject': build.subject,
|
||||
'modules': build.modules,
|
||||
'build_type': 'rebuild',
|
||||
}
|
||||
if exact:
|
||||
@ -410,7 +418,6 @@ class runbot_build(models.Model):
|
||||
values.update({
|
||||
'config_id': build.config_id.id,
|
||||
'extra_params': build.extra_params,
|
||||
'server_match': build.server_match,
|
||||
'orphan_result': build.orphan_result,
|
||||
})
|
||||
#if replace: ?
|
||||
@ -436,6 +443,8 @@ class runbot_build(models.Model):
|
||||
self.write({'local_state': 'done', 'local_result': 'skipped', 'duplicate_id': False})
|
||||
|
||||
def _local_cleanup(self):
|
||||
if self.pool._init:
|
||||
return
|
||||
|
||||
_logger.debug('Local cleaning')
|
||||
|
||||
@ -457,8 +466,7 @@ class runbot_build(models.Model):
|
||||
existing = builds.exists()
|
||||
remaining = (builds - existing)
|
||||
if remaining:
|
||||
dest_list = [dest for sublist in [dest_by_builds_ids[rem_id] for rem_id in remaining] for dest in sublist]
|
||||
#dest_list = [dest for dest in dest_by_builds_ids[rem_id] for rem_id in remaining]
|
||||
dest_list = [dest for sublist in [dest_by_builds_ids[rem_id] for rem_id in remaining.ids] for dest in sublist]
|
||||
_logger.debug('(%s) (%s) not deleted because no corresponding build found' % (label, " ".join(dest_list)))
|
||||
for build in existing:
|
||||
if fields.Datetime.from_string(build.create_date) + datetime.timedelta(days=max_days) < datetime.datetime.now():
|
||||
@ -546,7 +554,7 @@ class runbot_build(models.Model):
|
||||
build._log('_schedule', 'Init build environment with config %s ' % build.config_id.name)
|
||||
# notify pending build - avoid confusing users by saying nothing
|
||||
build._github_status()
|
||||
build._checkout()
|
||||
os.makedirs(build._path('logs'), exist_ok=True)
|
||||
build._log('_schedule', 'Building docker image')
|
||||
docker_build(build._path('logs', 'docker_build.txt'), build._path())
|
||||
except Exception:
|
||||
@ -632,128 +640,102 @@ class runbot_build(models.Model):
|
||||
root = self.env['runbot.repo']._root()
|
||||
return os.path.join(root, 'build', build.dest, *l)
|
||||
|
||||
def _server(self, *l, **kw): # not really build related, specific to odoo version, could be a data
|
||||
"""Return the build server path"""
|
||||
def _server(self, *path):
|
||||
"""Return the absolute path to the direcory containing the server file, adding optional *path"""
|
||||
self.ensure_one()
|
||||
build = self
|
||||
if os.path.exists(build._path('odoo')):
|
||||
return build._path('odoo', *l)
|
||||
return build._path('openerp', *l)
|
||||
commit = self.get_server_commit()
|
||||
if os.path.exists(commit._source_path('odoo')):
|
||||
return commit._source_path('odoo', *path)
|
||||
return commit._source_path('openerp', *path)
|
||||
|
||||
def _filter_modules(self, modules, available_modules, explicit_modules):
|
||||
# TODO add blacklist_modules and blacklist prefixes as data on repo
|
||||
blacklist_modules = set(['auth_ldap', 'document_ftp', 'base_gengo',
|
||||
'website_gengo', 'website_instantclick',
|
||||
'pad', 'pad_project', 'note_pad',
|
||||
'pos_cache', 'pos_blackbox_be'])
|
||||
|
||||
mod_filter = lambda m: (
|
||||
m in available_modules and
|
||||
(m in explicit_modules or (not m.startswith(('hw_', 'theme_', 'l10n_')) and
|
||||
m not in blacklist_modules))
|
||||
)
|
||||
return uniq_list(filter(mod_filter, modules))
|
||||
def mod_filter(module):
|
||||
if module not in available_modules:
|
||||
return False
|
||||
if module in explicit_modules:
|
||||
return True
|
||||
if module.startswith(('hw_', 'theme_', 'l10n_')):
|
||||
return False
|
||||
if module in blacklist_modules:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _checkout(self):
|
||||
return uniq_list([module for module in modules if mod_filter(module)])
|
||||
|
||||
def _get_available_modules(self, commit):
|
||||
for manifest_file_name in commit.repo.manifest_files.split(','): # '__manifest__.py' '__openerp__.py'
|
||||
for addons_path in commit.repo.addons_paths.split(','): # '' 'addons' 'odoo/addons'
|
||||
sep = os.path.join(addons_path, '*')
|
||||
for manifest_path in glob.glob(commit._source_path(sep, manifest_file_name)):
|
||||
module = os.path.basename(os.path.dirname(manifest_path))
|
||||
yield (addons_path, module, manifest_file_name)
|
||||
|
||||
def _docker_source_folder(self, commit):
|
||||
# in case some build have commits with the same repo name (ex: foo/bar, foo-ent/bar)
|
||||
# it can be usefull to uniquify commit export path using hash
|
||||
if self.commit_path_mode == 'rep_sha':
|
||||
return '%s-%s' % (commit.repo._get_repo_name_part(), commit.sha[:8])
|
||||
else:
|
||||
return commit.repo._get_repo_name_part()
|
||||
|
||||
def _checkout(self, commits=None):
|
||||
self.ensure_one() # will raise exception if hash not found, we don't want to fail for all build.
|
||||
# starts from scratch
|
||||
build = self
|
||||
if os.path.isdir(build._path()):
|
||||
shutil.rmtree(build._path())
|
||||
|
||||
# runbot log path
|
||||
os.makedirs(build._path("logs"), exist_ok=True)
|
||||
os.makedirs(build._server('addons'), exist_ok=True)
|
||||
|
||||
# update repo if needed
|
||||
if not build.repo_id._hash_exists(build.name):
|
||||
build.repo_id._update()
|
||||
|
||||
# checkout branch
|
||||
build.branch_id.repo_id._git_export(build.name, build._path())
|
||||
exports = {}
|
||||
for commit in commits or self.get_all_commit():
|
||||
build_export_path = self._docker_source_folder(commit)
|
||||
if build_export_path in exports:
|
||||
self.log('_checkout', 'Multiple repo have same export path in build, some source may be missing for %s' % build_export_path, level='ERROR')
|
||||
self._kill(result='ko')
|
||||
try:
|
||||
exports[build_export_path] = commit.export()
|
||||
except HashMissingException:
|
||||
self._log('_checkout', "Commit %s is unreachable. Did you force push the branch since build creation?" % commit, level='ERROR')
|
||||
self.kill(result='ko')
|
||||
return exports
|
||||
|
||||
has_server = os.path.isfile(build._server('__init__.py'))
|
||||
server_match = 'builtin'
|
||||
|
||||
# build complete set of modules to install
|
||||
modules_to_move = []
|
||||
modules_to_test = ((build.branch_id.modules or '') + ',' +
|
||||
(build.repo_id.modules or ''))
|
||||
modules_to_test = list(filter(None, modules_to_test.split(','))) # ???
|
||||
explicit_modules = set(modules_to_test)
|
||||
_logger.debug("manual modules_to_test for build %s: %s", build.dest, modules_to_test)
|
||||
|
||||
if not has_server:
|
||||
if build.repo_id.modules_auto == 'repo':
|
||||
modules_to_test += [
|
||||
os.path.basename(os.path.dirname(a))
|
||||
for a in (glob.glob(build._path('*/__openerp__.py')) +
|
||||
glob.glob(build._path('*/__manifest__.py')))
|
||||
]
|
||||
_logger.debug("local modules_to_test for build %s: %s", build.dest, modules_to_test)
|
||||
|
||||
# todo make it backward compatible, or create migration script?
|
||||
for build_dependency in build.dependency_ids:
|
||||
closest_branch = build_dependency.closest_branch_id
|
||||
latest_commit = build_dependency.dependency_hash
|
||||
repo = closest_branch.repo_id or build_dependency.repo_id
|
||||
closest_name = closest_branch.name or 'no_branch'
|
||||
if build_dependency.match_type == 'default':
|
||||
server_match = 'default'
|
||||
elif server_match != 'default':
|
||||
server_match = 'match'
|
||||
|
||||
build._log(
|
||||
'_checkout', 'Checkouting %s from %s' % (closest_name, repo.name)
|
||||
)
|
||||
|
||||
if not repo._hash_exists(latest_commit):
|
||||
repo._update(force=True)
|
||||
if not repo._hash_exists(latest_commit):
|
||||
try:
|
||||
repo._git(['fetch', 'origin', latest_commit])
|
||||
except:
|
||||
pass
|
||||
if not repo._hash_exists(latest_commit):
|
||||
build._log('_checkout', "Dependency commit %s in repo %s is unreachable. Did you force push the branch since build creation?" % (latest_commit, repo.name))
|
||||
raise Exception
|
||||
|
||||
repo._git_export(latest_commit, build._path())
|
||||
|
||||
# Finally mark all addons to move to openerp/addons
|
||||
modules_to_move += [
|
||||
os.path.dirname(module)
|
||||
for module in (glob.glob(build._path('*/__openerp__.py')) +
|
||||
glob.glob(build._path('*/__manifest__.py')))
|
||||
]
|
||||
|
||||
# move all addons to server addons path
|
||||
for module in uniq_list(glob.glob(build._path('addons/*')) + modules_to_move):
|
||||
basename = os.path.basename(module)
|
||||
addon_path = build._server('addons', basename)
|
||||
if os.path.exists(addon_path):
|
||||
build._log(
|
||||
'Building environment',
|
||||
'You have duplicate modules in your branches "%s"' % basename
|
||||
)
|
||||
if os.path.islink(addon_path) or os.path.isfile(addon_path):
|
||||
os.remove(addon_path)
|
||||
def _get_modules_to_test(self, commits=None):
|
||||
self.ensure_one() # will raise exception if hash not found, we don't want to fail for all build.
|
||||
# checkout branch
|
||||
repo_modules = []
|
||||
available_modules = []
|
||||
for commit in commits or self.get_all_commit():
|
||||
for (addons_path, module, manifest_file_name) in self._get_available_modules(commit):
|
||||
if commit.repo == self.repo_id:
|
||||
repo_modules.append(module)
|
||||
if module in available_modules:
|
||||
self._log(
|
||||
'Building environment',
|
||||
'%s is a duplicated modules (found in "%s")' % (module, commit._source_path(addons_path, module, manifest_file_name)),
|
||||
level='WARNING'
|
||||
)
|
||||
else:
|
||||
shutil.rmtree(addon_path)
|
||||
shutil.move(module, build._server('addons'))
|
||||
available_modules.append(module)
|
||||
explicit_modules = uniq_list([module for module in (self.branch_id.modules or '').split(',') + (self.repo_id.modules or '').split(',') if module])
|
||||
|
||||
available_modules = [
|
||||
os.path.basename(os.path.dirname(a))
|
||||
for a in (glob.glob(build._server('addons/*/__openerp__.py')) +
|
||||
glob.glob(build._server('addons/*/__manifest__.py')))
|
||||
]
|
||||
if build.repo_id.modules_auto == 'all' or (build.repo_id.modules_auto != 'none' and has_server):
|
||||
modules_to_test += available_modules
|
||||
if explicit_modules:
|
||||
_logger.debug("explicit modules_to_test for build %s: %s", self.dest, explicit_modules)
|
||||
|
||||
modules_to_test = self._filter_modules(modules_to_test,
|
||||
set(available_modules), explicit_modules)
|
||||
_logger.debug("modules_to_test for build %s: %s", build.dest, modules_to_test)
|
||||
build.write({'server_match': server_match,
|
||||
'modules': ','.join(modules_to_test)})
|
||||
if set(explicit_modules) - set(available_modules):
|
||||
self.log('checkout', 'Some explicit modules (branch or repo defined) are not in available module list.', level='WARNING')
|
||||
|
||||
if self.repo_id.modules_auto == 'all':
|
||||
modules_to_test = available_modules
|
||||
elif self.repo_id.modules_auto == 'repo':
|
||||
modules_to_test = explicit_modules + repo_modules
|
||||
_logger.debug("local modules_to_test for build %s: %s", self.dest, modules_to_test)
|
||||
else:
|
||||
modules_to_test = explicit_modules
|
||||
|
||||
modules_to_test = self._filter_modules(modules_to_test, available_modules, explicit_modules)
|
||||
_logger.debug("modules_to_test for build %s: %s", self.dest, modules_to_test)
|
||||
return modules_to_test
|
||||
|
||||
def _local_pg_dropdb(self, dbname):
|
||||
with local_pgadmin_cursor() as local_cr:
|
||||
@ -837,25 +819,48 @@ class runbot_build(models.Model):
|
||||
if not child.duplicate_id:
|
||||
child._ask_kill()
|
||||
|
||||
def _cmd(self): # why not remove build.modules output ?
|
||||
def get_all_commit(self):
|
||||
return [Commit(self.repo_id, self.name)] + [Commit(dep.get_repo(), dep.dependency_hash) for dep in self.dependency_ids]
|
||||
|
||||
def get_server_commit(self, commits=None):
|
||||
"""
|
||||
returns a Commit() of the first repo containing server files found in commits or in build commits
|
||||
the commits param is not used in code base but could be usefull for jobs and crons
|
||||
"""
|
||||
for commit in (commits or self.get_all_commit()):
|
||||
if commit.repo.server_files:
|
||||
return commit
|
||||
raise ValidationError('No repo found with defined server_files')
|
||||
|
||||
def get_addons_path(self, commits=None):
|
||||
for commit in (commits or self.get_all_commit()):
|
||||
source_path = self._docker_source_folder(commit)
|
||||
for addons_path in commit.repo.addons_paths.split(','):
|
||||
if os.path.isdir(commit._source_path(addons_path)):
|
||||
yield os.path.join(source_path, addons_path).strip(os.sep)
|
||||
|
||||
def get_server_info(self, commit=None):
|
||||
server_dir = False
|
||||
server = False
|
||||
commit = commit or self.get_server_commit()
|
||||
for server_file in commit.repo.server_files.split(','):
|
||||
if os.path.isfile(commit._source_path(server_file)):
|
||||
return (self._docker_source_folder(commit), server_file)
|
||||
self._log('server_info', 'No server found in %s' % commit, level='ERROR')
|
||||
raise ValidationError('No server found in %s' % commit)
|
||||
|
||||
def _cmd(self):
|
||||
"""Return a tuple describing the command to start the build
|
||||
First part is list with the command and parameters
|
||||
Second part is a list of Odoo modules
|
||||
"""
|
||||
self.ensure_one()
|
||||
build = self
|
||||
bins = [
|
||||
'odoo-bin', # >= 10.0
|
||||
'openerp-server', # 9.0, 8.0
|
||||
'openerp-server.py', # 7.0
|
||||
'bin/openerp-server.py', # < 7.0
|
||||
]
|
||||
for odoo_bin in bins:
|
||||
if os.path.isfile(build._path(odoo_bin)):
|
||||
break
|
||||
|
||||
(server_dir, server_file) = self.get_server_info()
|
||||
addons_paths = self.get_addons_path()
|
||||
# commandline
|
||||
cmd = [ os.path.join('/data/build', odoo_bin), ]
|
||||
cmd = [os.path.join('/data/build', server_dir, server_file), '--addons-path', ",".join(addons_paths)]
|
||||
# options
|
||||
if grep(build._server("tools/config.py"), "no-xmlrpcs"): # move that to configs ?
|
||||
cmd.append("--no-xmlrpcs")
|
||||
@ -879,7 +884,7 @@ class runbot_build(models.Model):
|
||||
# use the username of the runbot host to connect to the databases
|
||||
cmd += ['-r %s' % pwd.getpwuid(os.getuid()).pw_name]
|
||||
|
||||
return cmd, build.modules
|
||||
return cmd
|
||||
|
||||
def _github_status_notify_all(self, status):
|
||||
"""Notify each repo with a status"""
|
||||
|
@ -5,7 +5,7 @@ import os
|
||||
import re
|
||||
import shlex
|
||||
import time
|
||||
from ..common import now, grep, get_py_version, time2str, rfind
|
||||
from ..common import now, grep, get_py_version, time2str, rfind, Commit
|
||||
from ..container import docker_run, docker_get_gateway_ip, build_odoo_cmd
|
||||
from odoo import models, fields, api
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
@ -18,6 +18,7 @@ _re_warning = r'^\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ WARNING '
|
||||
|
||||
PYTHON_DEFAULT = "# type python code here\n\n\n\n\n\n"
|
||||
|
||||
|
||||
class Config(models.Model):
|
||||
_name = "runbot.build.config"
|
||||
_inherit = "mail.thread"
|
||||
@ -212,7 +213,6 @@ class ConfigStep(models.Model):
|
||||
'committer': build.committer,
|
||||
'committer_email': build.committer_email,
|
||||
'subject': build.subject,
|
||||
'modules': build.modules,
|
||||
'hidden': self.hide_build,
|
||||
'orphan_result': self.make_orphan,
|
||||
})
|
||||
@ -239,11 +239,13 @@ class ConfigStep(models.Model):
|
||||
return safe_eval(self.sudo().python_code.strip(), eval_ctx, mode="exec", nocopy=True)
|
||||
|
||||
def _run_odoo_run(self, build, log_path):
|
||||
exports = build._checkout()
|
||||
# adjust job_end to record an accurate job_20 job_time
|
||||
build._log('run', 'Start running build %s' % build.dest)
|
||||
# run server
|
||||
cmd, _ = build._cmd()
|
||||
if os.path.exists(build._server('addons/im_livechat')):
|
||||
cmd = build._cmd()
|
||||
server = build._server()
|
||||
if os.path.exists(os.path.join(server, 'addons/im_livechat')):
|
||||
cmd += ["--workers", "2"]
|
||||
cmd += ["--longpolling-port", "8070"]
|
||||
cmd += ["--max-cron-threads", "1"]
|
||||
@ -255,7 +257,7 @@ class ConfigStep(models.Model):
|
||||
# we need to have at least one job of type install_odoo to run odoo, take the last one for db_name.
|
||||
cmd += ['-d', '%s-%s' % (build.dest, db_name)]
|
||||
|
||||
if grep(build._server("tools/config.py"), "db-filter"):
|
||||
if grep(os.path.join(server, "tools/config.py"), "db-filter"):
|
||||
if build.repo_id.nginx:
|
||||
cmd += ['--db-filter', '%d.*$']
|
||||
else:
|
||||
@ -269,10 +271,11 @@ class ConfigStep(models.Model):
|
||||
build_port = build.port
|
||||
self.env.cr.commit() # commit before docker run to be 100% sure that db state is consistent with dockers
|
||||
self.invalidate_cache()
|
||||
return docker_run(build_odoo_cmd(cmd), log_path, build_path, docker_name, exposed_ports=[build_port, build_port + 1])
|
||||
return docker_run(build_odoo_cmd(cmd), log_path, build_path, docker_name, exposed_ports=[build_port, build_port + 1], ro_volumes=exports)
|
||||
|
||||
def _run_odoo_install(self, build, log_path):
|
||||
cmd, _ = build._cmd()
|
||||
exports = build._checkout()
|
||||
cmd = build._cmd()
|
||||
# create db if needed
|
||||
db_name = "%s-%s" % (build.dest, self.db_name)
|
||||
if self.create_db:
|
||||
@ -308,13 +311,13 @@ class ConfigStep(models.Model):
|
||||
|
||||
max_timeout = int(self.env['ir.config_parameter'].get_param('runbot.runbot_timeout', default=10000))
|
||||
timeout = min(self.cpu_limit, max_timeout)
|
||||
return docker_run(build_odoo_cmd(cmd), log_path, build._path(), build._get_docker_name(), cpu_limit=timeout)
|
||||
return docker_run(build_odoo_cmd(cmd), log_path, build._path(), build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports)
|
||||
|
||||
def _modules_to_install(self, build):
|
||||
modules_to_install = set([mod.strip() for mod in self.install_modules.split(',')])
|
||||
if '*' in modules_to_install:
|
||||
modules_to_install.remove('*')
|
||||
default_mod = set([mod.strip() for mod in build.modules.split(',')])
|
||||
default_mod = set(build._get_modules_to_test())
|
||||
modules_to_install = default_mod | modules_to_install
|
||||
# todo add without support
|
||||
return modules_to_install
|
||||
@ -329,13 +332,18 @@ class ConfigStep(models.Model):
|
||||
return []
|
||||
|
||||
def _coverage_params(self, build, modules_to_install):
|
||||
available_modules = [ # todo extract this to build method
|
||||
os.path.basename(os.path.dirname(a))
|
||||
for a in (glob.glob(build._server('addons/*/__openerp__.py')) +
|
||||
glob.glob(build._server('addons/*/__manifest__.py')))
|
||||
]
|
||||
module_to_omit = set(available_modules) - modules_to_install
|
||||
return ['--omit', ','.join('*addons/%s/*' % m for m in module_to_omit) + ',*__manifest__.py']
|
||||
pattern_to_omit = set()
|
||||
for commit in self.get_all_commit:
|
||||
docker_source_folder = build._docker_source_folder(commit)
|
||||
for manifest_file in commit.repo.manifest_files.split(','):
|
||||
pattern_to_omit.add('*%s' % manifest_file)
|
||||
for (addons_path, module, manifest_file_name) in build._get_available_modules(commit):
|
||||
module = os.path.basename(module_path)
|
||||
if module not in modules_to_install:
|
||||
# we want to omit docker_source_folder/[addons/path/]module/*
|
||||
module_path_in_docker = os.path.join(docker_source_folder, addons_path, module)
|
||||
pattern_to_omit.add('%s/*' % (module_path_in_docker))
|
||||
return ['--omit', ','.join(pattern_to_omit)]
|
||||
|
||||
def _make_results(self, build):
|
||||
build_values = {}
|
||||
|
@ -9,3 +9,7 @@ class RunbotBuildDependency(models.Model):
|
||||
dependency_hash = fields.Char('Name of commit', index=True)
|
||||
closest_branch_id = fields.Many2one('runbot.branch', 'Branch', required=True, ondelete='cascade')
|
||||
match_type = fields.Char('Match Type')
|
||||
|
||||
def get_repo(self):
|
||||
return self.closest_branch_id.repo_id or self.dependecy_repo_id
|
||||
|
||||
|
@ -11,14 +11,17 @@ import signal
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
from odoo.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT
|
||||
from odoo import models, fields, api
|
||||
from odoo.modules.module import get_module_resource
|
||||
from odoo.tools import config
|
||||
from ..common import fqdn, dt2time
|
||||
from ..common import fqdn, dt2time, Commit
|
||||
from psycopg2.extensions import TransactionRollbackError
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
class HashMissingException(Exception):
|
||||
pass
|
||||
|
||||
class runbot_repo(models.Model):
|
||||
|
||||
@ -55,6 +58,10 @@ class runbot_repo(models.Model):
|
||||
repo_config_id = fields.Many2one('runbot.build.config', 'Run Config')
|
||||
config_id = fields.Many2one('runbot.build.config', 'Run Config', compute='_compute_config_id', inverse='_inverse_config_id')
|
||||
|
||||
server_files = fields.Char('Server files', help='Comma separated list of possible server files') # odoo-bin,openerp-server,openerp-server.py
|
||||
manifest_files = fields.Char('Addons files', help='Comma separated list of possible addons files', default='__manifest__.py,__openerp__.py')
|
||||
addons_paths = fields.Char('Addons files', help='Comma separated list of possible addons path', default='')
|
||||
|
||||
def _compute_config_id(self):
|
||||
for repo in self:
|
||||
if repo.repo_config_id:
|
||||
@ -71,15 +78,25 @@ class runbot_repo(models.Model):
|
||||
default = os.path.join(os.path.dirname(__file__), '../static')
|
||||
return os.path.abspath(default)
|
||||
|
||||
def _source_path(self, sha, *path):
|
||||
"""
|
||||
returns the absolute path to the source folder of the repo (adding option *path)
|
||||
"""
|
||||
self.ensure_one()
|
||||
return os.path.join(self._root(), 'sources', self._get_repo_name_part(), sha, *path)
|
||||
|
||||
@api.depends('name')
|
||||
def _get_path(self):
|
||||
"""compute the server path of repo from the name"""
|
||||
root = self._root()
|
||||
for repo in self:
|
||||
name = repo.name
|
||||
for i in '@:/':
|
||||
name = name.replace(i, '_')
|
||||
repo.path = os.path.join(root, 'repo', name)
|
||||
repo.path = os.path.join(root, 'repo', repo._sanitized_name(repo.name))
|
||||
|
||||
@api.model
|
||||
def _sanitized_name(self, name):
|
||||
for i in '@:/':
|
||||
name = name.replace(i, '_')
|
||||
return name
|
||||
|
||||
@api.depends('name')
|
||||
def _get_base_url(self):
|
||||
@ -95,24 +112,48 @@ class runbot_repo(models.Model):
|
||||
for repo in self:
|
||||
repo.short_name = '/'.join(repo.base.split('/')[-2:])
|
||||
|
||||
def _get_repo_name_part(self):
|
||||
self.ensure_one
|
||||
return self._sanitized_name(self.name.split('/')[-1])
|
||||
|
||||
def _git(self, cmd):
|
||||
"""Execute a git command 'cmd'"""
|
||||
for repo in self:
|
||||
cmd = ['git', '--git-dir=%s' % repo.path] + cmd
|
||||
_logger.debug("git command: %s", ' '.join(cmd))
|
||||
return subprocess.check_output(cmd).decode('utf-8')
|
||||
self.ensure_one()
|
||||
cmd = ['git', '--git-dir=%s' % self.path] + cmd
|
||||
_logger.debug("git command: %s", ' '.join(cmd))
|
||||
return subprocess.check_output(cmd).decode('utf-8')
|
||||
|
||||
def _git_rev_parse(self, branch_name):
|
||||
return self._git(['rev-parse', branch_name]).strip()
|
||||
|
||||
def _git_export(self, treeish, dest):
|
||||
"""Export a git repo to dest"""
|
||||
def _git_export(self, sha):
|
||||
"""Export a git repo into a sources"""
|
||||
# TODO add automated tests
|
||||
self.ensure_one()
|
||||
_logger.debug('checkout %s %s %s', self.name, treeish, dest)
|
||||
p1 = subprocess.Popen(['git', '--git-dir=%s' % self.path, 'archive', treeish], stdout=subprocess.PIPE)
|
||||
p2 = subprocess.Popen(['tar', '-xmC', dest], stdin=p1.stdout, stdout=subprocess.PIPE)
|
||||
export_path = self._source_path(sha)
|
||||
|
||||
if os.path.isdir(export_path):
|
||||
_logger.info('git export: checkouting to %s (already exists)' % export_path)
|
||||
return export_path
|
||||
|
||||
if not self._hash_exists(sha):
|
||||
self._update(force=True)
|
||||
if not self._hash_exists(sha):
|
||||
try:
|
||||
self._git(['fetch', 'origin', sha])
|
||||
except:
|
||||
pass
|
||||
if not self._hash_exists(sha):
|
||||
raise HashMissingException()
|
||||
|
||||
_logger.info('git export: checkouting to %s (new)' % export_path)
|
||||
os.makedirs(export_path)
|
||||
p1 = subprocess.Popen(['git', '--git-dir=%s' % self.path, 'archive', sha], stdout=subprocess.PIPE)
|
||||
p2 = subprocess.Popen(['tar', '-xmC', export_path], stdin=p1.stdout, stdout=subprocess.PIPE)
|
||||
p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.
|
||||
p2.communicate()[0]
|
||||
# TODO get result and fallback on cleaing in case of problem
|
||||
return export_path
|
||||
|
||||
def _hash_exists(self, commit_hash):
|
||||
""" Verify that a commit hash exists in the repo """
|
||||
|
@ -21,10 +21,7 @@
|
||||
<t t-if="bu.global_result=='killed'"><i class="text-danger fa fa-times"/> killed</t>
|
||||
<t t-if="bu.global_result=='manually_killed'"><i class="text-danger fa fa-times"/> manually killed</t>
|
||||
</t>
|
||||
<t t-if="bu.real_build.server_match == 'default'">
|
||||
<i class="text-warning fa fa-question-circle fa-fw"
|
||||
title="Server branch cannot be determined exactly. Please use naming convention '12.0-my-branch' to build with '12.0' server branch."/>
|
||||
</t>
|
||||
|
||||
<t t-if="bu.revdep_build_ids">
|
||||
<small class="pull-right">Dep builds:
|
||||
<t t-foreach="bu.sorted_revdep_build_ids()" t-as="rbu">
|
||||
|
@ -4,12 +4,21 @@ from odoo.tools.config import configmanager
|
||||
from odoo.tests import common
|
||||
|
||||
|
||||
def rev_parse(repo, branch_name):
|
||||
"""
|
||||
simulate a rev parse by returning a fake hash of form
|
||||
'rp_odoo-dev/enterprise_saas-12.2__head'
|
||||
should be overwitten if a pr head should match a branch head
|
||||
"""
|
||||
head_hash = 'rp_%s_%s_head' % (repo.name.split(':')[1], branch_name.split('/')[-1])
|
||||
return head_hash
|
||||
|
||||
class Test_Build(common.TransactionCase):
|
||||
|
||||
def setUp(self):
|
||||
super(Test_Build, self).setUp()
|
||||
self.Repo = self.env['runbot.repo']
|
||||
self.repo = self.Repo.create({'name': 'bla@example.com:foo/bar'})
|
||||
self.repo = self.Repo.create({'name': 'bla@example.com:foo/bar', 'server_files': 'server.py', 'addons_paths': 'addons,core/addons'})
|
||||
self.Branch = self.env['runbot.branch']
|
||||
self.branch = self.Branch.create({
|
||||
'repo_id': self.repo.id,
|
||||
@ -62,10 +71,12 @@ class Test_Build(common.TransactionCase):
|
||||
with self.assertRaises(AssertionError):
|
||||
builds.write({'local_state': 'duplicate'})
|
||||
|
||||
@patch('odoo.addons.runbot.models.build.os.path.isfile')
|
||||
@patch('odoo.addons.runbot.models.build.os.mkdir')
|
||||
@patch('odoo.addons.runbot.models.build.grep')
|
||||
def test_build_cmd_log_db(self, mock_grep, mock_mkdir):
|
||||
def test_build_cmd_log_db(self, mock_grep, mock_mkdir, mock_is_file):
|
||||
""" test that the logdb connection URI is taken from the .odoorc file """
|
||||
mock_is_file.return_value = True
|
||||
uri = 'postgres://someone:pass@somewhere.com/db'
|
||||
self.env['ir.config_parameter'].sudo().set_param("runbot.runbot_logdb_uri", uri)
|
||||
build = self.Build.create({
|
||||
@ -73,9 +84,121 @@ class Test_Build(common.TransactionCase):
|
||||
'name': 'd0d0caca0000ffffffffffffffffffffffffffff',
|
||||
'port': '1234',
|
||||
})
|
||||
cmd = build._cmd()[0]
|
||||
cmd = build._cmd()
|
||||
self.assertIn('--log-db=%s' % uri, cmd)
|
||||
|
||||
@patch('odoo.addons.runbot.models.build.os.path.isdir')
|
||||
@patch('odoo.addons.runbot.models.build.os.path.isfile')
|
||||
@patch('odoo.addons.runbot.models.build.os.mkdir')
|
||||
@patch('odoo.addons.runbot.models.build.grep')
|
||||
def test_build_cmd_server_path_no_dep(self, mock_grep, mock_mkdir, mock_is_file, mock_is_dir):
|
||||
""" test that the server path and addons path """
|
||||
mock_is_file.return_value = True
|
||||
mock_is_dir.return_value = True
|
||||
build = self.Build.create({
|
||||
'branch_id': self.branch.id,
|
||||
'name': 'd0d0caca0000ffffffffffffffffffffffffffff',
|
||||
'port': '1234',
|
||||
})
|
||||
cmd = build._cmd()
|
||||
self.assertEqual('/data/build/bar/server.py', cmd[0])
|
||||
self.assertIn('--addons-path', cmd)
|
||||
addons_path_pos = cmd.index('--addons-path') + 1
|
||||
self.assertEqual(cmd[addons_path_pos], 'bar/addons,bar/core/addons')
|
||||
|
||||
@patch('odoo.addons.runbot.models.branch.runbot_branch._is_on_remote')
|
||||
@patch('odoo.addons.runbot.models.build.os.path.isdir')
|
||||
@patch('odoo.addons.runbot.models.build.os.path.isfile')
|
||||
@patch('odoo.addons.runbot.models.build.os.mkdir')
|
||||
@patch('odoo.addons.runbot.models.build.grep')
|
||||
def test_build_cmd_server_path_with_dep(self, mock_grep, mock_mkdir, mock_is_file, mock_is_dir, mock_is_on_remote):
|
||||
""" test that the server path and addons path """
|
||||
|
||||
def is_file(file):
|
||||
self.assertIn('sources/bar/dfdfcfcf0000ffffffffffffffffffffffffffff/server.py', file)
|
||||
return True
|
||||
|
||||
def is_dir(file):
|
||||
paths = [
|
||||
'sources/bar/dfdfcfcf0000ffffffffffffffffffffffffffff/addons',
|
||||
'sources/bar/dfdfcfcf0000ffffffffffffffffffffffffffff/core/addons',
|
||||
'sources/bar-ent/d0d0caca0000ffffffffffffffffffffffffffff'
|
||||
]
|
||||
self.assertTrue(any([path in file for path in paths])) # checking that addons path existence check looks ok
|
||||
return True
|
||||
|
||||
mock_is_file.side_effect = is_file
|
||||
mock_is_dir.side_effect = is_dir
|
||||
mock_is_on_remote.return_value = True
|
||||
repo_ent = self.env['runbot.repo'].create({
|
||||
'name': 'bla@example.com:foo/bar-ent',
|
||||
'server_files': '',
|
||||
})
|
||||
repo_ent.dependency_ids = self.repo
|
||||
enterprise_branch = self.env['runbot.branch'].create({
|
||||
'repo_id': repo_ent.id,
|
||||
'name': 'refs/heads/master'
|
||||
})
|
||||
|
||||
def rev_parse(repo, branch_name):
|
||||
self.assertEqual(repo, self.repo)
|
||||
self.assertEqual(branch_name, 'refs/heads/master')
|
||||
return 'dfdfcfcf0000ffffffffffffffffffffffffffff'
|
||||
|
||||
with patch('odoo.addons.runbot.models.repo.runbot_repo._git_rev_parse', new=rev_parse):
|
||||
build = self.Build.create({
|
||||
'branch_id': enterprise_branch.id,
|
||||
'name': 'd0d0caca0000ffffffffffffffffffffffffffff',
|
||||
'port': '1234',
|
||||
})
|
||||
cmd = build._cmd()
|
||||
self.assertIn('--addons-path', cmd)
|
||||
addons_path_pos = cmd.index('--addons-path') + 1
|
||||
self.assertEqual(cmd[addons_path_pos], 'bar-ent,bar/addons,bar/core/addons')
|
||||
self.assertEqual('/data/build/bar/server.py', cmd[0])
|
||||
|
||||
@patch('odoo.addons.runbot.models.branch.runbot_branch._is_on_remote')
|
||||
@patch('odoo.addons.runbot.models.build.os.path.isdir')
|
||||
@patch('odoo.addons.runbot.models.build.os.path.isfile')
|
||||
@patch('odoo.addons.runbot.models.build.os.mkdir')
|
||||
@patch('odoo.addons.runbot.models.build.grep')
|
||||
def test_build_cmd_server_path_with_dep_collision(self, mock_grep, mock_mkdir, mock_is_file, mock_is_dir, mock_is_on_remote):
|
||||
""" test that the server path and addons path """
|
||||
|
||||
def is_file(file):
|
||||
self.assertIn('sources/bar/dfdfcfcf0000ffffffffffffffffffffffffffff/server.py', file)
|
||||
return True
|
||||
|
||||
mock_is_file.side_effect = is_file
|
||||
mock_is_dir.return_value = True
|
||||
mock_is_on_remote.return_value = True
|
||||
repo_ent = self.env['runbot.repo'].create({
|
||||
'name': 'bla@example.com:foo-ent/bar',
|
||||
'server_files': '',
|
||||
})
|
||||
repo_ent.dependency_ids = self.repo
|
||||
enterprise_branch = self.env['runbot.branch'].create({
|
||||
'repo_id': repo_ent.id,
|
||||
'name': 'refs/heads/master'
|
||||
})
|
||||
|
||||
def rev_parse(repo, branch_name):
|
||||
self.assertEqual(repo, self.repo)
|
||||
self.assertEqual(branch_name, 'refs/heads/master')
|
||||
return 'dfdfcfcf0000ffffffffffffffffffffffffffff'
|
||||
|
||||
with patch('odoo.addons.runbot.models.repo.runbot_repo._git_rev_parse', new=rev_parse):
|
||||
build = self.Build.create({
|
||||
'branch_id': enterprise_branch.id,
|
||||
'name': 'd0d0caca0000ffffffffffffffffffffffffffff',
|
||||
'port': '1234',
|
||||
})
|
||||
cmd = build._cmd()
|
||||
self.assertIn('--addons-path', cmd)
|
||||
addons_path_pos = cmd.index('--addons-path') + 1
|
||||
self.assertEqual(cmd[addons_path_pos], 'bar-d0d0caca,bar-dfdfcfcf/addons,bar-dfdfcfcf/core/addons')
|
||||
self.assertEqual('/data/build/bar-dfdfcfcf/server.py', cmd[0])
|
||||
|
||||
def test_build_config_from_branch_default(self):
|
||||
"""test build config_id is computed from branch default config_id"""
|
||||
build = self.Build.create({
|
||||
@ -266,16 +389,6 @@ class Test_Build(common.TransactionCase):
|
||||
self.assertEqual(build_parent.nb_testing, 0)
|
||||
self.assertEqual(build_parent.global_state, 'done')
|
||||
|
||||
def rev_parse(repo, branch_name):
|
||||
"""
|
||||
simulate a rev parse by returning a fake hash of form
|
||||
'rp_odoo-dev/enterprise_saas-12.2__head'
|
||||
should be overwitten if a pr head should match a branch head
|
||||
"""
|
||||
head_hash = 'rp_%s_%s_head' % (repo.name.split(':')[1], branch_name.split('/')[-1])
|
||||
return head_hash
|
||||
|
||||
|
||||
class TestClosestBranch(common.TransactionCase):
|
||||
|
||||
def branch_description(self, branch):
|
||||
|
@ -45,6 +45,9 @@ class TestSchedule(common.TransactionCase):
|
||||
build_ids = self.Build.search(domain_host + [('local_state', 'in', ['testing', 'running', 'deathrow'])])
|
||||
mock_running.return_value = False
|
||||
self.assertEqual(build.local_state, 'testing')
|
||||
build_ids._schedule() # too fast, docker not started
|
||||
self.assertEqual(build.local_state, 'testing')
|
||||
build_ids.write({'job_start': datetime.datetime.now() - datetime.timedelta(seconds=20)}) # job is now a little older
|
||||
build_ids._schedule()
|
||||
self.assertEqual(build.local_state, 'done')
|
||||
self.assertEqual(build.local_result, 'ok')
|
||||
|
@ -32,7 +32,6 @@
|
||||
<field name="build_time"/>
|
||||
<field name="build_age"/>
|
||||
<field name="duplicate_id"/>
|
||||
<field name="modules"/>
|
||||
<field name="build_type" groups="base.group_no_one"/>
|
||||
<field name="config_id" readonly="1"/>
|
||||
<field name="config_id" groups="base.group_no_one"/>
|
||||
|
@ -23,6 +23,9 @@
|
||||
<field name="group_ids" widget="many2many_tags"/>
|
||||
<field name="hook_time"/>
|
||||
<field name="config_id"/>
|
||||
<field name="server_files"/>
|
||||
<field name="manifest_files"/>
|
||||
<field name="addons_paths"/>
|
||||
</group>
|
||||
</sheet>
|
||||
</form>
|
||||
|
@ -21,7 +21,8 @@ class Step(models.Model):
|
||||
return super(Step, self)._run_step(build, log_path)
|
||||
|
||||
def _runbot_cla_check(self, build, log_path):
|
||||
cla_glob = glob.glob(build._path("doc/cla/*/*.md"))
|
||||
build._checkout()
|
||||
cla_glob = glob.glob(build.get_server_commit()._source_path("doc/cla/*/*.md"))
|
||||
if cla_glob:
|
||||
description = "%s Odoo CLA signature check" % build.author
|
||||
mo = re.search('[^ <@]+@[^ @>]+', build.author_email or '')
|
||||
|
Loading…
Reference in New Issue
Block a user