diff --git a/runbot/__init__.py b/runbot/__init__.py index 635d05e9..fee5ed2d 100644 --- a/runbot/__init__.py +++ b/runbot/__init__.py @@ -4,3 +4,4 @@ from . import croninterval from . import controllers from . import models from . import common +from . import container diff --git a/runbot/__manifest__.py b/runbot/__manifest__.py index 218e69be..8650b1d6 100644 --- a/runbot/__manifest__.py +++ b/runbot/__manifest__.py @@ -6,7 +6,7 @@ 'author': "Odoo SA", 'website': "http://runbot.odoo.com", 'category': 'Website', - 'version': '2.4', + 'version': '2.5', 'depends': ['website', 'base'], 'data': [ 'security/runbot_security.xml', diff --git a/runbot/common.py b/runbot/common.py index 01cf04ad..154ce814 100644 --- a/runbot/common.py +++ b/runbot/common.py @@ -34,28 +34,6 @@ def now(): return time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) -def lock(filename): - fd = os.open(filename, os.O_CREAT | os.O_RDWR, 0o600) - if hasattr(os, 'set_inheritable'): - os.set_inheritable(fd, True) # needed since pep-446 - fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) - - -def locked(filename): - result = False - try: - fd = os.open(filename, os.O_CREAT | os.O_RDWR, 0o600) - except OSError: - return False - try: - fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) - except OSError: # since pep-3151 fcntl raises OSError and IOError is now an alias of OSError - result = True - finally: - os.close(fd) - return result - - def grep(filename, string): if os.path.isfile(filename): return open(filename).read().find(string) != -1 diff --git a/runbot/container.py b/runbot/container.py new file mode 100644 index 00000000..9e1a1797 --- /dev/null +++ b/runbot/container.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +"""Containerize builds + +The docker image used for the build is always tagged like this: + odoo:runbot_tests +This file contains helpers to containerize builds with Docker. +When testing this file: + the first parameter should be a directory containing Odoo. + The second parameter is the exposed port +""" +import argparse +import datetime +import logging +import os +import shutil +import subprocess +import time + + +_logger = logging.getLogger(__name__) +DOCKERUSER = """ +RUN groupadd -g %(group_id)s odoo \\ +&& useradd -u %(user_id)s -g odoo -G audio,video odoo \\ +&& mkdir /home/odoo \\ +&& chown -R odoo:odoo /home/odoo \\ +&& echo "odoo ALL= NOPASSWD: /usr/bin/pip" > /etc/sudoers.d/pip \\ +&& echo "odoo ALL= NOPASSWD: /usr/bin/pip3" >> /etc/sudoers.d/pip +USER odoo +ENV COVERAGE_FILE /data/build/.coverage +""" % {'group_id': os.getgid(), 'user_id': os.getuid()} + +def docker_build(log_path, build_dir): + """Build the docker image + :param log_path: path to the logfile that will contain odoo stdout and stderr + :param build_dir: the build directory that contains the Odoo sources to build. + """ + # Prepare docker image + docker_dir = os.path.join(build_dir, 'docker') + os.makedirs(docker_dir, exist_ok=True) + shutil.copy(os.path.join(os.path.dirname(__file__), 'data', 'Dockerfile'), docker_dir) + # synchronise the current user with the odoo user inside the Dockerfile + with open(os.path.join(docker_dir, 'Dockerfile'), 'a') as df: + df.write(DOCKERUSER) + logs = open(log_path, 'w') + dbuild = subprocess.Popen(['docker', 'build', '--tag', 'odoo:runbot_tests', '.'], stdout=logs, stderr=logs, cwd=docker_dir) + dbuild.wait() + +def docker_run(odoo_cmd, log_path, build_dir, container_name, exposed_ports=None, cpu_limit=None, preexec_fn=None): + """Run tests in a docker container + :param odoo_cmd: command that starts odoo + :param log_path: path to the logfile that will contain odoo stdout and stderr + :param build_dir: the build directory that contains the Odoo sources to build. + This directory is shared as a volume with the container + :param container_name: used to give a name to the container for later reference + :param exposed_ports: if not None, starting at 8069, ports will be exposed as exposed_ports numbers + """ + # build cmd + cmd_chain = [] + cmd_chain.append('cd /data/build') + cmd_chain.append('head -1 odoo-bin | grep -q python3 && sudo pip3 install -r requirements.txt || sudo pip install -r requirements.txt') + cmd_chain.append(' '.join(odoo_cmd)) + run_cmd = ' && '.join(cmd_chain) + _logger.debug('Docker run command: %s', run_cmd) + logs = open(log_path, 'w') + + # create start script + docker_command = [ + 'docker', 'run', '--rm', + '--name', container_name, + '--volume=/var/run/postgresql:/var/run/postgresql', + '--volume=%s:/data/build' % build_dir, + '--shm-size=128m', + '--init', + ] + if exposed_ports: + for dp,hp in enumerate(exposed_ports, start=8069): + docker_command.extend(['-p', '127.0.0.1:%s:%s' % (hp, dp)]) + if cpu_limit: + docker_command.extend(['--ulimit', 'cpu=%s' % int(cpu_limit)]) + docker_command.extend(['odoo:runbot_tests', '/bin/bash', '-c', "%s" % run_cmd]) + docker_run = subprocess.Popen(docker_command, stdout=logs, stderr=logs, preexec_fn=preexec_fn, close_fds=False, cwd=build_dir) + _logger.info('Started Docker container %s', container_name) + return docker_run.pid + +def docker_stop(container_name): + """Stops the container named container_name""" + _logger.info('Stopping container %s', container_name) + dstop = subprocess.run(['docker', 'stop', container_name]) + +def docker_is_running(container_name): + """Return True if container is still running""" + dinspect = subprocess.run(['docker', 'container', 'inspect', container_name], stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL) + return True if dinspect.returncode == 0 else False + +def build(args): + """Build container from CLI""" + _logger.info('Building the base image container') + logdir = os.path.join(args.build_dir, 'logs') + os.makedirs(logdir, exist_ok=True) + logfile = os.path.join(logdir, 'logs-build.txt') + docker_build(logfile, args.build_dir) + _logger.info('Finished building the base image container') + +def tests(args): + _logger.info('Start container tests') + os.makedirs(os.path.join(args.build_dir, 'logs'), exist_ok=True) + os.makedirs(os.path.join(args.build_dir, 'datadir'), exist_ok=True) + + if args.kill: + # Test stopping a non running container + _logger.info('Test killing an non existing container') + docker_stop('xy' * 5) + # Test building + _logger.info('Test building the base image container') + logfile = os.path.join(args.build_dir, 'logs', 'logs-build.txt') + docker_build(logfile, args.build_dir) + + # Test testing + odoo_cmd = ['/data/build/odoo-bin', '-d %s' % args.db_name, '--addons-path=/data/build/addons', '--data-dir', '/data/build/datadir', '-r %s' % os.getlogin(), '-i', args.odoo_modules, '--test-enable', '--stop-after-init', '--max-cron-threads=0'] + + if args.kill: + logfile = os.path.join(args.build_dir, 'logs', 'logs-partial.txt') + container_name = 'odoo-container-test-%s' % datetime.datetime.now().microsecond + docker_run(odoo_cmd, logfile, args.build_dir, container_name) + # Test stopping the container + _logger.info('Waiting 30 sec before killing the build') + time.sleep(30) + docker_stop(container_name) + time.sleep(3) + + # Test full testing + logfile = os.path.join(args.build_dir, 'logs', 'logs-full-test.txt') + container_name = 'odoo-container-test-%s' % datetime.datetime.now().microsecond + if args.coverage: + omit = ['--omit', '*__manifest__.py'] + with open(os.path.join(args.build_dir, 'odoo-bin'), 'r') as exfile: + pyversion = 'python3' if 'python3' in exfile.readline() else 'python' + odoo_cmd = [ pyversion, '-m', 'coverage', 'run', '--branch', '--source', '/data/build'] + omit + odoo_cmd + docker_run(odoo_cmd, logfile, args.build_dir, container_name) + time.sleep(1) # give time for the container to start + + while docker_is_running(container_name): + time.sleep(10) + _logger.info("Waiting for %s to stop", container_name) + + if args.run: + # Test running + logfile = os.path.join(args.build_dir, 'logs', 'logs-running.txt') + odoo_cmd = [ + '/data/build/odoo-bin', '-d %s' % args.db_name, + '--db-filter', '%s.*$' % args.db_name, '--addons-path=/data/build/addons', + '-r %s' % os.getlogin(), '-i', 'web', '--max-cron-threads=1', + '--data-dir', '/data/build/datadir', '--workers', '2', + '--longpolling-port', '8070'] + container_name = 'odoo-container-test-%s' % datetime.datetime.now().microsecond + docker_run(odoo_cmd, logfile, args.build_dir, container_name, exposed_ports=[args.odoo_port, args.odoo_port + 1], cpu_limit=300) + +if __name__ == '__main__': + logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(name)s: %(message)s') + parser = argparse.ArgumentParser() + subparser = parser.add_subparsers(help='commands') + p_build = subparser.add_parser('build', help='Build docker image') + p_build.add_argument('build_dir') + p_build.set_defaults(func=build) + p_test = subparser.add_parser('tests', help='Test docker functions') + p_test.set_defaults(func=tests) + p_test.add_argument('build_dir') + p_test.add_argument('odoo_port', type=int) + p_test.add_argument('db_name') + p_test.add_argument('--coverage', action='store_true', help= 'test a build with coverage') + p_test.add_argument('-i', dest='odoo_modules', default='web', help='Comma separated list of modules') + p_test.add_argument('--kill', action='store_true', default=False, help='Also test container kill') + p_test.add_argument('--run', action='store_true', default=False, help='Also test running (Warning: the container survives exit)') + args = parser.parse_args() + args.func(args) diff --git a/runbot/data/Dockerfile b/runbot/data/Dockerfile new file mode 100644 index 00000000..21d83cdb --- /dev/null +++ b/runbot/data/Dockerfile @@ -0,0 +1,69 @@ +FROM ubuntu:bionic +ENV LANG C.UTF-8 +USER root +# Install base files +RUN set -x ; \ + apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + gawk \ + gnupg \ + libldap2-dev \ + libsasl2-dev \ + libxslt1-dev \ + node-less \ + python \ + python-dev \ + python-pip \ + python3 \ + python3-dev \ + python3-pip \ + python3-setuptools \ + python3-wheel \ + sed \ + sudo \ + xfonts-75dpi \ + zlib1g-dev + +# Install Google Chrome +RUN curl -sSL https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \ + && echo "deb https://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google-chrome.list \ + && apt-get update \ + && apt-get install -y -qq google-chrome-stable + +# Install phantomjs +RUN curl -sSL https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2 -o /tmp/phantomjs.tar.bz2 \ + && tar xvfO /tmp/phantomjs.tar.bz2 phantomjs-2.1.1-linux-x86_64/bin/phantomjs > /usr/local/bin/phantomjs \ + && chmod +x /usr/local/bin/phantomjs \ + && rm -f /tmp/phantomjs.tar.bz2 + +# Install wkhtml +RUN curl -sSL https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox_0.12.5-1.bionic_amd64.deb -o /tmp/wkhtml.deb \ + && dpkg --force-depends -i /tmp/wkhtml.deb \ + && apt-get install -y -f --no-install-recommends \ + && rm /tmp/wkhtml.deb + +# Install rtlcss (on Debian stretch) +RUN curl -sSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - \ + && echo "deb https://deb.nodesource.com/node_8.x stretch main" > /etc/apt/sources.list.d/nodesource.list \ + && apt-get update \ + && apt-get install -y nodejs + +RUN npm install -g rtlcss + +# Install Odoo Debian dependencies +ADD https://raw.githubusercontent.com/odoo/odoo/10.0/debian/control /tmp/p2-control +ADD https://raw.githubusercontent.com/odoo/odoo/master/debian/control /tmp/p3-control +RUN pip install -U setuptools wheel \ + && sed -n '/^Depends:/,/^[A-Z]/p' /tmp/p2-control /tmp/p3-control | awk '/^ [a-z]/ { gsub(/,/,"") ; print }' | sort -u | sed 's/python-imaging/python-pil/'| sed 's/python-pypdf/python-pypdf2/' | DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Install Odoo requirements for python2 and python3 not fullfilled by Debian dependencies +ADD https://raw.githubusercontent.com/odoo/odoo/master/requirements.txt /root/p3-requirements.txt +ADD https://raw.githubusercontent.com/odoo/odoo/10.0/requirements.txt /root/p2-requirements.txt +RUN pip install --no-cache-dir -r /root/p2-requirements.txt coverage flanker==0.4.38 pylint==1.7.2 phonenumbers redis \ + && pip3 install --no-cache-dir -r /root/p3-requirements.txt coverage websocket-client astroid==2.0.4 pylint==1.7.2 phonenumbers diff --git a/runbot/models/build.py b/runbot/models/build.py index b274e7cf..69926f92 100644 --- a/runbot/models/build.py +++ b/runbot/models/build.py @@ -11,7 +11,8 @@ import signal import subprocess import time from subprocess import CalledProcessError -from ..common import dt2time, fqdn, now, locked, grep, time2str, rfind, uniq_list, local_pgadmin_cursor, lock, get_py_version +from ..common import dt2time, fqdn, now, grep, time2str, rfind, uniq_list, local_pgadmin_cursor, get_py_version +from ..container import docker_build, docker_run, docker_stop, docker_is_running from odoo import models, fields, api from odoo.exceptions import UserError from odoo.http import request @@ -386,6 +387,10 @@ class runbot_build(models.Model): l[0] = "%s %s" % (build.dest, l[0]) _logger.debug(*l) + def _get_docker_name(self): + self.ensure_one() + return '%s_%s' % (self.dest, self.job) + def _schedule(self): """schedule the build""" jobs = self._list_jobs() @@ -412,8 +417,7 @@ class runbot_build(models.Model): build.write(values) else: # check if current job is finished - lock_path = build._path('logs', '%s.lock' % build.job) - if locked(lock_path): + if docker_is_running(build._get_docker_name()): # kill if overpassed timeout = (build.branch_id.job_timeout or default_timeout) * 60 * ( build.coverage and 1.5 or 1) if build.job != jobs[-1] and build.job_time > timeout: @@ -444,10 +448,10 @@ class runbot_build(models.Model): build._logger('running %s', build.job) job_method = getattr(self, '_' + build.job) # compute the job method to run os.makedirs(build._path('logs'), exist_ok=True) - lock_path = build._path('logs', '%s.lock' % build.job) + os.makedirs(build._path('datadir'), exist_ok=True) log_path = build._path('logs', '%s.txt' % build.job) try: - pid = job_method(build, lock_path, log_path) + pid = job_method(build, log_path) build.write({'pid': pid}) except Exception: _logger.exception('%s failed running method %s', build.dest, build.job) @@ -630,12 +634,7 @@ class runbot_build(models.Model): if build.host != host: continue build._log('kill', 'Kill build %s' % build.dest) - if build.pid: - build._logger('killing %s', build.pid) - try: - os.killpg(build.pid, signal.SIGKILL) - except OSError: - pass + docker_stop(build._get_docker_name()) v = {'state': 'done', 'job': False} if result: v['result'] = result @@ -668,15 +667,12 @@ class runbot_build(models.Model): 'openerp-server.py', # 7.0 'bin/openerp-server.py', # < 7.0 ] - for server_path in map(build._path, bins): - if os.path.isfile(server_path): + for odoo_bin in bins: + if os.path.isfile(build._path(odoo_bin)): break # commandline - cmd = [ - build._path(server_path), - "--xmlrpc-port=%d" % build.port, - ] + cmd = [ os.path.join('/data/build', odoo_bin), ] # options if grep(build._server("tools/config.py"), "no-xmlrpcs"): cmd.append("--no-xmlrpcs") @@ -694,29 +690,13 @@ class runbot_build(models.Model): datadir = build._path('datadir') if not os.path.exists(datadir): os.mkdir(datadir) - cmd += ["--data-dir", datadir] + cmd += ["--data-dir", '/data/build/datadir'] # if build.branch_id.test_tags: # cmd.extend(['--test_tags', "'%s'" % build.branch_id.test_tags]) # keep for next version return cmd, build.modules - def _spawn(self, cmd, lock_path, log_path, cpu_limit=None, shell=False, env=None): - def preexec_fn(): - os.setsid() - if cpu_limit: - # set soft cpulimit - soft, hard = resource.getrlimit(resource.RLIMIT_CPU) - r = resource.getrusage(resource.RUSAGE_SELF) - cpu_time = r.ru_utime + r.ru_stime - resource.setrlimit(resource.RLIMIT_CPU, (cpu_time + cpu_limit, hard)) - # close parent files - os.closerange(3, os.sysconf("SC_OPEN_MAX")) - lock(lock_path) - out = open(log_path, "w") - _logger.debug("spawn: %s stdout: %s", ' '.join(cmd), log_path) - p = subprocess.Popen(cmd, stdout=out, stderr=out, preexec_fn=preexec_fn, shell=shell, env=env, close_fds=False) - return p.pid def _github_status_notify_all(self, status): """Notify each repo with a status""" @@ -751,15 +731,21 @@ class runbot_build(models.Model): build._github_status_notify_all(status) # Jobs definitions - # They all need "build, lock_pathn log_path" parameters - def _job_00_init(self, build, lock_path, log_path): + # They all need "build log_path" parameters + def _job_00_init(self, build, log_path): build._log('init', 'Init build environment') # notify pending build - avoid confusing users by saying nothing build._github_status() build._checkout() return -2 - def _job_10_test_base(self, build, lock_path, log_path): + def _job_02_docker_build(self, build, log_path): + """Build the docker image""" + build._log('docker_build', 'Building docker image') + docker_build(log_path, build._path()) + return -2 + + def _job_10_test_base(self, build, log_path): build._log('test_base', 'Start test base module') # run base test self._local_pg_createdb("%s-base" % build.dest) @@ -769,9 +755,9 @@ class runbot_build(models.Model): cmd += ['-d', '%s-base' % build.dest, '-i', 'base', '--stop-after-init', '--log-level=test', '--max-cron-threads=0'] if build.extra_params: cmd.extend(shlex.split(build.extra_params)) - return self._spawn(cmd, lock_path, log_path, cpu_limit=600) + return docker_run(cmd, log_path, build._path(), build._get_docker_name(), cpu_limit=600) - def _job_20_test_all(self, build, lock_path, log_path): + def _job_20_test_all(self, build, log_path): build._log('test_all', 'Start test all modules') cpu_limit = 2400 self._local_pg_createdb("%s-all" % build.dest) @@ -781,11 +767,8 @@ class runbot_build(models.Model): cmd += ['-d', '%s-all' % build.dest, '-i', mods, '--stop-after-init', '--log-level=test', '--max-cron-threads=0'] if build.extra_params: cmd.extend(build.extra_params.split(' ')) - env = None if build.coverage: cpu_limit *= 1.5 - pyversion = get_py_version(build) - env = self._coverage_env(build) available_modules = [ os.path.basename(os.path.dirname(a)) for a in (glob.glob(build._server('addons/*/__openerp__.py')) + @@ -793,25 +776,21 @@ class runbot_build(models.Model): ] bad_modules = set(available_modules) - set((mods or '').split(',')) omit = ['--omit', ','.join('*addons/%s/*' %m for m in bad_modules) + '*__manifest__.py'] - cmd = [pyversion, '-m', 'coverage', 'run', '--branch', '--source', build._server()] + omit + cmd[:] + cmd = [ get_py_version(build), '-m', 'coverage', 'run', '--branch', '--source', '/data/build'] + omit + cmd # reset job_start to an accurate job_20 job_time build.write({'job_start': now()}) - return self._spawn(cmd, lock_path, log_path, cpu_limit=cpu_limit, env=env) + return docker_run(cmd, log_path, build._path(), build._get_docker_name(), cpu_limit=cpu_limit) - def _coverage_env(self, build): - return dict(os.environ, COVERAGE_FILE=build._path('.coverage')) - - def _job_21_coverage_html(self, build, lock_path, log_path): + def _job_21_coverage_html(self, build, log_path): if not build.coverage: return -2 build._log('coverage_html', 'Start generating coverage html') - pyversion = get_py_version(build) cov_path = build._path('coverage') os.makedirs(cov_path, exist_ok=True) - cmd = [pyversion, "-m", "coverage", "html", "-d", cov_path, "--ignore-errors"] - return self._spawn(cmd, lock_path, log_path, env=self._coverage_env(build)) + cmd = [ get_py_version(build), "-m", "coverage", "html", "-d", "/data/build/coverage", "--ignore-errors"] + return docker_run(cmd, log_path, build._path(), build._get_docker_name()) - def _job_22_coverage_result(self, build, lock_path, log_path): + def _job_22_coverage_result(self, build, log_path): if not build.coverage: return -2 build._log('coverage_result', 'Start getting coverage result') @@ -825,7 +804,7 @@ class runbot_build(models.Model): build._log('coverage_result', 'Coverage file not found') return -2 # nothing to wait for - def _job_30_run(self, build, lock_path, log_path): + def _job_30_run(self, build, log_path): # adjust job_end to record an accurate job_20 job_time build._log('run', 'Start running build %s' % build.dest) log_all = build._path('logs', 'job_20_test_all.txt') @@ -848,17 +827,17 @@ class runbot_build(models.Model): cmd, mods = build._cmd() if os.path.exists(build._server('addons/im_livechat')): cmd += ["--workers", "2"] - cmd += ["--longpolling-port", "%d" % (build.port + 1)] + cmd += ["--longpolling-port", "8070"] cmd += ["--max-cron-threads", "1"] else: # not sure, to avoid old server to check other dbs cmd += ["--max-cron-threads", "0"] - cmd += ['-d', "%s-all" % build.dest] + cmd += ['-d', '%s-all' % build.dest] if grep(build._server("tools/config.py"), "db-filter"): if build.repo_id.nginx: cmd += ['--db-filter', '%d.*$'] else: cmd += ['--db-filter', '%s.*$' % build.dest] - return self._spawn(cmd, lock_path, log_path, cpu_limit=None) + return docker_run(cmd, log_path, build._path(), build._get_docker_name(), exposed_ports = [build.port, build.port + 1]) diff --git a/runbot_cla/runbot.py b/runbot_cla/runbot.py index 09d74d14..aa937f0f 100644 --- a/runbot_cla/runbot.py +++ b/runbot_cla/runbot.py @@ -13,7 +13,7 @@ _logger = logging.getLogger(__name__) class runbot_build(models.Model): _inherit = "runbot.build" - def _job_05_check_cla(self, build, lock_path, log_path): + def _job_05_check_cla(self, build, log_path): cla_glob = glob.glob(build._path("doc/cla/*/*.md")) if cla_glob: description = "%s Odoo CLA signature check" % build.author