diff --git a/README.md b/README.md
index 9869c2cb..fd6899c4 100644
--- a/README.md
+++ b/README.md
@@ -188,3 +188,17 @@ It is also possible to add test-tags to config step to allow more module to be i
### db template
Db creation will use template0 by default. It is possible to specify a specific template to use in runbot config *Postgresql template*. It is mainly used to add extensions by default.
+
+
+## Dockerfiles
+
+Runbot is using a Dockerfile Odoo model to define the Dockerfile used for builds and is shipped with a default one. This default Dockerfile is based on Ubuntu Bionic and is intended to build recent supported versions of Odoo (currently 12.0 up to 14.0).
+
+The model is using Odoo QWeb views as templates.
+
+A new Dockerfile can be created as needed either by duplicating the default one and adapt parameters in the view. e.g.: changing the key `'from': 'ubuntu:bionic'` to `'from': 'debian:buster'` will create a new Dockerfile based on Debian instead of ubuntu.
+Or by providing a plain Dockerfile in the template.
+
+Once the Dockerfile is created and the `to_build` field is checked, the Dockerfile will be built (pay attention that no other operations will occur during the build).
+
+A version or a bundle can be assigned a specific Dockerfile.
diff --git a/runbot/__manifest__.py b/runbot/__manifest__.py
index c6c0962d..27e1116c 100644
--- a/runbot/__manifest__.py
+++ b/runbot/__manifest__.py
@@ -9,6 +9,8 @@
'version': '5.0',
'depends': ['base', 'base_automation', 'website'],
'data': [
+ 'templates/dockerfile.xml',
+ 'data/dockerfile_data.xml',
'data/build_parse.xml',
'data/error_link.xml',
'data/runbot_build_config_data.xml',
@@ -40,6 +42,7 @@
'views/bundle_views.xml',
'views/commit_views.xml',
'views/config_views.xml',
+ 'views/dockerfile_views.xml',
'views/error_log_views.xml',
'views/host_views.xml',
'views/repo_views.xml',
diff --git a/runbot/container.py b/runbot/container.py
index 0e4b1e07..e8eff021 100644
--- a/runbot/container.py
+++ b/runbot/container.py
@@ -99,38 +99,36 @@ class Command():
return res.read()
-def docker_build(log_path, build_dir):
- return _docker_build(log_path, build_dir)
+def docker_build(build_dir, image_tag):
+ return _docker_build(build_dir, image_tag)
-def _docker_build(log_path, build_dir):
+def _docker_build(build_dir, image_tag):
"""Build the docker image
- :param log_path: path to the logfile that will contain odoo stdout and stderr
- :param build_dir: the build directory that contains the Odoo sources to build.
+ :param build_dir: the build directory that contains Dockerfile.
+ :param image_tag: name used to tag the resulting docker image
"""
- # Prepare docker image
- docker_dir = os.path.join(build_dir, 'docker')
- os.makedirs(docker_dir, exist_ok=True)
- shutil.copy(os.path.join(os.path.dirname(__file__), 'data', 'Dockerfile'), docker_dir)
# synchronise the current user with the odoo user inside the Dockerfile
- with open(os.path.join(docker_dir, 'Dockerfile'), 'a') as df:
+ with open(os.path.join(build_dir, 'Dockerfile'), 'a') as df:
df.write(DOCKERUSER)
+ log_path = os.path.join(build_dir, 'docker_build.txt')
logs = open(log_path, 'w')
- dbuild = subprocess.Popen(['docker', 'build', '--tag', 'odoo:runbot_tests', '.'], stdout=logs, stderr=logs, cwd=docker_dir)
- dbuild.wait()
+ dbuild = subprocess.Popen(['docker', 'build', '--tag', image_tag, '.'], stdout=logs, stderr=logs, cwd=build_dir)
+ return dbuild.wait()
def docker_run(*args, **kwargs):
return _docker_run(*args, **kwargs)
-def _docker_run(run_cmd, log_path, build_dir, container_name, exposed_ports=None, cpu_limit=None, preexec_fn=None, ro_volumes=None, env_variables=None):
+def _docker_run(run_cmd, log_path, build_dir, container_name, image_tag='odoo:DockerDefault', exposed_ports=None, cpu_limit=None, preexec_fn=None, ro_volumes=None, env_variables=None):
"""Run tests in a docker container
:param run_cmd: command string to run in container
:param log_path: path to the logfile that will contain odoo stdout and stderr
:param build_dir: the build directory that contains the Odoo sources to build.
This directory is shared as a volume with the container
:param container_name: used to give a name to the container for later reference
+ :param image_tag: Docker image tag name to select which docker image to use
:param exposed_ports: if not None, starting at 8069, ports will be exposed as exposed_ports numbers
:params ro_volumes: dict of dest:source volumes to mount readonly in builddir
:params env_variables: list of environment variables
@@ -179,7 +177,7 @@ def _docker_run(run_cmd, log_path, build_dir, container_name, exposed_ports=None
docker_command.extend(['-p', '127.0.0.1:%s:%s' % (hp, dp)])
if cpu_limit:
docker_command.extend(['--ulimit', 'cpu=%s' % int(cpu_limit)])
- docker_command.extend(['odoo:runbot_tests', '/bin/bash', '-c', "%s" % run_cmd])
+ docker_command.extend([image_tag, '/bin/bash', '-c', "%s" % run_cmd])
subprocess.Popen(docker_command, stdout=logs, stderr=logs, preexec_fn=preexec_fn, close_fds=False, cwd=build_dir)
_logger.info('Started Docker container %s', container_name)
return
@@ -286,102 +284,6 @@ def sanitize_container_name(name):
name = re.sub('^[^a-zA-Z0-9]+', '', name)
return re.sub('[^a-zA-Z0-9_.-]', '', name)
-def tests(args):
- _logger.info('Start container tests')
- os.makedirs(os.path.join(args.build_dir, 'logs'), exist_ok=True)
- os.makedirs(os.path.join(args.build_dir, 'datadir'), exist_ok=True)
-
- if args.kill:
- # Test stopping a non running container
- _logger.info('Test killing an non existing container')
- docker_stop('xy' * 5)
- # Test building
- _logger.info('Test building the base image container')
- logfile = os.path.join(args.build_dir, 'logs', 'logs-build.txt')
- docker_build(logfile, args.build_dir)
-
- with open(os.path.join(args.build_dir, 'odoo-bin'), 'r') as exfile:
- py_version = '3' if 'python3' in exfile.readline() else ''
-
- # Test environment variables
- if args.env:
- cmd = Command(None, ['echo testa is $TESTA and testb is $TESTB '], None)
- env_variables = ['TESTA=test a', 'TESTB="test b"']
- env_log = os.path.join(args.build_dir, 'logs', 'logs-env.txt')
- container_name = 'odoo-container-test-%s' % datetime.datetime.now().microsecond
- docker_run(cmd.build(), env_log, args.build_dir, container_name, env_variables=env_variables)
- expected = 'testa is test a and testb is "test b"'
- time.sleep(3) # ugly sleep to wait for docker process to flush the log file
- assert expected in open(env_log,'r').read()
-
- # Test testing
- pres = [['sudo', 'pip%s' % py_version, 'install', '-r', '/data/build/requirements.txt']]
- posts = None
- python_params = []
- if args.coverage:
- omit = ['--omit', '*__manifest__.py']
- python_params = ['-m', 'coverage', 'run', '--branch', '--source', '/data/build'] + omit
- posts = [['python%s' % py_version, "-m", "coverage", "html", "-d", "/data/build/coverage", "--ignore-errors"], ['python%s' % py_version, "-m", "coverage", "xml", "--ignore-errors"]]
- os.makedirs(os.path.join(args.build_dir, 'coverage'), exist_ok=True)
- elif args.flamegraph:
- flame_log = '/data/build/logs/flame.log'
- python_params = ['-m', 'flamegraph', '-o', flame_log]
- odoo_cmd = ['python%s' % py_version] + python_params + ['/data/build/odoo-bin', '-d %s' % args.db_name, '--addons-path=/data/build/addons', '-i', args.odoo_modules, '--test-enable', '--stop-after-init', '--max-cron-threads=0']
- cmd = Command(pres, odoo_cmd, posts)
- cmd.add_config_tuple('data_dir', '/data/build/datadir')
- cmd.add_config_tuple('db_user', '%s' % os.getlogin())
-
- if args.dump:
- os.makedirs(os.path.join(args.build_dir, 'logs', args.db_name), exist_ok=True)
- dump_dir = '/data/build/logs/%s/' % args.db_name
- sql_dest = '%s/dump.sql' % dump_dir
- filestore_path = '/data/build/datadir/filestore/%s' % args.db_name
- filestore_dest = '%s/filestore/' % dump_dir
- zip_path = '/data/build/logs/%s.zip' % args.db_name
- cmd.finals.append(['pg_dump', args.db_name, '>', sql_dest])
- cmd.finals.append(['cp', '-r', filestore_path, filestore_dest])
- cmd.finals.append(['cd', dump_dir, '&&', 'zip', '-rm9', zip_path, '*'])
-
- if args.flamegraph:
- cmd.finals.append(['flamegraph.pl', '--title', 'Flamegraph', flame_log, '>', '/data/build/logs/flame.svg'])
- cmd.finals.append(['gzip', '-f', flame_log])
-
- if args.kill:
- logfile = os.path.join(args.build_dir, 'logs', 'logs-partial.txt')
- container_name = 'odoo-container-test-%s' % datetime.datetime.now().microsecond
- docker_run(cmd.build(), logfile, args.build_dir, container_name)
- # Test stopping the container
- _logger.info('Waiting 30 sec before killing the build')
- time.sleep(30)
- docker_stop(container_name)
- time.sleep(3)
-
- # Test full testing
- logfile = os.path.join(args.build_dir, 'logs', 'logs-full-test.txt')
- container_name = 'odoo-container-test-%s' % datetime.datetime.now().microsecond
- docker_run(cmd, logfile, args.build_dir, container_name)
- time.sleep(1) # give time for the container to start
-
- while docker_is_running(container_name):
- time.sleep(10)
- _logger.info("Waiting for %s to stop", container_name)
-
- if args.run:
- # Test running
- logfile = os.path.join(args.build_dir, 'logs', 'logs-running.txt')
- odoo_cmd = [
- 'python%s' % py_version,
- '/data/build/odoo-bin', '-d %s' % args.db_name,
- '--db-filter', '%s.*$' % args.db_name, '--addons-path=/data/build/addons',
- '-r %s' % os.getlogin(), '-i', 'web', '--max-cron-threads=1',
- '--data-dir', '/data/build/datadir', '--workers', '2',
- '--longpolling-port', '8070', '--unaccent']
- smtp_host = docker_get_gateway_ip()
- if smtp_host:
- odoo_cmd.extend(['--smtp', smtp_host])
- container_name = 'odoo-container-test-%s' % datetime.datetime.now().microsecond
- cmd = Command(pres, odoo_cmd, [])
- docker_run(cmd.build(), logfile, args.build_dir, container_name, exposed_ports=[args.odoo_port, args.odoo_port + 1], cpu_limit=300)
##############################################################################
@@ -406,27 +308,3 @@ if os.environ.get('RUNBOT_MODE') == 'test':
log_file.write('Initiating shutdown\n')
docker_run = fake_docker_run
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(name)s: %(message)s')
- parser = argparse.ArgumentParser()
- subparser = parser.add_subparsers(dest='command', required='True', help='commands')
- p_build = subparser.add_parser('build', help='Build docker image')
- p_build.add_argument('build_dir')
- p_build.set_defaults(func=build)
- p_test = subparser.add_parser('tests', help='Test docker functions')
- p_test.set_defaults(func=tests)
- p_test.add_argument('build_dir')
- p_test.add_argument('odoo_port', type=int)
- p_test.add_argument('db_name')
- group = p_test.add_mutually_exclusive_group()
- group.add_argument('--coverage', action='store_true', help='test a build with coverage')
- group.add_argument('--flamegraph', action='store_true', help='test a build and draw a flamegraph')
- p_test.add_argument('-i', dest='odoo_modules', default='web', help='Comma separated list of modules')
- p_test.add_argument('--kill', action='store_true', default=False, help='Also test container kill')
- p_test.add_argument('--dump', action='store_true', default=False, help='Test database export with pg_dump')
- p_test.add_argument('--run', action='store_true', default=False, help='Also test running (Warning: the container survives exit)')
- p_test.add_argument('--env', action='store_true', default=False, help='Test passing environment variables')
- args = parser.parse_args()
- args.func(args)
diff --git a/runbot/data/Dockerfile b/runbot/data/Dockerfile
deleted file mode 100644
index 52352de2..00000000
--- a/runbot/data/Dockerfile
+++ /dev/null
@@ -1,103 +0,0 @@
-FROM ubuntu:bionic
-ENV LANG C.UTF-8
-USER root
-# Install base files
-RUN set -x ; \
- apt-get update \
- && apt-get install -y --no-install-recommends \
- apt-transport-https \
- build-essential \
- ca-certificates \
- curl \
- ffmpeg \
- file \
- fonts-freefont-ttf \
- fonts-noto-cjk \
- gawk \
- gnupg \
- libldap2-dev \
- libsasl2-dev \
- libxslt1-dev \
- node-less \
- ocrmypdf \
- python \
- python-dev \
- python-pip \
- python3 \
- python3-dev \
- python3-pip \
- python3-setuptools \
- python3-wheel \
- sed \
- sudo \
- unzip \
- xfonts-75dpi \
- zip \
- zlib1g-dev
-
-# Install Google Chrome
-RUN curl -sSL http://nightly.odoo.com/odoo.key | apt-key add - \
- && echo "deb http://nightly.odoo.com/deb/bionic ./" > /etc/apt/sources.list.d/google-chrome.list \
- && apt-get update \
- && apt-get install -y -qq google-chrome-stable=80.0.3987.116-1
-
-# Install phantomjs
-RUN curl -sSL https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2 -o /tmp/phantomjs.tar.bz2 \
- && tar xvfO /tmp/phantomjs.tar.bz2 phantomjs-2.1.1-linux-x86_64/bin/phantomjs > /usr/local/bin/phantomjs \
- && chmod +x /usr/local/bin/phantomjs \
- && rm -f /tmp/phantomjs.tar.bz2
-
-# Install wkhtml
-RUN curl -sSL https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox_0.12.5-1.bionic_amd64.deb -o /tmp/wkhtml.deb \
- && apt-get update \
- && dpkg --force-depends -i /tmp/wkhtml.deb \
- && apt-get install -y -f --no-install-recommends \
- && rm /tmp/wkhtml.deb
-
-# Install rtlcss (on Debian stretch)
-RUN curl -sSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - \
- && echo "deb https://deb.nodesource.com/node_8.x stretch main" > /etc/apt/sources.list.d/nodesource.list \
- && apt-get update \
- && apt-get install -y nodejs
-
-RUN npm install -g rtlcss
-
-# Install es-check tool
-RUN npm install -g es-check
-
-# Install for migration scripts
-RUN apt-get update \
- && apt-get install -y python3-markdown
-
-# Install flamegraph.pl
-ADD https://raw.githubusercontent.com/brendangregg/FlameGraph/master/flamegraph.pl /usr/local/bin/flamegraph.pl
-RUN chmod +rx /usr/local/bin/flamegraph.pl
-
-# Install postgresql-client-12
-RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
- && echo "deb http://apt.postgresql.org/pub/repos/apt/ bionic-pgdg main" > /etc/apt/sources.list.d/pgclient.list \
- && apt-get update \
- && DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-client-12 \
- && apt-get clean \
- && rm -rf /var/lib/apt/lists/*
-
-# Install Odoo Debian dependencies
-ADD https://raw.githubusercontent.com/odoo/odoo/10.0/debian/control /tmp/p2-control
-ADD https://raw.githubusercontent.com/odoo/odoo/master/debian/control /tmp/p3-control
-RUN pip install -U setuptools wheel \
- && apt-get update \
- && sed -n '/^Depends:/,/^[A-Z]/p' /tmp/p2-control /tmp/p3-control \
- | awk '/^ [a-z]/ { gsub(/,/,"") ; print }' | sort -u \
- | egrep -v 'postgresql-client' \
- | sed 's/python-imaging/python-pil/'| sed 's/python-pypdf/python-pypdf2/' \
- | DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq \
- && apt-get clean \
- && rm -rf /var/lib/apt/lists/*
-
-# Install Odoo requirements for python2 and python3 not fullfilled by Debian dependencies
-ADD https://raw.githubusercontent.com/odoo/odoo/master/requirements.txt /root/p3-requirements.txt
-ADD https://raw.githubusercontent.com/odoo/odoo/10.0/requirements.txt /root/p2-requirements.txt
-RUN pip install --no-cache-dir -r /root/p2-requirements.txt coverage flanker==0.4.38 pylint==1.7.2 phonenumbers redis \
- && pip3 install --no-cache-dir -r /root/p3-requirements.txt coverage==4.5.4 websocket-client astroid==2.4.2 \
- pylint==2.6.0 phonenumbers pyCrypto dbfread==2.0.7 firebase-admin==2.17.0 flamegraph pdfminer.six==20200720 \
- pdf417gen==0.7.1
diff --git a/runbot/data/dockerfile_data.xml b/runbot/data/dockerfile_data.xml
new file mode 100644
index 00000000..a811e710
--- /dev/null
+++ b/runbot/data/dockerfile_data.xml
@@ -0,0 +1,10 @@
+
+
+
+ Docker Default
+
+ True
+ Default Dockerfile for latest Odoo versions.
+
+
+
diff --git a/runbot/models/__init__.py b/runbot/models/__init__.py
index fce22625..c2b05ba8 100644
--- a/runbot/models/__init__.py
+++ b/runbot/models/__init__.py
@@ -8,6 +8,7 @@ from . import build_error
from . import bundle
from . import commit
from . import database
+from . import dockerfile
from . import event
from . import host
from . import ir_cron
diff --git a/runbot/models/batch.py b/runbot/models/batch.py
index aa1d0a49..29717cb5 100644
--- a/runbot/models/batch.py
+++ b/runbot/models/batch.py
@@ -134,6 +134,11 @@ class Batch(models.Model):
project = bundle.project_id
if not bundle.version_id:
_logger.error('No version found on bundle %s in project %s', bundle.name, project.name)
+
+ dockerfile_id = bundle.dockerfile_id or bundle.base_id.dockerfile_id or bundle.version_id.dockerfile_id
+ if not dockerfile_id:
+ _logger.error('No dockerfile found !')
+
triggers = self.env['runbot.trigger'].search([ # could be optimised for multiple batches. Ormcached method?
('project_id', '=', project.id),
('category_id', '=', self.category_id.id)
@@ -279,7 +284,8 @@ class Batch(models.Model):
'trigger_id': trigger.id, # for future reference and access rights
'config_data': {},
'commit_link_ids': [(6, 0, [commit_link_by_repos[repo.id].id for repo in trigger_repos])],
- 'modules': bundle.modules
+ 'modules': bundle.modules,
+ 'dockerfile_id': dockerfile_id,
}
params_value['builds_reference_ids'] = trigger._reference_builds(bundle)
diff --git a/runbot/models/build.py b/runbot/models/build.py
index e53f7fe9..c759f256 100644
--- a/runbot/models/build.py
+++ b/runbot/models/build.py
@@ -9,7 +9,7 @@ import time
import datetime
import hashlib
from ..common import dt2time, fqdn, now, grep, local_pgadmin_cursor, s2human, dest_reg, os, list_local_dbs, pseudo_markdown, RunbotException
-from ..container import docker_stop, docker_state, Command
+from ..container import docker_stop, docker_state, Command, docker_run
from ..fields import JsonDictField
from odoo import models, fields, api
from odoo.exceptions import UserError, ValidationError
@@ -50,6 +50,8 @@ class BuildParameters(models.Model):
project_id = fields.Many2one('runbot.project', required=True, index=True) # for access rights
trigger_id = fields.Many2one('runbot.trigger', index=True) # for access rights
category = fields.Char('Category', index=True) # normal vs nightly vs weekly, ...
+ dockerfile_id = fields.Many2one('runbot.dockerfile', index=True, default=lambda self: self.env.ref('runbot.docker_default', raise_if_not_found=False))
+ skip_requirements = fields.Boolean('Skip requirements.txt auto install')
# other informations
extra_params = fields.Char('Extra cmd args')
config_id = fields.Many2one('runbot.build.config', 'Run Config', required=True,
@@ -86,6 +88,8 @@ class BuildParameters(models.Model):
'upgrade_from_build_id': param.upgrade_from_build_id.id,
'upgrade_to_build_id': param.upgrade_to_build_id.id,
'dump_db': param.dump_db.id,
+ 'dockerfile_id': param.dockerfile_id.id,
+ 'skip_requirements': param.skip_requirements,
}
param.fingerprint = hashlib.sha256(str(cleaned_vals).encode('utf8')).hexdigest()
@@ -716,6 +720,14 @@ class BuildResult(models.Model):
build._log("run", message, level='ERROR')
build._kill(result='ko')
+ def _docker_run(self, *args, **kwargs):
+ self.ensure_one()
+ if 'image_tag' not in kwargs:
+ kwargs.update({'image_tag': self.params_id.dockerfile_id.image_tag})
+ if kwargs['image_tag'] != 'odoo:DockerDefault':
+ self._log('Preparing', 'Using Dockerfile Tag %s' % kwargs['image_tag'])
+ docker_run(*args, **kwargs)
+
def _path(self, *l, **kw):
"""Return the repo build path"""
self.ensure_one()
@@ -908,7 +920,7 @@ class BuildResult(models.Model):
py_version = py_version if py_version is not None else build._get_py_version()
pres = []
for commit_id in self.env.context.get('defined_commit_ids') or self.params_id.commit_ids:
- if os.path.isfile(commit_id._source_path('requirements.txt')): # this is a change I think
+ if not self.params_id.skip_requirements and os.path.isfile(commit_id._source_path('requirements.txt')):
repo_dir = self._docker_source_folder(commit_id)
requirement_path = os.path.join(repo_dir, 'requirements.txt')
pres.append(['sudo', 'pip%s' % py_version, 'install', '-r', '%s' % requirement_path])
diff --git a/runbot/models/build_config.py b/runbot/models/build_config.py
index 49c7fd5c..cbfc9278 100644
--- a/runbot/models/build_config.py
+++ b/runbot/models/build_config.py
@@ -348,7 +348,7 @@ class ConfigStep(models.Model):
build_port = build.port
self.env.cr.commit() # commit before docker run to be 100% sure that db state is consistent with dockers
self.invalidate_cache()
- res = docker_run(cmd, log_path, build_path, docker_name, exposed_ports=[build_port, build_port + 1], ro_volumes=exports, env_variables=env_variables)
+ res = build._docker_run(cmd, log_path, build_path, docker_name, exposed_ports=[build_port, build_port + 1], ro_volumes=exports, env_variables=env_variables)
self.env['runbot.runbot']._reload_nginx()
return res
@@ -434,7 +434,7 @@ class ConfigStep(models.Model):
max_timeout = int(self.env['ir.config_parameter'].get_param('runbot.runbot_timeout', default=10000))
timeout = min(self.cpu_limit, max_timeout)
env_variables = self.additionnal_env.split(';') if self.additionnal_env else []
- return docker_run(cmd, log_path, build._path(), build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables)
+ return build._docker_run(cmd, log_path, build._path(), build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables)
def _upgrade_create_childs(self):
pass
@@ -667,7 +667,7 @@ class ConfigStep(models.Model):
exception_env = self.env['runbot.upgrade.exception']._generate()
if exception_env:
env_variables.append(exception_env)
- docker_run(migrate_cmd, log_path, build._path(), build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables)
+ build._docker_run(migrate_cmd, log_path, build._path(), build._get_docker_name(), cpu_limit=timeout, ro_volumes=exports, env_variables=env_variables, image_tag=target.params_id.dockerfile_id.image_tag)
def _run_restore(self, build, log_path):
# exports = build._checkout()
@@ -708,7 +708,7 @@ class ConfigStep(models.Model):
])
- docker_run(cmd, log_path, build._path(), build._get_docker_name(), cpu_limit=self.cpu_limit)
+ build._docker_run(cmd, log_path, build._path(), build._get_docker_name(), cpu_limit=self.cpu_limit)
def _reference_builds(self, bundle, trigger):
upgrade_dumps_trigger_id = trigger.upgrade_dumps_trigger_id
diff --git a/runbot/models/bundle.py b/runbot/models/bundle.py
index a8b160a1..66ecb506 100644
--- a/runbot/models/bundle.py
+++ b/runbot/models/bundle.py
@@ -47,6 +47,8 @@ class Bundle(models.Model):
host_id = fields.Many2one('runbot.host', compute="_compute_host_id", store=True)
+ dockerfile_id = fields.Many2one('runbot.dockerfile', index=True, help="Use a custom Dockerfile")
+
@api.depends('name')
def _compute_host_id(self):
assigned_only = None
diff --git a/runbot/models/dockerfile.py b/runbot/models/dockerfile.py
new file mode 100644
index 00000000..31573610
--- /dev/null
+++ b/runbot/models/dockerfile.py
@@ -0,0 +1,53 @@
+import logging
+import re
+from odoo import models, fields, api
+from odoo.addons.base.models.qweb import QWebException
+
+_logger = logging.getLogger(__name__)
+
+
+class Dockerfile(models.Model):
+ _name = 'runbot.dockerfile'
+ _inherit = [ 'mail.thread' ]
+ _description = "Dockerfile"
+
+ name = fields.Char('Dockerfile name', required=True, help="Name of Dockerfile")
+ image_tag = fields.Char(compute='_compute_image_tag', store=True)
+ template_id = fields.Many2one('ir.ui.view', string='Docker Template', domain=[('type', '=', 'qweb')], context={'default_type': 'qweb', 'default_arch_base': ''})
+ arch_base = fields.Text(related='template_id.arch_base', readonly=False)
+ dockerfile = fields.Text(compute='_compute_dockerfile', tracking=True)
+ to_build = fields.Boolean('To Build', help='Build Dockerfile. Check this when the Dockerfile is ready.', default=False)
+ version_ids = fields.One2many('runbot.version', 'dockerfile_id', string='Versions')
+ description = fields.Text('Description')
+ view_ids = fields.Many2many('ir.ui.view', compute='_compute_view_ids')
+
+ _sql_constraints = [('runbot_dockerfile_name_unique', 'unique(name)', 'A Dockerfile with this name already exists')]
+
+ @api.returns('self', lambda value: value.id)
+ def copy(self, default=None):
+ copied_record = super().copy(default={'name': '%s (copy)' % self.name, 'to_build': False})
+ copied_record.template_id = self.template_id.copy()
+ copied_record.template_id.name = '%s (copy)' % copied_record.template_id.name
+ copied_record.template_id.key = '%s (copy)' % copied_record.template_id.key
+ return copied_record
+
+ @api.depends('template_id.arch_base')
+ def _compute_dockerfile(self):
+ for rec in self:
+ try:
+ res = rec.template_id.render().decode() if rec.template_id else ''
+ rec.dockerfile = re.sub(r'^\s*$', '', res, flags=re.M).strip()
+ except QWebException:
+ rec.dockerfile = ''
+
+ @api.depends('name')
+ def _compute_image_tag(self):
+ for rec in self:
+ if rec.name:
+ rec.image_tag = 'odoo:%s' % re.sub(r'[ /:\(\)\[\]]', '', rec.name)
+
+ @api.depends('template_id')
+ def _compute_view_ids(self):
+ for rec in self:
+ keys = re.findall(r'
+
+
+
+
+FROM
+ENV LANG C.UTF-8
+USER root
+
+
+
+# Install debian packages
+RUN set -x ; \
+ apt-get update \
+ && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
+ && rm -rf /var/lib/apt/lists/*
+
+
+
+
+
+# Install Google Chrome
+RUN curl -sSL http://nightly.odoo.com/odoo.key | apt-key add - \
+ && echo "deb http://nightly.odoo.com/deb/ ./" > /etc/apt/sources.list.d/google-chrome.list \
+ && apt-get update \
+ && apt-get install -y -qq google-chrome-stable= \
+ && rm -rf /var/lib/apt/lists/*
+
+
+
+# Install phantomjs
+RUN curl -sSL https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2 -o /tmp/phantomjs.tar.bz2 \
+ && tar xvfO /tmp/phantomjs.tar.bz2 phantomjs-2.1.1-linux-x86_64/bin/phantomjs > /usr/local/bin/phantomjs \
+ && chmod +x /usr/local/bin/phantomjs \
+ && rm -f /tmp/phantomjs.tar.bz2
+
+
+
+# Install wkhtml
+RUN curl -sSL -o /tmp/wkhtml.deb \
+ && apt-get update \
+ && dpkg --force-depends -i /tmp/wkhtml.deb \
+ && apt-get install -y -f --no-install-recommends \
+ && rm /tmp/wkhtml.deb
+
+
+
+
+# Install nodejs
+RUN curl -sSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - \
+ && echo "deb https://deb.nodesource.com/node_.x `lsb_release -c -s` main" > /etc/apt/sources.list.d/nodesource.list \
+ && apt-get update \
+ && apt-get install -y nodejs
+
+
+
+RUN npm install -g
+
+
+
+ADD https://raw.githubusercontent.com/brendangregg/FlameGraph/master/flamegraph.pl /usr/local/bin/flamegraph.pl
+RUN chmod +rx /usr/local/bin/flamegraph.pl
+
+
+
+
+RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
+ && echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -s -c`-pgdg main" > /etc/apt/sources.list.d/pgclient.list \
+ && apt-get update \
+ && DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-client- \
+ && rm -rf /var/lib/apt/lists/*
+
+
+
+
+ADD https://raw.githubusercontent.com/odoo/odoo//debian/control /tmp/control.txt
+RUN apt-get update \
+ && sed -n '/^Depends:/,/^[A-Z]/p' /tmp/control.txt \
+ | awk '/^ [a-z]/ { gsub(/,/,"") ; print }' | sort -u \
+ | egrep -v 'postgresql-client' \
+ | sed 's/python-imaging/python-pil/'| sed 's/python-pypdf/python-pypdf2/' \
+ | DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+
+
+ADD https://raw.githubusercontent.com/odoo/odoo//requirements.txt /root/requirements.txt
+RUN -m pip install setuptools wheel && \
+ -m pip install --no-cache-dir -r /root/requirements.txt && \
+ -m pip install
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/runbot/templates/utils.xml b/runbot/templates/utils.xml
index 93ad485c..9f089f63 100644
--- a/runbot/templates/utils.xml
+++ b/runbot/templates/utils.xml
@@ -296,7 +296,7 @@