From d2872f8e16785bdb59d6bba222ff796865264118 Mon Sep 17 00:00:00 2001 From: Xavier-Do Date: Thu, 25 Jul 2024 14:30:46 +0200 Subject: [PATCH] [IMP] runbot: rework dockerfile generation --- runbot/__manifest__.py | 2 +- runbot/container.py | 5 +- runbot/data/dockerfile_data.xml | 196 ++++++++++++- runbot/migrations/17.0.5.7/post-migration.py | 22 ++ runbot/migrations/17.0.5.7/pre-migration.py | 13 + runbot/models/__init__.py | 1 + runbot/models/build.py | 7 +- runbot/models/docker.py | 289 ++++++++++++++++++- runbot/models/host.py | 58 +--- runbot/models/ir_model_fields_converter.py | 7 + runbot/models/version.py | 2 +- runbot/security/ir.model.access.csv | 3 + runbot/templates/dockerfile.xml | 147 ---------- runbot/tests/test_dockerfile.py | 116 ++++---- runbot/tests/test_version.py | 9 + runbot/views/dockerfile_views.xml | 88 +++++- runbot/views/menus.xml | 2 + 17 files changed, 682 insertions(+), 285 deletions(-) create mode 100644 runbot/migrations/17.0.5.7/post-migration.py create mode 100644 runbot/migrations/17.0.5.7/pre-migration.py create mode 100644 runbot/models/ir_model_fields_converter.py diff --git a/runbot/__manifest__.py b/runbot/__manifest__.py index 1288b4b3..8cec3007 100644 --- a/runbot/__manifest__.py +++ b/runbot/__manifest__.py @@ -6,7 +6,7 @@ 'author': "Odoo SA", 'website': "http://runbot.odoo.com", 'category': 'Website', - 'version': '5.6', + 'version': '5.7', 'application': True, 'depends': ['base', 'base_automation', 'website'], 'data': [ diff --git a/runbot/container.py b/runbot/container.py index 6312230c..74c91c56 100644 --- a/runbot/container.py +++ b/runbot/container.py @@ -9,6 +9,7 @@ When testing this file: The second parameter is the exposed port """ import configparser +import getpass import io import logging import os @@ -28,6 +29,7 @@ with warnings.catch_warnings(): ) import docker +USERNAME = getpass.getuser() _logger = logging.getLogger(__name__) docker_stop_failures = {} @@ -198,7 +200,8 @@ def _docker_run(cmd=False, log_path=False, build_dir=False, container_name=False command=['/bin/bash', '-c', f'exec &>> /data/buildlogs.txt ;{run_cmd}'], auto_remove=True, - detach=True + detach=True, + user=USERNAME, ) if container.status not in ('running', 'created') : _logger.error('Container %s started but status is not running or created: %s', container_name, container.status) diff --git a/runbot/data/dockerfile_data.xml b/runbot/data/dockerfile_data.xml index d28cba53..74e64f43 100644 --- a/runbot/data/dockerfile_data.xml +++ b/runbot/data/dockerfile_data.xml @@ -2,8 +2,202 @@ Docker Default - True Default Dockerfile for latest Odoo versions. + + + + template + Install debian packages + + RUN set -x ; \ + apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends {$packages} \ + && rm -rf /var/lib/apt/lists/* + + + + + template + Install pip packages + {} + RUN python3 -m pip install --no-cache-dir {$packages} + + + + + template + Create user template + {"USERUID":"/missing/", "USERNAME":"/missing/", "USERUID":"/missing/"} + RUN groupadd -g {USERGID} {USERNAME} && useradd --create-home -u {USERUID} -g {USERNAME} -G audio,video {USERNAME} + + + + + + raw + FROM ubuntu:noble + FROM ubuntu:noble + + + + + + raw + ENV LANG C.UTF-8 + ENV LANG C.UTF-8 + + + + + + raw + USER root + USER root + + + + + + reference_layer + Install base debian packages + apt-transport-https build-essential ca-certificates curl file fonts-freefont-ttf fonts-noto-cjk gawk gnupg gsfonts libldap2-dev libjpeg9-dev libsasl2-dev libxslt1-dev lsb-release npm ocrmypdf sed sudo unzip xfonts-75dpi zip zlib1g-dev + + + + + + + reference_layer + Install python debian packages + publicsuffix python3 flake8 python3-dbfread python3-dev python3-gevent python3-pip python3-setuptools python3-wheel python3-markdown python3-mock python3-phonenumbers python3-websocket python3-google-auth libpq-dev pylint python3-jwt python3-asn1crypto python3-html2text python3-suds python3-xmlsec + + + + + + + template + Install wkhtmltopdf + + + RUN curl -sSL https://github.com/wkhtmltopdf/packaging/releases/download/{wkhtmltopdf_version}/wkhtmltox_{wkhtmltopdf_version}.{wkhtmltopdf_os_release}_amd64.deb -o /tmp/wkhtml.deb \ + && apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get -y install --no-install-recommends --fix-missing -qq /tmp/wkhtml.deb \ + && rm -rf /var/lib/apt/lists/* \ + && rm /tmp/wkhtml.deb + + + + + + raw + ENV NODE_PATH=/usr/lib/node_modules/ + ENV NODE_PATH=/usr/lib/node_modules/ + + + + + + raw + ENV npm_config_prefix=/usr + ENV npm_config_prefix=/usr + + + + + + raw + RUN npm install + RUN npm install --force -g rtlcss@3.4.0 es-check@6.0.0 eslint@8.1.0 prettier@2.7.1 eslint-config-prettier@8.5.0 eslint-plugin-prettier@4.2.1 + + + + + + template + Install branch debian/control with latest postgresql-client + + # This layer updates the repository list to get the latest postgresql-client, mainly needed if the host postgresql version is higher than the default version of the docker os +ADD https://raw.githubusercontent.com/odoo/odoo/{odoo_branch}/debian/control /tmp/control.txt +RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc -o /etc/apt/trusted.gpg.d/psql_client.asc \ + && echo "deb http://apt.postgresql.org/pub/repos/apt/ {os_release_name}-pgdg main" > /etc/apt/sources.list.d/pgclient.list \ + && apt-get update \ + && sed -n '/^Depends:/,/^[A-Z]/p' /tmp/control.txt \ + | awk '/^ [a-z]/ { gsub(/,/,"") ; gsub(" ", "") ; print $NF }' | sort -u \ + | DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq --no-install-recommends \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + + + + + + template + Install chrome + + RUN curl -sSL https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_{chrome_version}_amd64.deb -o /tmp/chrome.deb \ + && apt-get update \ + && apt-get -y install --no-install-recommends /tmp/chrome.deb \ + && rm /tmp/chrome.deb + + + + + + template + RUN deluser ubuntu + # Ubuntu Noble decided to add a default use ubuntu with id 1000 in the image, that may interact with the user creation, lets remove it +RUN deluser ubuntu + + + + + + reference_layer + Create user for docker default + + + + + + + template + Switch user + USER {USERNAME} + + + + + + template + ENV PIP_BREAK_SYSTEM_PACKAGES=1 + # needed to install requirements outside a venv +ENV PIP_BREAK_SYSTEM_PACKAGES=1 + + + + + + reference_layer + Install external_dependencies deps + + ebaysdk==2.1.5 # no debian package but needed in odoo requirements + pdf417gen==0.7.1 # needed by l10n_cl_edi + ruff==0.4.7 # runbot check style + + + + + + + + template + Install branch requirements + + ADD --chown={USERNAME} https://raw.githubusercontent.com/odoo/odoo/{odoo_branch}/requirements.txt /tmp/requirements.txt +RUN python3 -m pip install --no-cache-dir -r /tmp/requirements.txt + + diff --git a/runbot/migrations/17.0.5.7/post-migration.py b/runbot/migrations/17.0.5.7/post-migration.py new file mode 100644 index 00000000..b88603d3 --- /dev/null +++ b/runbot/migrations/17.0.5.7/post-migration.py @@ -0,0 +1,22 @@ +import logging + +from markupsafe import Markup + +from odoo import api, SUPERUSER_ID + +_logger = logging.getLogger(__name__) + + +def migrate(cr, version): + env = api.Environment(cr, SUPERUSER_ID, {}) + dockerfiles = env['runbot.dockerfile'].search([]) + for dockerfile in dockerfiles: + if dockerfile.template_id and not dockerfile.layer_ids: + dockerfile._template_to_layers() + + for dockerfile in dockerfiles: + if dockerfile.template_id and dockerfile.layer_ids: + dockerfile.message_post( + body=Markup('Was using template %s') % (dockerfile.template_id.id, dockerfile.template_id.name) + ) + dockerfile.template_id = False diff --git a/runbot/migrations/17.0.5.7/pre-migration.py b/runbot/migrations/17.0.5.7/pre-migration.py new file mode 100644 index 00000000..f2b245ae --- /dev/null +++ b/runbot/migrations/17.0.5.7/pre-migration.py @@ -0,0 +1,13 @@ +import logging + +from markupsafe import Markup + +from odoo import api, SUPERUSER_ID + +_logger = logging.getLogger(__name__) + + +def migrate(cr, version): + cr.execute("""DELETE FROM ir_model_data WHERE module='runbot' AND name = 'docker_base' RETURNING res_id""") + res_id = cr.fetchone()[0] + cr.execute("""UPDATE ir_ui_view SET key='runbot.docker_base' WHERE id = %s""", [res_id]) diff --git a/runbot/models/__init__.py b/runbot/models/__init__.py index 1da4e0e4..60601d60 100644 --- a/runbot/models/__init__.py +++ b/runbot/models/__init__.py @@ -15,6 +15,7 @@ from . import docker from . import host from . import ir_cron from . import ir_http +from . import ir_model_fields_converter from . import ir_qweb from . import ir_logging from . import project diff --git a/runbot/models/build.py b/runbot/models/build.py index 868ee1c2..04cc5c8d 100644 --- a/runbot/models/build.py +++ b/runbot/models/build.py @@ -39,6 +39,8 @@ COPY_WHITELIST = [ "orphan_result", ] +USERUID = os.getuid() +USERNAME = getpass.getuser() def make_selection(array): return [(elem, elem.replace('_', ' ').capitalize()) if isinstance(elem, str) else elem for elem in array] @@ -866,8 +868,7 @@ class BuildResult(models.Model): else: rc_content = starting_config self._write_file('.odoorc', rc_content) - user = getpass.getuser() - ro_volumes[f'/home/{user}/.odoorc'] = self._path('.odoorc') + ro_volumes[f'/home/{USERNAME}/.odoorc'] = self._path('.odoorc') kwargs.pop('build_dir', False) kwargs.pop('log_path', False) kwargs.pop('container_name', False) @@ -1110,7 +1111,7 @@ class BuildResult(models.Model): command = Command(pres, cmd, posts, finals=finals, config_tuples=config_tuples, cmd_checker=build) # use the username of the runbot host to connect to the databases - command.add_config_tuple('db_user', '%s' % pwd.getpwuid(os.getuid()).pw_name) + command.add_config_tuple('db_user', '%s' % pwd.getpwuid(USERUID).pw_name) if local_only: if grep(config_path, "--http-interface"): diff --git a/runbot/models/docker.py b/runbot/models/docker.py index aa4fd0d1..1995dc17 100644 --- a/runbot/models/docker.py +++ b/runbot/models/docker.py @@ -1,11 +1,123 @@ +import getpass import logging +import os import re -from odoo import models, fields, api +import time + +from odoo import api, fields, models from odoo.addons.base.models.ir_qweb import QWebException +from ..container import docker_build +from ..fields import JsonDictField + _logger = logging.getLogger(__name__) +USERUID = os.getuid() +USERGID = os.getgid() +USERNAME = getpass.getuser() + +class DockerLayer(models.Model): + _name = 'runbot.docker_layer' + _inherit = 'mail.thread' + _description = "Docker layer" + _order = 'sequence, id' + + name = fields.Char("Name", required=True) + sequence = fields.Integer("Sequence", default=100, tracking=True) + dockerfile_id = fields.Many2one('runbot.dockerfile', index=True, tracking=True) + layer_type = fields.Selection([ + ('raw', "Raw"), + ('template', "Template"), + ('reference_layer', "Reference layer"), + ('reference_file', "Reference file"), + ], string="Layer type", default='raw', tracking=True) + content = fields.Text("Content", tracking=True) + packages = fields.Text("Packages", help="List of package, can be on multiple lines with comments", tracking=True) + rendered = fields.Text("Rendered", compute="_compute_rendered", recursive=True) + reference_docker_layer_id = fields.Many2one('runbot.docker_layer', index=True, tracking=True) + reference_dockerfile_id = fields.Many2one('runbot.dockerfile', index=True, tracking=True) + values = JsonDictField() + referencing_dockerlayer_ids = fields.One2many('runbot.docker_layer', 'reference_docker_layer_id', string='Layers referencing this one direcly', readonly=True) + all_referencing_dockerlayer_ids = fields.One2many('runbot.docker_layer', compute="_compute_references", string='Layers referencing this one', readonly=True) + reference_count = fields.Integer('Number of references', compute='_compute_references') + has_xml_id = fields.Boolean(compute='_compute_has_xml_id', store=True) + + @api.depends('referencing_dockerlayer_ids', 'dockerfile_id.referencing_dockerlayer_ids') + def _compute_references(self): + for record in self: + record.all_referencing_dockerlayer_ids = record.referencing_dockerlayer_ids | record.dockerfile_id.referencing_dockerlayer_ids + record.reference_count = len(record.all_referencing_dockerlayer_ids) + + def _compute_has_xml_id(self): + existing_xml_id = set(self.env['ir.model.data'].search([('model', '=', self._name)]).mapped('res_id')) + for record in self: + record.has_xml_id = record.id and record.id in existing_xml_id + + @api.depends('layer_type', 'content', 'reference_docker_layer_id.rendered', 'reference_dockerfile_id.layer_ids.rendered', 'values', 'packages', 'name') + def _compute_rendered(self): + for layer in self: + rendered = layer._render_layer({}) + layer.rendered = rendered + + def _render_layer(self, custom_values): + base_values = { + 'USERUID': USERUID, + 'USERGID': USERGID, + 'USERNAME': USERNAME, + } + if packages := self._parse_packages(): + base_values['$packages'] = packages + + values = {**base_values, **self.values, **custom_values} + + if self.layer_type == 'raw': + rendered = self.content + elif self.layer_type == 'reference_layer': + if self.reference_docker_layer_id: + rendered = self.reference_docker_layer_id._render_layer(values) + else: + rendered = 'ERROR: no reference_docker_layer_id defined' + elif self.layer_type == 'reference_file': + if self.reference_dockerfile_id: + rendered = self.reference_dockerfile_id.layer_ids.render_layers(values) + else: + rendered = 'ERROR: no reference_docker_layer_id defined' + elif self.layer_type == 'template': + rendered = self._render_template(values) + if not rendered or rendered[0] != '#': + rendered = f'# {self.name}\n{rendered}' + return rendered + + def render_layers(self, values=None): + values = values or {} + return "\n\n".join(layer._render_layer(values) or "" for layer in self) + '\n' + + def _render_template(self, values): + values = {key: value for key, value in values.items() if f'{key}' in (self.content or '')} # filter on keys mainly to have a nicer comment. All default must be defined in self.values + rendered = self.content + if self.values.keys() - ['$packages']: + values_repr = str(values).replace("'", '"') + rendered = f"# {self.name or 'Rendering'} with values {values_repr}\n{rendered}" + + for key, value in values.items(): + rendered = rendered.replace('{%s}' % key, str(value)) + return rendered + + def _parse_packages(self): + packages = [packages.split('#')[0].strip() for packages in (self.packages or '').split('\n')] + packages = [package for package in packages if package] + return ' '.join(packages) + + def unlink(self): + to_unlink = self + for record in self: + if record.reference_count and record.dockerfile_id and not record.has_xml_id: + record.dockerfile_id = False + to_unlink = to_unlink - record + return super(DockerLayer, to_unlink).unlink() + + class Dockerfile(models.Model): _name = 'runbot.dockerfile' _inherit = [ 'mail.thread' ] @@ -24,13 +136,18 @@ class Dockerfile(models.Model): bundle_ids = fields.One2many('runbot.bundle', 'dockerfile_id', string='Used in Bundles') build_results = fields.One2many('runbot.docker_build_result', 'dockerfile_id', string='Build results') last_successful_result = fields.Many2one('runbot.docker_build_result', compute='_compute_last_successful_result') + layer_ids = fields.One2many('runbot.docker_layer', 'dockerfile_id', string='Layers', copy=True) + referencing_dockerlayer_ids = fields.One2many('runbot.docker_layer', 'reference_dockerfile_id', string='Layers referencing this one') + use_count = fields.Integer('Used count', compute="_compute_use_count", store=True) + # maybe we should have global values here? branch version, chrome version, ... then use a os layer when possible (jammy, ...) + # we could also have a variant param, to use the version image in a specific trigger? Add a layer or change a param? _sql_constraints = [('runbot_dockerfile_name_unique', 'unique(name)', 'A Dockerfile with this name already exists')] @api.returns('self', lambda value: value.id) def copy(self, default=None): copied_record = super().copy(default={'name': '%s (copy)' % self.name, 'to_build': False}) - copied_record.template_id = self.template_id.copy() + #copied_record.template_id = self.template_id.copy() copied_record.template_id.name = '%s (copy)' % copied_record.template_id.name copied_record.template_id.key = '%s (copy)' % copied_record.template_id.key return copied_record @@ -39,14 +156,31 @@ class Dockerfile(models.Model): for record in self: record.last_successful_result = next((result for result in record.build_results if result.result == 'success'), record.build_results.browse()) - @api.depends('template_id.arch_base') + @api.depends('bundle_ids', 'referencing_dockerlayer_ids', 'project_ids', 'version_ids') + def _compute_use_count(self): + for record in self: + record.use_count = len(record.bundle_ids) + len(record.referencing_dockerlayer_ids) + len(record.project_ids) + len(record.version_ids) + + @api.depends('template_id.arch_base', 'layer_ids.rendered', 'layer_ids.sequence') def _compute_dockerfile(self): for rec in self: - try: - res = rec.template_id._render_template(rec.template_id.id) if rec.template_id else '' - rec.dockerfile = re.sub(r'^\s*$', '', res, flags=re.M).strip() - except QWebException: - rec.dockerfile = '' + content = '' + if rec.template_id: + try: + res = rec.template_id._render_template(rec.template_id.id) if rec.template_id else '' + dockerfile = re.sub(r'^\s*$', '', res, flags=re.M).strip() + create_user = f"""\nRUN groupadd -g {USERGID} {USERNAME} && useradd --create-home -u {USERUID} -g {USERNAME} -G audio,video {USERNAME}\n""" + content = dockerfile + create_user + except QWebException: + content = '' + else: + content = rec.layer_ids.render_layers() + + switch_user = f"\nUSER {USERNAME}\n" + if not content.endswith(switch_user): + content = content + switch_user + + rec.dockerfile = content @api.depends('name') def _compute_image_tag(self): @@ -60,6 +194,142 @@ class Dockerfile(models.Model): keys = re.findall(r'.*)', 1) + regex = regex.replace(r'\{%s\}' % key, fr'.*') + if match := re.match(regex, clean_comments(content)): + new_values = {} + _logger.info('Matched existing template') + for key in base_layer.values: + new_values[key] = match.group(key) + values['reference_docker_layer_id'] = base_layer.id + values['values'] = new_values + values['layer_type'] = 'reference_layer' + break + else: + values['content'] = content + values['layer_type'] = 'raw' + self.env['runbot.docker_layer'].create(values) + + # add finals user managementlayers + self.env['runbot.docker_layer'].create({ + 'dockerfile_id': rec.id, + 'name': f'Create user for [{rec.name}]', + 'layer_type': 'reference_layer', + 'reference_docker_layer_id': create_user_layer_id, + }) + self.env['runbot.docker_layer'].create({ + 'dockerfile_id': rec.id, + 'name': f'Switch user for [{rec.name}]', + 'layer_type': 'template', + 'content': 'USER {USERNAME}', + }) + + def _build(self): + start = time.time() + docker_build_path = self.env['runbot.runbot']._path('docker', self.image_tag) + os.makedirs(docker_build_path, exist_ok=True) + + content = self.dockerfile + + with open(self.env['runbot.runbot']._path('docker', self.image_tag, 'Dockerfile'), 'w') as Dockerfile: + Dockerfile.write(content) + + docker_build_identifier, msg = docker_build(docker_build_path, self.image_tag) + duration = time.time() - start + docker_build_result_values = {'dockerfile_id': self.id, 'output': msg, 'duration': duration, 'content': content, 'host_id': self.id} + duration = time.time() - start + if docker_build_identifier: + docker_build_result_values['result'] = 'success' + docker_build_result_values['identifier'] = docker_build_identifier.id + if duration > 1: + _logger.info('Dockerfile %s finished build in %s', self.image_tag, duration) + else: + docker_build_result_values['result'] = 'error' + self.to_build = False + + should_save_result = not docker_build_identifier # always save in case of failure + if not should_save_result: + # check previous result anyway + previous_result = self.env['runbot.docker_build_result'].search([ + ('dockerfile_id', '=', self.id), + ('host_id', '=', self.id), + ], order='id desc', limit=1) + # identifier changed + if docker_build_identifier.id != previous_result.identifier: + should_save_result = True + if previous_result.output != docker_build_result_values['output']: # to discuss + should_save_result = True + if previous_result.content != docker_build_result_values['content']: # docker image changed + should_save_result = True + + if should_save_result: + result = self.env['runbot.docker_build_result'].create(docker_build_result_values) + if not docker_build_identifier: + message = f'Build failure, check results for more info ({result.summary})' + self.message_post(body=message) + _logger.error(message) + class DockerBuildOutput(models.Model): _name = 'runbot.docker_build_result' @@ -74,12 +344,13 @@ class DockerBuildOutput(models.Model): content = fields.Text('Content') identifier = fields.Char('Identifier') summary = fields.Char("Summary", compute='_compute_summary', store=True) + metadata = JsonDictField("Metadata", help="Additionnal data about this image generated by nightly builds") @api.depends('output') def _compute_summary(self): for record in self: summary = '' - for line in reversed(self.output.split('\n')): + for line in reversed(record.output.split('\n')): if len(line) > 5: summary = line break diff --git a/runbot/models/host.py b/runbot/models/host.py index df80f898..ca872073 100644 --- a/runbot/models/host.py +++ b/runbot/models/host.py @@ -1,13 +1,10 @@ import logging -import getpass -import time from collections import defaultdict from odoo import models, fields, api -from odoo.tools import config, ormcache, file_open +from odoo.tools import config, ormcache from ..common import fqdn, local_pgadmin_cursor, os, list_local_dbs, local_pg_cursor -from ..container import docker_build _logger = logging.getLogger(__name__) @@ -129,58 +126,7 @@ class Host(models.Model): _logger.info('Done...') def _docker_build_dockerfile(self, dockerfile): - start = time.time() - docker_build_path = self.env['runbot.runbot']._path('docker', dockerfile.image_tag) - os.makedirs(docker_build_path, exist_ok=True) - - user = getpass.getuser() - - docker_append = f""" - RUN groupadd -g {os.getgid()} {user} \\ - && useradd -u {os.getuid()} -g {user} -G audio,video {user} \\ - && mkdir /home/{user} \\ - && chown -R {user}:{user} /home/{user} - USER {user} - ENV COVERAGE_FILE /data/build/.coverage - """ - content = dockerfile.dockerfile + docker_append - with open(self.env['runbot.runbot']._path('docker', dockerfile.image_tag, 'Dockerfile'), 'w') as Dockerfile: - Dockerfile.write(content) - - docker_build_identifier, msg = docker_build(docker_build_path, dockerfile.image_tag) - duration = time.time() - start - docker_build_result_values = {'dockerfile_id': dockerfile.id, 'output': msg, 'duration': duration, 'content': content, 'host_id': self.id} - duration = time.time() - start - if docker_build_identifier: - docker_build_result_values['result'] = 'success' - docker_build_result_values['identifier'] = docker_build_identifier.id - if duration > 1: - _logger.info('Dockerfile %s finished build in %s', dockerfile.image_tag, duration) - else: - docker_build_result_values['result'] = 'error' - dockerfile.to_build = False - - should_save_result = not docker_build_identifier # always save in case of failure - if not should_save_result: - # check previous result anyway - previous_result = self.env['runbot.docker_build_result'].search([ - ('dockerfile_id', '=', dockerfile.id), - ('host_id', '=', self.id), - ], order='id desc', limit=1) - # identifier changed - if docker_build_identifier.id != previous_result.identifier: - should_save_result = True - if previous_result.output != docker_build_result_values['output']: # to discuss - should_save_result = True - if previous_result.content != docker_build_result_values['content']: # docker image changed - should_save_result = True - - if should_save_result: - result = self.env['runbot.docker_build_result'].create(docker_build_result_values) - if not docker_build_identifier: - message = f'Build failure, check results for more info ({result.summary})' - dockerfile.message_post(body=message) - _logger.error(message) + dockerfile._build() @ormcache() def _host_list(self): diff --git a/runbot/models/ir_model_fields_converter.py b/runbot/models/ir_model_fields_converter.py new file mode 100644 index 00000000..1ee2e56a --- /dev/null +++ b/runbot/models/ir_model_fields_converter.py @@ -0,0 +1,7 @@ +from odoo import models + +class IrFieldsConverter(models.AbstractModel): + _inherit = 'ir.fields.converter' + + def _str_to_jsonb(self, model, field, value): + return self._str_to_json(model, field, value) diff --git a/runbot/models/version.py b/runbot/models/version.py index cd5cde55..51be9d58 100644 --- a/runbot/models/version.py +++ b/runbot/models/version.py @@ -23,7 +23,7 @@ class Version(models.Model): next_major_version_id = fields.Many2one('runbot.version', compute='_compute_version_relations') next_intermediate_version_ids = fields.Many2many('runbot.version', compute='_compute_version_relations') - dockerfile_id = fields.Many2one('runbot.dockerfile', default=lambda self: self.env.ref('runbot.docker_default', raise_if_not_found=False)) + dockerfile_id = fields.Many2one('runbot.dockerfile', default=lambda self: self.env['runbot.version'].search([('name', '=', 'master')], limit=1).dockerfile_id or self.env.ref('runbot.docker_default', raise_if_not_found=False)) @api.depends('name') def _compute_version_number(self): diff --git a/runbot/security/ir.model.access.csv b/runbot/security/ir.model.access.csv index b18bd0c4..d9df2e7b 100644 --- a/runbot/security/ir.model.access.csv +++ b/runbot/security/ir.model.access.csv @@ -128,6 +128,9 @@ access_runbot_upgrade_exception_admin,access_runbot_upgrade_exception_admin,runb access_runbot_dockerfile_user,access_runbot_dockerfile_user,runbot.model_runbot_dockerfile,runbot.group_user,1,0,0,0 access_runbot_dockerfile_admin,access_runbot_dockerfile_admin,runbot.model_runbot_dockerfile,runbot.group_runbot_admin,1,1,1,1 +access_runbot_docker_layer_user,access_runbot_docker_layer_user,runbot.model_runbot_docker_layer,runbot.group_user,1,0,0,0 +access_runbot_docker_layer_admin,access_runbot_docker_layer_admin,runbot.model_runbot_docker_layer,runbot.group_runbot_admin,1,1,1,1 + access_runbot_docker_build_result_user,access_runbot_docker_build_result_user,runbot.model_runbot_docker_build_result,runbot.group_user,1,0,0,0 access_runbot_docker_build_result_admin,access_runbot_docker_build_result_admin,runbot.model_runbot_docker_build_result,runbot.group_runbot_admin,1,1,1,1 diff --git a/runbot/templates/dockerfile.xml b/runbot/templates/dockerfile.xml index 7311f746..20fbf1b3 100644 --- a/runbot/templates/dockerfile.xml +++ b/runbot/templates/dockerfile.xml @@ -2,152 +2,5 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/runbot/tests/test_dockerfile.py b/runbot/tests/test_dockerfile.py index b1089b86..a902415b 100644 --- a/runbot/tests/test_dockerfile.py +++ b/runbot/tests/test_dockerfile.py @@ -1,6 +1,9 @@ # -*- coding: utf-8 -*- +import getpass import logging +import os +from odoo import Command from unittest.mock import patch, mock_open from odoo.tests.common import Form, tagged, HttpCase @@ -8,6 +11,9 @@ from .common import RunbotCase _logger = logging.getLogger(__name__) +USERUID = os.getuid() +USERGID = os.getgid() +USERNAME = getpass.getuser() @tagged('-at_install', 'post_install') class TestDockerfile(RunbotCase, HttpCase): @@ -19,44 +25,45 @@ class TestDockerfile(RunbotCase, HttpCase): r"""FROM ubuntu:jammy ENV LANG C.UTF-8 USER root +# Install debian packages RUN set -x ; \ apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends apt-transport-https build-essential ca-certificates curl ffmpeg file fonts-freefont-ttf fonts-noto-cjk gawk gnupg gsfonts libldap2-dev libjpeg9-dev libsasl2-dev libxslt1-dev lsb-release node-less ocrmypdf sed sudo unzip xfonts-75dpi zip zlib1g-dev \ + && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends apt-transport-https build-essential ca-certificates curl ffmpeg file flake8 fonts-freefont-ttf fonts-noto-cjk gawk gnupg gsfonts libldap2-dev libjpeg9-dev libsasl2-dev libxslt1-dev lsb-release ocrmypdf sed sudo unzip xfonts-75dpi zip zlib1g-dev \ && rm -rf /var/lib/apt/lists/* +# Install debian packages RUN set -x ; \ apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends python3 python3-dbfread python3-dev python3-pip python3-setuptools python3-wheel python3-markdown python3-mock python3-phonenumbers plibpq-dev python3-gevent python3-websocket \ + && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends python3 python3-dbfread python3-dev python3-gevent python3-pip python3-setuptools python3-wheel python3-markdown python3-mock python3-phonenumbers python3-websocket python3-google-auth libpq-dev python3-asn1crypto python3-jwt publicsuffix python3-xmlsec python3-aiosmtpd pylint \ && rm -rf /var/lib/apt/lists/* -# Install wkhtml -RUN curl -sSL https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox_0.12.5-1.bionic_amd64.deb -o /tmp/wkhtml.deb \ +# Install wkhtmltopdf +RUN curl -sSL https://nightly.odoo.com/deb/jammy/wkhtmltox_0.12.5-2.jammy_amd64.deb -o /tmp/wkhtml.deb \ && apt-get update \ - && dpkg --force-depends -i /tmp/wkhtml.deb \ - && apt-get install -y -f --no-install-recommends \ + && DEBIAN_FRONTEND=noninteractive apt-get -y install --no-install-recommends --fix-missing -qq /tmp/wkhtml.deb \ + && rm -rf /var/lib/apt/lists/* \ && rm /tmp/wkhtml.deb # Install nodejs with values {"node_version": "20"} -RUN curl -sSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - \ - && echo "deb https://deb.nodesource.com/node_20.x `lsb_release -c -s` main" > /etc/apt/sources.list.d/nodesource.list \ +RUN curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | gpg --dearmor | tee /usr/share/keyrings/nodesource.gpg > /dev/null \ + && echo "deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x `lsb_release -c -s` main" > /etc/apt/sources.list.d/nodesource.list \ && apt-get update \ && apt-get install -y nodejs -RUN npm install -g rtlcss es-check eslint -ADD https://raw.githubusercontent.com/brendangregg/FlameGraph/master/flamegraph.pl /usr/local/bin/flamegraph.pl -RUN chmod +rx /usr/local/bin/flamegraph.pl +RUN npm install -g rtlcss@3.4.0 es-check@6.0.0 eslint@8.1.0 prettier@2.7.1 eslint-config-prettier@8.5.0 eslint-plugin-prettier@4.2.1 # Install branch debian/control with values {"odoo_branch": "master"} ADD https://raw.githubusercontent.com/odoo/odoo/master/debian/control /tmp/control.txt RUN apt-get update \ && sed -n '/^Depends:/,/^[A-Z]/p' /tmp/control.txt \ - | awk '/^ [a-z]/ { gsub(/,/,"") ; gsub(" ", "") ; print $NF }' | sort -u \ - | egrep -v 'postgresql-client' \ - | sed 's/python-imaging/python-pil/'| sed 's/python-pypdf/python-pypdf2/' \ - | DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq \ + | awk '/^ [a-z]/ { gsub(/,/,"") ; gsub(" ", "") ; print $NF }' | sort -u \ + | egrep -v 'postgresql-client' \ + | DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq --no-install-recommends \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* -RUN python3 -m pip install --no-cache-dir coverage==4.5.4 astroid==2.4.2 pylint==2.5.0 flamegraph +# Install pip packages with values {"$packages": "astroid==2.4.2 pylint==2.5.0"} +RUN python3 -m pip install --no-cache-dir astroid==2.4.2 pylint==2.5.0 +# Install pip packages with values {"$packages": "ebaysdk==2.1.5 pdf417gen==0.7.1"} RUN python3 -m pip install --no-cache-dir ebaysdk==2.1.5 pdf417gen==0.7.1 RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \ && echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -s -c`-pgdg main" > /etc/apt/sources.list.d/pgclient.list \ && apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-client-12 \ + && DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-client-14 \ && rm -rf /var/lib/apt/lists/* # Install chrome with values {"chrome_version": "123.0.6312.58-1"} RUN curl -sSL https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_123.0.6312.58-1_amd64.deb -o /tmp/chrome.deb \ @@ -64,59 +71,44 @@ RUN curl -sSL https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-s && apt-get -y install --no-install-recommends /tmp/chrome.deb \ && rm /tmp/chrome.deb # Install branch requirements with values {"odoo_branch": "master"} -ADD https://raw.githubusercontent.com/odoo/odoo/master/requirements.txt /root/requirements.txt -RUN python3 -m pip install --no-cache-dir -r /root/requirements.txt""") +ADD https://raw.githubusercontent.com/odoo/odoo/master/requirements.txt /tmp/requirements.txt +RUN python3 -m pip install --no-cache-dir -r /tmp/requirements.txt""" f""" +# Create user template with values {{"USERUID": {USERUID}, "USERGID": {USERGID}, "USERNAME": "{USERNAME}"}} +RUN groupadd -g {USERGID} {USERNAME} && useradd --create-home -u {USERUID} -g {USERNAME} -G audio,video {USERNAME} +# Switch user with values {{"USERNAME": "{USERNAME}"}} +USER {USERNAME} +""") def test_dockerfile_base_fields(self): - xml_content = """ - - -""" - - focal_template = self.env['ir.ui.view'].create({ - 'name': 'docker_focal_test', - 'type': 'qweb', - 'key': 'docker.docker_focal_test', - 'arch_db': xml_content - }) - dockerfile = self.env['runbot.dockerfile'].create({ 'name': 'Tests Ubuntu Focal (20.0)[Chrome 86]', - 'template_id': focal_template.id, - 'to_build': True + 'to_build': True, + 'layer_ids': [ + Command.create({ + 'name': 'Customized base', + 'reference_dockerfile_id': self.env.ref('runbot.docker_default').id, + 'values': { + 'from': 'ubuntu:jammy', + 'phantom': True, + 'chrome_version': '86.0.4240.183-1', + }, + 'layer_type': 'reference_file', + }), + Command.create({ + 'name': 'Customized base', + 'packages': 'babel==2.8.0', + 'layer_type': 'reference_layer', + 'reference_docker_layer_id': self.env.ref('runbot.docker_layer_pip_packages_template').id, + }), + ], }) self.assertEqual(dockerfile.image_tag, 'odoo:TestsUbuntuFocal20.0Chrome86') self.assertTrue(dockerfile.dockerfile.startswith('FROM ubuntu:jammy')) - self.assertIn(' apt-get install -y -qq google-chrome-stable=86.0.4240.183-1', dockerfile.dockerfile) - self.assertIn('# Install phantomjs', dockerfile.dockerfile) + self.assertIn('86.0.4240.183-1', dockerfile.dockerfile) self.assertIn('pip install --no-cache-dir babel==2.8.0', dockerfile.dockerfile) - # test view update - xml_content = xml_content.replace('86.0.4240.183-1', '87.0-1') - dockerfile_form = Form(dockerfile) - dockerfile_form.arch_base = xml_content - dockerfile_form.save() + # test layer update + dockerfile.layer_ids[0].values = {**dockerfile.layer_ids[0].values, 'chrome_version': '87.0.4240.183-1'} - self.assertIn('apt-get install -y -qq google-chrome-stable=87.0-1', dockerfile.dockerfile) - - # Ensure that only the test dockerfile will be found by docker_run - self.env['runbot.dockerfile'].search([('id', '!=', dockerfile.id)]).update({'to_build': False}) - - def write_side_effect(content): - self.assertIn('apt-get install -y -qq google-chrome-stable=87.0-1', content) - - docker_build_mock = self.patchers['docker_build'] - docker_build_mock.return_value = (True, None) - mopen = mock_open() - rb_host = self.env['runbot.host'].create({'name': 'runbotxxx.odoo.com'}) - with patch('builtins.open', mopen) as file_mock: - file_handle_mock = file_mock.return_value.__enter__.return_value - file_handle_mock.write.side_effect = write_side_effect - rb_host._docker_build() + self.assertIn('Install chrome with values {"chrome_version": "87.0.4240.183-1"}', dockerfile.dockerfile) diff --git a/runbot/tests/test_version.py b/runbot/tests/test_version.py index f95b8d8c..166c8ac5 100644 --- a/runbot/tests/test_version.py +++ b/runbot/tests/test_version.py @@ -59,3 +59,12 @@ class TestVersion(RunbotCase): self.assertEqual(master.previous_major_version_id, v13) self.assertEqual(master.intermediate_version_ids, v133 | v132 | v131) + + def test_version_docker_file(self): + version18 = self.env['runbot.version'].create({'name': '18.0'}) + versionmaster = self.env['runbot.version'].search([('name', '=', 'master')]) + self.assertEqual(version18.dockerfile_id, versionmaster.dockerfile_id) + versionmaster.dockerfile_id = self.env['runbot.dockerfile'].create({'name': 'New dockefile for master'}) + version181 = self.env['runbot.version'].create({'name': '18.1'}) + self.assertEqual(version181.dockerfile_id, versionmaster.dockerfile_id) + self.assertEqual(version181.dockerfile_id.name, 'New dockefile for master') diff --git a/runbot/views/dockerfile_views.xml b/runbot/views/dockerfile_views.xml index a7e663e1..fb86e9b3 100644 --- a/runbot/views/dockerfile_views.xml +++ b/runbot/views/dockerfile_views.xml @@ -19,12 +19,23 @@ - - + + + + + + + + + + + + + @@ -33,13 +44,28 @@ - + + + + + + + + + + + + + + + + @@ -75,11 +101,59 @@ - + + + runbot.docker_layer.form + runbot.docker_layer + +
+ + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
+
+ + + runbot.docker_layer.tree + runbot.docker_layer + + + + + + + + + + + + runbot.docker_build_result.form @@ -135,5 +209,11 @@ tree,form + + Docker Layers + runbot.docker_layer + tree,form + + diff --git a/runbot/views/menus.xml b/runbot/views/menus.xml index a4be343e..c1e0a8df 100644 --- a/runbot/views/menus.xml +++ b/runbot/views/menus.xml @@ -30,8 +30,10 @@ + +