mirror of
https://github.com/odoo/runbot.git
synced 2025-03-27 13:25:47 +07:00
[IMP] runbot: rework dockerfile generation
This commit is contained in:
parent
8cb9599e90
commit
a1c79c6f3c
@ -6,7 +6,7 @@
|
|||||||
'author': "Odoo SA",
|
'author': "Odoo SA",
|
||||||
'website': "http://runbot.odoo.com",
|
'website': "http://runbot.odoo.com",
|
||||||
'category': 'Website',
|
'category': 'Website',
|
||||||
'version': '5.6',
|
'version': '5.7',
|
||||||
'application': True,
|
'application': True,
|
||||||
'depends': ['base', 'base_automation', 'website'],
|
'depends': ['base', 'base_automation', 'website'],
|
||||||
'data': [
|
'data': [
|
||||||
|
@ -9,6 +9,7 @@ When testing this file:
|
|||||||
The second parameter is the exposed port
|
The second parameter is the exposed port
|
||||||
"""
|
"""
|
||||||
import configparser
|
import configparser
|
||||||
|
import getpass
|
||||||
import io
|
import io
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@ -28,6 +29,7 @@ with warnings.catch_warnings():
|
|||||||
)
|
)
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
|
USERNAME = getpass.getuser()
|
||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
docker_stop_failures = {}
|
docker_stop_failures = {}
|
||||||
@ -198,7 +200,8 @@ def _docker_run(cmd=False, log_path=False, build_dir=False, container_name=False
|
|||||||
command=['/bin/bash', '-c',
|
command=['/bin/bash', '-c',
|
||||||
f'exec &>> /data/buildlogs.txt ;{run_cmd}'],
|
f'exec &>> /data/buildlogs.txt ;{run_cmd}'],
|
||||||
auto_remove=True,
|
auto_remove=True,
|
||||||
detach=True
|
detach=True,
|
||||||
|
user=USERNAME,
|
||||||
)
|
)
|
||||||
if container.status not in ('running', 'created') :
|
if container.status not in ('running', 'created') :
|
||||||
_logger.error('Container %s started but status is not running or created: %s', container_name, container.status)
|
_logger.error('Container %s started but status is not running or created: %s', container_name, container.status)
|
||||||
|
@ -2,8 +2,202 @@
|
|||||||
<odoo>
|
<odoo>
|
||||||
<record model="runbot.dockerfile" id="runbot.docker_default">
|
<record model="runbot.dockerfile" id="runbot.docker_default">
|
||||||
<field name="name">Docker Default</field>
|
<field name="name">Docker Default</field>
|
||||||
<field name="template_id" ref="runbot.docker_base"/>
|
|
||||||
<field name="to_build">True</field>
|
<field name="to_build">True</field>
|
||||||
<field name="description">Default Dockerfile for latest Odoo versions.</field>
|
<field name="description">Default Dockerfile for latest Odoo versions.</field>
|
||||||
</record>
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_debian_packages_template" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="-1"/>
|
||||||
|
<field name="layer_type">template</field>
|
||||||
|
<field name="name">Install debian packages</field>
|
||||||
|
<field name="values" eval="{}"></field>
|
||||||
|
<field name="content">RUN set -x ; \
|
||||||
|
apt-get update \
|
||||||
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends {$packages} \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_pip_packages_template" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="-1"/>
|
||||||
|
<field name="layer_type">template</field>
|
||||||
|
<field name="name">Install pip packages</field>
|
||||||
|
<field name="values">{}</field>
|
||||||
|
<field name="content">RUN python3 -m pip install --no-cache-dir {$packages}</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_create_user_template" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="-1"/>
|
||||||
|
<field name="layer_type">template</field>
|
||||||
|
<field name="name">Create user template</field>
|
||||||
|
<field name="values">{"USERUID":"/missing/", "USERNAME":"/missing/", "USERUID":"/missing/"}</field>
|
||||||
|
<field name="content">RUN groupadd -g {USERGID} {USERNAME} && useradd --create-home -u {USERUID} -g {USERNAME} -G audio,video {USERNAME}</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_from" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="0"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">raw</field>
|
||||||
|
<field name="name">FROM ubuntu:noble</field>
|
||||||
|
<field name="content">FROM ubuntu:noble</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_lang" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="10"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">raw</field>
|
||||||
|
<field name="name">ENV LANG C.UTF-8</field>
|
||||||
|
<field name="content">ENV LANG C.UTF-8</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_root_user" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="20"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">raw</field>
|
||||||
|
<field name="name">USER root</field>
|
||||||
|
<field name="content">USER root</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_deb" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="100"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">reference_layer</field>
|
||||||
|
<field name="name">Install base debian packages</field>
|
||||||
|
<field name="packages">apt-transport-https build-essential ca-certificates curl file fonts-freefont-ttf fonts-noto-cjk gawk gnupg gsfonts libldap2-dev libjpeg9-dev libsasl2-dev libxslt1-dev lsb-release npm ocrmypdf sed sudo unzip xfonts-75dpi zip zlib1g-dev</field>
|
||||||
|
<field name="reference_docker_layer_id" ref="runbot.docker_layer_debian_packages_template"/>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_pydebs" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="110"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">reference_layer</field>
|
||||||
|
<field name="name">Install python debian packages</field>
|
||||||
|
<field name="packages">publicsuffix python3 flake8 python3-dbfread python3-dev python3-gevent python3-pip python3-setuptools python3-wheel python3-markdown python3-mock python3-phonenumbers python3-websocket python3-google-auth libpq-dev pylint python3-jwt python3-asn1crypto python3-html2text python3-suds python3-xmlsec</field>
|
||||||
|
<field name="reference_docker_layer_id" ref="runbot.docker_layer_debian_packages_template"/>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_wkhtml" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="200"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">template</field>
|
||||||
|
<field name="name">Install wkhtmltopdf</field>
|
||||||
|
|
||||||
|
<field name="values" eval="{'wkhtmltopdf_version': '0.12.6.1-2', 'wkhtmltopdf_os_release': 'jammy'}"/>
|
||||||
|
<field name="content">RUN curl -sSL https://github.com/wkhtmltopdf/packaging/releases/download/{wkhtmltopdf_version}/wkhtmltox_{wkhtmltopdf_version}.{wkhtmltopdf_os_release}_amd64.deb -o /tmp/wkhtml.deb \
|
||||||
|
&& apt-get update \
|
||||||
|
&& DEBIAN_FRONTEND=noninteractive apt-get -y install --no-install-recommends --fix-missing -qq /tmp/wkhtml.deb \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& rm /tmp/wkhtml.deb</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.env_node_path" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="300"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">raw</field>
|
||||||
|
<field name="name">ENV NODE_PATH=/usr/lib/node_modules/</field>
|
||||||
|
<field name="content">ENV NODE_PATH=/usr/lib/node_modules/</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.env_npm_config_prefix" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="310"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">raw</field>
|
||||||
|
<field name="name">ENV npm_config_prefix=/usr</field>
|
||||||
|
<field name="content">ENV npm_config_prefix=/usr</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_npminstall" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="320"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">raw</field>
|
||||||
|
<field name="name">RUN npm install</field>
|
||||||
|
<field name="content">RUN npm install --force -g rtlcss@3.4.0 es-check@6.0.0 eslint@8.1.0 prettier@2.7.1 eslint-config-prettier@8.5.0 eslint-plugin-prettier@4.2.1</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_masterdebiancontroll" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="500"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">template</field>
|
||||||
|
<field name="name">Install branch debian/control with latest postgresql-client</field>
|
||||||
|
<field name="values" eval="{'odoo_branch': 'master', 'os_release_name': '`lsb_release -s -c`'}"/>
|
||||||
|
<field name="content"># This layer updates the repository list to get the latest postgresql-client, mainly needed if the host postgresql version is higher than the default version of the docker os
|
||||||
|
ADD https://raw.githubusercontent.com/odoo/odoo/{odoo_branch}/debian/control /tmp/control.txt
|
||||||
|
RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc -o /etc/apt/trusted.gpg.d/psql_client.asc \
|
||||||
|
&& echo "deb http://apt.postgresql.org/pub/repos/apt/ {os_release_name}-pgdg main" > /etc/apt/sources.list.d/pgclient.list \
|
||||||
|
&& apt-get update \
|
||||||
|
&& sed -n '/^Depends:/,/^[A-Z]/p' /tmp/control.txt \
|
||||||
|
| awk '/^ [a-z]/ { gsub(/,/,"") ; gsub(" ", "") ; print $NF }' | sort -u \
|
||||||
|
| DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq --no-install-recommends \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_chrome" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="600"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">template</field>
|
||||||
|
<field name="name">Install chrome</field>
|
||||||
|
<field name="values" eval="{'chrome_version': '123.0.6312.58-1'}"/>
|
||||||
|
<field name="content">RUN curl -sSL https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_{chrome_version}_amd64.deb -o /tmp/chrome.deb \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get -y install --no-install-recommends /tmp/chrome.deb \
|
||||||
|
&& rm /tmp/chrome.deb</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_delete_user_ubuntu" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="1000"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">template</field>
|
||||||
|
<field name="name">RUN deluser ubuntu</field>
|
||||||
|
<field name="content"># Ubuntu Noble decided to add a default use ubuntu with id 1000 in the image, that may interact with the user creation, lets remove it
|
||||||
|
RUN deluser ubuntu</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_create_user" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="1010"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">reference_layer</field>
|
||||||
|
<field name="name">Create user for docker default</field>
|
||||||
|
<field name="reference_docker_layer_id" ref="runbot.docker_layer_create_user_template"/>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_switch_user" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="1020"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">template</field>
|
||||||
|
<field name="name">Switch user</field>
|
||||||
|
<field name="content">USER {USERNAME}</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_pip_break_system_packages" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="1100"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">template</field>
|
||||||
|
<field name="name">ENV PIP_BREAK_SYSTEM_PACKAGES=1</field>
|
||||||
|
<field name="content"># needed to install requirements outside a venv
|
||||||
|
ENV PIP_BREAK_SYSTEM_PACKAGES=1</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_external_dependencies_deps" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="1110"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">reference_layer</field>
|
||||||
|
<field name="name">Install external_dependencies deps</field>
|
||||||
|
<field name="packages">
|
||||||
|
ebaysdk==2.1.5 # no debian package but needed in odoo requirements
|
||||||
|
pdf417gen==0.7.1 # needed by l10n_cl_edi
|
||||||
|
ruff==0.4.7 # runbot check style
|
||||||
|
</field>
|
||||||
|
<field name="reference_docker_layer_id" ref="runbot.docker_layer_pip_packages_template"/>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="runbot.docker_layer_branch_req" model="runbot.docker_layer">
|
||||||
|
<field name="sequence" eval="1200"/>
|
||||||
|
<field name="dockerfile_id" ref="runbot.docker_default"/>
|
||||||
|
<field name="layer_type">template</field>
|
||||||
|
<field name="name">Install branch requirements</field>
|
||||||
|
<field name="values" eval="{'odoo_branch': 'master'}"/>
|
||||||
|
<field name="content">ADD --chown={USERNAME} https://raw.githubusercontent.com/odoo/odoo/{odoo_branch}/requirements.txt /tmp/requirements.txt
|
||||||
|
RUN python3 -m pip install --no-cache-dir -r /tmp/requirements.txt</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
</odoo>
|
</odoo>
|
||||||
|
22
runbot/migrations/17.0.5.7/post-migration.py
Normal file
22
runbot/migrations/17.0.5.7/post-migration.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from markupsafe import Markup
|
||||||
|
|
||||||
|
from odoo import api, SUPERUSER_ID
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def migrate(cr, version):
|
||||||
|
env = api.Environment(cr, SUPERUSER_ID, {})
|
||||||
|
dockerfiles = env['runbot.dockerfile'].search([])
|
||||||
|
for dockerfile in dockerfiles:
|
||||||
|
if dockerfile.template_id and not dockerfile.layer_ids:
|
||||||
|
dockerfile._template_to_layers()
|
||||||
|
|
||||||
|
for dockerfile in dockerfiles:
|
||||||
|
if dockerfile.template_id and dockerfile.layer_ids:
|
||||||
|
dockerfile.message_post(
|
||||||
|
body=Markup('Was using template <a href="/web#id=%s&model=ir.ui.view&view_type=form">%s</a>') % (dockerfile.template_id.id, dockerfile.template_id.name)
|
||||||
|
)
|
||||||
|
dockerfile.template_id = False
|
13
runbot/migrations/17.0.5.7/pre-migration.py
Normal file
13
runbot/migrations/17.0.5.7/pre-migration.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from markupsafe import Markup
|
||||||
|
|
||||||
|
from odoo import api, SUPERUSER_ID
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def migrate(cr, version):
|
||||||
|
cr.execute("""DELETE FROM ir_model_data WHERE module='runbot' AND name = 'docker_base' RETURNING res_id""")
|
||||||
|
res_id = cr.fetchone()[0]
|
||||||
|
cr.execute("""UPDATE ir_ui_view SET key='runbot.docker_base' WHERE id = %s""", [res_id])
|
@ -15,6 +15,7 @@ from . import docker
|
|||||||
from . import host
|
from . import host
|
||||||
from . import ir_cron
|
from . import ir_cron
|
||||||
from . import ir_http
|
from . import ir_http
|
||||||
|
from . import ir_model_fields_converter
|
||||||
from . import ir_qweb
|
from . import ir_qweb
|
||||||
from . import ir_logging
|
from . import ir_logging
|
||||||
from . import project
|
from . import project
|
||||||
|
@ -39,6 +39,8 @@ COPY_WHITELIST = [
|
|||||||
"orphan_result",
|
"orphan_result",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
USERUID = os.getuid()
|
||||||
|
USERNAME = getpass.getuser()
|
||||||
|
|
||||||
def make_selection(array):
|
def make_selection(array):
|
||||||
return [(elem, elem.replace('_', ' ').capitalize()) if isinstance(elem, str) else elem for elem in array]
|
return [(elem, elem.replace('_', ' ').capitalize()) if isinstance(elem, str) else elem for elem in array]
|
||||||
@ -866,8 +868,7 @@ class BuildResult(models.Model):
|
|||||||
else:
|
else:
|
||||||
rc_content = starting_config
|
rc_content = starting_config
|
||||||
self._write_file('.odoorc', rc_content)
|
self._write_file('.odoorc', rc_content)
|
||||||
user = getpass.getuser()
|
ro_volumes[f'/home/{USERNAME}/.odoorc'] = self._path('.odoorc')
|
||||||
ro_volumes[f'/home/{user}/.odoorc'] = self._path('.odoorc')
|
|
||||||
kwargs.pop('build_dir', False)
|
kwargs.pop('build_dir', False)
|
||||||
kwargs.pop('log_path', False)
|
kwargs.pop('log_path', False)
|
||||||
kwargs.pop('container_name', False)
|
kwargs.pop('container_name', False)
|
||||||
@ -1110,7 +1111,7 @@ class BuildResult(models.Model):
|
|||||||
command = Command(pres, cmd, posts, finals=finals, config_tuples=config_tuples, cmd_checker=build)
|
command = Command(pres, cmd, posts, finals=finals, config_tuples=config_tuples, cmd_checker=build)
|
||||||
|
|
||||||
# use the username of the runbot host to connect to the databases
|
# use the username of the runbot host to connect to the databases
|
||||||
command.add_config_tuple('db_user', '%s' % pwd.getpwuid(os.getuid()).pw_name)
|
command.add_config_tuple('db_user', '%s' % pwd.getpwuid(USERUID).pw_name)
|
||||||
|
|
||||||
if local_only:
|
if local_only:
|
||||||
if grep(config_path, "--http-interface"):
|
if grep(config_path, "--http-interface"):
|
||||||
|
@ -1,11 +1,123 @@
|
|||||||
|
import getpass
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
from odoo import models, fields, api
|
import time
|
||||||
|
|
||||||
|
from odoo import api, fields, models
|
||||||
from odoo.addons.base.models.ir_qweb import QWebException
|
from odoo.addons.base.models.ir_qweb import QWebException
|
||||||
|
|
||||||
|
from ..container import docker_build
|
||||||
|
from ..fields import JsonDictField
|
||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
USERUID = os.getuid()
|
||||||
|
USERGID = os.getgid()
|
||||||
|
USERNAME = getpass.getuser()
|
||||||
|
|
||||||
|
class DockerLayer(models.Model):
|
||||||
|
_name = 'runbot.docker_layer'
|
||||||
|
_inherit = 'mail.thread'
|
||||||
|
_description = "Docker layer"
|
||||||
|
_order = 'sequence, id'
|
||||||
|
|
||||||
|
name = fields.Char("Name", required=True)
|
||||||
|
sequence = fields.Integer("Sequence", default=100, tracking=True)
|
||||||
|
dockerfile_id = fields.Many2one('runbot.dockerfile', index=True, tracking=True)
|
||||||
|
layer_type = fields.Selection([
|
||||||
|
('raw', "Raw"),
|
||||||
|
('template', "Template"),
|
||||||
|
('reference_layer', "Reference layer"),
|
||||||
|
('reference_file', "Reference file"),
|
||||||
|
], string="Layer type", default='raw', tracking=True)
|
||||||
|
content = fields.Text("Content", tracking=True)
|
||||||
|
packages = fields.Text("Packages", help="List of package, can be on multiple lines with comments", tracking=True)
|
||||||
|
rendered = fields.Text("Rendered", compute="_compute_rendered", recursive=True)
|
||||||
|
reference_docker_layer_id = fields.Many2one('runbot.docker_layer', index=True, tracking=True)
|
||||||
|
reference_dockerfile_id = fields.Many2one('runbot.dockerfile', index=True, tracking=True)
|
||||||
|
values = JsonDictField()
|
||||||
|
referencing_dockerlayer_ids = fields.One2many('runbot.docker_layer', 'reference_docker_layer_id', string='Layers referencing this one direcly', readonly=True)
|
||||||
|
all_referencing_dockerlayer_ids = fields.One2many('runbot.docker_layer', compute="_compute_references", string='Layers referencing this one', readonly=True)
|
||||||
|
reference_count = fields.Integer('Number of references', compute='_compute_references')
|
||||||
|
has_xml_id = fields.Boolean(compute='_compute_has_xml_id', store=True)
|
||||||
|
|
||||||
|
@api.depends('referencing_dockerlayer_ids', 'dockerfile_id.referencing_dockerlayer_ids')
|
||||||
|
def _compute_references(self):
|
||||||
|
for record in self:
|
||||||
|
record.all_referencing_dockerlayer_ids = record.referencing_dockerlayer_ids | record.dockerfile_id.referencing_dockerlayer_ids
|
||||||
|
record.reference_count = len(record.all_referencing_dockerlayer_ids)
|
||||||
|
|
||||||
|
def _compute_has_xml_id(self):
|
||||||
|
existing_xml_id = set(self.env['ir.model.data'].search([('model', '=', self._name)]).mapped('res_id'))
|
||||||
|
for record in self:
|
||||||
|
record.has_xml_id = record.id and record.id in existing_xml_id
|
||||||
|
|
||||||
|
@api.depends('layer_type', 'content', 'reference_docker_layer_id.rendered', 'reference_dockerfile_id.layer_ids.rendered', 'values', 'packages', 'name')
|
||||||
|
def _compute_rendered(self):
|
||||||
|
for layer in self:
|
||||||
|
rendered = layer._render_layer({})
|
||||||
|
layer.rendered = rendered
|
||||||
|
|
||||||
|
def _render_layer(self, custom_values):
|
||||||
|
base_values = {
|
||||||
|
'USERUID': USERUID,
|
||||||
|
'USERGID': USERGID,
|
||||||
|
'USERNAME': USERNAME,
|
||||||
|
}
|
||||||
|
if packages := self._parse_packages():
|
||||||
|
base_values['$packages'] = packages
|
||||||
|
|
||||||
|
values = {**base_values, **self.values, **custom_values}
|
||||||
|
|
||||||
|
if self.layer_type == 'raw':
|
||||||
|
rendered = self.content
|
||||||
|
elif self.layer_type == 'reference_layer':
|
||||||
|
if self.reference_docker_layer_id:
|
||||||
|
rendered = self.reference_docker_layer_id._render_layer(values)
|
||||||
|
else:
|
||||||
|
rendered = 'ERROR: no reference_docker_layer_id defined'
|
||||||
|
elif self.layer_type == 'reference_file':
|
||||||
|
if self.reference_dockerfile_id:
|
||||||
|
rendered = self.reference_dockerfile_id.layer_ids.render_layers(values)
|
||||||
|
else:
|
||||||
|
rendered = 'ERROR: no reference_docker_layer_id defined'
|
||||||
|
elif self.layer_type == 'template':
|
||||||
|
rendered = self._render_template(values)
|
||||||
|
if not rendered or rendered[0] != '#':
|
||||||
|
rendered = f'# {self.name}\n{rendered}'
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
def render_layers(self, values=None):
|
||||||
|
values = values or {}
|
||||||
|
return "\n\n".join(layer._render_layer(values) or "" for layer in self) + '\n'
|
||||||
|
|
||||||
|
def _render_template(self, values):
|
||||||
|
values = {key: value for key, value in values.items() if f'{key}' in (self.content or '')} # filter on keys mainly to have a nicer comment. All default must be defined in self.values
|
||||||
|
rendered = self.content
|
||||||
|
if self.values.keys() - ['$packages']:
|
||||||
|
values_repr = str(values).replace("'", '"')
|
||||||
|
rendered = f"# {self.name or 'Rendering'} with values {values_repr}\n{rendered}"
|
||||||
|
|
||||||
|
for key, value in values.items():
|
||||||
|
rendered = rendered.replace('{%s}' % key, str(value))
|
||||||
|
return rendered
|
||||||
|
|
||||||
|
def _parse_packages(self):
|
||||||
|
packages = [packages.split('#')[0].strip() for packages in (self.packages or '').split('\n')]
|
||||||
|
packages = [package for package in packages if package]
|
||||||
|
return ' '.join(packages)
|
||||||
|
|
||||||
|
def unlink(self):
|
||||||
|
to_unlink = self
|
||||||
|
for record in self:
|
||||||
|
if record.reference_count and record.dockerfile_id and not record.has_xml_id:
|
||||||
|
record.dockerfile_id = False
|
||||||
|
to_unlink = to_unlink - record
|
||||||
|
return super(DockerLayer, to_unlink).unlink()
|
||||||
|
|
||||||
|
|
||||||
class Dockerfile(models.Model):
|
class Dockerfile(models.Model):
|
||||||
_name = 'runbot.dockerfile'
|
_name = 'runbot.dockerfile'
|
||||||
_inherit = [ 'mail.thread' ]
|
_inherit = [ 'mail.thread' ]
|
||||||
@ -24,13 +136,18 @@ class Dockerfile(models.Model):
|
|||||||
bundle_ids = fields.One2many('runbot.bundle', 'dockerfile_id', string='Used in Bundles')
|
bundle_ids = fields.One2many('runbot.bundle', 'dockerfile_id', string='Used in Bundles')
|
||||||
build_results = fields.One2many('runbot.docker_build_result', 'dockerfile_id', string='Build results')
|
build_results = fields.One2many('runbot.docker_build_result', 'dockerfile_id', string='Build results')
|
||||||
last_successful_result = fields.Many2one('runbot.docker_build_result', compute='_compute_last_successful_result')
|
last_successful_result = fields.Many2one('runbot.docker_build_result', compute='_compute_last_successful_result')
|
||||||
|
layer_ids = fields.One2many('runbot.docker_layer', 'dockerfile_id', string='Layers', copy=True)
|
||||||
|
referencing_dockerlayer_ids = fields.One2many('runbot.docker_layer', 'reference_dockerfile_id', string='Layers referencing this one')
|
||||||
|
use_count = fields.Integer('Used count', compute="_compute_use_count", store=True)
|
||||||
|
# maybe we should have global values here? branch version, chrome version, ... then use a os layer when possible (jammy, ...)
|
||||||
|
# we could also have a variant param, to use the version image in a specific trigger? Add a layer or change a param?
|
||||||
|
|
||||||
_sql_constraints = [('runbot_dockerfile_name_unique', 'unique(name)', 'A Dockerfile with this name already exists')]
|
_sql_constraints = [('runbot_dockerfile_name_unique', 'unique(name)', 'A Dockerfile with this name already exists')]
|
||||||
|
|
||||||
@api.returns('self', lambda value: value.id)
|
@api.returns('self', lambda value: value.id)
|
||||||
def copy(self, default=None):
|
def copy(self, default=None):
|
||||||
copied_record = super().copy(default={'name': '%s (copy)' % self.name, 'to_build': False})
|
copied_record = super().copy(default={'name': '%s (copy)' % self.name, 'to_build': False})
|
||||||
copied_record.template_id = self.template_id.copy()
|
#copied_record.template_id = self.template_id.copy()
|
||||||
copied_record.template_id.name = '%s (copy)' % copied_record.template_id.name
|
copied_record.template_id.name = '%s (copy)' % copied_record.template_id.name
|
||||||
copied_record.template_id.key = '%s (copy)' % copied_record.template_id.key
|
copied_record.template_id.key = '%s (copy)' % copied_record.template_id.key
|
||||||
return copied_record
|
return copied_record
|
||||||
@ -39,14 +156,31 @@ class Dockerfile(models.Model):
|
|||||||
for record in self:
|
for record in self:
|
||||||
record.last_successful_result = next((result for result in record.build_results if result.result == 'success'), record.build_results.browse())
|
record.last_successful_result = next((result for result in record.build_results if result.result == 'success'), record.build_results.browse())
|
||||||
|
|
||||||
@api.depends('template_id.arch_base')
|
@api.depends('bundle_ids', 'referencing_dockerlayer_ids', 'project_ids', 'version_ids')
|
||||||
|
def _compute_use_count(self):
|
||||||
|
for record in self:
|
||||||
|
record.use_count = len(record.bundle_ids) + len(record.referencing_dockerlayer_ids) + len(record.project_ids) + len(record.version_ids)
|
||||||
|
|
||||||
|
@api.depends('template_id.arch_base', 'layer_ids.rendered', 'layer_ids.sequence')
|
||||||
def _compute_dockerfile(self):
|
def _compute_dockerfile(self):
|
||||||
for rec in self:
|
for rec in self:
|
||||||
try:
|
content = ''
|
||||||
res = rec.template_id._render_template(rec.template_id.id) if rec.template_id else ''
|
if rec.template_id:
|
||||||
rec.dockerfile = re.sub(r'^\s*$', '', res, flags=re.M).strip()
|
try:
|
||||||
except QWebException:
|
res = rec.template_id._render_template(rec.template_id.id) if rec.template_id else ''
|
||||||
rec.dockerfile = ''
|
dockerfile = re.sub(r'^\s*$', '', res, flags=re.M).strip()
|
||||||
|
create_user = f"""\nRUN groupadd -g {USERGID} {USERNAME} && useradd --create-home -u {USERUID} -g {USERNAME} -G audio,video {USERNAME}\n"""
|
||||||
|
content = dockerfile + create_user
|
||||||
|
except QWebException:
|
||||||
|
content = ''
|
||||||
|
else:
|
||||||
|
content = rec.layer_ids.render_layers()
|
||||||
|
|
||||||
|
switch_user = f"\nUSER {USERNAME}\n"
|
||||||
|
if not content.endswith(switch_user):
|
||||||
|
content = content + switch_user
|
||||||
|
|
||||||
|
rec.dockerfile = content
|
||||||
|
|
||||||
@api.depends('name')
|
@api.depends('name')
|
||||||
def _compute_image_tag(self):
|
def _compute_image_tag(self):
|
||||||
@ -60,6 +194,142 @@ class Dockerfile(models.Model):
|
|||||||
keys = re.findall(r'<t.+t-call="(.+)".+', rec.arch_base or '')
|
keys = re.findall(r'<t.+t-call="(.+)".+', rec.arch_base or '')
|
||||||
rec.view_ids = self.env['ir.ui.view'].search([('type', '=', 'qweb'), ('key', 'in', keys)]).ids
|
rec.view_ids = self.env['ir.ui.view'].search([('type', '=', 'qweb'), ('key', 'in', keys)]).ids
|
||||||
|
|
||||||
|
def _template_to_layers(self):
|
||||||
|
|
||||||
|
##
|
||||||
|
# Notes: This is working fine, but missing
|
||||||
|
# - debian packages layer (multiline),
|
||||||
|
# - setup tools and wheel pip (not usefull anymore? )
|
||||||
|
# - args goole chrome (maybe we should introduce that in the layers management instead of values?)
|
||||||
|
# - doc requirements
|
||||||
|
# - geo
|
||||||
|
##
|
||||||
|
def clean_comments(text):
|
||||||
|
result = '\n'.join([line.strip() for line in text.split('\n') if not line.startswith('#')])
|
||||||
|
result = result.replace('\\\n', '')
|
||||||
|
return result
|
||||||
|
|
||||||
|
env = self.env
|
||||||
|
base_layers = env['runbot.docker_layer'].browse(env['ir.model.data'].search([('model', '=', 'runbot.docker_layer')]).mapped('res_id'))
|
||||||
|
create_user_layer_id = env.ref('runbot.docker_layer_create_user_template').id
|
||||||
|
for rec in self:
|
||||||
|
if rec.template_id and not rec.layer_ids:
|
||||||
|
_logger.info('Converting %s in layers', rec.name)
|
||||||
|
layers = []
|
||||||
|
comments = []
|
||||||
|
previous_directive_add = False
|
||||||
|
content = rec.template_id._render_template(rec.template_id.id)
|
||||||
|
for line in content.split('\n'):
|
||||||
|
# should we consider all layers instead of base_layersbase_layers ?
|
||||||
|
if not line.strip():
|
||||||
|
continue
|
||||||
|
|
||||||
|
if line.startswith('#'):
|
||||||
|
comments.append(line)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if any(line.startswith(directive) for directive in ['FROM', 'ENV', 'USER', 'SET', 'ADD', 'RUN', 'COPY', 'ARG']):
|
||||||
|
if (previous_directive_add and line.startswith('RUN')):
|
||||||
|
_logger.info('Keeping ADD in same layer than RUN')
|
||||||
|
else:
|
||||||
|
layers.append([])
|
||||||
|
previous_directive_add = line.startswith('ADD')
|
||||||
|
|
||||||
|
layers[-1] += comments
|
||||||
|
comments = []
|
||||||
|
layers[-1].append(line)
|
||||||
|
|
||||||
|
for layer in layers:
|
||||||
|
content = '\n'.join(layer)
|
||||||
|
values = {
|
||||||
|
'dockerfile_id': rec.id,
|
||||||
|
'name': f'{rec.name}: Migrated layer',
|
||||||
|
}
|
||||||
|
|
||||||
|
for base_layer in base_layers:
|
||||||
|
if clean_comments(base_layer.rendered) == clean_comments(content):
|
||||||
|
values['reference_docker_layer_id'] = base_layer.id
|
||||||
|
values['layer_type'] = 'reference_layer'
|
||||||
|
_logger.info('Matched existing layer')
|
||||||
|
break
|
||||||
|
if base_layer.layer_type == 'template':
|
||||||
|
regex = re.escape(clean_comments(base_layer.content)).replace('"', r'\"') # for astrange reason, re.escape does not escape "
|
||||||
|
for key in base_layer.values:
|
||||||
|
regex = regex.replace(r'\{%s\}' % key, fr'(?P<{key}>.*)', 1)
|
||||||
|
regex = regex.replace(r'\{%s\}' % key, fr'.*')
|
||||||
|
if match := re.match(regex, clean_comments(content)):
|
||||||
|
new_values = {}
|
||||||
|
_logger.info('Matched existing template')
|
||||||
|
for key in base_layer.values:
|
||||||
|
new_values[key] = match.group(key)
|
||||||
|
values['reference_docker_layer_id'] = base_layer.id
|
||||||
|
values['values'] = new_values
|
||||||
|
values['layer_type'] = 'reference_layer'
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
values['content'] = content
|
||||||
|
values['layer_type'] = 'raw'
|
||||||
|
self.env['runbot.docker_layer'].create(values)
|
||||||
|
|
||||||
|
# add finals user managementlayers
|
||||||
|
self.env['runbot.docker_layer'].create({
|
||||||
|
'dockerfile_id': rec.id,
|
||||||
|
'name': f'Create user for [{rec.name}]',
|
||||||
|
'layer_type': 'reference_layer',
|
||||||
|
'reference_docker_layer_id': create_user_layer_id,
|
||||||
|
})
|
||||||
|
self.env['runbot.docker_layer'].create({
|
||||||
|
'dockerfile_id': rec.id,
|
||||||
|
'name': f'Switch user for [{rec.name}]',
|
||||||
|
'layer_type': 'template',
|
||||||
|
'content': 'USER {USERNAME}',
|
||||||
|
})
|
||||||
|
|
||||||
|
def _build(self):
|
||||||
|
start = time.time()
|
||||||
|
docker_build_path = self.env['runbot.runbot']._path('docker', self.image_tag)
|
||||||
|
os.makedirs(docker_build_path, exist_ok=True)
|
||||||
|
|
||||||
|
content = self.dockerfile
|
||||||
|
|
||||||
|
with open(self.env['runbot.runbot']._path('docker', self.image_tag, 'Dockerfile'), 'w') as Dockerfile:
|
||||||
|
Dockerfile.write(content)
|
||||||
|
|
||||||
|
docker_build_identifier, msg = docker_build(docker_build_path, self.image_tag)
|
||||||
|
duration = time.time() - start
|
||||||
|
docker_build_result_values = {'dockerfile_id': self.id, 'output': msg, 'duration': duration, 'content': content, 'host_id': self.id}
|
||||||
|
duration = time.time() - start
|
||||||
|
if docker_build_identifier:
|
||||||
|
docker_build_result_values['result'] = 'success'
|
||||||
|
docker_build_result_values['identifier'] = docker_build_identifier.id
|
||||||
|
if duration > 1:
|
||||||
|
_logger.info('Dockerfile %s finished build in %s', self.image_tag, duration)
|
||||||
|
else:
|
||||||
|
docker_build_result_values['result'] = 'error'
|
||||||
|
self.to_build = False
|
||||||
|
|
||||||
|
should_save_result = not docker_build_identifier # always save in case of failure
|
||||||
|
if not should_save_result:
|
||||||
|
# check previous result anyway
|
||||||
|
previous_result = self.env['runbot.docker_build_result'].search([
|
||||||
|
('dockerfile_id', '=', self.id),
|
||||||
|
('host_id', '=', self.id),
|
||||||
|
], order='id desc', limit=1)
|
||||||
|
# identifier changed
|
||||||
|
if docker_build_identifier.id != previous_result.identifier:
|
||||||
|
should_save_result = True
|
||||||
|
if previous_result.output != docker_build_result_values['output']: # to discuss
|
||||||
|
should_save_result = True
|
||||||
|
if previous_result.content != docker_build_result_values['content']: # docker image changed
|
||||||
|
should_save_result = True
|
||||||
|
|
||||||
|
if should_save_result:
|
||||||
|
result = self.env['runbot.docker_build_result'].create(docker_build_result_values)
|
||||||
|
if not docker_build_identifier:
|
||||||
|
message = f'Build failure, check results for more info ({result.summary})'
|
||||||
|
self.message_post(body=message)
|
||||||
|
_logger.error(message)
|
||||||
|
|
||||||
|
|
||||||
class DockerBuildOutput(models.Model):
|
class DockerBuildOutput(models.Model):
|
||||||
_name = 'runbot.docker_build_result'
|
_name = 'runbot.docker_build_result'
|
||||||
@ -74,12 +344,13 @@ class DockerBuildOutput(models.Model):
|
|||||||
content = fields.Text('Content')
|
content = fields.Text('Content')
|
||||||
identifier = fields.Char('Identifier')
|
identifier = fields.Char('Identifier')
|
||||||
summary = fields.Char("Summary", compute='_compute_summary', store=True)
|
summary = fields.Char("Summary", compute='_compute_summary', store=True)
|
||||||
|
metadata = JsonDictField("Metadata", help="Additionnal data about this image generated by nightly builds")
|
||||||
|
|
||||||
@api.depends('output')
|
@api.depends('output')
|
||||||
def _compute_summary(self):
|
def _compute_summary(self):
|
||||||
for record in self:
|
for record in self:
|
||||||
summary = ''
|
summary = ''
|
||||||
for line in reversed(self.output.split('\n')):
|
for line in reversed(record.output.split('\n')):
|
||||||
if len(line) > 5:
|
if len(line) > 5:
|
||||||
summary = line
|
summary = line
|
||||||
break
|
break
|
||||||
|
@ -1,13 +1,10 @@
|
|||||||
import logging
|
import logging
|
||||||
import getpass
|
|
||||||
import time
|
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from odoo import models, fields, api
|
from odoo import models, fields, api
|
||||||
from odoo.tools import config, ormcache, file_open
|
from odoo.tools import config, ormcache
|
||||||
from ..common import fqdn, local_pgadmin_cursor, os, list_local_dbs, local_pg_cursor
|
from ..common import fqdn, local_pgadmin_cursor, os, list_local_dbs, local_pg_cursor
|
||||||
from ..container import docker_build
|
|
||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -129,58 +126,7 @@ class Host(models.Model):
|
|||||||
_logger.info('Done...')
|
_logger.info('Done...')
|
||||||
|
|
||||||
def _docker_build_dockerfile(self, dockerfile):
|
def _docker_build_dockerfile(self, dockerfile):
|
||||||
start = time.time()
|
dockerfile._build()
|
||||||
docker_build_path = self.env['runbot.runbot']._path('docker', dockerfile.image_tag)
|
|
||||||
os.makedirs(docker_build_path, exist_ok=True)
|
|
||||||
|
|
||||||
user = getpass.getuser()
|
|
||||||
|
|
||||||
docker_append = f"""
|
|
||||||
RUN groupadd -g {os.getgid()} {user} \\
|
|
||||||
&& useradd -u {os.getuid()} -g {user} -G audio,video {user} \\
|
|
||||||
&& mkdir /home/{user} \\
|
|
||||||
&& chown -R {user}:{user} /home/{user}
|
|
||||||
USER {user}
|
|
||||||
ENV COVERAGE_FILE /data/build/.coverage
|
|
||||||
"""
|
|
||||||
content = dockerfile.dockerfile + docker_append
|
|
||||||
with open(self.env['runbot.runbot']._path('docker', dockerfile.image_tag, 'Dockerfile'), 'w') as Dockerfile:
|
|
||||||
Dockerfile.write(content)
|
|
||||||
|
|
||||||
docker_build_identifier, msg = docker_build(docker_build_path, dockerfile.image_tag)
|
|
||||||
duration = time.time() - start
|
|
||||||
docker_build_result_values = {'dockerfile_id': dockerfile.id, 'output': msg, 'duration': duration, 'content': content, 'host_id': self.id}
|
|
||||||
duration = time.time() - start
|
|
||||||
if docker_build_identifier:
|
|
||||||
docker_build_result_values['result'] = 'success'
|
|
||||||
docker_build_result_values['identifier'] = docker_build_identifier.id
|
|
||||||
if duration > 1:
|
|
||||||
_logger.info('Dockerfile %s finished build in %s', dockerfile.image_tag, duration)
|
|
||||||
else:
|
|
||||||
docker_build_result_values['result'] = 'error'
|
|
||||||
dockerfile.to_build = False
|
|
||||||
|
|
||||||
should_save_result = not docker_build_identifier # always save in case of failure
|
|
||||||
if not should_save_result:
|
|
||||||
# check previous result anyway
|
|
||||||
previous_result = self.env['runbot.docker_build_result'].search([
|
|
||||||
('dockerfile_id', '=', dockerfile.id),
|
|
||||||
('host_id', '=', self.id),
|
|
||||||
], order='id desc', limit=1)
|
|
||||||
# identifier changed
|
|
||||||
if docker_build_identifier.id != previous_result.identifier:
|
|
||||||
should_save_result = True
|
|
||||||
if previous_result.output != docker_build_result_values['output']: # to discuss
|
|
||||||
should_save_result = True
|
|
||||||
if previous_result.content != docker_build_result_values['content']: # docker image changed
|
|
||||||
should_save_result = True
|
|
||||||
|
|
||||||
if should_save_result:
|
|
||||||
result = self.env['runbot.docker_build_result'].create(docker_build_result_values)
|
|
||||||
if not docker_build_identifier:
|
|
||||||
message = f'Build failure, check results for more info ({result.summary})'
|
|
||||||
dockerfile.message_post(body=message)
|
|
||||||
_logger.error(message)
|
|
||||||
|
|
||||||
@ormcache()
|
@ormcache()
|
||||||
def _host_list(self):
|
def _host_list(self):
|
||||||
|
7
runbot/models/ir_model_fields_converter.py
Normal file
7
runbot/models/ir_model_fields_converter.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
from odoo import models
|
||||||
|
|
||||||
|
class IrFieldsConverter(models.AbstractModel):
|
||||||
|
_inherit = 'ir.fields.converter'
|
||||||
|
|
||||||
|
def _str_to_jsonb(self, model, field, value):
|
||||||
|
return self._str_to_json(model, field, value)
|
@ -23,7 +23,7 @@ class Version(models.Model):
|
|||||||
next_major_version_id = fields.Many2one('runbot.version', compute='_compute_version_relations')
|
next_major_version_id = fields.Many2one('runbot.version', compute='_compute_version_relations')
|
||||||
next_intermediate_version_ids = fields.Many2many('runbot.version', compute='_compute_version_relations')
|
next_intermediate_version_ids = fields.Many2many('runbot.version', compute='_compute_version_relations')
|
||||||
|
|
||||||
dockerfile_id = fields.Many2one('runbot.dockerfile', default=lambda self: self.env.ref('runbot.docker_default', raise_if_not_found=False))
|
dockerfile_id = fields.Many2one('runbot.dockerfile', default=lambda self: self.env['runbot.version'].search([('name', '=', 'master')], limit=1).dockerfile_id or self.env.ref('runbot.docker_default', raise_if_not_found=False))
|
||||||
|
|
||||||
@api.depends('name')
|
@api.depends('name')
|
||||||
def _compute_version_number(self):
|
def _compute_version_number(self):
|
||||||
|
@ -128,6 +128,9 @@ access_runbot_upgrade_exception_admin,access_runbot_upgrade_exception_admin,runb
|
|||||||
access_runbot_dockerfile_user,access_runbot_dockerfile_user,runbot.model_runbot_dockerfile,runbot.group_user,1,0,0,0
|
access_runbot_dockerfile_user,access_runbot_dockerfile_user,runbot.model_runbot_dockerfile,runbot.group_user,1,0,0,0
|
||||||
access_runbot_dockerfile_admin,access_runbot_dockerfile_admin,runbot.model_runbot_dockerfile,runbot.group_runbot_admin,1,1,1,1
|
access_runbot_dockerfile_admin,access_runbot_dockerfile_admin,runbot.model_runbot_dockerfile,runbot.group_runbot_admin,1,1,1,1
|
||||||
|
|
||||||
|
access_runbot_docker_layer_user,access_runbot_docker_layer_user,runbot.model_runbot_docker_layer,runbot.group_user,1,0,0,0
|
||||||
|
access_runbot_docker_layer_admin,access_runbot_docker_layer_admin,runbot.model_runbot_docker_layer,runbot.group_runbot_admin,1,1,1,1
|
||||||
|
|
||||||
access_runbot_docker_build_result_user,access_runbot_docker_build_result_user,runbot.model_runbot_docker_build_result,runbot.group_user,1,0,0,0
|
access_runbot_docker_build_result_user,access_runbot_docker_build_result_user,runbot.model_runbot_docker_build_result,runbot.group_user,1,0,0,0
|
||||||
access_runbot_docker_build_result_admin,access_runbot_docker_build_result_admin,runbot.model_runbot_docker_build_result,runbot.group_runbot_admin,1,1,1,1
|
access_runbot_docker_build_result_admin,access_runbot_docker_build_result_admin,runbot.model_runbot_docker_build_result,runbot.group_runbot_admin,1,1,1,1
|
||||||
|
|
||||||
|
|
@ -2,152 +2,5 @@
|
|||||||
<odoo>
|
<odoo>
|
||||||
<data>
|
<data>
|
||||||
|
|
||||||
<template id="runbot.docker_from">
|
|
||||||
FROM <t t-esc="values['from']"/>
|
|
||||||
ENV LANG C.UTF-8
|
|
||||||
USER root
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_debs">
|
|
||||||
# Install debian packages
|
|
||||||
RUN set -x ; \
|
|
||||||
apt-get update \
|
|
||||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends <t t-esc="deb_packages or values['deb_packages']"/> \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_chrome">
|
|
||||||
<t t-set="chrome_distrib" t-value="values.get('chrome_distrib')"/>
|
|
||||||
<t t-set="chrome_version" t-value="values['chrome_version']"/>
|
|
||||||
<t t-set="chrome_source" t-value="values.get('chrome_source')"/>
|
|
||||||
# Install Google Chrome
|
|
||||||
<t t-if="chrome_source == 'google'">
|
|
||||||
RUN curl -sSL https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_<t t-esc="chrome_version"/>_amd64.deb -o /tmp/chrome.deb \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get -y install --no-install-recommends /tmp/chrome.deb \
|
|
||||||
&& rm /tmp/chrome.deb
|
|
||||||
</t>
|
|
||||||
<t t-else="">
|
|
||||||
RUN curl -sSL http://nightly.odoo.com/odoo.key | apt-key add - \
|
|
||||||
&& echo "deb http://nightly.odoo.com/deb/<t t-esc="chrome_distrib"/> ./" > /etc/apt/sources.list.d/google-chrome.list \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y -qq google-chrome-stable=<t t-esc="chrome_version"/> \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
</t>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_phantomjs">
|
|
||||||
# Install phantomjs
|
|
||||||
RUN curl -sSL https://nightly.odoo.com/resources/phantomjs.tar.bz2 -o /tmp/phantomjs.tar.bz2 \
|
|
||||||
&& tar xvfO /tmp/phantomjs.tar.bz2 phantomjs-2.1.1-linux-x86_64/bin/phantomjs > /usr/local/bin/phantomjs \
|
|
||||||
&& chmod +x /usr/local/bin/phantomjs \
|
|
||||||
&& rm -f /tmp/phantomjs.tar.bz2
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_wkhtml">
|
|
||||||
<t t-if="values['wkhtml_url']">
|
|
||||||
# Install wkhtml
|
|
||||||
RUN curl -sSL <t t-esc="values['wkhtml_url']"/> -o /tmp/wkhtml.deb \
|
|
||||||
&& apt-get update \
|
|
||||||
&& dpkg --force-depends -i /tmp/wkhtml.deb \
|
|
||||||
&& apt-get install -y -f --no-install-recommends \
|
|
||||||
&& rm /tmp/wkhtml.deb
|
|
||||||
</t>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_nodejs">
|
|
||||||
<t t-set="node_version" t-value="node_version or '20'"/>
|
|
||||||
# Install nodejs
|
|
||||||
RUN curl -sSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - \
|
|
||||||
&& echo "deb https://deb.nodesource.com/node_<t t-esc="values['node_version']"/>.x `lsb_release -c -s` main" > /etc/apt/sources.list.d/nodesource.list \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y nodejs
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_node_packages">
|
|
||||||
RUN npm install -g <t t-esc="values['node_packages']"/>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_flamegraph">
|
|
||||||
ADD https://raw.githubusercontent.com/brendangregg/FlameGraph/master/flamegraph.pl /usr/local/bin/flamegraph.pl
|
|
||||||
RUN chmod +rx /usr/local/bin/flamegraph.pl
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_psql">
|
|
||||||
<t t-set="psql_version" t-value="psql_version or False"/>
|
|
||||||
RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
|
|
||||||
&& echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -s -c`-pgdg main" > /etc/apt/sources.list.d/pgclient.list \
|
|
||||||
&& apt-get update \
|
|
||||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-client<t t-if="values['psql_version']">-</t><t t-esc="values['psql_version']"/> \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_odoo_debs">
|
|
||||||
<t t-set="odoo_branch" t-value="odoo_branch or 'master'"/>
|
|
||||||
ADD https://raw.githubusercontent.com/odoo/odoo/<t t-esc="values['odoo_branch']"/>/debian/control /tmp/control.txt
|
|
||||||
RUN apt-get update \
|
|
||||||
&& sed -n '/^Depends:/,/^[A-Z]/p' /tmp/control.txt \
|
|
||||||
| awk '/^ [a-z]/ { gsub(/,/,"") ; gsub(" ", "") ; print $NF }' | sort -u \
|
|
||||||
| egrep -v 'postgresql-client' \
|
|
||||||
| sed 's/python-imaging/python-pil/'| sed 's/python-pypdf/python-pypdf2/' \
|
|
||||||
| DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq \
|
|
||||||
&& apt-get clean \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_odoo_python_requirements">
|
|
||||||
ADD https://raw.githubusercontent.com/odoo/odoo/<t t-esc="values['odoo_branch']"/>/requirements.txt /root/requirements.txt
|
|
||||||
RUN <t t-esc="values['python_version']"/> -m pip install --no-cache-dir setuptools wheel && \
|
|
||||||
<t t-esc="values['python_version']"/> -m pip install --no-cache-dir -r /root/requirements.txt && \
|
|
||||||
<t t-esc="values['python_version']"/> -m pip install --no-cache-dir <t t-esc="values['additional_pip']"/>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<template id="runbot.docker_install_runbot_python_requirements">
|
|
||||||
RUN <t t-esc="values['python_version']"/> -m pip install --no-cache-dir setuptools wheel && \
|
|
||||||
<t t-esc="values['python_version']"/> -m pip install <t t-esc="values['runbot_pip']"/>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
|
|
||||||
<template id="runbot.docker_base">
|
|
||||||
<t t-set="default" t-value="{
|
|
||||||
'from': 'ubuntu:jammy',
|
|
||||||
'odoo_branch': 'master',
|
|
||||||
'chrome_source': 'google',
|
|
||||||
'chrome_version': '123.0.6312.58-1',
|
|
||||||
'node_packages': 'rtlcss es-check eslint',
|
|
||||||
'node_version': '20',
|
|
||||||
'psql_version': '12',
|
|
||||||
'wkhtml_url': 'https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox_0.12.5-1.bionic_amd64.deb',
|
|
||||||
'chrome': True,
|
|
||||||
'phantom': False,
|
|
||||||
'do_requirements': True,
|
|
||||||
'python_version': 'python3',
|
|
||||||
'deb_packages_python': 'python3 python3-dbfread python3-dev python3-pip python3-setuptools python3-wheel python3-markdown python3-mock python3-phonenumbers python3-websocket libpq-dev python3-gevent',
|
|
||||||
'deb_package_default': 'apt-transport-https build-essential ca-certificates curl ffmpeg file fonts-freefont-ttf fonts-noto-cjk gawk gnupg gsfonts libldap2-dev libjpeg9-dev libsasl2-dev libxslt1-dev lsb-release node-less ocrmypdf sed sudo unzip xfonts-75dpi zip zlib1g-dev',
|
|
||||||
'additional_pip': 'ebaysdk==2.1.5 pdf417gen==0.7.1',
|
|
||||||
'runbot_pip': 'coverage==4.5.4 astroid==2.4.2 pylint==2.5.0 flamegraph'
|
|
||||||
}"/>
|
|
||||||
<t t-set="values" t-value="default"/>
|
|
||||||
<t t-set="dummy" t-value="values.update(custom_values)" t-if="custom_values" />
|
|
||||||
|
|
||||||
<t t-call="runbot.docker_from"/>
|
|
||||||
<t t-call="runbot.docker_install_debs">
|
|
||||||
<t t-set="deb_packages" t-value="values['deb_package_default']"/>
|
|
||||||
</t>
|
|
||||||
<t t-call="runbot.docker_install_debs">
|
|
||||||
<t t-set="deb_packages" t-value="values['deb_packages_python']"/>
|
|
||||||
</t>
|
|
||||||
<t t-out="0"/><!-- custom content from caller t-call-->
|
|
||||||
<t t-call="runbot.docker_install_wkhtml"/>
|
|
||||||
<t t-call="runbot.docker_install_nodejs"/>
|
|
||||||
<t t-call="runbot.docker_install_node_packages"/>
|
|
||||||
<t t-call="runbot.docker_install_flamegraph"/>
|
|
||||||
<t t-call="runbot.docker_install_odoo_debs"/>
|
|
||||||
<t t-call="runbot.docker_install_runbot_python_requirements"/>
|
|
||||||
<t t-call="runbot.docker_install_psql"/>
|
|
||||||
<t t-if="values['chrome']" t-call="runbot.docker_install_chrome"/>
|
|
||||||
<t t-if="values['phantom']" t-call="runbot.docker_install_phantomjs"/>
|
|
||||||
<t t-if="values['do_requirements']" t-call="runbot.docker_install_odoo_python_requirements"/>
|
|
||||||
</template>
|
|
||||||
</data>
|
</data>
|
||||||
</odoo>
|
</odoo>
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
import getpass
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from odoo import Command
|
||||||
from unittest.mock import patch, mock_open
|
from unittest.mock import patch, mock_open
|
||||||
|
|
||||||
from odoo.tests.common import Form, tagged, HttpCase
|
from odoo.tests.common import Form, tagged, HttpCase
|
||||||
@ -8,6 +11,9 @@ from .common import RunbotCase
|
|||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
USERUID = os.getuid()
|
||||||
|
USERGID = os.getgid()
|
||||||
|
USERNAME = getpass.getuser()
|
||||||
|
|
||||||
@tagged('-at_install', 'post_install')
|
@tagged('-at_install', 'post_install')
|
||||||
class TestDockerfile(RunbotCase, HttpCase):
|
class TestDockerfile(RunbotCase, HttpCase):
|
||||||
@ -19,44 +25,45 @@ class TestDockerfile(RunbotCase, HttpCase):
|
|||||||
r"""FROM ubuntu:jammy
|
r"""FROM ubuntu:jammy
|
||||||
ENV LANG C.UTF-8
|
ENV LANG C.UTF-8
|
||||||
USER root
|
USER root
|
||||||
|
# Install debian packages
|
||||||
RUN set -x ; \
|
RUN set -x ; \
|
||||||
apt-get update \
|
apt-get update \
|
||||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends apt-transport-https build-essential ca-certificates curl ffmpeg file fonts-freefont-ttf fonts-noto-cjk gawk gnupg gsfonts libldap2-dev libjpeg9-dev libsasl2-dev libxslt1-dev lsb-release node-less ocrmypdf sed sudo unzip xfonts-75dpi zip zlib1g-dev \
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends apt-transport-https build-essential ca-certificates curl ffmpeg file flake8 fonts-freefont-ttf fonts-noto-cjk gawk gnupg gsfonts libldap2-dev libjpeg9-dev libsasl2-dev libxslt1-dev lsb-release ocrmypdf sed sudo unzip xfonts-75dpi zip zlib1g-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
# Install debian packages
|
||||||
RUN set -x ; \
|
RUN set -x ; \
|
||||||
apt-get update \
|
apt-get update \
|
||||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends python3 python3-dbfread python3-dev python3-pip python3-setuptools python3-wheel python3-markdown python3-mock python3-phonenumbers plibpq-dev python3-gevent python3-websocket \
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends python3 python3-dbfread python3-dev python3-gevent python3-pip python3-setuptools python3-wheel python3-markdown python3-mock python3-phonenumbers python3-websocket python3-google-auth libpq-dev python3-asn1crypto python3-jwt publicsuffix python3-xmlsec python3-aiosmtpd pylint \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
# Install wkhtml
|
# Install wkhtmltopdf
|
||||||
RUN curl -sSL https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox_0.12.5-1.bionic_amd64.deb -o /tmp/wkhtml.deb \
|
RUN curl -sSL https://nightly.odoo.com/deb/jammy/wkhtmltox_0.12.5-2.jammy_amd64.deb -o /tmp/wkhtml.deb \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& dpkg --force-depends -i /tmp/wkhtml.deb \
|
&& DEBIAN_FRONTEND=noninteractive apt-get -y install --no-install-recommends --fix-missing -qq /tmp/wkhtml.deb \
|
||||||
&& apt-get install -y -f --no-install-recommends \
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
&& rm /tmp/wkhtml.deb
|
&& rm /tmp/wkhtml.deb
|
||||||
# Install nodejs with values {"node_version": "20"}
|
# Install nodejs with values {"node_version": "20"}
|
||||||
RUN curl -sSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - \
|
RUN curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | gpg --dearmor | tee /usr/share/keyrings/nodesource.gpg > /dev/null \
|
||||||
&& echo "deb https://deb.nodesource.com/node_20.x `lsb_release -c -s` main" > /etc/apt/sources.list.d/nodesource.list \
|
&& echo "deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x `lsb_release -c -s` main" > /etc/apt/sources.list.d/nodesource.list \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y nodejs
|
&& apt-get install -y nodejs
|
||||||
RUN npm install -g rtlcss es-check eslint
|
RUN npm install -g rtlcss@3.4.0 es-check@6.0.0 eslint@8.1.0 prettier@2.7.1 eslint-config-prettier@8.5.0 eslint-plugin-prettier@4.2.1
|
||||||
ADD https://raw.githubusercontent.com/brendangregg/FlameGraph/master/flamegraph.pl /usr/local/bin/flamegraph.pl
|
|
||||||
RUN chmod +rx /usr/local/bin/flamegraph.pl
|
|
||||||
# Install branch debian/control with values {"odoo_branch": "master"}
|
# Install branch debian/control with values {"odoo_branch": "master"}
|
||||||
ADD https://raw.githubusercontent.com/odoo/odoo/master/debian/control /tmp/control.txt
|
ADD https://raw.githubusercontent.com/odoo/odoo/master/debian/control /tmp/control.txt
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& sed -n '/^Depends:/,/^[A-Z]/p' /tmp/control.txt \
|
&& sed -n '/^Depends:/,/^[A-Z]/p' /tmp/control.txt \
|
||||||
| awk '/^ [a-z]/ { gsub(/,/,"") ; gsub(" ", "") ; print $NF }' | sort -u \
|
| awk '/^ [a-z]/ { gsub(/,/,"") ; gsub(" ", "") ; print $NF }' | sort -u \
|
||||||
| egrep -v 'postgresql-client' \
|
| egrep -v 'postgresql-client' \
|
||||||
| sed 's/python-imaging/python-pil/'| sed 's/python-pypdf/python-pypdf2/' \
|
| DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq --no-install-recommends \
|
||||||
| DEBIAN_FRONTEND=noninteractive xargs apt-get install -y -qq \
|
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
RUN python3 -m pip install --no-cache-dir coverage==4.5.4 astroid==2.4.2 pylint==2.5.0 flamegraph
|
# Install pip packages with values {"$packages": "astroid==2.4.2 pylint==2.5.0"}
|
||||||
|
RUN python3 -m pip install --no-cache-dir astroid==2.4.2 pylint==2.5.0
|
||||||
|
# Install pip packages with values {"$packages": "ebaysdk==2.1.5 pdf417gen==0.7.1"}
|
||||||
RUN python3 -m pip install --no-cache-dir ebaysdk==2.1.5 pdf417gen==0.7.1
|
RUN python3 -m pip install --no-cache-dir ebaysdk==2.1.5 pdf417gen==0.7.1
|
||||||
RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
|
RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
|
||||||
&& echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -s -c`-pgdg main" > /etc/apt/sources.list.d/pgclient.list \
|
&& echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -s -c`-pgdg main" > /etc/apt/sources.list.d/pgclient.list \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-client-12 \
|
&& DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-client-14 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
# Install chrome with values {"chrome_version": "123.0.6312.58-1"}
|
# Install chrome with values {"chrome_version": "123.0.6312.58-1"}
|
||||||
RUN curl -sSL https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_123.0.6312.58-1_amd64.deb -o /tmp/chrome.deb \
|
RUN curl -sSL https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_123.0.6312.58-1_amd64.deb -o /tmp/chrome.deb \
|
||||||
@ -64,59 +71,44 @@ RUN curl -sSL https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-s
|
|||||||
&& apt-get -y install --no-install-recommends /tmp/chrome.deb \
|
&& apt-get -y install --no-install-recommends /tmp/chrome.deb \
|
||||||
&& rm /tmp/chrome.deb
|
&& rm /tmp/chrome.deb
|
||||||
# Install branch requirements with values {"odoo_branch": "master"}
|
# Install branch requirements with values {"odoo_branch": "master"}
|
||||||
ADD https://raw.githubusercontent.com/odoo/odoo/master/requirements.txt /root/requirements.txt
|
ADD https://raw.githubusercontent.com/odoo/odoo/master/requirements.txt /tmp/requirements.txt
|
||||||
RUN python3 -m pip install --no-cache-dir -r /root/requirements.txt""")
|
RUN python3 -m pip install --no-cache-dir -r /tmp/requirements.txt""" f"""
|
||||||
|
# Create user template with values {{"USERUID": {USERUID}, "USERGID": {USERGID}, "USERNAME": "{USERNAME}"}}
|
||||||
|
RUN groupadd -g {USERGID} {USERNAME} && useradd --create-home -u {USERUID} -g {USERNAME} -G audio,video {USERNAME}
|
||||||
|
# Switch user with values {{"USERNAME": "{USERNAME}"}}
|
||||||
|
USER {USERNAME}
|
||||||
|
""")
|
||||||
|
|
||||||
def test_dockerfile_base_fields(self):
|
def test_dockerfile_base_fields(self):
|
||||||
xml_content = """<t t-call="runbot.docker_base">
|
|
||||||
<t t-set="custom_values" t-value="{
|
|
||||||
'from': 'ubuntu:jammy',
|
|
||||||
'phantom': True,
|
|
||||||
'additional_pip': 'babel==2.8.0',
|
|
||||||
'chrome_source': 'odoo',
|
|
||||||
'chrome_version': '86.0.4240.183-1',
|
|
||||||
}"/>
|
|
||||||
</t>
|
|
||||||
"""
|
|
||||||
|
|
||||||
focal_template = self.env['ir.ui.view'].create({
|
|
||||||
'name': 'docker_focal_test',
|
|
||||||
'type': 'qweb',
|
|
||||||
'key': 'docker.docker_focal_test',
|
|
||||||
'arch_db': xml_content
|
|
||||||
})
|
|
||||||
|
|
||||||
dockerfile = self.env['runbot.dockerfile'].create({
|
dockerfile = self.env['runbot.dockerfile'].create({
|
||||||
'name': 'Tests Ubuntu Focal (20.0)[Chrome 86]',
|
'name': 'Tests Ubuntu Focal (20.0)[Chrome 86]',
|
||||||
'template_id': focal_template.id,
|
'to_build': True,
|
||||||
'to_build': True
|
'layer_ids': [
|
||||||
|
Command.create({
|
||||||
|
'name': 'Customized base',
|
||||||
|
'reference_dockerfile_id': self.env.ref('runbot.docker_default').id,
|
||||||
|
'values': {
|
||||||
|
'from': 'ubuntu:jammy',
|
||||||
|
'phantom': True,
|
||||||
|
'chrome_version': '86.0.4240.183-1',
|
||||||
|
},
|
||||||
|
'layer_type': 'reference_file',
|
||||||
|
}),
|
||||||
|
Command.create({
|
||||||
|
'name': 'Customized base',
|
||||||
|
'packages': 'babel==2.8.0',
|
||||||
|
'layer_type': 'reference_layer',
|
||||||
|
'reference_docker_layer_id': self.env.ref('runbot.docker_layer_pip_packages_template').id,
|
||||||
|
}),
|
||||||
|
],
|
||||||
})
|
})
|
||||||
|
|
||||||
self.assertEqual(dockerfile.image_tag, 'odoo:TestsUbuntuFocal20.0Chrome86')
|
self.assertEqual(dockerfile.image_tag, 'odoo:TestsUbuntuFocal20.0Chrome86')
|
||||||
self.assertTrue(dockerfile.dockerfile.startswith('FROM ubuntu:jammy'))
|
self.assertTrue(dockerfile.dockerfile.startswith('FROM ubuntu:jammy'))
|
||||||
self.assertIn(' apt-get install -y -qq google-chrome-stable=86.0.4240.183-1', dockerfile.dockerfile)
|
self.assertIn('86.0.4240.183-1', dockerfile.dockerfile)
|
||||||
self.assertIn('# Install phantomjs', dockerfile.dockerfile)
|
|
||||||
self.assertIn('pip install --no-cache-dir babel==2.8.0', dockerfile.dockerfile)
|
self.assertIn('pip install --no-cache-dir babel==2.8.0', dockerfile.dockerfile)
|
||||||
|
|
||||||
# test view update
|
# test layer update
|
||||||
xml_content = xml_content.replace('86.0.4240.183-1', '87.0-1')
|
dockerfile.layer_ids[0].values = {**dockerfile.layer_ids[0].values, 'chrome_version': '87.0.4240.183-1'}
|
||||||
dockerfile_form = Form(dockerfile)
|
|
||||||
dockerfile_form.arch_base = xml_content
|
|
||||||
dockerfile_form.save()
|
|
||||||
|
|
||||||
self.assertIn('apt-get install -y -qq google-chrome-stable=87.0-1', dockerfile.dockerfile)
|
self.assertIn('Install chrome with values {"chrome_version": "87.0.4240.183-1"}', dockerfile.dockerfile)
|
||||||
|
|
||||||
# Ensure that only the test dockerfile will be found by docker_run
|
|
||||||
self.env['runbot.dockerfile'].search([('id', '!=', dockerfile.id)]).update({'to_build': False})
|
|
||||||
|
|
||||||
def write_side_effect(content):
|
|
||||||
self.assertIn('apt-get install -y -qq google-chrome-stable=87.0-1', content)
|
|
||||||
|
|
||||||
docker_build_mock = self.patchers['docker_build']
|
|
||||||
docker_build_mock.return_value = (True, None)
|
|
||||||
mopen = mock_open()
|
|
||||||
rb_host = self.env['runbot.host'].create({'name': 'runbotxxx.odoo.com'})
|
|
||||||
with patch('builtins.open', mopen) as file_mock:
|
|
||||||
file_handle_mock = file_mock.return_value.__enter__.return_value
|
|
||||||
file_handle_mock.write.side_effect = write_side_effect
|
|
||||||
rb_host._docker_build()
|
|
||||||
|
@ -59,3 +59,12 @@ class TestVersion(RunbotCase):
|
|||||||
|
|
||||||
self.assertEqual(master.previous_major_version_id, v13)
|
self.assertEqual(master.previous_major_version_id, v13)
|
||||||
self.assertEqual(master.intermediate_version_ids, v133 | v132 | v131)
|
self.assertEqual(master.intermediate_version_ids, v133 | v132 | v131)
|
||||||
|
|
||||||
|
def test_version_docker_file(self):
|
||||||
|
version18 = self.env['runbot.version'].create({'name': '18.0'})
|
||||||
|
versionmaster = self.env['runbot.version'].search([('name', '=', 'master')])
|
||||||
|
self.assertEqual(version18.dockerfile_id, versionmaster.dockerfile_id)
|
||||||
|
versionmaster.dockerfile_id = self.env['runbot.dockerfile'].create({'name': 'New dockefile for master'})
|
||||||
|
version181 = self.env['runbot.version'].create({'name': '18.1'})
|
||||||
|
self.assertEqual(version181.dockerfile_id, versionmaster.dockerfile_id)
|
||||||
|
self.assertEqual(version181.dockerfile_id.name, 'New dockefile for master')
|
||||||
|
@ -19,12 +19,23 @@
|
|||||||
<field name="description"/>
|
<field name="description"/>
|
||||||
</group>
|
</group>
|
||||||
<notebook>
|
<notebook>
|
||||||
<page string="Template">
|
<page string="Layers">
|
||||||
<field name="arch_base" widget="ace" options="{'mode': 'xml'}" readonly="0"/>
|
<field name="layer_ids">
|
||||||
|
<tree>
|
||||||
|
<field name="sequence" widget="handle"/>
|
||||||
|
<field name="rendered" decoration-it="layer_type in ('reference_layer', 'reference_file')" decoration-bg-info="layer_type == 'template'" decoration-bg-success="layer_type == 'raw'"/>
|
||||||
|
<field name="reference_count" string="#" decoration="True" decoration-bg-danger="referencing_dockerlayer_ids" decoration-bg-warning="reference_count != 0"/>
|
||||||
|
<field name="referencing_dockerlayer_ids" column_invisible="True"/>
|
||||||
|
<field name="layer_type" column_invisible="True"/>
|
||||||
|
</tree>
|
||||||
|
</field>
|
||||||
</page>
|
</page>
|
||||||
<page string="Dockerfile">
|
<page string="Dockerfile">
|
||||||
<field name="dockerfile"/>
|
<field name="dockerfile"/>
|
||||||
</page>
|
</page>
|
||||||
|
<page string="Template">
|
||||||
|
<field name="arch_base" widget="ace" options="{'mode': 'xml'}" readonly="0"/>
|
||||||
|
</page>
|
||||||
<page string="Views" groups="runbot.group_runbot_admin">
|
<page string="Views" groups="runbot.group_runbot_admin">
|
||||||
<field name="view_ids" widget="one2many">
|
<field name="view_ids" widget="one2many">
|
||||||
<tree>
|
<tree>
|
||||||
@ -33,13 +44,28 @@
|
|||||||
</tree>
|
</tree>
|
||||||
</field>
|
</field>
|
||||||
</page>
|
</page>
|
||||||
<page string="Bundles">
|
<page string="Used in ">
|
||||||
<field name="bundle_ids" widget="one2many">
|
<field name="bundle_ids" widget="one2many">
|
||||||
<tree>
|
<tree>
|
||||||
<field name="project_id"/>
|
<field name="project_id"/>
|
||||||
<field name="name"/>
|
<field name="name"/>
|
||||||
</tree>
|
</tree>
|
||||||
</field>
|
</field>
|
||||||
|
<field name="project_ids" widget="one2many">
|
||||||
|
<tree>
|
||||||
|
<field name="name"/>
|
||||||
|
</tree>
|
||||||
|
</field>
|
||||||
|
<field name="version_ids" widget="one2many">
|
||||||
|
<tree>
|
||||||
|
<field name="name"/>
|
||||||
|
</tree>
|
||||||
|
</field>
|
||||||
|
<field name="referencing_dockerlayer_ids" widget="one2many">
|
||||||
|
<tree>
|
||||||
|
<field name="name"/>
|
||||||
|
</tree>
|
||||||
|
</field>
|
||||||
</page>
|
</page>
|
||||||
<page string="Build results">
|
<page string="Build results">
|
||||||
<field name="build_results">
|
<field name="build_results">
|
||||||
@ -75,11 +101,59 @@
|
|||||||
<field name="to_build" widget="boolean_toggle" groups="runbot.group_runbot_admin"/>
|
<field name="to_build" widget="boolean_toggle" groups="runbot.group_runbot_admin"/>
|
||||||
<field name="version_ids" widget="many2many_tags"/>
|
<field name="version_ids" widget="many2many_tags"/>
|
||||||
<field name="project_ids" widget="many2many_tags"/>
|
<field name="project_ids" widget="many2many_tags"/>
|
||||||
<field name="bundle_ids"/>
|
<field name="use_count"/>
|
||||||
<field name="dockerfile" invisible="True"/>
|
<field name="dockerfile" invisible="True"/>
|
||||||
</tree>
|
</tree>
|
||||||
</field>
|
</field>
|
||||||
</record>
|
</record>
|
||||||
|
<record id="docker_layer_form" model="ir.ui.view">
|
||||||
|
<field name="name">runbot.docker_layer.form</field>
|
||||||
|
<field name="model">runbot.docker_layer</field>
|
||||||
|
<field name="arch" type="xml">
|
||||||
|
<form string="Docker layer">
|
||||||
|
<sheet>
|
||||||
|
<div class="alert alert-warning" role="alert" invisible="not has_xml_id">This layer is part of the master data and should not be modified.</div>
|
||||||
|
<div class="alert alert-warning" role="alert" invisible="not reference_count">This layer is Used by another layer.</div>
|
||||||
|
<group>
|
||||||
|
<field name="has_xml_id" invisible="1"/>
|
||||||
|
<field name="reference_count" invisible="1"/>
|
||||||
|
<field name="name" readonly="has_xml_id"/>
|
||||||
|
<field name="dockerfile_id" invisible="dockerfile_id" readonly="has_xml_id"/>
|
||||||
|
<field name="layer_type" readonly="has_xml_id"/>
|
||||||
|
<field name="content" widget="ace" invisible="layer_type not in ('raw', 'template')" readonly="has_xml_id"/>
|
||||||
|
<field name="reference_docker_layer_id" invisible="layer_type not in ('reference_layer')" readonly="has_xml_id"/>
|
||||||
|
<field name="reference_dockerfile_id" invisible="layer_type != 'reference_file'" readonly="has_xml_id"/>
|
||||||
|
<field name="values" widget="runbotjsonb" invisible="layer_type not in ('template', 'reference_layer', 'reference_file')" readonly="has_xml_id"/>
|
||||||
|
<field name="packages" widget="ace" invisible="layer_type not in ('template', 'reference_layer', 'reference_file')" readonly="has_xml_id"/>
|
||||||
|
<field name="all_referencing_dockerlayer_ids" widget="many2many_tags" readonly="1"/>
|
||||||
|
</group>
|
||||||
|
<group>
|
||||||
|
<field name="rendered"/>
|
||||||
|
</group>
|
||||||
|
</sheet>
|
||||||
|
<div class="oe_chatter">
|
||||||
|
<field name="message_follower_ids"/>
|
||||||
|
<field name="message_ids"/>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
|
<record id="docker_layer_tree" model="ir.ui.view">
|
||||||
|
<field name="name">runbot.docker_layer.tree</field>
|
||||||
|
<field name="model">runbot.docker_layer</field>
|
||||||
|
<field name="arch" type="xml">
|
||||||
|
<tree string="Docker Layer">
|
||||||
|
<field name="name"/>
|
||||||
|
<field name="dockerfile_id"/>
|
||||||
|
<field name="layer_type"/>
|
||||||
|
<field name="referencing_dockerlayer_ids" column_invisible="True"/>
|
||||||
|
<field name="reference_count" string="#refs" decoration-danger="referencing_dockerlayer_ids" decoration-warning="reference_count != 0"/>
|
||||||
|
<field name="all_referencing_dockerlayer_ids" string="#referencing" widget="many2many_tags"/>
|
||||||
|
<field name="rendered" decoration-warning="layer_type in ('reference_layer', 'reference_file')" decoration-info="layer_type == 'template'" decoration-success="layer_type == 'raw'"/>
|
||||||
|
</tree>
|
||||||
|
</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
<record id="docker_build_result_form" model="ir.ui.view">
|
<record id="docker_build_result_form" model="ir.ui.view">
|
||||||
<field name="name">runbot.docker_build_result.form</field>
|
<field name="name">runbot.docker_build_result.form</field>
|
||||||
@ -135,5 +209,11 @@
|
|||||||
<field name="view_mode">tree,form</field>
|
<field name="view_mode">tree,form</field>
|
||||||
</record>
|
</record>
|
||||||
|
|
||||||
|
<record id="open_view_docker_layer_tree" model="ir.actions.act_window">
|
||||||
|
<field name="name">Docker Layers</field>
|
||||||
|
<field name="res_model">runbot.docker_layer</field>
|
||||||
|
<field name="view_mode">tree,form</field>
|
||||||
|
</record>
|
||||||
|
|
||||||
</data>
|
</data>
|
||||||
</odoo>
|
</odoo>
|
||||||
|
@ -30,8 +30,10 @@
|
|||||||
|
|
||||||
<menuitem name="Docker" id="menu_dockerfile" parent="runbot_menu_root" sequence="800"/>
|
<menuitem name="Docker" id="menu_dockerfile" parent="runbot_menu_root" sequence="800"/>
|
||||||
<menuitem name="Docker files" id="menu_dockerfiles" parent="menu_dockerfile" action="open_view_dockerfile_tree" sequence="801"/>
|
<menuitem name="Docker files" id="menu_dockerfiles" parent="menu_dockerfile" action="open_view_dockerfile_tree" sequence="801"/>
|
||||||
|
<menuitem name="Docker layers" id="menu_docker_layers" parent="menu_dockerfile" action="open_view_docker_layer_tree" sequence="801"/>
|
||||||
<menuitem name="Docker build results" id="menu_docker_results" parent="menu_dockerfile" action="open_view_docker_result_tree" sequence="802"/>
|
<menuitem name="Docker build results" id="menu_docker_results" parent="menu_dockerfile" action="open_view_docker_result_tree" sequence="802"/>
|
||||||
|
|
||||||
|
|
||||||
<menuitem name="Manage errors" id="runbot_menu_manage_errors" parent="runbot_menu_root" sequence="900"/>
|
<menuitem name="Manage errors" id="runbot_menu_manage_errors" parent="runbot_menu_root" sequence="900"/>
|
||||||
<menuitem name="Build errors" id="runbot_menu_build_error_tree" parent="runbot_menu_manage_errors" sequence="10" action="open_view_build_error_tree"/>
|
<menuitem name="Build errors" id="runbot_menu_build_error_tree" parent="runbot_menu_manage_errors" sequence="10" action="open_view_build_error_tree"/>
|
||||||
<menuitem name="Error Logs" id="runbot_menu_error_logs" parent="runbot_menu_manage_errors" sequence="20" action="open_view_error_log_tree"/>
|
<menuitem name="Error Logs" id="runbot_menu_error_logs" parent="runbot_menu_manage_errors" sequence="20" action="open_view_error_log_tree"/>
|
||||||
|
Loading…
Reference in New Issue
Block a user