[IMP] runbot: queue build logs in a local database

Before the commit the build ir_logging was sent from the build instance
to the main runbot ir.logging table. As the number of runbot hosts
increases, it introduce a lot of concurrency.
e.g.: 80 hosts with 8 builds means 640 instances trying to insert
records in the ir.logging table.

With this commit, a special database is used on the builder host in
order to receive ir.logging's from the build instances.

Regulary, the table is emptied by the builder and the logs are inserted
in the runbot leader ir.logging table.
This commit is contained in:
Christophe Monniez 2022-07-07 16:23:01 +02:00 committed by xdo
parent 66e37b9323
commit 2cad0542f4
19 changed files with 271 additions and 126 deletions

View File

@ -6,7 +6,7 @@
'author': "Odoo SA",
'website': "http://runbot.odoo.com",
'category': 'Website',
'version': '5.1',
'version': '5.2',
'application': True,
'depends': ['base', 'base_automation', 'website'],
'data': [

View File

@ -108,6 +108,16 @@ def local_pgadmin_cursor():
if cnx:
cnx.close()
@contextlib.contextmanager
def local_pg_cursor(db_name):
cnx = None
try:
cnx = psycopg2.connect(f"dbname={db_name}")
yield cnx.cursor()
finally:
if cnx:
cnx.commit()
cnx.close()
def list_local_dbs(additionnal_conditions=None):
additionnal_condition_str = ''

View File

@ -41,6 +41,11 @@
admin_passwd=running_master_password</field>
</record>
<record model="ir.config_parameter" id="runbot.runbot_default_logdb_name">
<field name="key">runbot.logdb_name</field>
<field name="value">runbot_logs</field>
</record>
</data>
<record model="ir.config_parameter" id="runbot.runbot_is_base_regex">

View File

@ -0,0 +1,3 @@
def migrate(cr, version):
cr.execute("DROP TRIGGER IF EXISTS runbot_new_logging ON ir_logging")
cr.execute("DROP FUNCTION IF EXISTS runbot_set_logging_build")

View File

@ -529,7 +529,8 @@ class BuildResult(models.Model):
for _id in self.exists().ids:
additionnal_conditions.append("datname like '%s-%%'" % _id)
existing_db = list_local_dbs(additionnal_conditions=additionnal_conditions)
log_db = self.env['ir.config_parameter'].get_param('runbot.logdb_name')
existing_db = [db for db in list_local_dbs(additionnal_conditions=additionnal_conditions) if db != log_db]
for db, _ in _filter(dest_list=existing_db, label='db'):
self._logger('Removing database')
@ -662,6 +663,8 @@ class BuildResult(models.Model):
def _schedule(self):
"""schedule the build"""
icp = self.env['ir.config_parameter'].sudo()
hosts_by_name = {h.name: h for h in self.env['runbot.host'].search([('name', 'in', self.mapped('host'))])}
hosts_by_build = {b.id: hosts_by_name[b.host] for b in self}
for build in self:
if build.local_state not in ['testing', 'running']:
raise UserError("Build %s is not testing/running: %s" % (build.id, build.local_state))
@ -689,6 +692,8 @@ class BuildResult(models.Model):
continue
else:
build._log('_schedule', 'Docker with state %s not started after 60 seconds, skipping' % _docker_state, level='ERROR')
if hosts_by_build[build.id]._fetch_local_logs(build_ids=build.ids):
continue # avoid to make results with remaining logs
# No job running, make result and select nex job
build_values = {
'job_end': now(),
@ -1015,11 +1020,7 @@ class BuildResult(models.Model):
command.add_config_tuple("xmlrpc_interface", "127.0.0.1")
if grep(config_path, "log-db"):
logdb_uri = self.env['ir.config_parameter'].get_param('runbot.runbot_logdb_uri')
logdb = self.env.cr.dbname
if logdb_uri: # this looks useless
logdb = '%s' % logdb_uri
command.add_config_tuple("log_db", "%s" % logdb)
command.add_config_tuple("log_db", "runbot_logs")
if grep(config_path, 'log-db-level'):
command.add_config_tuple("log_db_level", '25')

View File

@ -5,7 +5,7 @@ import logging
from collections import defaultdict
from ..common import pseudo_markdown
from odoo import models, fields, tools
from odoo import models, fields, tools, api
from odoo.exceptions import UserError
_logger = logging.getLogger(__name__)
@ -24,59 +24,22 @@ class runbot_event(models.Model):
error_id = fields.Many2one('runbot.build.error', compute='_compute_known_error') # remember to never store this field
dbname = fields.Char(string='Database Name', index=False)
@api.model_create_multi
def create(self, vals_list):
logs_by_build_id = defaultdict(list)
for log in vals_list:
if 'build_id' in log:
logs_by_build_id[log['build_id']].append(log)
def init(self):
parent_class = super(runbot_event, self)
if hasattr(parent_class, 'init'):
parent_class.init()
self._cr.execute("""
CREATE OR REPLACE FUNCTION runbot_set_logging_build() RETURNS TRIGGER AS $runbot_set_logging_build$
BEGIN
IF (NEW.build_id IS NULL AND NEW.dbname IS NOT NULL AND NEW.dbname != current_database()) THEN
NEW.build_id := split_part(NEW.dbname, '-', 1)::integer;
SELECT active_step INTO NEW.active_step_id FROM runbot_build WHERE runbot_build.id = NEW.build_id;
END IF;
IF (NEW.build_id IS NOT NULL) AND (NEW.type = 'server') THEN
DECLARE
counter INTEGER;
BEGIN
UPDATE runbot_build b
SET log_counter = log_counter - 1
WHERE b.id = NEW.build_id;
SELECT log_counter
INTO counter
FROM runbot_build
WHERE runbot_build.id = NEW.build_id;
IF (counter = 0) THEN
NEW.message = 'Log limit reached (full logs are still available in the log file)';
NEW.level = 'SEPARATOR';
NEW.func = '';
NEW.type = 'runbot';
RETURN NEW;
ELSIF (counter < 0) THEN
RETURN NULL;
END IF;
END;
END IF;
IF (NEW.build_id IS NOT NULL AND UPPER(NEW.level) NOT IN ('INFO', 'SEPARATOR')) THEN
BEGIN
UPDATE runbot_build b
SET triggered_result = CASE WHEN UPPER(NEW.level) = 'WARNING' THEN 'warn'
ELSE 'ko'
END
WHERE b.id = NEW.build_id;
END;
END IF;
RETURN NEW;
END;
$runbot_set_logging_build$ language plpgsql;
DROP TRIGGER IF EXISTS runbot_new_logging ON ir_logging;
CREATE TRIGGER runbot_new_logging BEFORE INSERT ON ir_logging
FOR EACH ROW EXECUTE PROCEDURE runbot_set_logging_build();
""")
builds = self.env['runbot.build'].browse(logs_by_build_id.keys())
for build in builds:
build_logs = logs_by_build_id[build.id]
for ir_log in build_logs:
if ir_log['level'].upper() == 'WARNING':
build.triggered_result = 'warn'
elif ir_log['level'].upper() == 'ERROR':
build.triggered_result = 'ko'
return super().create(vals_list)
def _markdown(self):
""" Apply pseudo markdown parser for message.

View File

@ -1,13 +1,18 @@
import logging
import getpass
from collections import defaultdict
from odoo import models, fields, api
from odoo.tools import config
from ..common import fqdn, local_pgadmin_cursor, os
from ..common import fqdn, local_pgadmin_cursor, os, list_local_dbs, local_pg_cursor
from ..container import docker_build
_logger = logging.getLogger(__name__)
forced_host_name = None
class Host(models.Model):
_name = 'runbot.host'
_description = "Host"
@ -52,6 +57,31 @@ class Host(models.Model):
values['disp_name'] = values['name']
return super().create(values)
def _bootstrap_local_logs_db(self):
""" bootstrap a local database that will collect logs from builds """
logs_db_name = self.env['ir.config_parameter'].get_param('runbot.logdb_name')
if logs_db_name not in list_local_dbs():
_logger.info('Logging database %s not found. Creating it ...', logs_db_name)
with local_pgadmin_cursor() as local_cr:
local_cr.execute(f"""CREATE DATABASE "{logs_db_name}" TEMPLATE template0 LC_COLLATE 'C' ENCODING 'unicode'""")
try:
with local_pg_cursor(logs_db_name) as local_cr:
# create_date, type, dbname, name, level, message, path, line, func
local_cr.execute("""CREATE TABLE ir_logging (
id bigserial NOT NULL,
create_date timestamp without time zone,
name character varying NOT NULL,
level character varying,
dbname character varying,
func character varying NOT NULL,
path character varying NOT NULL,
line character varying NOT NULL,
type character varying NOT NULL,
message text NOT NULL);
""")
except Exception as e:
_logger.exception('Failed to create local logs database: %s', e)
def _bootstrap_db_template(self):
""" boostrap template database if needed """
icp = self.env['ir.config_parameter']
@ -72,6 +102,7 @@ class Host(models.Model):
for dir, path in static_dirs.items():
os.makedirs(path, exist_ok=True)
self._bootstrap_db_template()
self._bootstrap_local_logs_db()
def _docker_build(self):
""" build docker images needed by locally pending builds"""
@ -144,3 +175,66 @@ class Host(models.Model):
nb_reserved = self.env['runbot.host'].search_count([('assigned_only', '=', True)])
if nb_reserved < (nb_hosts / 2):
self.assigned_only = True
def _fetch_local_logs(self, build_ids=None):
""" fetch build logs from local database """
logs_db_name = self.env['ir.config_parameter'].get_param('runbot.logdb_name')
try:
with local_pg_cursor(logs_db_name) as local_cr:
res = []
where_clause = 'WHERE build_id IN (%s)' if build_ids else ''
query = f"""
SELECT *
FROM (
SELECT id, create_date, name, level, dbname, func, path, line, type, message, split_part(dbname, '-', 1)::integer as build_id
FROM ir_logging
)
AS ir_logs
{where_clause}
ORDER BY id
"""
local_cr.execute(query, build_ids)
col_names = [col.name for col in local_cr.description]
for row in local_cr.fetchall():
res.append({name:value for name,value in zip(col_names, row)})
return res
except Exception:
return []
def process_logs(self, build_ids=None):
"""move logs from host to the leader"""
ir_logs = self._fetch_local_logs()
logs_by_build_id = defaultdict(list)
for log in ir_logs:
logs_by_build_id[int(log['dbname'].split('-', maxsplit=1)[0])].append(log)
builds = self.env['runbot.build'].browse(logs_by_build_id.keys())
logs_to_send = []
local_log_ids = []
for build in builds.exists():
build_logs = logs_by_build_id[build.id]
for ir_log in build_logs:
local_log_ids.append(ir_log['id'])
ir_log['active_step_id'] = build.active_step.id
ir_log['type'] = 'server'
build.log_counter -= 1
build.flush()
if build.log_counter == 0:
ir_log['level'] = 'SEPARATOR'
ir_log['func'] = ''
ir_log['type'] = 'runbot'
ir_log['message'] = 'Log limit reached (full logs are still available in the log file)'
elif build.log_counter < 0:
continue
ir_log['build_id'] = build.id
logs_to_send.append({k:ir_log[k] for k in ir_log if k != 'id'})
if logs_to_send:
self.env['ir.logging'].create(logs_to_send)
self.env.cr.commit() # we don't want to remove local logs that were not inserted in main runbot db
if local_log_ids:
logs_db_name = self.env['ir.config_parameter'].get_param('runbot.logdb_name')
with local_pg_cursor(logs_db_name) as local_cr:
local_cr.execute("DELETE FROM ir_logging WHERE id in %s", [tuple(local_log_ids)])

View File

@ -16,8 +16,7 @@ class ResConfigSettings(models.TransientModel):
runbot_timeout = fields.Integer('Max allowed step timeout (in seconds)')
runbot_starting_port = fields.Integer('Starting port for running builds')
runbot_max_age = fields.Integer('Max commit age (in days)')
runbot_logdb_uri = fields.Char('Runbot URI for build logs',
help='postgres://user:password@host/db formated uri to give to a build to log in database. Should be a user with limited access rights (ir_logging, runbot_build)')
runbot_logdb_name = fields.Char('Local Logs DB name', default='runbot_logs', config_parameter='runbot.logdb_name')
runbot_update_frequency = fields.Integer('Update frequency (in seconds)')
runbot_template = fields.Char('Postgresql template', help="Postgresql template to use when creating DB's")
runbot_message = fields.Text('Frontend warning message', help="Will be displayed on the frontend when not empty")
@ -62,7 +61,6 @@ class ResConfigSettings(models.TransientModel):
runbot_timeout=int(get_param('runbot.runbot_timeout', default=10000)),
runbot_starting_port=int(get_param('runbot.runbot_starting_port', default=2000)),
runbot_max_age=int(get_param('runbot.runbot_max_age', default=30)),
runbot_logdb_uri=get_param('runbot.runbot_logdb_uri', default=False),
runbot_update_frequency=int(get_param('runbot.runbot_update_frequency', default=10)),
runbot_template=get_param('runbot.runbot_db_template'),
runbot_message=get_param('runbot.runbot_message', default=''),
@ -84,7 +82,6 @@ class ResConfigSettings(models.TransientModel):
set_param("runbot.runbot_timeout", self.runbot_timeout)
set_param("runbot.runbot_starting_port", self.runbot_starting_port)
set_param("runbot.runbot_max_age", self.runbot_max_age)
set_param("runbot.runbot_logdb_uri", self.runbot_logdb_uri)
set_param('runbot.runbot_update_frequency', self.runbot_update_frequency)
set_param('runbot.runbot_db_template', self.runbot_template)
set_param('runbot.runbot_message', self.runbot_message)

View File

@ -42,6 +42,8 @@ class Runbot(models.AbstractModel):
for build in self._get_builds_with_requested_actions(host):
build._process_requested_actions()
self._commit()
host.process_logs()
self._commit()
for build in self._get_builds_to_schedule(host):
build._schedule()
self._commit()

View File

@ -14,3 +14,4 @@ from . import test_runbot
from . import test_commit
from . import test_upgrade
from . import test_dockerfile
from . import test_host

View File

@ -172,6 +172,7 @@ class RunbotCase(TransactionCase):
self.start_patcher('makedirs', 'odoo.addons.runbot.common.os.makedirs', True)
self.start_patcher('mkdir', 'odoo.addons.runbot.common.os.mkdir', True)
self.start_patcher('local_pgadmin_cursor', 'odoo.addons.runbot.common.local_pgadmin_cursor', False) # avoid to create databases
self.start_patcher('host_local_pg_cursor', 'odoo.addons.runbot.models.host.local_pg_cursor')
self.start_patcher('isdir', 'odoo.addons.runbot.common.os.path.isdir', True)
self.start_patcher('isfile', 'odoo.addons.runbot.common.os.path.isfile', True)
self.start_patcher('docker_run', 'odoo.addons.runbot.container._docker_run')

View File

@ -211,15 +211,12 @@ class TestBuildResult(RunbotCase):
self.assertEqual(modules_to_test, sorted(['good_module', 'bad_module', 'other_good', 'l10n_be', 'hwgood', 'hw_explicit', 'other_mod_1', 'other_mod_2']))
def test_build_cmd_log_db(self, ):
""" test that the logdb connection URI is taken from the .odoorc file """
uri = 'postgres://someone:pass@somewhere.com/db'
self.env['ir.config_parameter'].sudo().set_param("runbot.runbot_logdb_uri", uri)
""" test that the log_db parameter is set in the .odoorc file """
build = self.Build.create({
'params_id': self.server_params.id,
})
cmd = build._cmd(py_version=3)
self.assertIn('log_db = %s' % uri, cmd.get_config())
self.assertIn('log_db = runbot_logs', cmd.get_config())
def test_build_cmd_server_path_no_dep(self):
""" test that the server path and addons path """
@ -242,7 +239,8 @@ class TestBuildResult(RunbotCase):
'/tmp/runbot_test/static/sources/addons/d0d0caca0000ffffffffffffffffffffffffffff/requirements.txt',
'/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/requirements.txt',
'/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/server.py',
'/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/openerp/tools/config.py'
'/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/openerp/tools/config.py',
'/tmp/runbot_test/static/sources/server/dfdfcfcf0000ffffffffffffffffffffffffffff/openerp/sql_db.py'
])
if file == '/tmp/runbot_test/static/sources/addons/d0d0caca0000ffffffffffffffffffffffffffff/requirements.txt':
return False

View File

@ -15,6 +15,7 @@ class TestCron(RunbotCase):
def setUp(self):
super(TestCron, self).setUp()
self.start_patcher('list_local_dbs_patcher', 'odoo.addons.runbot.models.host.list_local_dbs', ['runbot_logs'])
self.start_patcher('_get_cron_period', 'odoo.addons.runbot.models.runbot.Runbot._get_cron_period', 2)
@patch('time.sleep', side_effect=sleep)

View File

@ -4,57 +4,6 @@ from .common import RunbotCase
class TestIrLogging(RunbotCase):
def simulate_log(self, build, func, message, level='INFO'):
""" simulate ir_logging from an external build """
dest = '%s-fake-dest' % build.id
val = ('server', dest, 'test', level, message, 'test', '0', func)
self.cr.execute("""
INSERT INTO ir_logging(create_date, type, dbname, name, level, message, path, line, func)
VALUES (NOW() at time zone 'UTC', %s, %s, %s, %s, %s, %s, %s, %s)
""", val)
def test_ir_logging(self):
build = self.Build.create({
'active_step': self.env.ref('runbot.runbot_build_config_step_test_all').id,
'params_id': self.base_params.id,
})
build.log_counter = 10
# Test that an ir_logging is created and a the trigger set the build_id
self.simulate_log(build, 'test function', 'test message')
log_line = self.env['ir.logging'].search([('func', '=', 'test function'), ('message', '=', 'test message'), ('level', '=', 'INFO')])
self.assertEqual(len(log_line), 1, "A build log event should have been created")
self.assertEqual(log_line.build_id, build)
self.assertEqual(log_line.active_step_id, self.env.ref('runbot.runbot_build_config_step_test_all'), 'The active step should be set on the log line')
# Test that a warn log line sets the build in warn
self.simulate_log(build, 'test function', 'test message', level='WARNING')
build.invalidate_cache()
self.assertEqual(build.triggered_result, 'warn', 'A warning log should sets the build in warn')
# Test that a error log line sets the build in ko
self.simulate_log(build, 'test function', 'test message', level='ERROR')
build.invalidate_cache()
self.assertEqual(build.triggered_result, 'ko', 'An error log should sets the build in ko')
self.assertEqual(7, build.log_counter, 'server lines should decrement the build log_counter')
build.log_counter = 10
# Test the log limit
for i in range(11):
self.simulate_log(build, 'limit function', 'limit message')
log_lines = self.env['ir.logging'].search([('build_id', '=', build.id), ('type', '=', 'server'), ('func', '=', 'limit function'), ('message', '=', 'limit message'), ('level', '=', 'INFO')])
self.assertGreater(len(log_lines), 7, 'Trigger should have created logs with appropriate build id')
self.assertLess(len(log_lines), 10, 'Trigger should prevent insert more lines of logs than log_counter')
last_log_line = self.env['ir.logging'].search([('build_id', '=', build.id)], order='id DESC', limit=1)
self.assertIn('Log limit reached', last_log_line.message, 'Trigger should modify last log message')
# Test that the _log method is still able to add logs
build._log('runbot function', 'runbot message')
log_lines = self.env['ir.logging'].search([('type', '=', 'runbot'), ('name', '=', 'odoo.runbot'), ('func', '=', 'runbot function'), ('message', '=', 'runbot message'), ('level', '=', 'INFO')])
self.assertEqual(len(log_lines), 1, '_log should be able to add logs from the runbot')
def test_markdown(self):
log = self.env['ir.logging'].create({
'name': 'odoo.runbot',

113
runbot/tests/test_host.py Normal file
View File

@ -0,0 +1,113 @@
import logging
from .common import RunbotCase
from datetime import datetime, timedelta
_logger = logging.getLogger(__name__)
def fetch_local_logs_return_value(nb_logs=10, message='', log_type='server', level='INFO', build_dest='1234567-master-all'):
log_date = datetime(2022, 8, 17, 21, 55)
logs = []
for i in range(nb_logs):
logs += [{
'id': i,
'create_date': log_date,
'name': 'odoo.modules.loading',
'level': level,
'dbname': build_dest,
'func': 'runbot',
'path': '/data/build/odoo/odoo/netsvc.py',
'line': '274',
'type': log_type,
'message': '75 modules loaded in 0.92s, 717 queries (+1 extra)' if message == '' else message,
}]
log_date += timedelta(seconds=20)
return logs
class TestHost(RunbotCase):
def setUp(self):
super().setUp()
self.test_host = self.env['runbot.host'].create({'name': 'test_host'})
self.server_commit = self.Commit.create({
'name': 'dfdfcfcf0000ffffffffffffffffffffffffffff',
'repo_id': self.repo_server.id
})
self.addons_commit = self.Commit.create({
'name': 'd0d0caca0000ffffffffffffffffffffffffffff',
'repo_id': self.repo_addons.id,
})
self.server_params = self.base_params.copy({'commit_link_ids': [
(0, 0, {'commit_id': self.server_commit.id})
]})
self.addons_params = self.base_params.copy({'commit_link_ids': [
(0, 0, {'commit_id': self.server_commit.id}),
(0, 0, {'commit_id': self.addons_commit.id})
]})
self.start_patcher('find_patcher', 'odoo.addons.runbot.common.find', 0)
self.start_patcher('host_bootstrap', 'odoo.addons.runbot.models.host.Host._bootstrap', None)
def test_build_logs(self):
build = self.Build.create({
'params_id': self.server_params.id,
'port': '1234567',
'active_step': self.env.ref('runbot.runbot_build_config_step_test_all').id,
'log_counter': 20,
})
# check that local logs are inserted in leader ir.logging
logs = fetch_local_logs_return_value(build_dest=build.dest)
self.start_patcher('fetch_local_logs', 'odoo.addons.runbot.models.host.Host._fetch_local_logs', logs)
self.test_host.process_logs()
self.patchers['host_local_pg_cursor'].assert_called()
self.assertEqual(
self.env['ir.logging'].search_count([
('build_id', '=', build.id),
('active_step_id', '=', self.env.ref('runbot.runbot_build_config_step_test_all').id)
]),
10,
)
# check that a warn log sets the build in warning
logs = fetch_local_logs_return_value(nb_logs=1, build_dest=build.dest, level='WARNING')
self.patchers['fetch_local_logs'].return_value = logs
self.test_host.process_logs()
self.patchers['host_local_pg_cursor'].assert_called()
self.assertEqual(
self.env['ir.logging'].search_count([
('build_id', '=', build.id),
('active_step_id', '=', self.env.ref('runbot.runbot_build_config_step_test_all').id),
('level', '=', 'WARNING')
]),
1,
)
self.assertEqual(build.triggered_result, 'warn', 'A warning log should sets the build in warn')
# now check that error logs sets the build in ko
logs = fetch_local_logs_return_value(nb_logs=1, build_dest=build.dest, level='ERROR')
self.patchers['fetch_local_logs'].return_value = logs
self.test_host.process_logs()
self.patchers['host_local_pg_cursor'].assert_called()
self.assertEqual(
self.env['ir.logging'].search_count([
('build_id', '=', build.id),
('active_step_id', '=', self.env.ref('runbot.runbot_build_config_step_test_all').id),
('level', '=', 'ERROR')
]),
1,
)
self.assertEqual(build.triggered_result, 'ko', 'An error log should sets the build in ko')
build.log_counter = 10
# Test log limit
logs = fetch_local_logs_return_value(nb_logs=11, message='test log limit', build_dest=build.dest)
self.patchers['fetch_local_logs'].return_value = logs
self.test_host.process_logs()
self.patchers['host_local_pg_cursor'].assert_called()

View File

@ -18,10 +18,13 @@ class TestSchedule(RunbotCase):
'project_id': self.project,
'config_id': self.env.ref('runbot.runbot_build_config_default').id,
})
host = self.env['runbot.host'].create({'name': 'runbotxx'}) # the host needs to exists in _schedule()
build = self.Build.create({
'local_state': 'testing',
'port': '1234',
'host': 'runbotxx',
'host': host.name,
'job_start': datetime.datetime.now(),
'active_step': self.env.ref('runbot.runbot_build_config_step_run').id,
'params_id': params.id
@ -31,6 +34,7 @@ class TestSchedule(RunbotCase):
build._schedule() # too fast, docker not started
self.assertEqual(build.local_state, 'testing')
self.start_patcher('fetch_local_logs', 'odoo.addons.runbot.models.host.Host._fetch_local_logs', []) # the local logs have to be empty
build.write({'job_start': datetime.datetime.now() - datetime.timedelta(seconds=70)}) # docker never started
build._schedule()
self.assertEqual(build.local_state, 'done')

View File

@ -273,6 +273,7 @@ class TestUpgradeFlow(RunbotCase):
host = self.env['runbot.host']._get_current()
upgrade_current_build.host = host.name
upgrade_current_build._init_pendings(host)
self.start_patcher('fetch_local_logs', 'odoo.addons.runbot.models.host.Host._fetch_local_logs', []) # the local logs have to be empty
upgrade_current_build._schedule()
self.assertEqual(upgrade_current_build.local_state, 'done')
self.assertEqual(len(upgrade_current_build.children_ids), 4)

View File

@ -46,6 +46,8 @@
<field name="runbot_is_base_regex" style="width: 55%;"/>
<label for="runbot_forwardport_author" class="col-xs-3 o_light_label" style="width: 40%;"/>
<field name="runbot_forwardport_author" style="width: 55%;"/>
<label for="runbot_logdb_name" class="col-xs-3 o_light_label" style="width: 40%;"/>
<field name="runbot_logdb_name" style="width: 15%;"/>
</div>
</div>
</div>
@ -62,8 +64,6 @@
</div>
</div>
</div>
<label for="runbot_logdb_uri" class="col-xs-3 o_light_label" style="width: 60%;"/>
<field name="runbot_logdb_uri" style="width: 100%;"/>
<label for="runbot_default_odoorc" class="col-xs-3 o_light_label" style="width: 60%;"/>
<field name="runbot_default_odoorc" style="width: 100%;"/>
<label for="runbot_message" class="col-xs-3 o_light_label" style="width: 60%;"/>

View File

@ -1,9 +1,11 @@
#!/usr/bin/python3
from tools import RunbotClient, run
import logging
from tools import RunbotClient, run
_logger = logging.getLogger(__name__)
class BuilderClient(RunbotClient):
def on_start(self):