mirror of
https://github.com/odoo/runbot.git
synced 2025-03-27 13:25:47 +07:00
[REF] runbot: various refactoring
The initial motivation is to remove the flush when a log_counter is written. This flush was initially usefull when the limit was in a psql trigger, but finally add a side effect to flush everything before starting the docker. This was limiting concurrent update after starting the docker, but we still have no garantee that the transaction is commited after starting the docker. The use case where the docker is started but the transaction is not commited was not handled well and was leading to an infinite loop of trying to start a docker (while the docker was already started) This refactoring returns the docker to the scheduler so that the schedulter can commit before starting the docker. To achieve this, it is ideal to have only one method that could return a callable in the _scheduler loop. This is done by removing the run_job from the init_pending method. All satellite method like make result are also modified and adapted to make direct write: the old way was technical debt, useless optimization from pre-v13. Other piece of code are moved arround to prepare for future changes, mainly to make the last commit easier to revert if needed. [FIX] runbot: adapt tests to previous refactoring
This commit is contained in:
parent
688900edb1
commit
d2f9330043
@ -159,8 +159,7 @@ class BuildResult(models.Model):
|
|||||||
global_state = fields.Selection(make_selection(state_order), string='Status', compute='_compute_global_state', store=True, recursive=True)
|
global_state = fields.Selection(make_selection(state_order), string='Status', compute='_compute_global_state', store=True, recursive=True)
|
||||||
local_state = fields.Selection(make_selection(state_order), string='Build Status', default='pending', required=True, index=True)
|
local_state = fields.Selection(make_selection(state_order), string='Build Status', default='pending', required=True, index=True)
|
||||||
global_result = fields.Selection(make_selection(result_order), string='Result', compute='_compute_global_result', store=True, recursive=True)
|
global_result = fields.Selection(make_selection(result_order), string='Result', compute='_compute_global_result', store=True, recursive=True)
|
||||||
local_result = fields.Selection(make_selection(result_order), string='Build Result')
|
local_result = fields.Selection(make_selection(result_order), string='Build Result', default='ok')
|
||||||
triggered_result = fields.Selection(make_selection(result_order), string='Triggered Result') # triggered by db only
|
|
||||||
|
|
||||||
requested_action = fields.Selection([('wake_up', 'To wake up'), ('deathrow', 'To kill')], string='Action requested', index=True)
|
requested_action = fields.Selection([('wake_up', 'To wake up'), ('deathrow', 'To kill')], string='Action requested', index=True)
|
||||||
# web infos
|
# web infos
|
||||||
@ -345,9 +344,26 @@ class BuildResult(models.Model):
|
|||||||
values.pop('local_result')
|
values.pop('local_result')
|
||||||
else:
|
else:
|
||||||
raise ValidationError('Local result cannot be set to a less critical level')
|
raise ValidationError('Local result cannot be set to a less critical level')
|
||||||
|
|
||||||
|
init_global_results = self.mapped('global_result')
|
||||||
|
init_global_states = self.mapped('global_state')
|
||||||
res = super(BuildResult, self).write(values)
|
res = super(BuildResult, self).write(values)
|
||||||
if 'log_counter' in values: # not 100% usefull but more correct ( see test_ir_logging)
|
for init_global_result, build in zip(init_global_results, self):
|
||||||
self.flush()
|
if init_global_result != build.global_result:
|
||||||
|
build._github_status()
|
||||||
|
|
||||||
|
for init_global_state, build in zip(init_global_states, self):
|
||||||
|
if not build.parent_id and init_global_state not in ('done', 'running') and build.global_state in ('done', 'running'):
|
||||||
|
build._github_status()
|
||||||
|
|
||||||
|
if values.get('global_state') in ('done', 'running'):
|
||||||
|
for build in self:
|
||||||
|
if not build.parent_id and build.global_state not in ('done', 'running'):
|
||||||
|
build._github_status()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _add_child(self, param_values, orphan=False, description=False, additionnal_commit_links=False):
|
def _add_child(self, param_values, orphan=False, description=False, additionnal_commit_links=False):
|
||||||
@ -382,12 +398,6 @@ class BuildResult(models.Model):
|
|||||||
return 'warning'
|
return 'warning'
|
||||||
return 'ko' # ?
|
return 'ko' # ?
|
||||||
|
|
||||||
def update_build_end(self):
|
|
||||||
for build in self:
|
|
||||||
build.build_end = now()
|
|
||||||
if build.parent_id and build.parent_id.local_state in ('running', 'done'):
|
|
||||||
build.parent_id.update_build_end()
|
|
||||||
|
|
||||||
@api.depends('params_id.version_id.name')
|
@api.depends('params_id.version_id.name')
|
||||||
def _compute_dest(self):
|
def _compute_dest(self):
|
||||||
for build in self:
|
for build in self:
|
||||||
@ -599,34 +609,20 @@ class BuildResult(models.Model):
|
|||||||
self.ensure_one()
|
self.ensure_one()
|
||||||
return '%s_%s' % (self.dest, self.active_step.name)
|
return '%s_%s' % (self.dest, self.active_step.name)
|
||||||
|
|
||||||
def _init_pendings(self, host):
|
def _init_pendings(self):
|
||||||
for build in self:
|
self.ensure_one()
|
||||||
if build.local_state != 'pending':
|
build = self
|
||||||
raise UserError("Build %s is not pending" % build.id)
|
build.port = self._find_port()
|
||||||
if build.host != host.name:
|
build.job_start = now()
|
||||||
raise UserError("Build %s does not have correct host" % build.id)
|
build.build_start = now()
|
||||||
# allocate port and schedule first job
|
build.job_end = False
|
||||||
values = {
|
build._log('_schedule', 'Init build environment with config %s ' % build.params_id.config_id.name)
|
||||||
'port': self._find_port(),
|
try:
|
||||||
'job_start': now(),
|
os.makedirs(build._path('logs'), exist_ok=True)
|
||||||
'build_start': now(),
|
except Exception:
|
||||||
'job_end': False,
|
_logger.exception('Failed initiating build %s', build.dest)
|
||||||
}
|
build._log('_schedule', 'Failed initiating build')
|
||||||
values.update(build._next_job_values())
|
build._kill(result='ko')
|
||||||
build.write(values)
|
|
||||||
if not build.active_step:
|
|
||||||
build._log('_schedule', 'No job in config, doing nothing')
|
|
||||||
build.local_result = 'warn'
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
build._log('_schedule', 'Init build environment with config %s ' % build.params_id.config_id.name)
|
|
||||||
os.makedirs(build._path('logs'), exist_ok=True)
|
|
||||||
except Exception:
|
|
||||||
_logger.exception('Failed initiating build %s', build.dest)
|
|
||||||
build._log('_schedule', 'Failed initiating build')
|
|
||||||
build._kill(result='ko')
|
|
||||||
continue
|
|
||||||
build._run_job()
|
|
||||||
|
|
||||||
def _process_requested_actions(self):
|
def _process_requested_actions(self):
|
||||||
for build in self:
|
for build in self:
|
||||||
@ -638,12 +634,15 @@ class BuildResult(models.Model):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if build.requested_action == 'wake_up':
|
if build.requested_action == 'wake_up':
|
||||||
if docker_state(build._get_docker_name(), build._path()) == 'RUNNING':
|
if build.local_state != 'done':
|
||||||
|
build.requested_action = False
|
||||||
|
build._log('wake_up', 'Impossible to wake-up, build is not done', log_type='markdown', level='SEPARATOR')
|
||||||
|
elif not os.path.exists(build._path()):
|
||||||
|
build.requested_action = False
|
||||||
|
build._log('wake_up', 'Impossible to wake-up, **build dir does not exists anymore**', log_type='markdown', level='SEPARATOR')
|
||||||
|
elif docker_state(build._get_docker_name(), build._path()) == 'RUNNING':
|
||||||
build.write({'requested_action': False, 'local_state': 'running'})
|
build.write({'requested_action': False, 'local_state': 'running'})
|
||||||
build._log('wake_up', 'Waking up failed, **docker is already running**', log_type='markdown', level='SEPARATOR')
|
build._log('wake_up', 'Waking up failed, **docker is already running**', log_type='markdown', level='SEPARATOR')
|
||||||
elif not os.path.exists(build._path()):
|
|
||||||
build.write({'requested_action': False, 'local_state': 'done'})
|
|
||||||
build._log('wake_up', 'Impossible to wake-up, **build dir does not exists anymore**', log_type='markdown', level='SEPARATOR')
|
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
log_path = build._path('logs', 'wake_up.txt')
|
log_path = build._path('logs', 'wake_up.txt')
|
||||||
@ -674,45 +673,39 @@ class BuildResult(models.Model):
|
|||||||
def _schedule(self):
|
def _schedule(self):
|
||||||
"""schedule the build"""
|
"""schedule the build"""
|
||||||
icp = self.env['ir.config_parameter'].sudo()
|
icp = self.env['ir.config_parameter'].sudo()
|
||||||
hosts_by_name = {h.name: h for h in self.env['runbot.host'].search([('name', 'in', self.mapped('host'))])}
|
self.ensure_one()
|
||||||
hosts_by_build = {b.id: hosts_by_name[b.host] for b in self}
|
build = self
|
||||||
for build in self:
|
if build.local_state not in ['testing', 'running', 'pending']:
|
||||||
if build.local_state not in ['testing', 'running']:
|
return False
|
||||||
raise UserError("Build %s is not testing/running: %s" % (build.id, build.local_state))
|
# check if current job is finished
|
||||||
if build.local_state == 'testing':
|
if build.local_state == 'pending':
|
||||||
# failfast in case of docker error (triggered in database)
|
build._init_pendings()
|
||||||
if build.triggered_result and not build.active_step.ignore_triggered_result:
|
else:
|
||||||
worst_result = self._get_worst_result([build.triggered_result, build.local_result])
|
|
||||||
if worst_result != build.local_result:
|
|
||||||
build.local_result = build.triggered_result
|
|
||||||
build._github_status() # failfast
|
|
||||||
# check if current job is finished
|
|
||||||
_docker_state = docker_state(build._get_docker_name(), build._path())
|
_docker_state = docker_state(build._get_docker_name(), build._path())
|
||||||
if _docker_state == 'RUNNING':
|
if _docker_state == 'RUNNING':
|
||||||
timeout = min(build.active_step.cpu_limit, int(icp.get_param('runbot.runbot_timeout', default=10000)))
|
timeout = min(build.active_step.cpu_limit, int(icp.get_param('runbot.runbot_timeout', default=10000)))
|
||||||
if build.local_state != 'running' and build.job_time > timeout:
|
if build.local_state != 'running' and build.job_time > timeout:
|
||||||
build._log('_schedule', '%s time exceeded (%ss)' % (build.active_step.name if build.active_step else "?", build.job_time))
|
build._log('_schedule', '%s time exceeded (%ss)' % (build.active_step.name if build.active_step else "?", build.job_time))
|
||||||
build._kill(result='killed')
|
build._kill(result='killed')
|
||||||
continue
|
return False
|
||||||
elif _docker_state in ('UNKNOWN', 'GHOST') and (build.local_state == 'running' or build.active_step._is_docker_step()): # todo replace with docker_start
|
elif _docker_state in ('UNKNOWN', 'GHOST') and (build.local_state == 'running' or build.active_step._is_docker_step()): # todo replace with docker_start
|
||||||
docker_time = time.time() - dt2time(build.docker_start or build.job_start)
|
docker_time = time.time() - dt2time(build.docker_start or build.job_start)
|
||||||
if docker_time < 5:
|
if docker_time < 5:
|
||||||
continue
|
return False
|
||||||
elif docker_time < 60:
|
elif docker_time < 60:
|
||||||
_logger.info('container "%s" seems too take a while to start :%s' % (build.job_time, build._get_docker_name()))
|
_logger.info('container "%s" seems too take a while to start :%s' % (build.job_time, build._get_docker_name()))
|
||||||
continue
|
return False
|
||||||
else:
|
else:
|
||||||
build._log('_schedule', 'Docker with state %s not started after 60 seconds, skipping' % _docker_state, level='ERROR')
|
build._log('_schedule', 'Docker with state %s not started after 60 seconds, skipping' % _docker_state, level='ERROR')
|
||||||
if hosts_by_build[build.id]._fetch_local_logs(build_ids=build.ids):
|
if self.env['runbot.host']._fetch_local_logs(build_ids=build.ids):
|
||||||
continue # avoid to make results with remaining logs
|
return True # avoid to make results with remaining logs
|
||||||
# No job running, make result and select next job
|
# No job running, make result and select next job
|
||||||
build_values = {
|
|
||||||
'job_end': now(),
|
build.job_end = now()
|
||||||
'docker_start': False,
|
build.docker_start = False
|
||||||
}
|
|
||||||
# make result of previous job
|
# make result of previous job
|
||||||
try:
|
try:
|
||||||
results = build.active_step._make_results(build)
|
build.active_step._make_results(build)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if isinstance(e, RunbotException):
|
if isinstance(e, RunbotException):
|
||||||
message = e.args[0][:300000]
|
message = e.args[0][:300000]
|
||||||
@ -720,50 +713,71 @@ class BuildResult(models.Model):
|
|||||||
message = 'An error occured while computing results of %s:\n %s' % (build.job, str(e).replace('\\n', '\n').replace("\\'", "'")[:10000])
|
message = 'An error occured while computing results of %s:\n %s' % (build.job, str(e).replace('\\n', '\n').replace("\\'", "'")[:10000])
|
||||||
_logger.exception(message)
|
_logger.exception(message)
|
||||||
build._log('_make_results', message, level='ERROR')
|
build._log('_make_results', message, level='ERROR')
|
||||||
results = {'local_result': 'ko'}
|
build.local_result = 'ko'
|
||||||
|
|
||||||
build_values.update(results)
|
|
||||||
|
|
||||||
# compute statistics before starting next job
|
# compute statistics before starting next job
|
||||||
build.active_step._make_stats(build)
|
build.active_step._make_stats(build)
|
||||||
|
|
||||||
build.active_step.log_end(build)
|
build.active_step.log_end(build)
|
||||||
|
|
||||||
build_values.update(build._next_job_values()) # find next active_step or set to done
|
step_ids = self.params_id.config_id.step_ids()
|
||||||
|
if not step_ids: # no job to do, build is done
|
||||||
|
self.active_step = False
|
||||||
|
self.local_state = 'done'
|
||||||
|
build._log('_schedule', 'No job in config, doing nothing')
|
||||||
|
build.local_result = 'warn'
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self.active_step and self.local_state != 'pending': # wakeup docker finished
|
||||||
|
build.active_step = False
|
||||||
|
build.local_state = 'done'
|
||||||
|
return False
|
||||||
|
|
||||||
ending_build = build.local_state not in ('done', 'running') and build_values.get('local_state') in ('done', 'running')
|
if not self.active_step:
|
||||||
if ending_build:
|
next_index = 0
|
||||||
build.update_build_end()
|
else:
|
||||||
|
if self.active_step not in step_ids:
|
||||||
|
self._log('run', 'Config was modified and current step does not exists anymore, skipping.', level='ERROR')
|
||||||
|
self.active_step = False
|
||||||
|
self.local_state = 'done'
|
||||||
|
self.local_result = 'ko'
|
||||||
|
return False
|
||||||
|
next_index = step_ids.index(self.active_step) + 1
|
||||||
|
|
||||||
build.write(build_values)
|
while True:
|
||||||
if ending_build:
|
if next_index >= len(step_ids): # final job, build is done
|
||||||
if not build.local_result: # Set 'ok' result if no result set (no tests job on build)
|
self.active_step = False
|
||||||
build.local_result = 'ok'
|
self.local_state = 'done'
|
||||||
build._logger("No result set, setting ok by default")
|
return False
|
||||||
build._github_status()
|
new_step = step_ids[next_index] # job to do, state is job_state (testing or running)
|
||||||
build._run_job()
|
if new_step.domain_filter and not self.filtered_domain(safe_eval(new_step.domain_filter)):
|
||||||
|
self._log('run', '**Skipping** step ~~%s~~ from config **%s**' % (new_step.name, self.params_id.config_id.name), log_type='markdown', level='SEPARATOR')
|
||||||
|
next_index += 1
|
||||||
|
continue
|
||||||
|
break
|
||||||
|
build.active_step = new_step.id
|
||||||
|
build.local_state = new_step._step_state()
|
||||||
|
|
||||||
|
return build._run_job()
|
||||||
|
|
||||||
def _run_job(self):
|
def _run_job(self):
|
||||||
# run job
|
self.ensure_one()
|
||||||
for build in self:
|
build = self
|
||||||
if build.local_state != 'done':
|
if build.local_state != 'done':
|
||||||
build._logger('running %s', build.active_step.name)
|
build._logger('running %s', build.active_step.name)
|
||||||
os.makedirs(build._path('logs'), exist_ok=True)
|
os.makedirs(build._path('logs'), exist_ok=True)
|
||||||
os.makedirs(build._path('datadir'), exist_ok=True)
|
os.makedirs(build._path('datadir'), exist_ok=True)
|
||||||
try:
|
try:
|
||||||
build.active_step._run(build) # run should be on build?
|
return build.active_step._run(build) # run should be on build?
|
||||||
except TransactionRollbackError:
|
except TransactionRollbackError:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if isinstance(e, RunbotException):
|
if isinstance(e, RunbotException):
|
||||||
message = e.args[0]
|
message = e.args[0]
|
||||||
else:
|
else:
|
||||||
message = '%s failed running step %s:\n %s' % (build.dest, build.job, str(e).replace('\\n', '\n').replace("\\'", "'"))
|
message = '%s failed running step %s:\n %s' % (build.dest, build.job, str(e).replace('\\n', '\n').replace("\\'", "'"))
|
||||||
_logger.exception(message)
|
_logger.exception(message)
|
||||||
build._log("run", message, level='ERROR')
|
build._log("run", message, level='ERROR')
|
||||||
build._kill(result='ko')
|
build._kill(result='ko')
|
||||||
|
|
||||||
def _docker_run(self, cmd=None, ro_volumes=None, **kwargs):
|
def _docker_run(self, cmd=None, ro_volumes=None, **kwargs):
|
||||||
self.ensure_one()
|
self.ensure_one()
|
||||||
@ -793,7 +807,10 @@ class BuildResult(models.Model):
|
|||||||
user = getpass.getuser()
|
user = getpass.getuser()
|
||||||
ro_volumes[f'/home/{user}/.odoorc'] = self._path('.odoorc')
|
ro_volumes[f'/home/{user}/.odoorc'] = self._path('.odoorc')
|
||||||
kwargs.pop('build_dir', False) # todo check python steps
|
kwargs.pop('build_dir', False) # todo check python steps
|
||||||
docker_run(cmd=cmd, build_dir=self._path(), ro_volumes=ro_volumes, **kwargs)
|
build_dir = self._path()
|
||||||
|
def start_docker():
|
||||||
|
docker_run(cmd=cmd, build_dir=build_dir, ro_volumes=ro_volumes, **kwargs)
|
||||||
|
return start_docker
|
||||||
|
|
||||||
def _path(self, *l, **kw):
|
def _path(self, *l, **kw):
|
||||||
"""Return the repo build path"""
|
"""Return the repo build path"""
|
||||||
@ -1061,36 +1078,6 @@ class BuildResult(models.Model):
|
|||||||
'build_id': self.id
|
'build_id': self.id
|
||||||
})
|
})
|
||||||
|
|
||||||
def _next_job_values(self):
|
|
||||||
self.ensure_one()
|
|
||||||
step_ids = self.params_id.config_id.step_ids()
|
|
||||||
if not step_ids: # no job to do, build is done
|
|
||||||
return {'active_step': False, 'local_state': 'done'}
|
|
||||||
|
|
||||||
if not self.active_step and self.local_state != 'pending':
|
|
||||||
# means that a step has been run manually without using config
|
|
||||||
return {'active_step': False, 'local_state': 'done'}
|
|
||||||
|
|
||||||
if not self.active_step:
|
|
||||||
next_index = 0
|
|
||||||
else:
|
|
||||||
if self.active_step not in step_ids:
|
|
||||||
self._log('run', 'Config was modified and current step does not exists anymore, skipping.', level='ERROR')
|
|
||||||
return {'active_step': False, 'local_state': 'done', 'local_result': self._get_worst_result([self.local_result, 'ko'])}
|
|
||||||
next_index = step_ids.index(self.active_step) + 1
|
|
||||||
|
|
||||||
while True:
|
|
||||||
if next_index >= len(step_ids): # final job, build is done
|
|
||||||
return {'active_step': False, 'local_state': 'done'}
|
|
||||||
new_step = step_ids[next_index] # job to do, state is job_state (testing or running)
|
|
||||||
if new_step.domain_filter and not self.filtered_domain(safe_eval(new_step.domain_filter)):
|
|
||||||
|
|
||||||
self._log('run', '**Skipping** step ~~%s~~ from config **%s**' % (new_step.name, self.params_id.config_id.name), log_type='markdown', level='SEPARATOR')
|
|
||||||
next_index += 1
|
|
||||||
continue
|
|
||||||
break
|
|
||||||
return {'active_step': new_step.id, 'local_state': new_step._step_state()}
|
|
||||||
|
|
||||||
def _get_py_version(self):
|
def _get_py_version(self):
|
||||||
"""return the python name to use from build batch"""
|
"""return the python name to use from build batch"""
|
||||||
(server_commit, server_file) = self._get_server_info()
|
(server_commit, server_file) = self._get_server_info()
|
||||||
|
@ -166,7 +166,6 @@ class ConfigStep(models.Model):
|
|||||||
# python
|
# python
|
||||||
python_code = fields.Text('Python code', tracking=True, default=PYTHON_DEFAULT)
|
python_code = fields.Text('Python code', tracking=True, default=PYTHON_DEFAULT)
|
||||||
python_result_code = fields.Text('Python code for result', tracking=True, default=PYTHON_DEFAULT)
|
python_result_code = fields.Text('Python code for result', tracking=True, default=PYTHON_DEFAULT)
|
||||||
ignore_triggered_result = fields.Boolean('Ignore error triggered in logs', tracking=True, default=False)
|
|
||||||
running_job = fields.Boolean('Job final state is running', default=False, help="Docker won't be killed if checked")
|
running_job = fields.Boolean('Job final state is running', default=False, help="Docker won't be killed if checked")
|
||||||
# create_build
|
# create_build
|
||||||
create_config_ids = fields.Many2many('runbot.build.config', 'runbot_build_config_step_ids_create_config_ids_rel', string='New Build Configs', tracking=True, index=True)
|
create_config_ids = fields.Many2many('runbot.build.config', 'runbot_build_config_step_ids_create_config_ids_rel', string='New Build Configs', tracking=True, index=True)
|
||||||
@ -275,14 +274,14 @@ class ConfigStep(models.Model):
|
|||||||
url = f"{log_url}/runbot/static/build/{build.dest}/logs/{self.name}.txt"
|
url = f"{log_url}/runbot/static/build/{build.dest}/logs/{self.name}.txt"
|
||||||
log_link = f'[@icon-file-text]({url})'
|
log_link = f'[@icon-file-text]({url})'
|
||||||
build._log('run', 'Starting step **%s** from config **%s** %s' % (self.name, build.params_id.config_id.name, log_link), log_type='markdown', level='SEPARATOR')
|
build._log('run', 'Starting step **%s** from config **%s** %s' % (self.name, build.params_id.config_id.name, log_link), log_type='markdown', level='SEPARATOR')
|
||||||
self._run_step(build, log_path)
|
return self._run_step(build, log_path)
|
||||||
|
|
||||||
def _run_step(self, build, log_path, **kwargs):
|
def _run_step(self, build, log_path, **kwargs):
|
||||||
build.log_counter = self.env['ir.config_parameter'].sudo().get_param('runbot.runbot_maxlogs', 100)
|
build.log_counter = self.env['ir.config_parameter'].sudo().get_param('runbot.runbot_maxlogs', 100)
|
||||||
run_method = getattr(self, '_run_%s' % self.job_type)
|
run_method = getattr(self, '_run_%s' % self.job_type)
|
||||||
docker_params = run_method(build, log_path, **kwargs)
|
docker_params = run_method(build, log_path, **kwargs)
|
||||||
if docker_params:
|
if docker_params:
|
||||||
build._docker_run(**docker_params)
|
return build._docker_run(**docker_params)
|
||||||
|
|
||||||
def _run_create_build(self, build, log_path):
|
def _run_create_build(self, build, log_path):
|
||||||
count = 0
|
count = 0
|
||||||
@ -906,23 +905,20 @@ class ConfigStep(models.Model):
|
|||||||
return ['--omit', ','.join(pattern_to_omit)]
|
return ['--omit', ','.join(pattern_to_omit)]
|
||||||
|
|
||||||
def _make_results(self, build):
|
def _make_results(self, build):
|
||||||
build_values = {}
|
|
||||||
log_time = self._get_log_last_write(build)
|
log_time = self._get_log_last_write(build)
|
||||||
if log_time:
|
if log_time:
|
||||||
build_values['job_end'] = log_time
|
build.job_end = log_time
|
||||||
if self.job_type == 'python' and self.python_result_code and self.python_result_code != PYTHON_DEFAULT:
|
if self.job_type == 'python' and self.python_result_code and self.python_result_code != PYTHON_DEFAULT:
|
||||||
build_values.update(self._make_python_results(build))
|
build.write(self._make_python_results(build))
|
||||||
elif self.job_type in ['install_odoo', 'python']:
|
elif self.job_type in ['install_odoo', 'python']:
|
||||||
if self.coverage:
|
if self.coverage:
|
||||||
build_values.update(self._make_coverage_results(build))
|
build.write(self._make_coverage_results(build))
|
||||||
if self.test_enable or self.test_tags:
|
if self.test_enable or self.test_tags:
|
||||||
build_values.update(self._make_tests_results(build))
|
build.write(self._make_tests_results(build))
|
||||||
elif self.job_type == 'test_upgrade':
|
elif self.job_type == 'test_upgrade':
|
||||||
build_values.update(self._make_upgrade_results(build))
|
build.write(self._make_upgrade_results(build))
|
||||||
elif self.job_type == 'restore':
|
elif self.job_type == 'restore':
|
||||||
build_values.update(self._make_restore_results(build))
|
build.write(self._make_restore_results(build))
|
||||||
|
|
||||||
return build_values
|
|
||||||
|
|
||||||
def _make_python_results(self, build):
|
def _make_python_results(self, build):
|
||||||
eval_ctx = self.make_python_ctx(build)
|
eval_ctx = self.make_python_ctx(build)
|
||||||
|
@ -55,7 +55,6 @@ class Bundle(models.Model):
|
|||||||
# extra_info
|
# extra_info
|
||||||
for_next_freeze = fields.Boolean('Should be in next freeze')
|
for_next_freeze = fields.Boolean('Should be in next freeze')
|
||||||
|
|
||||||
|
|
||||||
@api.depends('name')
|
@api.depends('name')
|
||||||
def _compute_host_id(self):
|
def _compute_host_id(self):
|
||||||
assigned_only = None
|
assigned_only = None
|
||||||
|
@ -36,10 +36,11 @@ class runbot_event(models.Model):
|
|||||||
build_logs = logs_by_build_id[build.id]
|
build_logs = logs_by_build_id[build.id]
|
||||||
for ir_log in build_logs:
|
for ir_log in build_logs:
|
||||||
ir_log['active_step_id'] = build.active_step.id
|
ir_log['active_step_id'] = build.active_step.id
|
||||||
if ir_log['level'].upper() == 'WARNING':
|
if build.local_state != 'running':
|
||||||
build.triggered_result = 'warn'
|
if ir_log['level'].upper() == 'WARNING':
|
||||||
elif ir_log['level'].upper() == 'ERROR':
|
build.local_result = 'warn'
|
||||||
build.triggered_result = 'ko'
|
elif ir_log['level'].upper() == 'ERROR':
|
||||||
|
build.local_result = 'ko'
|
||||||
return super().create(vals_list)
|
return super().create(vals_list)
|
||||||
|
|
||||||
def _markdown(self):
|
def _markdown(self):
|
||||||
|
@ -209,7 +209,7 @@ class Host(models.Model):
|
|||||||
res.append({name:value for name, value in zip(col_names, row)})
|
res.append({name:value for name, value in zip(col_names, row)})
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def process_logs(self, build_ids=None):
|
def _process_logs(self, build_ids=None):
|
||||||
"""move logs from host to the leader"""
|
"""move logs from host to the leader"""
|
||||||
ir_logs = self._fetch_local_logs()
|
ir_logs = self._fetch_local_logs()
|
||||||
logs_by_build_id = defaultdict(list)
|
logs_by_build_id = defaultdict(list)
|
||||||
@ -253,6 +253,13 @@ class Host(models.Model):
|
|||||||
with local_pg_cursor(logs_db_name) as local_cr:
|
with local_pg_cursor(logs_db_name) as local_cr:
|
||||||
local_cr.execute("DELETE FROM ir_logging WHERE id in %s", [tuple(local_log_ids)])
|
local_cr.execute("DELETE FROM ir_logging WHERE id in %s", [tuple(local_log_ids)])
|
||||||
|
|
||||||
|
def get_build_domain(self, domain=None):
|
||||||
|
domain = domain or []
|
||||||
|
return [('host', '=', self.name)] + domain
|
||||||
|
|
||||||
|
def get_builds(self, domain, order=None):
|
||||||
|
return self.env['runbot.build'].search(self.get_build_domain(domain), order=order)
|
||||||
|
|
||||||
def _process_messages(self):
|
def _process_messages(self):
|
||||||
self.host_message_ids._process()
|
self.host_message_ids._process()
|
||||||
|
|
||||||
|
@ -39,80 +39,76 @@ class Runbot(models.AbstractModel):
|
|||||||
def _scheduler(self, host):
|
def _scheduler(self, host):
|
||||||
self._gc_testing(host)
|
self._gc_testing(host)
|
||||||
self._commit()
|
self._commit()
|
||||||
for build in self._get_builds_with_requested_actions(host):
|
processed = 0
|
||||||
build = build.browse(build.id) # remove preftech ids, manage build one by one
|
for build in host.get_builds([('requested_action', 'in', ['wake_up', 'deathrow'])]):
|
||||||
|
processed += 1
|
||||||
build._process_requested_actions()
|
build._process_requested_actions()
|
||||||
self._commit()
|
self._commit()
|
||||||
host.process_logs()
|
host._process_logs()
|
||||||
self._commit()
|
self._commit()
|
||||||
host._process_messages()
|
host._process_messages()
|
||||||
self._commit()
|
self._commit()
|
||||||
for build in self._get_builds_to_schedule(host):
|
for build in host.get_builds([('local_state', 'in', ['testing', 'running'])]) | self._get_builds_to_init(host):
|
||||||
build = build.browse(build.id) # remove preftech ids, manage build one by one
|
build = build.browse(build.id) # remove preftech ids, manage build one by one
|
||||||
build._schedule()
|
result = build._schedule()
|
||||||
|
if result:
|
||||||
|
processed += 1
|
||||||
self._commit()
|
self._commit()
|
||||||
self._assign_pending_builds(host, host.nb_worker, [('build_type', '!=', 'scheduled')])
|
if callable(result):
|
||||||
|
result() # start docker
|
||||||
|
processed += self._assign_pending_builds(host, host.nb_worker, [('build_type', '!=', 'scheduled')])
|
||||||
self._commit()
|
self._commit()
|
||||||
self._assign_pending_builds(host, host.nb_worker - 1 or host.nb_worker)
|
processed += self._assign_pending_builds(host, host.nb_worker - 1 or host.nb_worker)
|
||||||
self._commit()
|
self._commit()
|
||||||
self._assign_pending_builds(host, host.nb_worker and host.nb_worker + 1, [('build_type', '=', 'priority')])
|
processed += self._assign_pending_builds(host, host.nb_worker and host.nb_worker + 1, [('build_type', '=', 'priority')])
|
||||||
self._commit()
|
self._commit()
|
||||||
for build in self._get_builds_to_init(host):
|
|
||||||
build = build.browse(build.id) # remove preftech ids, manage build one by one
|
|
||||||
build._init_pendings(host)
|
|
||||||
self._commit()
|
|
||||||
self._gc_running(host)
|
self._gc_running(host)
|
||||||
self._commit()
|
self._commit()
|
||||||
self._reload_nginx()
|
self._reload_nginx()
|
||||||
|
self._commit()
|
||||||
def build_domain_host(self, host, domain=None):
|
return processed
|
||||||
domain = domain or []
|
|
||||||
return [('host', '=', host.name)] + domain
|
|
||||||
|
|
||||||
def _get_builds_with_requested_actions(self, host):
|
|
||||||
return self.env['runbot.build'].search(self.build_domain_host(host, [('requested_action', 'in', ['wake_up', 'deathrow'])]))
|
|
||||||
|
|
||||||
def _get_builds_to_schedule(self, host):
|
|
||||||
return self.env['runbot.build'].search(self.build_domain_host(host, [('local_state', 'in', ['testing', 'running'])]))
|
|
||||||
|
|
||||||
def _assign_pending_builds(self, host, nb_worker, domain=None):
|
def _assign_pending_builds(self, host, nb_worker, domain=None):
|
||||||
if host.assigned_only or nb_worker <= 0:
|
if host.assigned_only or nb_worker <= 0:
|
||||||
return
|
return 0
|
||||||
domain_host = self.build_domain_host(host)
|
reserved_slots = len(host.get_builds([('local_state', 'in', ('testing', 'pending'))]))
|
||||||
reserved_slots = self.env['runbot.build'].search_count(domain_host + [('local_state', 'in', ('testing', 'pending'))])
|
|
||||||
assignable_slots = (nb_worker - reserved_slots)
|
assignable_slots = (nb_worker - reserved_slots)
|
||||||
if assignable_slots > 0:
|
if assignable_slots > 0:
|
||||||
allocated = self._allocate_builds(host, assignable_slots, domain)
|
allocated = self._allocate_builds(host, assignable_slots, domain)
|
||||||
if allocated:
|
if allocated:
|
||||||
_logger.info('Builds %s where allocated to runbot', allocated)
|
_logger.info('Builds %s where allocated to runbot', allocated)
|
||||||
|
return len(allocated)
|
||||||
|
return 0
|
||||||
|
|
||||||
def _get_builds_to_init(self, host):
|
def _get_builds_to_init(self, host):
|
||||||
domain_host = self.build_domain_host(host)
|
domain_host = host.get_build_domain()
|
||||||
used_slots = self.env['runbot.build'].search_count(domain_host + [('local_state', '=', 'testing')])
|
used_slots = len(host.get_builds([('local_state', '=', 'testing')]))
|
||||||
available_slots = host.nb_worker - used_slots
|
available_slots = host.nb_worker - used_slots
|
||||||
if available_slots <= 0:
|
build_to_init = self.env['runbot.build']
|
||||||
return self.env['runbot.build']
|
if available_slots > 0:
|
||||||
return self.env['runbot.build'].search(domain_host + [('local_state', '=', 'pending')], limit=available_slots)
|
build_to_init |= self.env['runbot.build'].search(domain_host + [('local_state', '=', 'pending')], limit=available_slots)
|
||||||
|
if available_slots + 1 > 0:
|
||||||
|
build_to_init |= self.env['runbot.build'].search(domain_host + [('local_state', '=', 'pending'), ('build_type', '=', 'priority')], limit=1)
|
||||||
|
return build_to_init
|
||||||
|
|
||||||
def _gc_running(self, host):
|
def _gc_running(self, host):
|
||||||
running_max = host.get_running_max()
|
running_max = host.get_running_max()
|
||||||
domain_host = self.build_domain_host(host)
|
|
||||||
Build = self.env['runbot.build']
|
Build = self.env['runbot.build']
|
||||||
cannot_be_killed_ids = Build.search(domain_host + [('keep_running', '=', True)]).ids
|
cannot_be_killed_ids = host.get_builds([('keep_running', '=', True)]).ids
|
||||||
sticky_bundles = self.env['runbot.bundle'].search([('sticky', '=', True), ('project_id.keep_sticky_running', '=', True)])
|
sticky_bundles = self.env['runbot.bundle'].search([('sticky', '=', True), ('project_id.keep_sticky_running', '=', True)])
|
||||||
cannot_be_killed_ids += [
|
cannot_be_killed_ids += [
|
||||||
build.id
|
build.id
|
||||||
for build in sticky_bundles.mapped('last_batchs.slot_ids.build_id')
|
for build in sticky_bundles.mapped('last_batchs.slot_ids.build_id')
|
||||||
if build.host == host.name
|
if build.host == host.name
|
||||||
][:running_max]
|
][:running_max]
|
||||||
build_ids = Build.search(domain_host + [('local_state', '=', 'running'), ('id', 'not in', cannot_be_killed_ids)], order='job_start desc').ids
|
build_ids = host.get_builds([('local_state', '=', 'running'), ('id', 'not in', cannot_be_killed_ids)], order='job_start desc').ids
|
||||||
Build.browse(build_ids)[running_max:]._kill()
|
Build.browse(build_ids)[running_max:]._kill()
|
||||||
|
|
||||||
def _gc_testing(self, host):
|
def _gc_testing(self, host):
|
||||||
"""garbage collect builds that could be killed"""
|
"""garbage collect builds that could be killed"""
|
||||||
# decide if we need room
|
# decide if we need room
|
||||||
Build = self.env['runbot.build']
|
Build = self.env['runbot.build']
|
||||||
domain_host = self.build_domain_host(host)
|
domain_host = host.get_build_domain()
|
||||||
testing_builds = Build.search(domain_host + [('local_state', 'in', ['testing', 'pending']), ('requested_action', '!=', 'deathrow')])
|
testing_builds = Build.search(domain_host + [('local_state', 'in', ['testing', 'pending']), ('requested_action', '!=', 'deathrow')])
|
||||||
used_slots = len(testing_builds)
|
used_slots = len(testing_builds)
|
||||||
available_slots = host.nb_worker - used_slots
|
available_slots = host.nb_worker - used_slots
|
||||||
@ -282,11 +278,11 @@ class Runbot(models.AbstractModel):
|
|||||||
|
|
||||||
return manager.get('sleep', default_sleep)
|
return manager.get('sleep', default_sleep)
|
||||||
|
|
||||||
def _scheduler_loop_turn(self, host, default_sleep=5):
|
def _scheduler_loop_turn(self, host, sleep=5):
|
||||||
_logger.info('Scheduling...')
|
|
||||||
with self.manage_host_exception(host) as manager:
|
with self.manage_host_exception(host) as manager:
|
||||||
self._scheduler(host)
|
if self._scheduler(host):
|
||||||
return manager.get('sleep', default_sleep)
|
sleep = 0.1
|
||||||
|
return manager.get('sleep', sleep)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def manage_host_exception(self, host):
|
def manage_host_exception(self, host):
|
||||||
|
@ -181,7 +181,6 @@ class RunbotCase(TransactionCase):
|
|||||||
self.start_patcher('docker_stop', 'odoo.addons.runbot.container._docker_stop')
|
self.start_patcher('docker_stop', 'odoo.addons.runbot.container._docker_stop')
|
||||||
self.start_patcher('docker_get_gateway_ip', 'odoo.addons.runbot.models.build_config.docker_get_gateway_ip', None)
|
self.start_patcher('docker_get_gateway_ip', 'odoo.addons.runbot.models.build_config.docker_get_gateway_ip', None)
|
||||||
|
|
||||||
self.start_patcher('cr_commit', 'odoo.sql_db.Cursor.commit', None)
|
|
||||||
self.start_patcher('repo_commit', 'odoo.addons.runbot.models.runbot.Runbot._commit', None)
|
self.start_patcher('repo_commit', 'odoo.addons.runbot.models.runbot.Runbot._commit', None)
|
||||||
self.start_patcher('_local_cleanup_patcher', 'odoo.addons.runbot.models.build.BuildResult._local_cleanup')
|
self.start_patcher('_local_cleanup_patcher', 'odoo.addons.runbot.models.build.BuildResult._local_cleanup')
|
||||||
self.start_patcher('_local_pg_dropdb_patcher', 'odoo.addons.runbot.models.build.BuildResult._local_pg_dropdb')
|
self.start_patcher('_local_pg_dropdb_patcher', 'odoo.addons.runbot.models.build.BuildResult._local_pg_dropdb')
|
||||||
@ -194,6 +193,11 @@ class RunbotCase(TransactionCase):
|
|||||||
|
|
||||||
self.start_patcher('_get_py_version', 'odoo.addons.runbot.models.build.BuildResult._get_py_version', 3)
|
self.start_patcher('_get_py_version', 'odoo.addons.runbot.models.build.BuildResult._get_py_version', 3)
|
||||||
|
|
||||||
|
def no_commit(*_args, **_kwargs):
|
||||||
|
_logger.info('Skipping commit')
|
||||||
|
|
||||||
|
self.patch(self.env.cr, 'commit', no_commit)
|
||||||
|
|
||||||
|
|
||||||
def start_patcher(self, patcher_name, patcher_path, return_value=DEFAULT, side_effect=DEFAULT, new=DEFAULT):
|
def start_patcher(self, patcher_name, patcher_path, return_value=DEFAULT, side_effect=DEFAULT, new=DEFAULT):
|
||||||
|
|
||||||
|
@ -4,8 +4,10 @@ import datetime
|
|||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from odoo import fields
|
from odoo import fields
|
||||||
|
from odoo.tests import tagged
|
||||||
from odoo.exceptions import UserError, ValidationError
|
from odoo.exceptions import UserError, ValidationError
|
||||||
from .common import RunbotCase, RunbotCaseMinimalSetup
|
from .common import RunbotCase, RunbotCaseMinimalSetup
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
|
|
||||||
def rev_parse(repo, branch_name):
|
def rev_parse(repo, branch_name):
|
||||||
@ -18,6 +20,7 @@ def rev_parse(repo, branch_name):
|
|||||||
return head_hash
|
return head_hash
|
||||||
|
|
||||||
|
|
||||||
|
@tagged('-at_install', 'post_istall')
|
||||||
class TestBuildParams(RunbotCaseMinimalSetup):
|
class TestBuildParams(RunbotCaseMinimalSetup):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@ -171,7 +174,10 @@ class TestBuildResult(RunbotCase):
|
|||||||
# test a bulk write, that one cannot change from 'ko' to 'ok'
|
# test a bulk write, that one cannot change from 'ko' to 'ok'
|
||||||
builds = self.Build.browse([build.id, other.id])
|
builds = self.Build.browse([build.id, other.id])
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
builds.write({'local_result': 'ok'})
|
builds.write({'local_result': 'warn'})
|
||||||
|
# self.assertEqual(build.local_result, 'warn')
|
||||||
|
# self.assertEqual(other.local_result, 'ko')
|
||||||
|
|
||||||
|
|
||||||
def test_markdown_description(self):
|
def test_markdown_description(self):
|
||||||
build = self.Build.create({
|
build = self.Build.create({
|
||||||
@ -331,6 +337,11 @@ class TestBuildResult(RunbotCase):
|
|||||||
build1 = self.Build.create({
|
build1 = self.Build.create({
|
||||||
'params_id': self.server_params.id,
|
'params_id': self.server_params.id,
|
||||||
})
|
})
|
||||||
|
self.assertEqual('pending', build1.global_state)
|
||||||
|
|
||||||
|
build1.local_state = 'testing'
|
||||||
|
self.assertEqual('testing', build1.global_state)
|
||||||
|
|
||||||
build1_1 = self.Build.create({
|
build1_1 = self.Build.create({
|
||||||
'params_id': self.server_params.id,
|
'params_id': self.server_params.id,
|
||||||
'parent_id': build1.id,
|
'parent_id': build1.id,
|
||||||
@ -339,6 +350,15 @@ class TestBuildResult(RunbotCase):
|
|||||||
'params_id': self.server_params.id,
|
'params_id': self.server_params.id,
|
||||||
'parent_id': build1.id,
|
'parent_id': build1.id,
|
||||||
})
|
})
|
||||||
|
self.assertEqual('testing', build1.global_state)
|
||||||
|
self.assertEqual('pending', build1_1.global_state)
|
||||||
|
self.assertEqual('pending', build1_2.global_state)
|
||||||
|
|
||||||
|
build1_1.local_state = 'testing'
|
||||||
|
self.assertEqual('testing', build1.global_state)
|
||||||
|
self.assertEqual('testing', build1_1.global_state)
|
||||||
|
self.assertEqual('pending', build1_2.global_state)
|
||||||
|
|
||||||
build1_1_1 = self.Build.create({
|
build1_1_1 = self.Build.create({
|
||||||
'params_id': self.server_params.id,
|
'params_id': self.server_params.id,
|
||||||
'parent_id': build1_1.id,
|
'parent_id': build1_1.id,
|
||||||
@ -348,60 +368,132 @@ class TestBuildResult(RunbotCase):
|
|||||||
'parent_id': build1_1.id,
|
'parent_id': build1_1.id,
|
||||||
})
|
})
|
||||||
|
|
||||||
def assert_state(global_state, build):
|
self.assertEqual('testing', build1.global_state)
|
||||||
self.assertEqual(build.global_state, global_state)
|
self.assertEqual('testing', build1_1.global_state)
|
||||||
|
self.assertEqual('pending', build1_2.global_state)
|
||||||
|
self.assertEqual('pending', build1_1_1.global_state)
|
||||||
|
self.assertEqual('pending', build1_1_2.global_state)
|
||||||
|
|
||||||
assert_state('pending', build1)
|
build1_2.flush()
|
||||||
assert_state('pending', build1_1)
|
with self.assertQueries(['''UPDATE "runbot_build" SET "global_state"=%s,"local_state"=%s,"write_date"=%s,"write_uid"=%s WHERE id IN %s''']):
|
||||||
assert_state('pending', build1_2)
|
build1_2.local_state = "testing"
|
||||||
assert_state('pending', build1_1_1)
|
build1_2.flush()
|
||||||
assert_state('pending', build1_1_2)
|
|
||||||
|
|
||||||
build1.local_state = 'testing'
|
self.assertEqual('testing', build1.global_state)
|
||||||
build1_1.local_state = 'testing'
|
self.assertEqual('testing', build1_2.global_state)
|
||||||
|
|
||||||
|
with self.assertQueries([]): # no change should be triggered
|
||||||
|
build1_2.local_state = "testing"
|
||||||
|
|
||||||
|
# with self.assertQueries(['''UPDATE "runbot_build" SET "global_state"=%s,"local_state"=%s,"write_date"=%s,"write_uid"=%s WHERE id IN %s''']):
|
||||||
build1.local_state = 'done'
|
build1.local_state = 'done'
|
||||||
|
build1.flush()
|
||||||
|
|
||||||
|
self.assertEqual('waiting', build1.global_state)
|
||||||
|
self.assertEqual('testing', build1_1.global_state)
|
||||||
|
|
||||||
|
# with self.assertQueries([]): # write the same value, no update should be triggered
|
||||||
|
build1.local_state = 'done'
|
||||||
|
build1.flush()
|
||||||
|
|
||||||
build1_1.local_state = 'done'
|
build1_1.local_state = 'done'
|
||||||
|
|
||||||
assert_state('waiting', build1)
|
self.assertEqual('waiting', build1.global_state)
|
||||||
assert_state('waiting', build1_1)
|
self.assertEqual('waiting', build1_1.global_state)
|
||||||
assert_state('pending', build1_2)
|
self.assertEqual('testing', build1_2.global_state)
|
||||||
assert_state('pending', build1_1_1)
|
self.assertEqual('pending', build1_1_1.global_state)
|
||||||
assert_state('pending', build1_1_2)
|
self.assertEqual('pending', build1_1_2.global_state)
|
||||||
|
|
||||||
build1_1_1.local_state = 'testing'
|
build1_1_1.local_state = 'testing'
|
||||||
|
|
||||||
assert_state('waiting', build1)
|
self.assertEqual('waiting', build1.global_state)
|
||||||
assert_state('waiting', build1_1)
|
self.assertEqual('waiting', build1_1.global_state)
|
||||||
assert_state('pending', build1_2)
|
self.assertEqual('testing', build1_2.global_state)
|
||||||
assert_state('testing', build1_1_1)
|
self.assertEqual('testing', build1_1_1.global_state)
|
||||||
assert_state('pending', build1_1_2)
|
self.assertEqual('pending', build1_1_2.global_state)
|
||||||
|
|
||||||
build1_2.local_state = 'testing'
|
with self.assertQueries([]):
|
||||||
|
build1_2.local_state = 'testing'
|
||||||
|
|
||||||
assert_state('waiting', build1)
|
self.assertEqual('waiting', build1.global_state)
|
||||||
assert_state('waiting', build1_1)
|
self.assertEqual('waiting', build1_1.global_state)
|
||||||
assert_state('testing', build1_2)
|
self.assertEqual('testing', build1_2.global_state)
|
||||||
assert_state('testing', build1_1_1)
|
self.assertEqual('testing', build1_1_1.global_state)
|
||||||
assert_state('pending', build1_1_2)
|
self.assertEqual('pending', build1_1_2.global_state)
|
||||||
|
|
||||||
build1_2.local_state = 'testing' # writing same state a second time
|
build1_2.local_state = 'done'
|
||||||
|
build1_1_1.local_state = 'done'
|
||||||
|
build1_1_2.local_state = 'testing'
|
||||||
|
|
||||||
assert_state('waiting', build1)
|
self.assertEqual('waiting', build1.global_state)
|
||||||
assert_state('waiting', build1_1)
|
self.assertEqual('waiting', build1_1.global_state)
|
||||||
assert_state('testing', build1_2)
|
self.assertEqual('done', build1_2.global_state)
|
||||||
assert_state('testing', build1_1_1)
|
self.assertEqual('done', build1_1_1.global_state)
|
||||||
assert_state('pending', build1_1_2)
|
self.assertEqual('testing', build1_1_2.global_state)
|
||||||
|
|
||||||
build1_1_2.local_state = 'done'
|
build1_1_2.local_state = 'done'
|
||||||
|
|
||||||
|
self.assertEqual('done', build1.global_state)
|
||||||
|
self.assertEqual('done', build1_1.global_state)
|
||||||
|
self.assertEqual('done', build1_2.global_state)
|
||||||
|
self.assertEqual('done', build1_1_1.global_state)
|
||||||
|
self.assertEqual('done', build1_1_2.global_state)
|
||||||
|
|
||||||
|
def test_rebuild_sub_sub_build(self):
|
||||||
|
build1 = self.Build.create({
|
||||||
|
'params_id': self.server_params.id,
|
||||||
|
})
|
||||||
|
build1.local_state = 'testing'
|
||||||
|
build1_1 = self.Build.create({
|
||||||
|
'params_id': self.server_params.id,
|
||||||
|
'parent_id': build1.id,
|
||||||
|
})
|
||||||
|
build1_1.local_state = 'testing'
|
||||||
|
build1.local_state = 'done'
|
||||||
|
build1_1_1 = self.Build.create({
|
||||||
|
'params_id': self.server_params.id,
|
||||||
|
'parent_id': build1_1.id,
|
||||||
|
})
|
||||||
|
build1_1_1.local_state = 'testing'
|
||||||
|
build1_1.local_state = 'done'
|
||||||
|
self.assertEqual('waiting', build1.global_state)
|
||||||
|
self.assertEqual('waiting', build1_1.global_state)
|
||||||
|
self.assertEqual('testing', build1_1_1.global_state)
|
||||||
|
|
||||||
|
build1_1_1.local_result = 'ko'
|
||||||
build1_1_1.local_state = 'done'
|
build1_1_1.local_state = 'done'
|
||||||
build1_2.local_state = 'done'
|
self.assertEqual('done', build1.global_state)
|
||||||
|
self.assertEqual('done', build1_1.global_state)
|
||||||
|
self.assertEqual('done', build1_1_1.global_state)
|
||||||
|
self.assertEqual('ko', build1.global_result)
|
||||||
|
self.assertEqual('ko', build1_1.global_result)
|
||||||
|
self.assertEqual('ko', build1_1_1.global_result)
|
||||||
|
|
||||||
assert_state('done', build1)
|
rebuild1_1_1 = self.Build.create({ # this is a rebuild
|
||||||
assert_state('done', build1_1)
|
'params_id': self.server_params.id,
|
||||||
assert_state('done', build1_2)
|
'parent_id': build1_1.id,
|
||||||
assert_state('done', build1_1_1)
|
})
|
||||||
assert_state('done', build1_1_2)
|
build1_1_1.orphan_result = True
|
||||||
|
|
||||||
|
self.assertEqual('ok', build1.global_result)
|
||||||
|
self.assertEqual('ok', build1_1.global_result)
|
||||||
|
self.assertEqual('ko', build1_1_1.global_result)
|
||||||
|
self.assertEqual('waiting', build1.global_state)
|
||||||
|
self.assertEqual('waiting', build1_1.global_state)
|
||||||
|
self.assertEqual('done', build1_1_1.global_state)
|
||||||
|
self.assertEqual('pending', rebuild1_1_1.global_state)
|
||||||
|
|
||||||
|
rebuild1_1_1.local_result = 'ok'
|
||||||
|
rebuild1_1_1.local_state = 'done'
|
||||||
|
|
||||||
|
self.assertEqual('ok', build1.global_result)
|
||||||
|
self.assertEqual('ok', build1_1.global_result)
|
||||||
|
self.assertEqual('ko', build1_1_1.global_result)
|
||||||
|
self.assertEqual('ok', rebuild1_1_1.global_result)
|
||||||
|
self.assertEqual('done', build1.global_state)
|
||||||
|
self.assertEqual('done', build1_1.global_state)
|
||||||
|
self.assertEqual('done', build1_1_1.global_state)
|
||||||
|
self.assertEqual('done', rebuild1_1_1.global_state)
|
||||||
|
|
||||||
class TestGc(RunbotCaseMinimalSetup):
|
class TestGc(RunbotCaseMinimalSetup):
|
||||||
|
|
||||||
@ -446,6 +538,7 @@ class TestGc(RunbotCaseMinimalSetup):
|
|||||||
build_a.write({'local_state': 'testing', 'host': host.name})
|
build_a.write({'local_state': 'testing', 'host': host.name})
|
||||||
build_b.write({'local_state': 'testing', 'host': 'runbot_yyy'})
|
build_b.write({'local_state': 'testing', 'host': 'runbot_yyy'})
|
||||||
|
|
||||||
|
|
||||||
# no room needed, verify that nobody got killed
|
# no room needed, verify that nobody got killed
|
||||||
self.Runbot._gc_testing(host)
|
self.Runbot._gc_testing(host)
|
||||||
self.assertFalse(build_a.requested_action)
|
self.assertFalse(build_a.requested_action)
|
||||||
@ -487,3 +580,34 @@ class TestGc(RunbotCaseMinimalSetup):
|
|||||||
self.assertFalse(build_b.requested_action)
|
self.assertFalse(build_b.requested_action)
|
||||||
self.assertFalse(build_a_last.requested_action)
|
self.assertFalse(build_a_last.requested_action)
|
||||||
self.assertFalse(children_b.requested_action)
|
self.assertFalse(children_b.requested_action)
|
||||||
|
|
||||||
|
|
||||||
|
class TestGithubStatus(RunbotCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super().setUp()
|
||||||
|
self.build = self.Build.create({
|
||||||
|
'params_id': self.base_params.id,
|
||||||
|
'description': 'A nice **description**'
|
||||||
|
})
|
||||||
|
|
||||||
|
def test_change_state(self):
|
||||||
|
self.callcount = 0
|
||||||
|
|
||||||
|
def github_status(build):
|
||||||
|
self.callcount += 1
|
||||||
|
|
||||||
|
with patch('odoo.addons.runbot.models.build.BuildResult._github_status', github_status):
|
||||||
|
self.callcount = 0
|
||||||
|
self.build.local_state = 'testing'
|
||||||
|
|
||||||
|
self.assertEqual(self.callcount, 0, "_github_status shouldn't have been called")
|
||||||
|
|
||||||
|
self.callcount = 0
|
||||||
|
self.build.local_state = 'running'
|
||||||
|
|
||||||
|
self.assertEqual(self.callcount, 1, "_github_status should have been called")
|
||||||
|
|
||||||
|
self.callcount = 0
|
||||||
|
self.build.local_state = 'done'
|
||||||
|
self.assertEqual(self.callcount, 0, "_github_status shouldn't have been called")
|
||||||
|
@ -214,7 +214,6 @@ class TestBuildConfigStepCreate(TestBuildConfigStepCommon):
|
|||||||
def test_config_step_create_results(self):
|
def test_config_step_create_results(self):
|
||||||
""" Test child builds are taken into account"""
|
""" Test child builds are taken into account"""
|
||||||
|
|
||||||
|
|
||||||
self.config_step._run_create_build(self.parent_build, '/tmp/essai')
|
self.config_step._run_create_build(self.parent_build, '/tmp/essai')
|
||||||
self.assertEqual(len(self.parent_build.children_ids), 2, 'Two sub-builds should have been generated')
|
self.assertEqual(len(self.parent_build.children_ids), 2, 'Two sub-builds should have been generated')
|
||||||
|
|
||||||
@ -224,6 +223,7 @@ class TestBuildConfigStepCreate(TestBuildConfigStepCommon):
|
|||||||
child_build.local_result = 'ko'
|
child_build.local_result = 'ko'
|
||||||
self.assertEqual(child_build.global_result, 'ko')
|
self.assertEqual(child_build.global_result, 'ko')
|
||||||
|
|
||||||
|
|
||||||
self.assertEqual(self.parent_build.global_result, 'ko')
|
self.assertEqual(self.parent_build.global_result, 'ko')
|
||||||
|
|
||||||
def test_config_step_create(self):
|
def test_config_step_create(self):
|
||||||
@ -236,6 +236,7 @@ class TestBuildConfigStepCreate(TestBuildConfigStepCommon):
|
|||||||
for child_build in self.parent_build.children_ids:
|
for child_build in self.parent_build.children_ids:
|
||||||
self.assertTrue(child_build.orphan_result, 'An orphan result config step should mark the build as orphan_result')
|
self.assertTrue(child_build.orphan_result, 'An orphan result config step should mark the build as orphan_result')
|
||||||
child_build.local_result = 'ko'
|
child_build.local_result = 'ko'
|
||||||
|
# child_build._update_:globals()
|
||||||
|
|
||||||
self.assertEqual(self.parent_build.global_result, 'ok')
|
self.assertEqual(self.parent_build.global_result, 'ok')
|
||||||
|
|
||||||
@ -455,19 +456,19 @@ class TestBuildConfigStep(TestBuildConfigStepCommon):
|
|||||||
|
|
||||||
self.patchers['docker_run'].side_effect = docker_run
|
self.patchers['docker_run'].side_effect = docker_run
|
||||||
|
|
||||||
config_step._run_step(self.parent_build, 'dev/null/logpath')
|
config_step._run_step(self.parent_build, 'dev/null/logpath')()
|
||||||
|
|
||||||
assert_db_name = 'custom_build'
|
assert_db_name = 'custom_build'
|
||||||
parent_build_params = self.parent_build.params_id.copy({'config_data': {'db_name': 'custom_build'}})
|
parent_build_params = self.parent_build.params_id.copy({'config_data': {'db_name': 'custom_build'}})
|
||||||
parent_build = self.parent_build.copy({'params_id': parent_build_params.id})
|
parent_build = self.parent_build.copy({'params_id': parent_build_params.id})
|
||||||
config_step._run_step(parent_build, 'dev/null/logpath')
|
config_step._run_step(parent_build, 'dev/null/logpath')()
|
||||||
|
|
||||||
config_step = self.ConfigStep.create({
|
config_step = self.ConfigStep.create({
|
||||||
'name': 'run_test',
|
'name': 'run_test',
|
||||||
'job_type': 'run_odoo',
|
'job_type': 'run_odoo',
|
||||||
'custom_db_name': 'custom',
|
'custom_db_name': 'custom',
|
||||||
})
|
})
|
||||||
config_step._run_step(parent_build, 'dev/null/logpath')
|
config_step._run_step(parent_build, 'dev/null/logpath')()
|
||||||
|
|
||||||
self.assertEqual(call_count, 3)
|
self.assertEqual(call_count, 3)
|
||||||
|
|
||||||
@ -489,7 +490,7 @@ docker_params = dict(cmd=cmd)
|
|||||||
self.assertIn('-d test_database', run_cmd)
|
self.assertIn('-d test_database', run_cmd)
|
||||||
|
|
||||||
self.patchers['docker_run'].side_effect = docker_run
|
self.patchers['docker_run'].side_effect = docker_run
|
||||||
config_step._run_step(self.parent_build, 'dev/null/logpath')
|
config_step._run_step(self.parent_build, 'dev/null/logpath')()
|
||||||
self.patchers['docker_run'].assert_called_once()
|
self.patchers['docker_run'].assert_called_once()
|
||||||
db = self.env['runbot.database'].search([('name', '=', 'test_database')])
|
db = self.env['runbot.database'].search([('name', '=', 'test_database')])
|
||||||
self.assertEqual(db.build_id, self.parent_build)
|
self.assertEqual(db.build_id, self.parent_build)
|
||||||
@ -525,7 +526,7 @@ def run():
|
|||||||
call_count += 1
|
call_count += 1
|
||||||
|
|
||||||
self.patchers['docker_run'].side_effect = docker_run
|
self.patchers['docker_run'].side_effect = docker_run
|
||||||
config_step._run_step(self.parent_build, 'dev/null/logpath')
|
config_step._run_step(self.parent_build, 'dev/null/logpath')()
|
||||||
|
|
||||||
self.assertEqual(call_count, 1)
|
self.assertEqual(call_count, 1)
|
||||||
|
|
||||||
@ -564,10 +565,14 @@ Initiating shutdown
|
|||||||
})
|
})
|
||||||
logs = []
|
logs = []
|
||||||
with patch('builtins.open', mock_open(read_data=file_content)):
|
with patch('builtins.open', mock_open(read_data=file_content)):
|
||||||
result = config_step._make_results(build)
|
config_step._make_results(build)
|
||||||
self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'ok'})
|
self.assertEqual(str(build.job_end), '1970-01-01 02:00:00')
|
||||||
self.assertEqual(logs, [('INFO', 'Getting results for build %s' % build.dest)])
|
self.assertEqual(logs, [('INFO', 'Getting results for build %s' % build.dest)])
|
||||||
|
self.assertEqual(build.local_result, 'ok')
|
||||||
# no shutdown
|
# no shutdown
|
||||||
|
build = self.Build.create({
|
||||||
|
'params_id': self.base_params.id,
|
||||||
|
})
|
||||||
logs = []
|
logs = []
|
||||||
file_content = """
|
file_content = """
|
||||||
Loading stuff
|
Loading stuff
|
||||||
@ -575,26 +580,34 @@ odoo.stuff.modules.loading: Modules loaded.
|
|||||||
Some post install stuff
|
Some post install stuff
|
||||||
"""
|
"""
|
||||||
with patch('builtins.open', mock_open(read_data=file_content)):
|
with patch('builtins.open', mock_open(read_data=file_content)):
|
||||||
result = config_step._make_results(build)
|
config_step._make_results(build)
|
||||||
self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'ko'})
|
self.assertEqual(str(build.job_end), '1970-01-01 02:00:00')
|
||||||
|
self.assertEqual(build.local_result, 'ko')
|
||||||
self.assertEqual(logs, [
|
self.assertEqual(logs, [
|
||||||
('INFO', 'Getting results for build %s' % build.dest),
|
('INFO', 'Getting results for build %s' % build.dest),
|
||||||
('ERROR', 'No "Initiating shutdown" found in logs, maybe because of cpu limit.')
|
('ERROR', 'No "Initiating shutdown" found in logs, maybe because of cpu limit.')
|
||||||
])
|
])
|
||||||
# no loaded
|
# no loaded
|
||||||
|
build = self.Build.create({
|
||||||
|
'params_id': self.base_params.id,
|
||||||
|
})
|
||||||
logs = []
|
logs = []
|
||||||
file_content = """
|
file_content = """
|
||||||
Loading stuff
|
Loading stuff
|
||||||
"""
|
"""
|
||||||
with patch('builtins.open', mock_open(read_data=file_content)):
|
with patch('builtins.open', mock_open(read_data=file_content)):
|
||||||
result = config_step._make_results(build)
|
config_step._make_results(build)
|
||||||
self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'ko'})
|
self.assertEqual(str(build.job_end), '1970-01-01 02:00:00')
|
||||||
|
self.assertEqual(build.local_result, 'ko')
|
||||||
self.assertEqual(logs, [
|
self.assertEqual(logs, [
|
||||||
('INFO', 'Getting results for build %s' % build.dest),
|
('INFO', 'Getting results for build %s' % build.dest),
|
||||||
('ERROR', 'Modules loaded not found in logs')
|
('ERROR', 'Modules loaded not found in logs')
|
||||||
])
|
])
|
||||||
|
|
||||||
# traceback
|
# traceback
|
||||||
|
build = self.Build.create({
|
||||||
|
'params_id': self.base_params.id,
|
||||||
|
})
|
||||||
logs = []
|
logs = []
|
||||||
file_content = """
|
file_content = """
|
||||||
Loading stuff
|
Loading stuff
|
||||||
@ -607,14 +620,18 @@ File "x.py", line a, in test_
|
|||||||
Initiating shutdown
|
Initiating shutdown
|
||||||
"""
|
"""
|
||||||
with patch('builtins.open', mock_open(read_data=file_content)):
|
with patch('builtins.open', mock_open(read_data=file_content)):
|
||||||
result = config_step._make_results(build)
|
config_step._make_results(build)
|
||||||
self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'ko'})
|
self.assertEqual(str(build.job_end), '1970-01-01 02:00:00')
|
||||||
|
self.assertEqual(build.local_result, 'ko')
|
||||||
self.assertEqual(logs, [
|
self.assertEqual(logs, [
|
||||||
('INFO', 'Getting results for build %s' % build.dest),
|
('INFO', 'Getting results for build %s' % build.dest),
|
||||||
('ERROR', 'Error or traceback found in logs')
|
('ERROR', 'Error or traceback found in logs')
|
||||||
])
|
])
|
||||||
|
|
||||||
# warning in logs
|
# warning in logs
|
||||||
|
build = self.Build.create({
|
||||||
|
'params_id': self.base_params.id,
|
||||||
|
})
|
||||||
logs = []
|
logs = []
|
||||||
file_content = """
|
file_content = """
|
||||||
Loading stuff
|
Loading stuff
|
||||||
@ -624,8 +641,9 @@ Some post install stuff
|
|||||||
Initiating shutdown
|
Initiating shutdown
|
||||||
"""
|
"""
|
||||||
with patch('builtins.open', mock_open(read_data=file_content)):
|
with patch('builtins.open', mock_open(read_data=file_content)):
|
||||||
result = config_step._make_results(build)
|
config_step._make_results(build)
|
||||||
self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'warn'})
|
self.assertEqual(str(build.job_end), '1970-01-01 02:00:00')
|
||||||
|
self.assertEqual(build.local_result, 'warn')
|
||||||
self.assertEqual(logs, [
|
self.assertEqual(logs, [
|
||||||
('INFO', 'Getting results for build %s' % build.dest),
|
('INFO', 'Getting results for build %s' % build.dest),
|
||||||
('WARNING', 'Warning found in logs')
|
('WARNING', 'Warning found in logs')
|
||||||
@ -634,15 +652,18 @@ Initiating shutdown
|
|||||||
# no log file
|
# no log file
|
||||||
logs = []
|
logs = []
|
||||||
self.patchers['isfile'].return_value = False
|
self.patchers['isfile'].return_value = False
|
||||||
result = config_step._make_results(build)
|
config_step._make_results(build)
|
||||||
|
|
||||||
self.assertEqual(result, {'local_result': 'ko'})
|
self.assertEqual(build.local_result, 'ko')
|
||||||
self.assertEqual(logs, [
|
self.assertEqual(logs, [
|
||||||
('INFO', 'Getting results for build %s' % build.dest),
|
('INFO', 'Getting results for build %s' % build.dest),
|
||||||
('ERROR', 'Log file not found at the end of test job')
|
('ERROR', 'Log file not found at the end of test job')
|
||||||
])
|
])
|
||||||
|
|
||||||
# no error but build was already in warn
|
# no error but build was already in warn
|
||||||
|
build = self.Build.create({
|
||||||
|
'params_id': self.base_params.id,
|
||||||
|
})
|
||||||
logs = []
|
logs = []
|
||||||
file_content = """
|
file_content = """
|
||||||
Loading stuff
|
Loading stuff
|
||||||
@ -653,11 +674,12 @@ Initiating shutdown
|
|||||||
self.patchers['isfile'].return_value = True
|
self.patchers['isfile'].return_value = True
|
||||||
build.local_result = 'warn'
|
build.local_result = 'warn'
|
||||||
with patch('builtins.open', mock_open(read_data=file_content)):
|
with patch('builtins.open', mock_open(read_data=file_content)):
|
||||||
result = config_step._make_results(build)
|
config_step._make_results(build)
|
||||||
self.assertEqual(logs, [
|
self.assertEqual(logs, [
|
||||||
('INFO', 'Getting results for build %s' % build.dest)
|
('INFO', 'Getting results for build %s' % build.dest)
|
||||||
])
|
])
|
||||||
self.assertEqual(result, {'job_end': '1970-01-01 02:00:00', 'local_result': 'warn'})
|
self.assertEqual(str(build.job_end), '1970-01-01 02:00:00')
|
||||||
|
self.assertEqual(build.local_result, 'warn')
|
||||||
|
|
||||||
@patch('odoo.addons.runbot.models.build_config.ConfigStep._make_tests_results')
|
@patch('odoo.addons.runbot.models.build_config.ConfigStep._make_tests_results')
|
||||||
def test_make_python_result(self, mock_make_tests_results):
|
def test_make_python_result(self, mock_make_tests_results):
|
||||||
@ -672,18 +694,18 @@ Initiating shutdown
|
|||||||
})
|
})
|
||||||
build.local_state = 'testing'
|
build.local_state = 'testing'
|
||||||
self.patchers['isfile'].return_value = False
|
self.patchers['isfile'].return_value = False
|
||||||
result = config_step._make_results(build)
|
config_step._make_results(build)
|
||||||
self.assertEqual(result, {'local_result': 'ok'})
|
self.assertEqual(build.local_result, 'ok')
|
||||||
|
|
||||||
# invalid result code (no return_value set)
|
# invalid result code (no return_value set)
|
||||||
config_step.python_result_code = """a = 2*5\nr = {'a': 'ok'}\nreturn_value = 'ko'"""
|
config_step.python_result_code = """a = 2*5\nr = {'a': 'ok'}\nreturn_value = 'ko'"""
|
||||||
with self.assertRaises(RunbotException):
|
with self.assertRaises(RunbotException):
|
||||||
result = config_step._make_results(build)
|
config_step._make_results(build)
|
||||||
|
|
||||||
# no result defined
|
# no result defined
|
||||||
config_step.python_result_code = ""
|
config_step.python_result_code = ""
|
||||||
mock_make_tests_results.return_value = {'local_result': 'warning'}
|
mock_make_tests_results.return_value = {'local_result': 'warn'}
|
||||||
result = config_step._make_results(build)
|
config_step._make_results(build)
|
||||||
self.assertEqual(result, {'local_result': 'warning'})
|
self.assertEqual(build.local_result, 'warn')
|
||||||
|
|
||||||
# TODO add generic test to copy_paste _run_* in a python step
|
# TODO add generic test to copy_paste _run_* in a python step
|
||||||
|
@ -33,8 +33,10 @@ class TestBuildError(RunbotCase):
|
|||||||
|
|
||||||
def test_build_scan(self):
|
def test_build_scan(self):
|
||||||
IrLog = self.env['ir.logging']
|
IrLog = self.env['ir.logging']
|
||||||
ko_build = self.create_test_build({'local_result': 'ko'})
|
ko_build = self.create_test_build({'local_result': 'ok', 'local_state': 'testing'})
|
||||||
ok_build = self.create_test_build({'local_result': 'ok'})
|
ok_build = self.create_test_build({'local_result': 'ok', 'local_state': 'running'})
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
error_team = self.BuildErrorTeam.create({
|
error_team = self.BuildErrorTeam.create({
|
||||||
'name': 'test-error-team',
|
'name': 'test-error-team',
|
||||||
@ -56,6 +58,10 @@ class TestBuildError(RunbotCase):
|
|||||||
IrLog.create(log)
|
IrLog.create(log)
|
||||||
log.update({'build_id': ok_build.id})
|
log.update({'build_id': ok_build.id})
|
||||||
IrLog.create(log)
|
IrLog.create(log)
|
||||||
|
|
||||||
|
self.assertEqual(ko_build.local_result, 'ko', 'Testing build should have gone ko after error log')
|
||||||
|
self.assertEqual(ok_build.local_result, 'ok', 'Running build should not have gone ko after error log')
|
||||||
|
|
||||||
ko_build._parse_logs()
|
ko_build._parse_logs()
|
||||||
ok_build._parse_logs()
|
ok_build._parse_logs()
|
||||||
build_error = self.BuildError.search([('build_ids', 'in', [ko_build.id])])
|
build_error = self.BuildError.search([('build_ids', 'in', [ko_build.id])])
|
||||||
|
@ -65,7 +65,7 @@ class TestHost(RunbotCase):
|
|||||||
# check that local logs are inserted in leader ir.logging
|
# check that local logs are inserted in leader ir.logging
|
||||||
logs = fetch_local_logs_return_value(build_dest=build.dest)
|
logs = fetch_local_logs_return_value(build_dest=build.dest)
|
||||||
self.start_patcher('fetch_local_logs', 'odoo.addons.runbot.models.host.Host._fetch_local_logs', logs)
|
self.start_patcher('fetch_local_logs', 'odoo.addons.runbot.models.host.Host._fetch_local_logs', logs)
|
||||||
self.test_host.process_logs()
|
self.test_host._process_logs()
|
||||||
self.patchers['host_local_pg_cursor'].assert_called()
|
self.patchers['host_local_pg_cursor'].assert_called()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.env['ir.logging'].search_count([
|
self.env['ir.logging'].search_count([
|
||||||
@ -78,7 +78,7 @@ class TestHost(RunbotCase):
|
|||||||
# check that a warn log sets the build in warning
|
# check that a warn log sets the build in warning
|
||||||
logs = fetch_local_logs_return_value(nb_logs=1, build_dest=build.dest, level='WARNING')
|
logs = fetch_local_logs_return_value(nb_logs=1, build_dest=build.dest, level='WARNING')
|
||||||
self.patchers['fetch_local_logs'].return_value = logs
|
self.patchers['fetch_local_logs'].return_value = logs
|
||||||
self.test_host.process_logs()
|
self.test_host._process_logs()
|
||||||
self.patchers['host_local_pg_cursor'].assert_called()
|
self.patchers['host_local_pg_cursor'].assert_called()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.env['ir.logging'].search_count([
|
self.env['ir.logging'].search_count([
|
||||||
@ -88,12 +88,12 @@ class TestHost(RunbotCase):
|
|||||||
]),
|
]),
|
||||||
1,
|
1,
|
||||||
)
|
)
|
||||||
self.assertEqual(build.triggered_result, 'warn', 'A warning log should sets the build in warn')
|
self.assertEqual(build.local_result, 'warn', 'A warning log should sets the build in warn')
|
||||||
|
|
||||||
# now check that error logs sets the build in ko
|
# now check that error logs sets the build in ko
|
||||||
logs = fetch_local_logs_return_value(nb_logs=1, build_dest=build.dest, level='ERROR')
|
logs = fetch_local_logs_return_value(nb_logs=1, build_dest=build.dest, level='ERROR')
|
||||||
self.patchers['fetch_local_logs'].return_value = logs
|
self.patchers['fetch_local_logs'].return_value = logs
|
||||||
self.test_host.process_logs()
|
self.test_host._process_logs()
|
||||||
self.patchers['host_local_pg_cursor'].assert_called()
|
self.patchers['host_local_pg_cursor'].assert_called()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.env['ir.logging'].search_count([
|
self.env['ir.logging'].search_count([
|
||||||
@ -103,11 +103,11 @@ class TestHost(RunbotCase):
|
|||||||
]),
|
]),
|
||||||
1,
|
1,
|
||||||
)
|
)
|
||||||
self.assertEqual(build.triggered_result, 'ko', 'An error log should sets the build in ko')
|
self.assertEqual(build.local_result, 'ko', 'An error log should sets the build in ko')
|
||||||
|
|
||||||
build.log_counter = 10
|
build.log_counter = 10
|
||||||
# Test log limit
|
# Test log limit
|
||||||
logs = fetch_local_logs_return_value(nb_logs=11, message='test log limit', build_dest=build.dest)
|
logs = fetch_local_logs_return_value(nb_logs=11, message='test log limit', build_dest=build.dest)
|
||||||
self.patchers['fetch_local_logs'].return_value = logs
|
self.patchers['fetch_local_logs'].return_value = logs
|
||||||
self.test_host.process_logs()
|
self.test_host._process_logs()
|
||||||
self.patchers['host_local_pg_cursor'].assert_called()
|
self.patchers['host_local_pg_cursor'].assert_called()
|
||||||
|
@ -23,6 +23,7 @@ class TestSchedule(RunbotCase):
|
|||||||
|
|
||||||
build = self.Build.create({
|
build = self.Build.create({
|
||||||
'local_state': 'testing',
|
'local_state': 'testing',
|
||||||
|
'global_state': 'testing',
|
||||||
'port': '1234',
|
'port': '1234',
|
||||||
'host': host.name,
|
'host': host.name,
|
||||||
'job_start': datetime.datetime.now(),
|
'job_start': datetime.datetime.now(),
|
||||||
@ -33,9 +34,10 @@ class TestSchedule(RunbotCase):
|
|||||||
self.assertEqual(build.local_state, 'testing')
|
self.assertEqual(build.local_state, 'testing')
|
||||||
build._schedule() # too fast, docker not started
|
build._schedule() # too fast, docker not started
|
||||||
self.assertEqual(build.local_state, 'testing')
|
self.assertEqual(build.local_state, 'testing')
|
||||||
|
self.assertEqual(build.local_result, 'ok')
|
||||||
|
|
||||||
self.start_patcher('fetch_local_logs', 'odoo.addons.runbot.models.host.Host._fetch_local_logs', []) # the local logs have to be empty
|
self.start_patcher('fetch_local_logs', 'odoo.addons.runbot.models.host.Host._fetch_local_logs', []) # the local logs have to be empty
|
||||||
build.write({'job_start': datetime.datetime.now() - datetime.timedelta(seconds=70)}) # docker never started
|
build.write({'job_start': datetime.datetime.now() - datetime.timedelta(seconds=70)}) # docker never started
|
||||||
build._schedule()
|
build._schedule()
|
||||||
self.assertEqual(build.local_state, 'done')
|
self.assertEqual(build.local_state, 'done')
|
||||||
self.assertEqual(build.local_result, 'ok')
|
self.assertEqual(build.local_result, 'ko')
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import getpass
|
import getpass
|
||||||
import logging
|
import logging
|
||||||
import getpass
|
from unittest.mock import patch, mock_open
|
||||||
from odoo.exceptions import UserError
|
from odoo.exceptions import UserError
|
||||||
from odoo.tools import mute_logger
|
from odoo.tools import mute_logger
|
||||||
from .common import RunbotCase
|
from .common import RunbotCase
|
||||||
@ -270,9 +270,9 @@ class TestUpgradeFlow(RunbotCase):
|
|||||||
batch = self.master_bundle._force()
|
batch = self.master_bundle._force()
|
||||||
batch._prepare()
|
batch._prepare()
|
||||||
upgrade_current_build = batch.slot_ids.filtered(lambda slot: slot.trigger_id == self.trigger_upgrade_server).build_id
|
upgrade_current_build = batch.slot_ids.filtered(lambda slot: slot.trigger_id == self.trigger_upgrade_server).build_id
|
||||||
host = self.env['runbot.host']._get_current()
|
#host = self.env['runbot.host']._get_current()
|
||||||
upgrade_current_build.host = host.name
|
#upgrade_current_build.host = host.name
|
||||||
upgrade_current_build._init_pendings(host)
|
upgrade_current_build._schedule()
|
||||||
self.start_patcher('fetch_local_logs', 'odoo.addons.runbot.models.host.Host._fetch_local_logs', []) # the local logs have to be empty
|
self.start_patcher('fetch_local_logs', 'odoo.addons.runbot.models.host.Host._fetch_local_logs', []) # the local logs have to be empty
|
||||||
upgrade_current_build._schedule()
|
upgrade_current_build._schedule()
|
||||||
self.assertEqual(upgrade_current_build.local_state, 'done')
|
self.assertEqual(upgrade_current_build.local_state, 'done')
|
||||||
@ -296,9 +296,8 @@ class TestUpgradeFlow(RunbotCase):
|
|||||||
|
|
||||||
# upgrade repos tests
|
# upgrade repos tests
|
||||||
upgrade_build = batch.slot_ids.filtered(lambda slot: slot.trigger_id == self.trigger_upgrade).build_id
|
upgrade_build = batch.slot_ids.filtered(lambda slot: slot.trigger_id == self.trigger_upgrade).build_id
|
||||||
host = self.env['runbot.host']._get_current()
|
#upgrade_build.host = host.name
|
||||||
upgrade_build.host = host.name
|
upgrade_build._schedule()
|
||||||
upgrade_build._init_pendings(host)
|
|
||||||
upgrade_build._schedule()
|
upgrade_build._schedule()
|
||||||
self.assertEqual(upgrade_build.local_state, 'done')
|
self.assertEqual(upgrade_build.local_state, 'done')
|
||||||
self.assertEqual(len(upgrade_build.children_ids), 2)
|
self.assertEqual(len(upgrade_build.children_ids), 2)
|
||||||
@ -337,9 +336,8 @@ class TestUpgradeFlow(RunbotCase):
|
|||||||
batch = self.master_bundle._force(self.nightly_category.id)
|
batch = self.master_bundle._force(self.nightly_category.id)
|
||||||
batch._prepare()
|
batch._prepare()
|
||||||
upgrade_nightly = batch.slot_ids.filtered(lambda slot: slot.trigger_id == trigger_upgrade_addons_nightly).build_id
|
upgrade_nightly = batch.slot_ids.filtered(lambda slot: slot.trigger_id == trigger_upgrade_addons_nightly).build_id
|
||||||
host = self.env['runbot.host']._get_current()
|
#upgrade_nightly.host = host.name
|
||||||
upgrade_nightly.host = host.name
|
upgrade_nightly._schedule()
|
||||||
upgrade_nightly._init_pendings(host)
|
|
||||||
upgrade_nightly._schedule()
|
upgrade_nightly._schedule()
|
||||||
to_version_builds = upgrade_nightly.children_ids
|
to_version_builds = upgrade_nightly.children_ids
|
||||||
self.assertEqual(upgrade_nightly.local_state, 'done')
|
self.assertEqual(upgrade_nightly.local_state, 'done')
|
||||||
@ -352,10 +350,15 @@ class TestUpgradeFlow(RunbotCase):
|
|||||||
to_version_builds.mapped('params_id.upgrade_from_build_id.params_id.version_id.name'),
|
to_version_builds.mapped('params_id.upgrade_from_build_id.params_id.version_id.name'),
|
||||||
[]
|
[]
|
||||||
)
|
)
|
||||||
to_version_builds.host = host.name
|
#to_version_builds.host = host.name
|
||||||
to_version_builds._init_pendings(host)
|
for build in to_version_builds:
|
||||||
to_version_builds._schedule()
|
build._schedule() # starts builds
|
||||||
self.assertEqual(to_version_builds.mapped('local_state'), ['done']*4)
|
self.assertEqual(build.local_state, 'testing')
|
||||||
|
build._schedule() # makes result and end build
|
||||||
|
self.assertEqual(build.local_state, 'done')
|
||||||
|
|
||||||
|
self.assertEqual(to_version_builds.mapped('global_state'), ['done', 'waiting', 'waiting', 'waiting'], 'One build have no child, other should wait for children')
|
||||||
|
|
||||||
from_version_builds = to_version_builds.children_ids
|
from_version_builds = to_version_builds.children_ids
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[
|
[
|
||||||
@ -367,10 +370,15 @@ class TestUpgradeFlow(RunbotCase):
|
|||||||
],
|
],
|
||||||
['11.0->12.0', 'saas-11.3->12.0', '12.0->13.0', 'saas-12.3->13.0', '13.0->master', 'saas-13.1->master', 'saas-13.2->master', 'saas-13.3->master']
|
['11.0->12.0', 'saas-11.3->12.0', '12.0->13.0', 'saas-12.3->13.0', '13.0->master', 'saas-13.1->master', 'saas-13.2->master', 'saas-13.3->master']
|
||||||
)
|
)
|
||||||
from_version_builds.host = host.name
|
#from_version_builds.host = host.name
|
||||||
from_version_builds._init_pendings(host)
|
for build in from_version_builds:
|
||||||
from_version_builds._schedule()
|
build._schedule()
|
||||||
self.assertEqual(from_version_builds.mapped('local_state'), ['done']*8)
|
self.assertEqual(build.local_state, 'testing')
|
||||||
|
build._schedule()
|
||||||
|
self.assertEqual(build.local_state, 'done')
|
||||||
|
|
||||||
|
self.assertEqual(from_version_builds.mapped('global_state'), ['waiting'] * 8)
|
||||||
|
|
||||||
db_builds = from_version_builds.children_ids
|
db_builds = from_version_builds.children_ids
|
||||||
self.assertEqual(len(db_builds), 40)
|
self.assertEqual(len(db_builds), 40)
|
||||||
|
|
||||||
@ -405,61 +413,74 @@ class TestUpgradeFlow(RunbotCase):
|
|||||||
[b.params_id.dump_db.db_suffix for b in b133_master],
|
[b.params_id.dump_db.db_suffix for b in b133_master],
|
||||||
['account', 'l10n_be', 'l10n_ch', 'mail', 'stock'] # is this order ok?
|
['account', 'l10n_be', 'l10n_ch', 'mail', 'stock'] # is this order ok?
|
||||||
)
|
)
|
||||||
|
current_build = db_builds[0]
|
||||||
|
for current_build in db_builds:
|
||||||
|
self.start_patcher('docker_state', 'odoo.addons.runbot.models.build.docker_state', 'END')
|
||||||
|
|
||||||
first_build = db_builds[0]
|
suffix = current_build.params_id.dump_db.db_suffix
|
||||||
|
source_dest = current_build.params_id.dump_db.build_id.dest
|
||||||
|
|
||||||
self.start_patcher('docker_state', 'odoo.addons.runbot.models.build.docker_state', 'END')
|
def docker_run_restore(cmd, *args, **kwargs):
|
||||||
|
dump_url = f'http://host.runbot.com/runbot/static/build/{source_dest}/logs/{source_dest}-{suffix}.zip'
|
||||||
|
zip_name = f'{source_dest}-{suffix}.zip'
|
||||||
|
db_name = f'{current_build.dest}-{suffix}'
|
||||||
|
self.assertEqual(
|
||||||
|
str(cmd).split(' && '),
|
||||||
|
[
|
||||||
|
'mkdir /data/build/restore',
|
||||||
|
'cd /data/build/restore',
|
||||||
|
f'wget {dump_url}',
|
||||||
|
f'unzip -q {zip_name}',
|
||||||
|
'echo "### restoring filestore"',
|
||||||
|
f'mkdir -p /data/build/datadir/filestore/{db_name}',
|
||||||
|
f'mv filestore/* /data/build/datadir/filestore/{db_name}',
|
||||||
|
'echo "### restoring db"',
|
||||||
|
f'psql -q {db_name} < dump.sql',
|
||||||
|
'cd /data/build',
|
||||||
|
'echo "### cleaning"',
|
||||||
|
'rm -r restore',
|
||||||
|
'echo "### listing modules"',
|
||||||
|
f'psql {db_name} -c "select name from ir_module_module where state = \'installed\'" -t -A > /data/build/logs/restore_modules_installed.txt',
|
||||||
|
'echo "### restore" "successful"'
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.patchers['docker_run'].side_effect = docker_run_restore
|
||||||
|
#current_build.host = host.name
|
||||||
|
current_build._schedule()()
|
||||||
|
self.patchers['docker_run'].assert_called()
|
||||||
|
|
||||||
def docker_run_restore(cmd, *args, **kwargs):
|
def docker_run_upgrade(cmd, *args, ro_volumes=False, **kwargs):
|
||||||
source_dest = first_build.params_id.dump_db.build_id.dest
|
user = getpass.getuser()
|
||||||
dump_url='http://host.runbot.com/runbot/static/build/%s/logs/%s-account.zip' % (source_dest, source_dest)
|
self.assertTrue(ro_volumes.pop(f'/home/{user}/.odoorc').startswith('/tmp/runbot_test/static/build/'))
|
||||||
zip_name='%s-account.zip' % source_dest
|
self.assertEqual(
|
||||||
db_name='%s-master-account' % str(first_build.id).zfill(5)
|
list(ro_volumes.keys()), [
|
||||||
self.assertEqual(
|
'/data/build/addons',
|
||||||
str(cmd).split(' && '),
|
'/data/build/server',
|
||||||
[
|
'/data/build/upgrade',
|
||||||
'mkdir /data/build/restore',
|
],
|
||||||
'cd /data/build/restore',
|
"other commit should have been added automaticaly"
|
||||||
f'wget {dump_url}',
|
)
|
||||||
f'unzip -q {zip_name}',
|
self.assertEqual(
|
||||||
'echo "### restoring filestore"',
|
str(cmd),
|
||||||
f'mkdir -p /data/build/datadir/filestore/{db_name}',
|
'python3 server/server.py {addons_path} --no-xmlrpcs --no-netrpc -u all -d {db_name} --stop-after-init --max-cron-threads=0'.format(
|
||||||
f'mv filestore/* /data/build/datadir/filestore/{db_name}',
|
addons_path='--addons-path addons,server/addons,server/core/addons',
|
||||||
'echo "### restoring db"',
|
db_name=f'{current_build.dest}-{suffix}')
|
||||||
f'psql -q {db_name} < dump.sql',
|
)
|
||||||
'cd /data/build',
|
self.patchers['docker_run'].side_effect = docker_run_upgrade
|
||||||
'echo "### cleaning"',
|
current_build._schedule()()
|
||||||
'rm -r restore',
|
|
||||||
'echo "### listing modules"',
|
|
||||||
f'psql {db_name} -c "select name from ir_module_module where state = \'installed\'" -t -A > /data/build/logs/restore_modules_installed.txt',
|
|
||||||
'echo "### restore" "successful"'
|
|
||||||
]
|
|
||||||
)
|
|
||||||
self.patchers['docker_run'].side_effect = docker_run_restore
|
|
||||||
first_build.host = host.name
|
|
||||||
first_build._init_pendings(host)
|
|
||||||
self.patchers['docker_run'].assert_called()
|
|
||||||
|
|
||||||
def docker_run_upgrade(cmd, *args, ro_volumes=False, **kwargs):
|
with patch('builtins.open', mock_open(read_data='')):
|
||||||
user = getpass.getuser()
|
current_build._schedule()
|
||||||
self.assertTrue(ro_volumes.pop(f'/home/{user}/.odoorc').startswith('/tmp/runbot_test/static/build/'))
|
self.assertEqual(current_build.local_state, 'done')
|
||||||
self.assertEqual(
|
|
||||||
ro_volumes, {
|
self.assertEqual(current_build.global_state, 'done')
|
||||||
'/data/build/addons': '/tmp/runbot_test/static/sources/addons/addons120',
|
# self.assertEqual(current_build.global_result, 'ok')
|
||||||
'/data/build/server': '/tmp/runbot_test/static/sources/server/server120',
|
|
||||||
'/data/build/upgrade': '/tmp/runbot_test/static/sources/upgrade/123abc789',
|
self.assertEqual(self.patchers['docker_run'].call_count, 80)
|
||||||
},
|
|
||||||
"other commit should have been added automaticaly"
|
self.assertEqual(from_version_builds.mapped('global_state'), ['done'] * 8)
|
||||||
)
|
|
||||||
self.assertEqual(
|
self.assertEqual(to_version_builds.mapped('global_state'), ['done'] * 4)
|
||||||
str(cmd),
|
|
||||||
'python3 server/server.py {addons_path} --no-xmlrpcs --no-netrpc -u all -d {db_name} --stop-after-init --max-cron-threads=0'.format(
|
|
||||||
addons_path='--addons-path addons,server/addons,server/core/addons',
|
|
||||||
db_name='%s-master-account' % str(first_build.id).zfill(5))
|
|
||||||
)
|
|
||||||
self.patchers['docker_run'].side_effect = docker_run_upgrade
|
|
||||||
first_build._schedule()
|
|
||||||
self.assertEqual(self.patchers['docker_run'].call_count, 2)
|
|
||||||
|
|
||||||
# test_build_references
|
# test_build_references
|
||||||
batch = self.master_bundle._force()
|
batch = self.master_bundle._force()
|
||||||
@ -520,12 +541,12 @@ class TestUpgradeFlow(RunbotCase):
|
|||||||
batch13 = bundle_13._force()
|
batch13 = bundle_13._force()
|
||||||
batch13._prepare()
|
batch13._prepare()
|
||||||
upgrade_complement_build_13 = batch13.slot_ids.filtered(lambda slot: slot.trigger_id == trigger_upgrade_complement).build_id
|
upgrade_complement_build_13 = batch13.slot_ids.filtered(lambda slot: slot.trigger_id == trigger_upgrade_complement).build_id
|
||||||
upgrade_complement_build_13.host = host.name
|
# upgrade_complement_build_13.host = host.name
|
||||||
self.assertEqual(upgrade_complement_build_13.params_id.config_id, config_upgrade_complement)
|
self.assertEqual(upgrade_complement_build_13.params_id.config_id, config_upgrade_complement)
|
||||||
for db in ['base', 'all', 'no-demo-all']:
|
for db in ['base', 'all', 'no-demo-all']:
|
||||||
upgrade_complement_build_13.database_ids = [(0, 0, {'name': '%s-%s' % (upgrade_complement_build_13.dest, db)})]
|
upgrade_complement_build_13.database_ids = [(0, 0, {'name': '%s-%s' % (upgrade_complement_build_13.dest, db)})]
|
||||||
|
|
||||||
upgrade_complement_build_13._init_pendings(host)
|
upgrade_complement_build_13._schedule()
|
||||||
|
|
||||||
self.assertEqual(len(upgrade_complement_build_13.children_ids), 5)
|
self.assertEqual(len(upgrade_complement_build_13.children_ids), 5)
|
||||||
master_child = upgrade_complement_build_13.children_ids[0]
|
master_child = upgrade_complement_build_13.children_ids[0]
|
||||||
@ -534,6 +555,7 @@ class TestUpgradeFlow(RunbotCase):
|
|||||||
self.assertEqual(master_child.params_id.config_id, self.test_upgrade_config)
|
self.assertEqual(master_child.params_id.config_id, self.test_upgrade_config)
|
||||||
self.assertEqual(master_child.params_id.upgrade_to_build_id.params_id.version_id.name, 'master')
|
self.assertEqual(master_child.params_id.upgrade_to_build_id.params_id.version_id.name, 'master')
|
||||||
|
|
||||||
|
|
||||||
class TestUpgrade(RunbotCase):
|
class TestUpgrade(RunbotCase):
|
||||||
|
|
||||||
def test_exceptions_in_env(self):
|
def test_exceptions_in_env(self):
|
||||||
|
@ -68,7 +68,6 @@
|
|||||||
<field name="requested_action" groups="base.group_no_one"/>
|
<field name="requested_action" groups="base.group_no_one"/>
|
||||||
<field name="local_result"/>
|
<field name="local_result"/>
|
||||||
<field name="global_result"/>
|
<field name="global_result"/>
|
||||||
<field name="triggered_result" groups="base.group_no_one"/>
|
|
||||||
<field name="host"/>
|
<field name="host"/>
|
||||||
<field name="host_id"/>
|
<field name="host_id"/>
|
||||||
<field name="job_start" groups="base.group_no_one"/>
|
<field name="job_start" groups="base.group_no_one"/>
|
||||||
|
@ -61,7 +61,6 @@
|
|||||||
<field name="python_code" widget="ace" options="{'mode': 'python'}"/>
|
<field name="python_code" widget="ace" options="{'mode': 'python'}"/>
|
||||||
<field name="python_result_code" widget="ace" options="{'mode': 'python'}"/>
|
<field name="python_result_code" widget="ace" options="{'mode': 'python'}"/>
|
||||||
<field name="running_job"/>
|
<field name="running_job"/>
|
||||||
<field name="ignore_triggered_result"/>
|
|
||||||
</group>
|
</group>
|
||||||
<group string="Test settings" attrs="{'invisible': [('job_type', 'not in', ('python', 'install_odoo'))]}">
|
<group string="Test settings" attrs="{'invisible': [('job_type', 'not in', ('python', 'install_odoo'))]}">
|
||||||
<field name="create_db" groups="base.group_no_one"/>
|
<field name="create_db" groups="base.group_no_one"/>
|
||||||
|
Loading…
Reference in New Issue
Block a user