mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 15:35:46 +07:00
[REL] adapt for 16.0
This commit is contained in:
parent
d3f998f88c
commit
45104b635f
@ -108,16 +108,15 @@ class BuildParameters(models.Model):
|
||||
params.commit_ids = params.commit_link_ids.commit_id
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, values_list):
|
||||
def create(self, vals_list):
|
||||
records = self.browse()
|
||||
for values in values_list:
|
||||
params = self.new(values)
|
||||
for vals in vals_list:
|
||||
params = self.new(vals)
|
||||
record = self._find_existing(params.fingerprint)
|
||||
if record:
|
||||
records |= record
|
||||
else:
|
||||
values = self._convert_to_write(params._cache)
|
||||
records |= super().create(values)
|
||||
records |= super().create(self._convert_to_write(params._cache))
|
||||
return records
|
||||
|
||||
def _find_existing(self, fingerprint):
|
||||
@ -214,7 +213,7 @@ class BuildResult(models.Model):
|
||||
# -> build_link ?
|
||||
|
||||
parent_id = fields.Many2one('runbot.build', 'Parent Build', index=True)
|
||||
parent_path = fields.Char('Parent path', index=True)
|
||||
parent_path = fields.Char('Parent path', index=True, unaccent=False)
|
||||
top_parent = fields.Many2one('runbot.build', compute='_compute_top_parent')
|
||||
ancestors = fields.Many2many('runbot.build', compute='_compute_ancestors')
|
||||
# should we add a has children stored boolean?
|
||||
@ -815,6 +814,7 @@ class BuildResult(models.Model):
|
||||
ro_volumes[f'/home/{user}/.odoorc'] = self._path('.odoorc')
|
||||
kwargs.pop('build_dir', False) # todo check python steps
|
||||
build_dir = self._path()
|
||||
self.env.flush_all()
|
||||
def start_docker():
|
||||
docker_run(cmd=cmd, build_dir=build_dir, ro_volumes=ro_volumes, **kwargs)
|
||||
return start_docker
|
||||
|
@ -59,9 +59,9 @@ class Config(models.Model):
|
||||
group = fields.Many2one('runbot.build.config', 'Configuration group', help="Group of config's and config steps")
|
||||
group_name = fields.Char('Group name', related='group.name')
|
||||
|
||||
@api.model_create_single
|
||||
def create(self, values):
|
||||
res = super(Config, self).create(values)
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
res = super(Config, self).create(vals_list)
|
||||
res._check_step_ids_order()
|
||||
return res
|
||||
|
||||
@ -85,19 +85,21 @@ class Config(models.Model):
|
||||
return [ordered_step.step_id for ordered_step in self.step_order_ids.sorted('sequence')]
|
||||
|
||||
def _check_step_ids_order(self):
|
||||
install_job = False
|
||||
step_ids = self.step_ids()
|
||||
for step in step_ids:
|
||||
if step.job_type == 'install_odoo':
|
||||
install_job = True
|
||||
if step.job_type == 'run_odoo':
|
||||
if step != step_ids[-1]:
|
||||
raise UserError('Jobs of type run_odoo should be the last one')
|
||||
if not install_job:
|
||||
raise UserError('Jobs of type run_odoo should be preceded by a job of type install_odoo')
|
||||
self._check_recustion()
|
||||
for record in self:
|
||||
install_job = False
|
||||
step_ids = record.step_ids()
|
||||
for step in step_ids:
|
||||
if step.job_type == 'install_odoo':
|
||||
install_job = True
|
||||
if step.job_type == 'run_odoo':
|
||||
if step != step_ids[-1]:
|
||||
raise UserError('Jobs of type run_odoo should be the last one')
|
||||
if not install_job:
|
||||
raise UserError('Jobs of type run_odoo should be preceded by a job of type install_odoo')
|
||||
record._check_recustion()
|
||||
|
||||
def _check_recustion(self, visited=None):
|
||||
self.ensure_one()
|
||||
visited = visited or []
|
||||
recursion = False
|
||||
if self in visited:
|
||||
@ -236,10 +238,11 @@ class ConfigStep(models.Model):
|
||||
copy._write({'protected': False})
|
||||
return copy
|
||||
|
||||
@api.model_create_single
|
||||
def create(self, values):
|
||||
self._check(values)
|
||||
return super(ConfigStep, self).create(values)
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
for vals in vals_list:
|
||||
self._check(vals)
|
||||
return super().create(vals_list)
|
||||
|
||||
def write(self, values):
|
||||
self._check(values)
|
||||
@ -398,8 +401,6 @@ class ConfigStep(models.Model):
|
||||
|
||||
docker_name = build._get_docker_name()
|
||||
build_port = build.port
|
||||
self.env.cr.commit() # commit before docker run to be 100% sure that db state is consistent with dockers
|
||||
self.invalidate_cache()
|
||||
self.env['runbot.runbot']._reload_nginx()
|
||||
return dict(cmd=cmd, log_path=log_path, container_name=docker_name, exposed_ports=[build_port, build_port + 1], ro_volumes=exports, env_variables=env_variables)
|
||||
|
||||
@ -1146,12 +1147,13 @@ class ConfigStepOrder(models.Model):
|
||||
def _onchange_step_id(self):
|
||||
self.sequence = self.step_id.default_sequence
|
||||
|
||||
@api.model_create_single
|
||||
def create(self, values):
|
||||
if 'sequence' not in values and values.get('step_id'):
|
||||
values['sequence'] = self.env['runbot.build.config.step'].browse(values.get('step_id')).default_sequence
|
||||
if self.pool._init: # do not duplicate entry on install
|
||||
existing = self.search([('sequence', '=', values.get('sequence')), ('config_id', '=', values.get('config_id')), ('step_id', '=', values.get('step_id'))])
|
||||
if existing:
|
||||
return
|
||||
return super(ConfigStepOrder, self).create(values)
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
for vals in vals_list:
|
||||
if 'sequence' not in vals and vals.get('step_id'):
|
||||
vals['sequence'] = self.env['runbot.build.config.step'].browse(vals.get('step_id')).default_sequence
|
||||
if self.pool._init: # do not duplicate entry on install
|
||||
existing = self.search([('sequence', '=', vals.get('sequence')), ('config_id', '=', vals.get('config_id')), ('step_id', '=', vals.get('step_id'))])
|
||||
if existing:
|
||||
return
|
||||
return super().create(vals_list)
|
||||
|
@ -47,9 +47,9 @@ class BuildError(models.Model):
|
||||
children_build_ids = fields.Many2many('runbot.build', compute='_compute_children_build_ids', string='Children builds')
|
||||
error_history_ids = fields.Many2many('runbot.build.error', compute='_compute_error_history_ids', string='Old errors', context={'active_test': False})
|
||||
first_seen_build_id = fields.Many2one('runbot.build', compute='_compute_first_seen_build_id', string='First Seen build')
|
||||
first_seen_date = fields.Datetime(string='First Seen Date', related='first_seen_build_id.create_date')
|
||||
first_seen_date = fields.Datetime(string='First Seen Date', related='first_seen_build_id.create_date', depends=['first_seen_build_id'])
|
||||
last_seen_build_id = fields.Many2one('runbot.build', compute='_compute_last_seen_build_id', string='Last Seen build', store=True)
|
||||
last_seen_date = fields.Datetime(string='Last Seen Date', related='last_seen_build_id.create_date', store=True)
|
||||
last_seen_date = fields.Datetime(string='Last Seen Date', related='last_seen_build_id.create_date', store=True, depends=['last_seen_build_id'])
|
||||
test_tags = fields.Char(string='Test tags', help="Comma separated list of test_tags to use to reproduce/remove this error", tracking=True)
|
||||
|
||||
@api.constrains('test_tags')
|
||||
|
@ -163,6 +163,8 @@ class Bundle(models.Model):
|
||||
@api.depends_context('category_id')
|
||||
def _compute_last_done_batch(self):
|
||||
if self:
|
||||
self.env['runbot.batch'].flush_model()
|
||||
self.env['runbot.bundle'].flush_model()
|
||||
# self.env['runbot.batch'].flush()
|
||||
for bundle in self:
|
||||
bundle.last_done_batch = False
|
||||
|
@ -15,9 +15,12 @@ class Database(models.Model):
|
||||
for record in self:
|
||||
record.db_suffix = record.name.replace('%s-' % record.build_id.dest, '')
|
||||
|
||||
@api.model_create_single
|
||||
def create(self, values):
|
||||
res = self.search([('name', '=', values['name']), ('build_id', '=', values['build_id'])])
|
||||
if res:
|
||||
return res
|
||||
return super().create(values)
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
records = self.browse()
|
||||
for vals in vals_list:
|
||||
res = self.search([('name', '=', vals['name']), ('build_id', '=', vals['build_id'])])
|
||||
if res:
|
||||
records |= res
|
||||
else:
|
||||
records |= super().create(vals)
|
||||
|
@ -1,7 +1,7 @@
|
||||
import logging
|
||||
import re
|
||||
from odoo import models, fields, api
|
||||
from odoo.addons.base.models.qweb import QWebException
|
||||
from odoo.addons.base.models.ir_qweb import QWebException
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
@ -37,7 +37,7 @@ class Dockerfile(models.Model):
|
||||
def _compute_dockerfile(self):
|
||||
for rec in self:
|
||||
try:
|
||||
res = rec.template_id.sudo()._render() if rec.template_id else ''
|
||||
res = rec.template_id._render_template(rec.template_id.id) if rec.template_id else ''
|
||||
rec.dockerfile = re.sub(r'^\s*$', '', res, flags=re.M).strip()
|
||||
except QWebException:
|
||||
rec.dockerfile = ''
|
||||
|
@ -56,11 +56,12 @@ class Host(models.Model):
|
||||
for host in self:
|
||||
host.build_ids = self.env['runbot.build'].search([('host', '=', host.name), ('local_state', '!=', 'done')])
|
||||
|
||||
@api.model_create_single
|
||||
def create(self, values):
|
||||
if 'disp_name' not in values:
|
||||
values['disp_name'] = values['name']
|
||||
return super().create(values)
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
for vals in vals_list:
|
||||
if 'disp_name' not in vals:
|
||||
vals['disp_name'] = vals['name']
|
||||
return super().create(vals_list)
|
||||
|
||||
def _bootstrap_local_logs_db(self):
|
||||
""" bootstrap a local database that will collect logs from builds """
|
||||
|
@ -19,8 +19,8 @@ class Project(models.Model):
|
||||
dummy_bundle_id = fields.Many2one('runbot.bundle', string='Dummy bundle')
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, create_values):
|
||||
projects = super().create(create_values)
|
||||
def create(self, vals_list):
|
||||
projects = super().create(vals_list)
|
||||
base_bundle_values = []
|
||||
dummy_bundle_values = []
|
||||
for project in projects:
|
||||
|
@ -276,12 +276,12 @@ class Repo(models.Model):
|
||||
def set_hook_time(self, value):
|
||||
for repo in self:
|
||||
self.env['runbot.repo.hooktime'].create({'time': value, 'repo_id': repo.id})
|
||||
self.invalidate_cache()
|
||||
self.invalidate_recordset(['hook_time'])
|
||||
|
||||
def set_ref_time(self, value):
|
||||
for repo in self:
|
||||
self.env['runbot.repo.reftime'].create({'time': value, 'repo_id': repo.id})
|
||||
self.invalidate_cache()
|
||||
self.invalidate_recordset(['get_ref_time'])
|
||||
|
||||
def _gc_times(self):
|
||||
self.env.cr.execute("""
|
||||
|
@ -21,7 +21,7 @@ from odoo.modules.module import get_module_resource
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
# after this point, not realy a repo buisness
|
||||
|
||||
class Runbot(models.AbstractModel):
|
||||
_name = 'runbot.runbot'
|
||||
_description = 'Base runbot model'
|
||||
@ -41,6 +41,7 @@ class Runbot(models.AbstractModel):
|
||||
self._commit()
|
||||
processed = 0
|
||||
for build in host.get_builds([('requested_action', 'in', ['wake_up', 'deathrow'])]):
|
||||
build = build.browse(build.id)
|
||||
processed += 1
|
||||
build._process_requested_actions()
|
||||
self._commit()
|
||||
@ -56,6 +57,7 @@ class Runbot(models.AbstractModel):
|
||||
self._commit()
|
||||
if callable(result):
|
||||
result() # start docker
|
||||
self._commit()
|
||||
processed += self._assign_pending_builds(host, host.nb_worker, [('build_type', '!=', 'scheduled')])
|
||||
self._commit()
|
||||
processed += self._assign_pending_builds(host, host.nb_worker - 1 or host.nb_worker)
|
||||
|
@ -43,14 +43,15 @@ class RunbotTeam(models.Model):
|
||||
skip_team_pr = fields.Boolean('Skip team pr', help="Don't add codeowner if pr was created by a member of the team", tracking=True)
|
||||
skip_fw_pr = fields.Boolean('Skip forward-port pr', help="Don't add codeowner if pr is a forwardport, even when forced pushed", tracking=True)
|
||||
|
||||
@api.model_create_single
|
||||
def create(self, values):
|
||||
if 'dashboard_id' not in values or values['dashboard_id'] == False:
|
||||
dashboard = self.env['runbot.dashboard'].search([('name', '=', values['name'])])
|
||||
if not dashboard:
|
||||
dashboard = dashboard.create({'name': values['name']})
|
||||
values['dashboard_id'] = dashboard.id
|
||||
return super().create(values)
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
for vals in vals_list:
|
||||
if 'dashboard_id' not in vals or vals['dashboard_id'] == False:
|
||||
dashboard = self.env['runbot.dashboard'].search([('name', '=', vals['name'])])
|
||||
if not dashboard:
|
||||
dashboard = dashboard.create({'name': vals['name']})
|
||||
vals['dashboard_id'] = dashboard.id
|
||||
return super().create(vals_list)
|
||||
|
||||
@api.model
|
||||
def _get_team(self, file_path, repos=None):
|
||||
|
@ -201,13 +201,14 @@ class RunbotCase(TransactionCase):
|
||||
|
||||
def start_patcher(self, patcher_name, patcher_path, return_value=DEFAULT, side_effect=DEFAULT, new=DEFAULT):
|
||||
|
||||
def stop_patcher_wrapper():
|
||||
self.stop_patcher(patcher_name)
|
||||
if patcher_name in self.patcher_objects:
|
||||
raise Exception(f'Patcher {patcher_name} already started')
|
||||
|
||||
patcher = patch(patcher_path, new=new)
|
||||
self.patcher_objects[patcher_name] = patcher
|
||||
if not hasattr(patcher, 'is_local'):
|
||||
res = patcher.start()
|
||||
self.addCleanup(stop_patcher_wrapper)
|
||||
self.addCleanup(patcher.stop)
|
||||
self.patchers[patcher_name] = res
|
||||
self.patcher_objects[patcher_name] = patcher
|
||||
if side_effect != DEFAULT:
|
||||
|
@ -50,8 +50,8 @@ class TestBranchRelations(RunbotCase):
|
||||
create_base('13.0')
|
||||
create_base('saas-13.1')
|
||||
self.last = create_base('saas-13.2')
|
||||
self.env['runbot.bundle'].flush()
|
||||
self.env['runbot.version'].flush()
|
||||
self.env['runbot.bundle'].flush_model()
|
||||
self.env['runbot.version'].flush_model()
|
||||
|
||||
def test_relations_master_dev(self):
|
||||
b = self.Branch.create({
|
||||
|
@ -300,7 +300,7 @@ class TestBuildResult(RunbotCase):
|
||||
self.assertEqual(child_delta.days, 24)
|
||||
|
||||
# test the real _local_cleanup method
|
||||
self.stop_patcher('_local_cleanup_patcher')
|
||||
self.patcher_objects['_local_cleanup_patcher'].stop()
|
||||
self.start_patcher('build_local_pgadmin_cursor_patcher', 'odoo.addons.runbot.models.build.local_pgadmin_cursor')
|
||||
self.start_patcher('build_path_patcher', 'odoo.addons.runbot.models.build.Path')
|
||||
dbname = '%s-foobar' % build.dest
|
||||
@ -374,10 +374,10 @@ class TestBuildResult(RunbotCase):
|
||||
self.assertEqual('pending', build1_1_1.global_state)
|
||||
self.assertEqual('pending', build1_1_2.global_state)
|
||||
|
||||
build1_2.flush()
|
||||
build1_2.flush_recordset()
|
||||
with self.assertQueries(['''UPDATE "runbot_build" SET "global_state"=%s,"local_state"=%s,"write_date"=%s,"write_uid"=%s WHERE id IN %s''']):
|
||||
build1_2.local_state = "testing"
|
||||
build1_2.flush()
|
||||
build1_2.flush_recordset()
|
||||
|
||||
self.assertEqual('testing', build1.global_state)
|
||||
self.assertEqual('testing', build1_2.global_state)
|
||||
@ -387,14 +387,14 @@ class TestBuildResult(RunbotCase):
|
||||
|
||||
# with self.assertQueries(['''UPDATE "runbot_build" SET "global_state"=%s,"local_state"=%s,"write_date"=%s,"write_uid"=%s WHERE id IN %s''']):
|
||||
build1.local_state = 'done'
|
||||
build1.flush()
|
||||
build1.flush_recordset()
|
||||
|
||||
self.assertEqual('waiting', build1.global_state)
|
||||
self.assertEqual('testing', build1_1.global_state)
|
||||
|
||||
# with self.assertQueries([]): # write the same value, no update should be triggered
|
||||
build1.local_state = 'done'
|
||||
build1.flush()
|
||||
build1.flush_recordset()
|
||||
|
||||
build1_1.local_state = 'done'
|
||||
|
||||
|
@ -214,13 +214,12 @@ class TestBuildError(RunbotCase):
|
||||
self.additionnal_setup()
|
||||
bundle = self.env['runbot.bundle'].search([('project_id', '=', self.project.id)])
|
||||
bundle.last_batch.state = 'done'
|
||||
bundle.flush()
|
||||
bundle._compute_last_done_batch() # force the recompute
|
||||
self.assertTrue(bool(bundle.last_done_batch.exists()))
|
||||
# simulate a failed build that we want to monitor
|
||||
failed_build = bundle.last_done_batch.slot_ids[0].build_id
|
||||
failed_build.global_result = 'ko'
|
||||
failed_build.flush()
|
||||
failed_build.flush_recordset()
|
||||
|
||||
team = self.env['runbot.team'].create({'name': 'Test team'})
|
||||
dashboard = self.env['runbot.dashboard.tile'].create({
|
||||
|
@ -55,7 +55,7 @@ some garbage
|
||||
nothing to see here
|
||||
"""
|
||||
self.start_patcher(
|
||||
"isdir", "odoo.addons.runbot.models.build_stat_regex.os.path.exists", True
|
||||
"stats_file_exists", "odoo.addons.runbot.models.build_stat_regex.os.path.exists", True
|
||||
)
|
||||
with patch("builtins.open", mock_open(read_data=file_content)):
|
||||
self.config_step._make_stats(self.build)
|
||||
@ -98,7 +98,7 @@ chocolate 15
|
||||
self.StatRegex.create({"name": "chocolate_count", "regex": r"(?P<key>chocolate) (?P<value>\d+)"})
|
||||
|
||||
self.start_patcher(
|
||||
"isdir", "odoo.addons.runbot.models.build_stat_regex.os.path.exists", True
|
||||
"stats_file_exists", "odoo.addons.runbot.models.build_stat_regex.os.path.exists", True
|
||||
)
|
||||
with patch("builtins.open", mock_open(read_data=file_content)):
|
||||
self.config_step._make_stats(self.build)
|
||||
@ -127,7 +127,7 @@ chocolate 15
|
||||
log_data += noise_lines * 10000
|
||||
|
||||
self.start_patcher(
|
||||
"isdir", "odoo.addons.runbot.models.build_stat_regex.os.path.exists", True
|
||||
"stats_file_exists", "odoo.addons.runbot.models.build_stat_regex.os.path.exists", True
|
||||
)
|
||||
with patch("builtins.open", mock_open(read_data=log_data)):
|
||||
self.config_step._make_stats(self.build)
|
||||
|
@ -54,12 +54,15 @@ class TestCommitStatus(HttpCase):
|
||||
|
||||
# 3. test that a non-existsing commit_status returns a 404
|
||||
# 3.1 find a non existing commit status id
|
||||
non_existing_id = self.env['runbot.commit.status'].browse(50000).exists() or 50000
|
||||
while self.env['runbot.commit.status'].browse(non_existing_id).exists():
|
||||
non_existing_id += 1
|
||||
non_existing_commit_status = self.env['runbot.commit.status'].create({
|
||||
'commit_id': self.server_commit.id,
|
||||
'context': 'ci/test',
|
||||
'state': 'failure',
|
||||
})
|
||||
non_existing_commit_status.unlink()
|
||||
|
||||
self.authenticate('runbot_admin', 'admin')
|
||||
response = self.url_open('/runbot/commit/resend/%s' % non_existing_id)
|
||||
response = self.url_open('/runbot/commit/resend/%s' % non_existing_commit_status.id)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
#4.1 Test that a status not sent (with not sent_date) can be manually resend
|
||||
|
@ -297,7 +297,7 @@ class TestRepo(RunbotCaseMinimalSetup):
|
||||
self.assertEqual(repo1[field_name], 1.3)
|
||||
self.assertEqual(repo2[field_name], 1.4)
|
||||
|
||||
self.Repo.invalidate_cache()
|
||||
self.Repo.invalidate_model()
|
||||
self.assertEqual(repo1[field_name], 1.3)
|
||||
self.assertEqual(repo2[field_name], 1.4)
|
||||
|
||||
@ -414,7 +414,7 @@ class TestIdentityFile(RunbotCase):
|
||||
def test_identity_file(self):
|
||||
"""test that the identity file is used in git command"""
|
||||
|
||||
self.stop_patcher('git_patcher')
|
||||
self.patcher_objects['git_patcher'].stop()
|
||||
self.start_patcher('check_output_patcher', 'odoo.addons.runbot.models.repo.subprocess.check_output', new=self.check_output_helper())
|
||||
|
||||
self.repo_server.identity_file = 'fake_identity'
|
||||
@ -466,8 +466,8 @@ class TestRepoScheduler(RunbotCase):
|
||||
host = self.env['runbot.host']._get_current()
|
||||
self.Runbot._scheduler(host)
|
||||
|
||||
build.invalidate_cache()
|
||||
scheduled_build.invalidate_cache()
|
||||
build.invalidate_recordset()
|
||||
scheduled_build.invalidate_recordset()
|
||||
self.assertFalse(build.host)
|
||||
self.assertFalse(scheduled_build.host)
|
||||
|
||||
|
@ -265,7 +265,7 @@ class TestUpgradeFlow(RunbotCase):
|
||||
|
||||
with self.assertRaises(UserError):
|
||||
self.step_upgrade_server.job_type = 'install_odoo'
|
||||
self.trigger_upgrade_server.flush(['upgrade_step_id'])
|
||||
self.trigger_upgrade_server.flush_recordset(['upgrade_step_id'])
|
||||
|
||||
batch = self.master_bundle._force()
|
||||
batch._prepare()
|
||||
@ -414,9 +414,9 @@ class TestUpgradeFlow(RunbotCase):
|
||||
['account', 'l10n_be', 'l10n_ch', 'mail', 'stock'] # is this order ok?
|
||||
)
|
||||
current_build = db_builds[0]
|
||||
for current_build in db_builds:
|
||||
self.start_patcher('docker_state', 'odoo.addons.runbot.models.build.docker_state', 'END')
|
||||
|
||||
self.start_patcher('docker_state', 'odoo.addons.runbot.models.build.docker_state', 'END')
|
||||
for current_build in db_builds:
|
||||
suffix = current_build.params_id.dump_db.db_suffix
|
||||
source_dest = current_build.params_id.dump_db.build_id.dest
|
||||
|
||||
|
@ -18,7 +18,7 @@
|
||||
<field name="sequence" widget="handle"/>
|
||||
</tree>
|
||||
</field>
|
||||
<field name="protected" groups="base.group_no_one"/>
|
||||
<field name="protected"/>
|
||||
<field name="group" groups="base.group_no_one"/>
|
||||
</group>
|
||||
</sheet>
|
||||
@ -44,7 +44,7 @@
|
||||
<field name="domain_filter"/>
|
||||
<field name="job_type"/>
|
||||
<field name="make_stats"/>
|
||||
<field name="protected" groups="base.group_no_one"/>
|
||||
<field name="protected"/>
|
||||
<field name="default_sequence" groups="base.group_no_one"/>
|
||||
<field name="group" groups="base.group_no_one"/>
|
||||
</group>
|
||||
|
@ -43,7 +43,7 @@
|
||||
<field name="model">runbot.error.log</field>
|
||||
<field name="arch" type="xml">
|
||||
<tree string="Build Errors">
|
||||
<button name="action_goto_build" type="object" icon="fa-external-link "/>
|
||||
<button name="action_goto_build" type="object" icon="fa-external-link" title="View build"/>
|
||||
<field name="build_id"/>
|
||||
<field name="bundle_ids" widget="many2many_tags"/>
|
||||
<field name="log_create_date"/>
|
||||
|
@ -5,7 +5,7 @@ from odoo.tools import mute_logger
|
||||
import logging
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
# after this point, not realy a repo buisness
|
||||
|
||||
class Runbot(models.AbstractModel):
|
||||
_inherit = 'runbot.runbot'
|
||||
|
||||
@ -64,7 +64,7 @@ class Runbot(models.AbstractModel):
|
||||
'is_pr': True,
|
||||
})
|
||||
count += 1
|
||||
branch.flush()
|
||||
branch.flush_recordset()
|
||||
|
||||
if 'partial' in bundle.name:
|
||||
break
|
||||
|
Loading…
Reference in New Issue
Block a user