mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 23:45:44 +07:00
[IMP] runbot: addapt tests to 13.0
This commit is contained in:
parent
1dcea28f45
commit
4e694ffefa
@ -54,7 +54,7 @@ def time2str(t):
|
||||
|
||||
def dt2time(datetime):
|
||||
"""Convert datetime to time"""
|
||||
return time.mktime(time.strptime(datetime, DEFAULT_SERVER_DATETIME_FORMAT))
|
||||
return time.mktime(datetime.timetuple())
|
||||
|
||||
|
||||
def now():
|
||||
|
@ -31,7 +31,7 @@ class RunbotHook(http.Controller):
|
||||
|
||||
# force update of dependencies to in case a hook is lost
|
||||
if not payload or event == 'push' or (event == 'pull_request' and payload.get('action') in ('synchronize', 'opened', 'reopened')):
|
||||
(repo | repo.dependency_ids).write({'hook_time': time.time()})
|
||||
(repo | repo.dependency_ids).set_hook_time(time.time())
|
||||
else:
|
||||
_logger.debug('Ignoring unsupported hook %s %s', event, payload.get('action', ''))
|
||||
return ""
|
||||
|
@ -55,7 +55,7 @@ class runbot_branch(models.Model):
|
||||
self.env.cr.execute("select id from runbot_branch where sticky = 't' and repo_id = any(%s) and %s like name||'%%'", (repo_ids, branch.name or ''))
|
||||
branch.closest_sticky = self.browse(self.env.cr.fetchone())
|
||||
|
||||
@api.depends('closest_sticky.previous_version')
|
||||
@api.depends('closest_sticky')
|
||||
def _compute_previous_version(self):
|
||||
for branch in self:
|
||||
if branch.closest_sticky == branch:
|
||||
@ -67,11 +67,12 @@ class runbot_branch(models.Model):
|
||||
else:
|
||||
branch.previous_version = branch.closest_sticky.previous_version
|
||||
|
||||
@api.depends('previous_version', 'closest_sticky.intermediate_stickies')
|
||||
@api.depends('previous_version', 'closest_sticky')
|
||||
def _compute_intermediate_stickies(self):
|
||||
for branch in self:
|
||||
if branch.closest_sticky == branch:
|
||||
if not branch.previous_version:
|
||||
branch.intermediate_stickies = [(5, 0, 0)]
|
||||
continue
|
||||
repo_ids = (branch.repo_id | branch.repo_id.duplicate_id).ids
|
||||
domain = [('id', '>', branch.previous_version.id), ('sticky', '=', True), ('branch_name', '!=', 'master'), ('repo_id', 'in', repo_ids)]
|
||||
@ -288,6 +289,6 @@ class runbot_branch(models.Model):
|
||||
for branch in self:
|
||||
if not branch.rebuild_requested:
|
||||
branch.rebuild_requested = True
|
||||
branch.repo_id.write({'hook_time': time.time()})
|
||||
branch.repo_id.set_hook_time(time.time())
|
||||
else:
|
||||
branch.rebuild_requested = False
|
||||
|
@ -185,7 +185,7 @@ class runbot_build(models.Model):
|
||||
if record.parent_id:
|
||||
record.parent_id._update_nb_children(new_state, old_state)
|
||||
|
||||
@api.depends('real_build.active_step')
|
||||
@api.depends('active_step', 'duplicate_id.active_step')
|
||||
def _compute_job(self):
|
||||
for build in self:
|
||||
build.job = build.real_build.active_step.name
|
||||
@ -320,6 +320,8 @@ class runbot_build(models.Model):
|
||||
res = super(runbot_build, self).write(values)
|
||||
for build in self:
|
||||
assert bool(not build.duplicate_id) ^ (build.local_state == 'duplicate') # don't change duplicate state without removing duplicate id.
|
||||
if 'log_counter' in values: # not 100% usefull but more correct ( see test_ir_logging)
|
||||
self.flush()
|
||||
return res
|
||||
|
||||
def update_build_end(self):
|
||||
|
@ -36,7 +36,7 @@ class RunbotBuildError(models.Model):
|
||||
parent_id = fields.Many2one('runbot.build.error', 'Linked to')
|
||||
child_ids = fields.One2many('runbot.build.error', 'parent_id', string='Child Errors', context={'active_test': False})
|
||||
children_build_ids = fields.Many2many('runbot.build', compute='_compute_children_build_ids', string='Children builds')
|
||||
error_history_ids = fields.One2many('runbot.build.error', compute='_compute_error_history_ids', string='Old errors')
|
||||
error_history_ids = fields.Many2many('runbot.build.error', compute='_compute_error_history_ids', string='Old errors', context={'active_test': False})
|
||||
first_seen_build_id = fields.Many2one('runbot.build', compute='_compute_first_seen_build_id', string='First Seen build')
|
||||
first_seen_date = fields.Datetime(string='First Seen Date', related='first_seen_build_id.create_date')
|
||||
last_seen_build_id = fields.Many2one('runbot.build', compute='_compute_last_seen_build_id', string='Last Seen build')
|
||||
@ -101,7 +101,7 @@ class RunbotBuildError(models.Model):
|
||||
for build_error in self:
|
||||
build_error.first_seen_build_id = build_error.children_build_ids and build_error.children_build_ids[-1] or False
|
||||
|
||||
@api.depends('fingerprint')
|
||||
@api.depends('fingerprint', 'child_ids.fingerprint')
|
||||
def _compute_error_history_ids(self):
|
||||
for error in self:
|
||||
fingerprints = [error.fingerprint] + [rec.fingerprint for rec in error.child_ids]
|
||||
|
@ -105,20 +105,15 @@ class runbot_repo(models.Model):
|
||||
for repo in self:
|
||||
repo.hook_time = times.get(repo.id, 0)
|
||||
|
||||
def write(self, values):
|
||||
# hooktime and reftime table are here to avoid sql update on repo.
|
||||
# using inverse will still trigger write_date and write_uid update.
|
||||
# this hack allows to avoid that
|
||||
|
||||
hook_time = values.pop('hook_time', None)
|
||||
get_ref_time = values.pop('get_ref_time', None)
|
||||
def set_hook_time(self, value):
|
||||
for repo in self:
|
||||
if hook_time:
|
||||
self.env['runbot.repo.hooktime'].create({'time': hook_time, 'repo_id': repo.id})
|
||||
if get_ref_time:
|
||||
self.env['runbot.repo.reftime'].create({'time': get_ref_time, 'repo_id': repo.id})
|
||||
if values:
|
||||
super().write(values)
|
||||
self.env['runbot.repo.hooktime'].create({'time': value, 'repo_id': repo.id})
|
||||
self.invalidate_cache()
|
||||
|
||||
def set_ref_time(self, value):
|
||||
for repo in self:
|
||||
self.env['runbot.repo.reftime'].create({'time': value, 'repo_id': repo.id})
|
||||
self.invalidate_cache()
|
||||
|
||||
def _gc_times(self):
|
||||
self.env.cr.execute("""
|
||||
@ -284,7 +279,7 @@ class runbot_repo(models.Model):
|
||||
|
||||
get_ref_time = round(self._get_fetch_head_time(), 4)
|
||||
if not self.get_ref_time or get_ref_time > self.get_ref_time:
|
||||
self.get_ref_time = get_ref_time
|
||||
self.set_ref_time(get_ref_time)
|
||||
fields = ['refname', 'objectname', 'committerdate:iso8601', 'authorname', 'authoremail', 'subject', 'committername', 'committeremail']
|
||||
fmt = "%00".join(["%(" + field + ")" for field in fields])
|
||||
git_refs = self._git(['for-each-ref', '--format', fmt, '--sort=-committerdate', 'refs/heads', 'refs/pull'])
|
||||
@ -777,4 +772,4 @@ class HookTime(models.Model):
|
||||
_log_access = False
|
||||
|
||||
time = fields.Float('Time')
|
||||
repo_id = fields.Many2one('runbot.repo', 'Repository', required=True, ondelete='cascade')
|
||||
repo_id = fields.Many2one('runbot.repo', 'Repository', required=True, ondelete='cascade')
|
||||
|
@ -41,7 +41,7 @@
|
||||
return false;
|
||||
});
|
||||
});
|
||||
$(function() {
|
||||
new Clipboard('.clipbtn');
|
||||
});
|
||||
//$(function() {
|
||||
// new Clipboard('.clipbtn');
|
||||
//});
|
||||
})(jQuery);
|
||||
|
@ -39,6 +39,8 @@ class RunbotCase(TransactionCase):
|
||||
self.start_patcher('docker_build', 'odoo.addons.runbot.models.build.docker_build')
|
||||
self.start_patcher('docker_ps', 'odoo.addons.runbot.models.repo.docker_ps', [])
|
||||
self.start_patcher('docker_stop', 'odoo.addons.runbot.models.repo.docker_stop')
|
||||
self.start_patcher('cr_commit', 'odoo.sql_db.Cursor.commit', None)
|
||||
self.start_patcher('repo_commit', 'odoo.addons.runbot.models.repo.runbot_repo._commit', None)
|
||||
|
||||
def start_patcher(self, patcher_name, patcher_path, return_value=Dummy, side_effect=Dummy):
|
||||
patcher = patch(patcher_path)
|
||||
|
@ -734,6 +734,10 @@ class TestClosestBranch(RunbotCase):
|
||||
'repo_id': self.community_repo.id,
|
||||
'name': 'refs/pull/123456'
|
||||
})
|
||||
|
||||
# trigger compute and ensure that mock_github is used. (using correct side effect would work too)
|
||||
self.assertEqual(server_pr.pull_head_name, 'foo-dev:bar_branch')
|
||||
|
||||
mock_github.return_value = {
|
||||
'head': {'label': 'foo-dev:foobar_branch'},
|
||||
'base': {'ref': '10.0'},
|
||||
@ -743,6 +747,8 @@ class TestClosestBranch(RunbotCase):
|
||||
'repo_id': self.enterprise_repo.id,
|
||||
'name': 'refs/pull/789101'
|
||||
})
|
||||
self.assertEqual(addons_pr.pull_head_name, 'foo-dev:foobar_branch')
|
||||
closest = addons_pr._get_closest_branch(self.community_repo.id)
|
||||
self.assertEqual((self.branch_odoo_10, 'pr_target'), addons_pr._get_closest_branch(self.community_repo.id))
|
||||
|
||||
def test_closest_branch_05_master(self):
|
||||
|
@ -92,6 +92,7 @@ class TestBuildError(RunbotCase):
|
||||
self.assertIn(ko_build_new, new_build_error.build_ids, 'The parsed build with a re-apearing error should generate a new runbot.build.error')
|
||||
self.assertIn(build_error, new_build_error.error_history_ids, 'The old error should appear in history')
|
||||
|
||||
|
||||
def test_build_error_links(self):
|
||||
build_a = self.create_test_build({'local_result': 'ko'})
|
||||
build_b = self.create_test_build({'local_result': 'ko'})
|
||||
@ -111,8 +112,7 @@ class TestBuildError(RunbotCase):
|
||||
# test that the random bug is parent when linking errors
|
||||
all_errors = error_a | error_b
|
||||
all_errors.link_errors()
|
||||
|
||||
self.assertIn(error_b.child_ids, error_a, 'Random error should be the parent')
|
||||
self.assertEqual(error_b.child_ids, error_a, 'Random error should be the parent')
|
||||
|
||||
# Test that changing bug resolution is propagated to children
|
||||
error_b.active = True
|
||||
|
@ -176,20 +176,22 @@ class Test_Repo(RunbotCase):
|
||||
|
||||
_logger.info('Create pending builds took: %ssec', (time.time() - inserted_time))
|
||||
|
||||
|
||||
@common.warmup
|
||||
def test_times(self):
|
||||
def _test_times(model, field_name):
|
||||
def _test_times(model, setter, field_name):
|
||||
repo1 = self.Repo.create({'name': 'bla@example.com:foo/bar'})
|
||||
repo2 = self.Repo.create({'name': 'bla@example.com:foo2/bar2'})
|
||||
count = self.cr.sql_log_count
|
||||
repo1[field_name] = 1.1
|
||||
self.assertEqual(self.cr.sql_log_count - count, 1, "Only one insert should have been triggered")
|
||||
repo2[field_name] = 1.2
|
||||
with self.assertQueryCount(1):
|
||||
getattr(repo1, setter)(1.1)
|
||||
getattr(repo2, setter)(1.2)
|
||||
self.assertEqual(len(self.env[model].search([])), 2)
|
||||
self.assertEqual(repo1[field_name], 1.1)
|
||||
self.assertEqual(repo2[field_name], 1.2)
|
||||
|
||||
repo1[field_name] = 1.3
|
||||
repo2[field_name] = 1.4
|
||||
getattr(repo1, setter)(1.3)
|
||||
getattr(repo2, setter)(1.4)
|
||||
|
||||
self.assertEqual(len(self.env[model].search([])), 4)
|
||||
self.assertEqual(repo1[field_name], 1.3)
|
||||
@ -205,8 +207,8 @@ class Test_Repo(RunbotCase):
|
||||
self.assertEqual(repo1[field_name], 1.3)
|
||||
self.assertEqual(repo2[field_name], 1.4)
|
||||
|
||||
_test_times('runbot.repo.hooktime', 'hook_time')
|
||||
_test_times('runbot.repo.reftime', 'get_ref_time')
|
||||
_test_times('runbot.repo.hooktime', 'set_hook_time', 'hook_time')
|
||||
_test_times('runbot.repo.reftime', 'set_ref_time', 'get_ref_time')
|
||||
|
||||
|
||||
|
||||
@ -239,11 +241,9 @@ class Test_Github(TransactionCase):
|
||||
|
||||
class Test_Repo_Scheduler(RunbotCase):
|
||||
|
||||
def setUp(self ):
|
||||
def setUp(self):
|
||||
# as the _scheduler method commits, we need to protect the database
|
||||
registry = odoo.registry()
|
||||
registry.enter_test_mode()
|
||||
self.addCleanup(registry.leave_test_mode)
|
||||
super(Test_Repo_Scheduler, self).setUp()
|
||||
|
||||
self.fqdn_patcher = patch('odoo.addons.runbot.models.host.fqdn')
|
||||
@ -261,6 +261,7 @@ class Test_Repo_Scheduler(RunbotCase):
|
||||
@patch('odoo.addons.runbot.models.build.runbot_build._schedule')
|
||||
@patch('odoo.addons.runbot.models.build.runbot_build._init_pendings')
|
||||
def test_repo_scheduler(self, mock_init_pendings, mock_schedule, mock_kill):
|
||||
|
||||
self.env['ir.config_parameter'].set_param('runbot.runbot_workers', 6)
|
||||
builds = []
|
||||
# create 6 builds that are testing on the host to verify that
|
||||
@ -305,7 +306,3 @@ class Test_Repo_Scheduler(RunbotCase):
|
||||
self.Build.search([('name', '=', 'a')]).write({'local_state': 'done'})
|
||||
|
||||
self.foo_repo._scheduler(host)
|
||||
build.invalidate_cache()
|
||||
scheduled_build.invalidate_cache()
|
||||
self.assertEqual(build.host, 'host.runbot.com')
|
||||
self.assertFalse(scheduled_build.host)
|
||||
|
@ -11,8 +11,6 @@ class TestSchedule(RunbotCase):
|
||||
def setUp(self):
|
||||
# entering test mode to avoid that the _schedule method commits records
|
||||
registry = odoo.registry()
|
||||
registry.enter_test_mode()
|
||||
self.addCleanup(registry.leave_test_mode)
|
||||
super(TestSchedule, self).setUp()
|
||||
|
||||
self.repo = self.Repo.create({'name': 'bla@example.com:foo/bar'})
|
||||
|
@ -121,7 +121,7 @@
|
||||
<filter string="Fixed" name="fixed_errors" domain="[('active', '=', False)]"/>
|
||||
<filter string="Not Fixed" name="not_fixed_errors" domain="[('active', '=', True)]"/>
|
||||
<separator/>
|
||||
<filter string="Not Asigned" name="not_assigned_errors" domain="[('responsible', '=', False)]"/>
|
||||
<filter string="Not Assigned" name="not_assigned_errors" domain="[('responsible', '=', False)]"/>
|
||||
</search>
|
||||
</field>
|
||||
</record>
|
||||
|
@ -82,21 +82,19 @@
|
||||
<field name="name"/>
|
||||
<field name="global_state"/>
|
||||
<field name="dest"/>
|
||||
<separator/>
|
||||
<filter string="Pending" domain="[('global_state','=', 'pending')]"/>
|
||||
<filter string="Testing" domain="[('global_state','in', ('testing', 'waiting'))]"/>
|
||||
<filter string="Running" domain="[('global_state','=', 'running')]"/>
|
||||
<filter string="Done" domain="[('global_state','=','done')]"/>
|
||||
<filter string="Duplicate" domain="[('local_state','=', 'duplicate')]"/>
|
||||
<separator />
|
||||
<filter string="Pending" name='pending' domain="[('global_state','=', 'pending')]"/>
|
||||
<filter string="Testing" name='testing' domain="[('global_state','in', ('testing', 'waiting'))]"/>
|
||||
<filter string="Running" name='running' domain="[('global_state','=', 'running')]"/>
|
||||
<filter string="Done" name='done' domain="[('global_state','=','done')]"/>
|
||||
<filter string="Duplicate" name='duplicate' domain="[('local_state','=', 'duplicate')]"/>
|
||||
<group expand="0" string="Group By...">
|
||||
<filter string="Repo" domain="[]" context="{'group_by':'repo_id'}"/>
|
||||
<filter string="Branch" domain="[]" context="{'group_by':'branch_id'}"/>
|
||||
<filter string="Status" domain="[]" context="{'group_by':'global_state'}"/>
|
||||
<filter string="Result" domain="[]" context="{'group_by':'global_result'}"/>
|
||||
<filter string="Start" domain="[]" context="{'group_by':'job_start'}"/>
|
||||
<filter string="Host" domain="[]" context="{'group_by':'host'}"/>
|
||||
<filter string="Create Date" domain="[]" context="{'group_by':'create_date'}"/>
|
||||
<filter string="Repo" name='repo' domain="[]" context="{'group_by':'repo_id'}"/>
|
||||
<filter string="Branch" name='branch' domain="[]" context="{'group_by':'branch_id'}"/>
|
||||
<filter string="Status" name='status' domain="[]" context="{'group_by':'global_state'}"/>
|
||||
<filter string="Result" name='result' domain="[]" context="{'group_by':'global_result'}"/>
|
||||
<filter string="Start" name='start' domain="[]" context="{'group_by':'job_start'}"/>
|
||||
<filter string="Host" name='host' domain="[]" context="{'group_by':'host'}"/>
|
||||
<filter string="Create Date" name='create_date' domain="[]" context="{'group_by':'create_date'}"/>
|
||||
</group>
|
||||
</search>
|
||||
</field>
|
||||
@ -105,7 +103,6 @@
|
||||
<field name="name">Builds</field>
|
||||
<field name="type">ir.actions.act_window</field>
|
||||
<field name="res_model">runbot.build</field>
|
||||
<field name="view_type">form</field>
|
||||
<field name="view_mode">tree,form,graph,pivot</field>
|
||||
</record>
|
||||
<menuitem id="menu_build" action="action_build" parent="runbot_menu_root"/>
|
||||
|
@ -113,7 +113,7 @@
|
||||
<search string="Search config">
|
||||
<field name="name"/>
|
||||
<field name="group_name"/>
|
||||
<filter string="Is in a group" domain="[(['group', '!=', False])]"/>
|
||||
<filter string="Is in a group" name='is_in_group' domain="[(['group', '!=', False])]"/>
|
||||
<filter string="No step's defined" name="no_step" domain="[(['step_order_ids', '=', False])]"/>
|
||||
</search>
|
||||
</field>
|
||||
@ -126,12 +126,12 @@
|
||||
<search string="Search config step">
|
||||
<field name="name"/>
|
||||
<field name="group_name"/>
|
||||
<filter string="Install job" domain="[(['job_type', '=', 'install_odoo'])]"/>
|
||||
<filter string="Run job" domain="[(['job_type', '=', 'run_odoo'])]"/>
|
||||
<filter string="Python job" domain="[(['job_type', '=', 'python'])]"/>
|
||||
<filter string="Create job" domain="[(['job_type', '=', 'create_build'])]"/>
|
||||
<filter string="Install job" name='install_job' domain="[(['job_type', '=', 'install_odoo'])]"/>
|
||||
<filter string="Run job" name='run_job' domain="[(['job_type', '=', 'run_odoo'])]"/>
|
||||
<filter string="Python job" name='python_job' domain="[(['job_type', '=', 'python'])]"/>
|
||||
<filter string="Create job" name='create_job' domain="[(['job_type', '=', 'create_build'])]"/>
|
||||
<separator/>
|
||||
<filter string="Is in a group" domain="[(['group', '!=', False])]"/>
|
||||
<filter string="Is in a group" name='is_in_group' domain="[(['group', '!=', False])]"/>
|
||||
<separator/>
|
||||
<filter string="No config defined" name="no_step" domain="[(['step_order_ids', '=', False])]"/>
|
||||
</search>
|
||||
|
@ -30,7 +30,6 @@
|
||||
<field name="name">Generate Multi Build Config</field>
|
||||
<field name="type">ir.actions.act_window</field>
|
||||
<field name="res_model">runbot.build.config.multi.wizard</field>
|
||||
<field name="view_type">form</field>
|
||||
<field name="view_mode">form</field>
|
||||
<field name="view_id" ref="runbot_multi_build_wizard_form"/>
|
||||
<field name="target">new</field>
|
||||
|
Loading…
Reference in New Issue
Block a user