mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 15:35:46 +07:00
[IMP] *: trigger-ify task queue type crons
These are pretty simple to convert as they are straightforward: an item is added to a work queue (table), then a cron regularly scans through the table executing the items and deleting them. That means the cron trigger can just be added on `create` and things should work out fine. There's just two wrinkles in the port_forward cron: - It can be requeued in the future, so needs a conditional trigger-ing in `write`. - It is disabled during freeze (maybe something to change), as a result triggers don't enqueue at all, so we need to immediately trigger after freeze to force the cron re-enabling it.
This commit is contained in:
parent
57a82235d9
commit
029957dbeb
@ -4,8 +4,8 @@
|
||||
<field name="model_id" ref="model_forwardport_batches"/>
|
||||
<field name="state">code</field>
|
||||
<field name="code">model._process()</field>
|
||||
<field name="interval_number">1</field>
|
||||
<field name="interval_type">minutes</field>
|
||||
<field name="interval_number">6</field>
|
||||
<field name="interval_type">hours</field>
|
||||
<field name="numbercall">-1</field>
|
||||
<field name="doall" eval="False"/>
|
||||
<field name="priority">43</field>
|
||||
@ -16,8 +16,8 @@
|
||||
<field name="model_id" ref="model_forwardport_updates"/>
|
||||
<field name="state">code</field>
|
||||
<field name="code">model._process()</field>
|
||||
<field name="interval_number">1</field>
|
||||
<field name="interval_type">minutes</field>
|
||||
<field name="interval_number">6</field>
|
||||
<field name="interval_type">hours</field>
|
||||
<field name="numbercall">-1</field>
|
||||
<field name="doall" eval="False"/>
|
||||
<field name="priority">46</field>
|
||||
@ -39,7 +39,7 @@
|
||||
<field name="model_id" ref="model_forwardport_branch_remover"/>
|
||||
<field name="state">code</field>
|
||||
<field name="code">model._process()</field>
|
||||
<field name="interval_number">1</field>
|
||||
<field name="interval_number">6</field>
|
||||
<field name="interval_type">hours</field>
|
||||
<field name="numbercall">-1</field>
|
||||
<field name="doall" eval="False"/>
|
||||
|
@ -77,6 +77,16 @@ class ForwardPortTasks(models.Model, Queue):
|
||||
retry_after = fields.Datetime(required=True, default='1900-01-01 01:01:01')
|
||||
pr_id = fields.Many2one('runbot_merge.pull_requests')
|
||||
|
||||
def create(self, vals_list):
|
||||
self.env.ref('forwardport.port_forward')._trigger()
|
||||
return super().create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
if retry := vals.get('retry_after'):
|
||||
self.env.ref('forwardport.port_forward')\
|
||||
._trigger(fields.Datetime.to_datetime(retry))
|
||||
return super().write(vals)
|
||||
|
||||
def _search_domain(self):
|
||||
return super()._search_domain() + [
|
||||
('retry_after', '<=', fields.Datetime.to_string(fields.Datetime.now())),
|
||||
@ -260,6 +270,10 @@ class UpdateQueue(models.Model, Queue):
|
||||
original_root = fields.Many2one('runbot_merge.pull_requests')
|
||||
new_root = fields.Many2one('runbot_merge.pull_requests')
|
||||
|
||||
def create(self, vals_list):
|
||||
self.env.ref('forwardport.updates')._trigger()
|
||||
return super().create(vals_list)
|
||||
|
||||
def _process_item(self):
|
||||
previous = self.new_root
|
||||
sentry_sdk.set_tag("update-root", self.new_root.display_name)
|
||||
@ -345,6 +359,10 @@ class DeleteBranches(models.Model, Queue):
|
||||
|
||||
pr_id = fields.Many2one('runbot_merge.pull_requests')
|
||||
|
||||
def create(self, vals_list):
|
||||
self.env.ref('forwardport.remover')._trigger(datetime.now() - MERGE_AGE)
|
||||
return super().create(vals_list)
|
||||
|
||||
def _search_domain(self):
|
||||
cutoff = getattr(builtins, 'forwardport_merged_before', None) \
|
||||
or fields.Datetime.to_string(datetime.now() - MERGE_AGE)
|
||||
|
@ -22,5 +22,7 @@ class FreezeWizard(models.Model):
|
||||
def unlink(self):
|
||||
r = super().unlink()
|
||||
if not (self.env.context.get('forwardport_keep_disabled') or self.search_count([])):
|
||||
self.env.ref('forwardport.port_forward').active = True
|
||||
cron = self.env.ref('forwardport.port_forward')
|
||||
cron.active = True
|
||||
cron._trigger() # process forward ports enqueued during the freeze period
|
||||
return r
|
||||
|
@ -9,10 +9,7 @@ def default_crons():
|
||||
return [
|
||||
'runbot_merge.merge_cron',
|
||||
'runbot_merge.staging_cron',
|
||||
'forwardport.port_forward',
|
||||
'forwardport.updates',
|
||||
'runbot_merge.check_linked_prs_status',
|
||||
'runbot_merge.feedback_cron',
|
||||
]
|
||||
|
||||
# public_repo — necessary to leave comments
|
||||
|
@ -123,7 +123,7 @@ def test_straightforward_flow(env, config, make_repo, users):
|
||||
prod.post_status(pr1.head, 'success', 'legal/cla')
|
||||
|
||||
env.run_crons()
|
||||
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
env.run_crons('forwardport.reminder', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
|
||||
pr0_, pr1_, pr2 = env['runbot_merge.pull_requests'].search([], order='number')
|
||||
|
||||
@ -329,8 +329,8 @@ def test_empty(env, config, make_repo, users):
|
||||
assert project.fp_github_name == users['other']
|
||||
|
||||
# check reminder
|
||||
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
env.run_crons('forwardport.reminder', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
env.run_crons('forwardport.reminder', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
|
||||
awaiting = (
|
||||
users['other'],
|
||||
@ -374,7 +374,7 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
|
||||
# check that this stops if we close the PR
|
||||
with prod:
|
||||
fail_pr.close()
|
||||
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
env.run_crons('forwardport.reminder', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
assert pr1.comments == [
|
||||
(users['reviewer'], 'hansen r+'),
|
||||
seen(env, pr1, users),
|
||||
@ -604,7 +604,7 @@ def test_disapproval(env, config, make_repo, users):
|
||||
# oh no, pr1 has an error!
|
||||
with prod:
|
||||
pr1.post_comment('hansen r-', token=config['role_other']['token'])
|
||||
env.run_crons('runbot_merge.feedback_cron')
|
||||
env.run_crons(None)
|
||||
assert pr1_id.state == 'validated', "pr1 should not be approved anymore"
|
||||
assert pr2_id.state == 'ready', "pr2 should not be affected"
|
||||
|
||||
@ -899,7 +899,7 @@ class TestClosing:
|
||||
prod.post_status(pr1_id.head, 'success', 'legal/cla')
|
||||
prod.post_status(pr1_id.head, 'success', 'ci/runbot')
|
||||
env.run_crons()
|
||||
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron')
|
||||
env.run_crons('forwardport.reminder')
|
||||
|
||||
assert env['runbot_merge.pull_requests'].search([], order='number') == pr0_id | pr1_id,\
|
||||
"closing the PR should suppress the FP sequence"
|
||||
|
@ -695,7 +695,7 @@ def test_retarget_after_freeze(env, config, make_repo, users):
|
||||
port_pr.base = 'bprime'
|
||||
assert port_id.target == new_branch
|
||||
|
||||
env.run_crons('forwardport.port_forward')
|
||||
env.run_crons(None)
|
||||
assert not job.exists(), "job should have succeeded and apoptosed"
|
||||
|
||||
# since the PR was "already forward-ported" to the new branch it should not
|
||||
@ -816,11 +816,7 @@ def test_freeze(env, config, make_repo, users):
|
||||
# re-enable forward-port cron after freeze
|
||||
_, cron_id = env['ir.model.data'].check_object_reference('forwardport', 'port_forward', context={'active_test': False})
|
||||
env['ir.cron'].browse([cron_id]).active = True
|
||||
|
||||
# run crons to process the feedback, run a second time in case of e.g.
|
||||
# forward porting
|
||||
env.run_crons()
|
||||
env.run_crons()
|
||||
env.run_crons('forwardport.port_forward')
|
||||
|
||||
assert release_id.state == 'merged'
|
||||
assert not env['runbot_merge.pull_requests'].search([
|
||||
@ -896,7 +892,7 @@ def test_missing_magic_ref(env, config, make_repo):
|
||||
# check that the batch is still here and targeted for the future
|
||||
req = env['forwardport.batches'].search([])
|
||||
assert len(req) == 1
|
||||
assert req.retry_after > datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
|
||||
assert req.retry_after > datetime.utcnow().isoformat(" ", "seconds")
|
||||
# reset retry_after
|
||||
req.retry_after = '1900-01-01 01:01:01'
|
||||
|
||||
@ -905,7 +901,7 @@ def test_missing_magic_ref(env, config, make_repo):
|
||||
[c2] = prod.make_commits(a_head.id, Commit('y', tree={'x': '0'}))
|
||||
assert c2 != c
|
||||
pr_id.head = c2
|
||||
env.run_crons()
|
||||
env.run_crons(None)
|
||||
|
||||
fp_id = env['runbot_merge.pull_requests'].search([('source_id', '=', pr_id.id)])
|
||||
assert fp_id
|
||||
@ -1161,7 +1157,7 @@ def test_reminder_detached(env, config, make_repo, users):
|
||||
pr_c = prod.get_pr(pr_c_id.number)
|
||||
|
||||
# region sanity check
|
||||
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
env.run_crons('forwardport.reminder', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
|
||||
assert pr_b.comments == [
|
||||
seen(env, pr_b, users),
|
||||
@ -1195,7 +1191,7 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
|
||||
|
||||
# region check detached
|
||||
pr_c_id.write({'parent_id': False, 'detach_reason': 'because'})
|
||||
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
env.run_crons('forwardport.reminder', context={'forwardport_updated_before': FAKE_PREV_WEEK})
|
||||
|
||||
assert pr_b.comments[2:] == [
|
||||
(users['user'], "@%s @%s child PR %s was modified / updated and has become a normal PR. This PR (and any of its parents) will need to be merged independently as approvals won't cross." % (
|
||||
|
@ -26,8 +26,8 @@
|
||||
<field name="model_id" ref="model_runbot_merge_pull_requests_feedback"/>
|
||||
<field name="state">code</field>
|
||||
<field name="code">model._send()</field>
|
||||
<field name="interval_number">1</field>
|
||||
<field name="interval_type">minutes</field>
|
||||
<field name="interval_number">6</field>
|
||||
<field name="interval_type">hours</field>
|
||||
<field name="numbercall">-1</field>
|
||||
<field name="doall" eval="False"/>
|
||||
<field name="priority">60</field>
|
||||
@ -48,8 +48,8 @@
|
||||
<field name="model_id" ref="model_runbot_merge_fetch_job"/>
|
||||
<field name="state">code</field>
|
||||
<field name="code">model._check(True)</field>
|
||||
<field name="interval_number">1</field>
|
||||
<field name="interval_type">minutes</field>
|
||||
<field name="interval_number">6</field>
|
||||
<field name="interval_type">hours</field>
|
||||
<field name="numbercall">-1</field>
|
||||
<field name="doall" eval="False"/>
|
||||
<field name="priority">10</field>
|
||||
|
@ -1774,6 +1774,12 @@ class Feedback(models.Model):
|
||||
help="Token field (from repo's project) to use to post messages"
|
||||
)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
# any time a feedback is created, it can be sent
|
||||
self.env.ref('runbot_merge.feedback_cron')._trigger()
|
||||
return super().create(vals_list)
|
||||
|
||||
def _send(self):
|
||||
ghs = {}
|
||||
to_remove = []
|
||||
@ -2424,6 +2430,11 @@ class FetchJob(models.Model):
|
||||
number = fields.Integer(required=True, group_operator=None)
|
||||
closing = fields.Boolean(default=False)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
self.env.ref('runbot_merge.fetch_prs_cron')._trigger()
|
||||
return super().create(vals_list)
|
||||
|
||||
def _check(self, commit=False):
|
||||
"""
|
||||
:param bool commit: commit after each fetch has been executed
|
||||
|
@ -7,16 +7,12 @@ def module():
|
||||
@pytest.fixture
|
||||
def default_crons():
|
||||
return [
|
||||
# env['runbot_merge.project']._check_fetch()
|
||||
'runbot_merge.fetch_prs_cron',
|
||||
# env['runbot_merge.project']._check_stagings()
|
||||
'runbot_merge.merge_cron',
|
||||
# env['runbot_merge.project']._create_stagings()
|
||||
'runbot_merge.staging_cron',
|
||||
# env['runbot_merge.pull_requests']._check_linked_prs_statuses()
|
||||
'runbot_merge.check_linked_prs_status',
|
||||
# env['runbot_merge.pull_requests.feedback']._send()
|
||||
'runbot_merge.feedback_cron',
|
||||
]
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -3534,7 +3534,6 @@ class TestUnknownPR:
|
||||
prx.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||
env.run_crons(
|
||||
'runbot_merge.fetch_prs_cron',
|
||||
'runbot_merge.feedback_cron',
|
||||
)
|
||||
|
||||
assert prx.comments == [
|
||||
@ -3559,7 +3558,6 @@ class TestUnknownPR:
|
||||
prx.post_review('APPROVE', 'hansen r+', config['role_reviewer']['token'])
|
||||
env.run_crons(
|
||||
'runbot_merge.fetch_prs_cron',
|
||||
'runbot_merge.feedback_cron',
|
||||
)
|
||||
|
||||
# FIXME: either split out reviews in local or merge reviews & comments in remote
|
||||
|
Loading…
Reference in New Issue
Block a user