mirror of
https://github.com/odoo/runbot.git
synced 2025-03-27 13:25:47 +07:00
[FIX] *: fix forward port inserter if last branch is disabled
In case where the last branch (before the branch being frozen) is disabled, the forwardport inserter screws up, and fails to correctly create the intermediate forwardports from the new branch. Also when disabling a branch, if there are FW PRs which target that branch and have not been forward-ported further, automatically forward-port them as if the branch had been disabled when they were created, this should limit data loss and confusion. Also change the message set on PRs when disabling a branch: because of user conflicts in test setup, the message about a branch being disabled would close the PRs, which would then orphan the followup, leading to unexpected / inconsistent behaviour. Fixes #665
This commit is contained in:
parent
ac4047ec2d
commit
7948e59c51
1
forwardport/changelog/2022-10/new-branch.md
Normal file
1
forwardport/changelog/2022-10/new-branch.md
Normal file
@ -0,0 +1 @@
|
|||||||
|
FIX: creation of forward-port PRs for new (freeze) branches in edge cases
|
@ -26,7 +26,6 @@ import re
|
|||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
import typing
|
import typing
|
||||||
from functools import partial
|
|
||||||
|
|
||||||
import dateutil.relativedelta
|
import dateutil.relativedelta
|
||||||
import requests
|
import requests
|
||||||
@ -89,14 +88,58 @@ class Project(models.Model):
|
|||||||
raise UserError(_("The forward-port bot needs a primary email set up."))
|
raise UserError(_("The forward-port bot needs a primary email set up."))
|
||||||
|
|
||||||
def write(self, vals):
|
def write(self, vals):
|
||||||
Branches = self.env['runbot_merge.branch']
|
|
||||||
# check on branches both active and inactive so disabling branches doesn't
|
# check on branches both active and inactive so disabling branches doesn't
|
||||||
# make it look like the sequence changed.
|
# make it look like the sequence changed.
|
||||||
self_ = self.with_context(active_test=False)
|
self_ = self.with_context(active_test=False)
|
||||||
|
previously_active_branches = {project: project.branch_ids.filtered('active') for project in self_}
|
||||||
branches_before = {project: project._forward_port_ordered() for project in self_}
|
branches_before = {project: project._forward_port_ordered() for project in self_}
|
||||||
|
|
||||||
r = super().write(vals)
|
r = super().write(vals)
|
||||||
for p in self_:
|
self_._followup_prs(previously_active_branches)
|
||||||
|
self_._insert_intermediate_prs(branches_before)
|
||||||
|
return r
|
||||||
|
|
||||||
|
def _followup_prs(self, previously_active_branches):
|
||||||
|
"""If a branch has been disabled and had PRs without a followup (e.g.
|
||||||
|
because no CI or CI failed), create followup, as if the branch had been
|
||||||
|
originally disabled (and thus skipped over)
|
||||||
|
"""
|
||||||
|
PRs = self.env['runbot_merge.pull_requests']
|
||||||
|
for p in self:
|
||||||
|
actives = previously_active_branches[p]
|
||||||
|
for deactivated in p.branch_ids.filtered(lambda b: not b.active) & actives:
|
||||||
|
# if a PR targets a deactivated branch, and that's not its limit,
|
||||||
|
# and it doesn't have a child (e.g. CI failed), enqueue a forward
|
||||||
|
# port as if the now deactivated branch had been skipped over (which
|
||||||
|
# is the normal fw behaviour)
|
||||||
|
extant = PRs.search([
|
||||||
|
('target', '=', deactivated.id),
|
||||||
|
('source_id.limit_id', '!=', deactivated.id),
|
||||||
|
('state', 'not in', ('closed', 'merged')),
|
||||||
|
])
|
||||||
|
for p in extant.with_context(force_fw=True):
|
||||||
|
next_target = p.source_id._find_next_target(p)
|
||||||
|
# should not happen since we already filtered out limits
|
||||||
|
if not next_target:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# check if it has a descendant in the next branch, if so skip
|
||||||
|
if PRs.search_count([
|
||||||
|
('source_id', '=', p.source_id.id),
|
||||||
|
('target', '=', next_target.id)
|
||||||
|
]):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# otherwise enqueue a followup
|
||||||
|
p._schedule_fp_followup()
|
||||||
|
|
||||||
|
def _insert_intermediate_prs(self, branches_before):
|
||||||
|
"""If new branches have been added to the sequence inbetween existing
|
||||||
|
branches (mostly a freeze inserted before the main branch), fill in
|
||||||
|
forward-ports for existing sequences
|
||||||
|
"""
|
||||||
|
Branches = self.env['runbot_merge.branch']
|
||||||
|
for p in self:
|
||||||
# check if the branches sequence has been modified
|
# check if the branches sequence has been modified
|
||||||
bbefore = branches_before[p]
|
bbefore = branches_before[p]
|
||||||
bafter = p._forward_port_ordered()
|
bafter = p._forward_port_ordered()
|
||||||
@ -153,7 +196,6 @@ class Project(models.Model):
|
|||||||
}).id,
|
}).id,
|
||||||
'source': 'insert',
|
'source': 'insert',
|
||||||
})
|
})
|
||||||
return r
|
|
||||||
|
|
||||||
def _forward_port_ordered(self, domain=()):
|
def _forward_port_ordered(self, domain=()):
|
||||||
Branches = self.env['runbot_merge.branch']
|
Branches = self.env['runbot_merge.branch']
|
||||||
@ -435,7 +477,7 @@ class PullRequests(models.Model):
|
|||||||
if not pr.parent_id:
|
if not pr.parent_id:
|
||||||
_logger.info('-> no parent %s (%s)', pr.display_name, pr.parent_id)
|
_logger.info('-> no parent %s (%s)', pr.display_name, pr.parent_id)
|
||||||
continue
|
continue
|
||||||
if self.source_id.fw_policy != 'skipci' and pr.state not in ['validated', 'ready']:
|
if not self.env.context.get('force_fw') and self.source_id.fw_policy != 'skipci' and pr.state not in ['validated', 'ready']:
|
||||||
_logger.info('-> wrong state %s (%s)', pr.display_name, pr.state)
|
_logger.info('-> wrong state %s (%s)', pr.display_name, pr.state)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -460,7 +502,7 @@ class PullRequests(models.Model):
|
|||||||
# check if batch-mate are all valid
|
# check if batch-mate are all valid
|
||||||
mates = batch.prs
|
mates = batch.prs
|
||||||
# wait until all of them are validated or ready
|
# wait until all of them are validated or ready
|
||||||
if any(pr.source_id.fw_policy != 'skipci' and pr.state not in ('validated', 'ready') for pr in mates):
|
if not self.env.context.get('force_fw') and any(pr.source_id.fw_policy != 'skipci' and pr.state not in ('validated', 'ready') for pr in mates):
|
||||||
_logger.info("-> not ready (%s)", [(pr.display_name, pr.state) for pr in mates])
|
_logger.info("-> not ready (%s)", [(pr.display_name, pr.state) for pr in mates])
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -432,14 +432,13 @@ def test_new_intermediate_branch(env, config, make_repo):
|
|||||||
original_c_tree = prod.read_tree(prod.commit('c'))
|
original_c_tree = prod.read_tree(prod.commit('c'))
|
||||||
prs = []
|
prs = []
|
||||||
with prod:
|
with prod:
|
||||||
for i in ['0', '1', '2']:
|
for i in ['0', '1']:
|
||||||
prod.make_commits('a', Commit(i, tree={i:i}), ref='heads/branch%s' % i)
|
prod.make_commits('a', Commit(i, tree={i:i}), ref='heads/branch%s' % i)
|
||||||
pr = prod.make_pr(target='a', head='branch%s' % i)
|
pr = prod.make_pr(target='a', head='branch%s' % i)
|
||||||
prs.append(pr)
|
prs.append(pr)
|
||||||
validate(pr.head)
|
validate(pr.head)
|
||||||
pr.post_comment('hansen r+', config['role_reviewer']['token'])
|
pr.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||||
# cancel validation of PR2
|
|
||||||
prod.post_status(prs[2].head, 'failure', 'ci/runbot')
|
|
||||||
# also add a PR targeting b forward-ported to c, in order to check
|
# also add a PR targeting b forward-ported to c, in order to check
|
||||||
# for an insertion right after the source
|
# for an insertion right after the source
|
||||||
prod.make_commits('b', Commit('x', tree={'x': 'x'}), ref='heads/branchx')
|
prod.make_commits('b', Commit('x', tree={'x': 'x'}), ref='heads/branchx')
|
||||||
@ -455,10 +454,7 @@ def test_new_intermediate_branch(env, config, make_repo):
|
|||||||
# should have merged pr1, pr2 and prx and created their forward ports, now
|
# should have merged pr1, pr2 and prx and created their forward ports, now
|
||||||
# validate pr0's FP so the c-targeted FP is created
|
# validate pr0's FP so the c-targeted FP is created
|
||||||
PRs = env['runbot_merge.pull_requests']
|
PRs = env['runbot_merge.pull_requests']
|
||||||
pr0_id = PRs.search([
|
pr0_id = to_pr(env, prs[0])
|
||||||
('repository.name', '=', prod.name),
|
|
||||||
('number', '=', prs[0].number),
|
|
||||||
])
|
|
||||||
pr0_fp_id = PRs.search([
|
pr0_fp_id = PRs.search([
|
||||||
('source_id', '=', pr0_id.id),
|
('source_id', '=', pr0_id.id),
|
||||||
])
|
])
|
||||||
@ -471,8 +467,12 @@ def test_new_intermediate_branch(env, config, make_repo):
|
|||||||
assert original0, "Could not find FP of PR0 to C"
|
assert original0, "Could not find FP of PR0 to C"
|
||||||
assert original0.target.name == 'c'
|
assert original0.target.name == 'c'
|
||||||
|
|
||||||
|
pr1_id = to_pr(env, prs[1])
|
||||||
|
pr1_fp_id = PRs.search([('source_id', '=', pr1_id.id)])
|
||||||
|
assert pr1_fp_id.target.name == 'b'
|
||||||
|
|
||||||
# also check prx's fp
|
# also check prx's fp
|
||||||
prx_id = PRs.search([('repository.name', '=', prod.name), ('number', '=', prx.number)])
|
prx_id = to_pr(env, prx)
|
||||||
prx_fp_id = PRs.search([('source_id', '=', prx_id.id)])
|
prx_fp_id = PRs.search([('source_id', '=', prx_id.id)])
|
||||||
assert prx_fp_id
|
assert prx_fp_id
|
||||||
assert prx_fp_id.target.name == 'c'
|
assert prx_fp_id.target.name == 'c'
|
||||||
@ -487,77 +487,85 @@ def test_new_intermediate_branch(env, config, make_repo):
|
|||||||
project.write({
|
project.write({
|
||||||
'branch_ids': [
|
'branch_ids': [
|
||||||
(1, currents['a'], {'fp_sequence': 3}),
|
(1, currents['a'], {'fp_sequence': 3}),
|
||||||
(1, currents['b'], {'fp_sequence': 2}),
|
(1, currents['b'], {'fp_sequence': 2, 'active': False}),
|
||||||
(0, False, {'name': 'new', 'fp_sequence': 1, 'fp_target': True}),
|
|
||||||
(1, currents['c'], {'fp_sequence': 0})
|
(1, currents['c'], {'fp_sequence': 0})
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
env.run_crons()
|
env.run_crons()
|
||||||
descendants = PRs.search([('source_id', '=', pr0_id.id)])
|
project.write({
|
||||||
new0 = descendants - pr0_fp_id - original0
|
'branch_ids': [
|
||||||
|
(0, False, {'name': 'new', 'fp_sequence': 1, 'fp_target': True}),
|
||||||
|
]
|
||||||
|
})
|
||||||
|
env.run_crons()
|
||||||
|
|
||||||
|
# created an intermediate PR for 0 and x
|
||||||
|
desc0 = PRs.search([('source_id', '=', pr0_id.id)])
|
||||||
|
new0 = desc0 - pr0_fp_id - original0
|
||||||
assert len(new0) == 1
|
assert len(new0) == 1
|
||||||
assert new0.parent_id == pr0_fp_id
|
assert new0.parent_id == pr0_fp_id
|
||||||
|
assert new0.target.name == 'new'
|
||||||
assert original0.parent_id == new0
|
assert original0.parent_id == new0
|
||||||
|
|
||||||
descx = PRs.search([('source_id', '=', prx_id.id)])
|
descx = PRs.search([('source_id', '=', prx_id.id)])
|
||||||
newx = descx - prx_fp_id
|
newx = descx - prx_fp_id
|
||||||
assert len(newx) == 1
|
assert len(newx) == 1
|
||||||
assert newx.parent_id == prx_id
|
assert newx.parent_id == prx_id
|
||||||
|
assert newx.target.name == 'new'
|
||||||
assert prx_fp_id.parent_id == newx
|
assert prx_fp_id.parent_id == newx
|
||||||
|
|
||||||
# finish up: merge pr1 and pr2, ensure all the content is present in both
|
# created followups for 1
|
||||||
# "new" (the newly inserted branch) and "c" (the tippity tip)
|
# earliest followup is followup from deactivating a branch, creates fp in
|
||||||
with prod: # validate pr2
|
# n+1 = c (from b), then inserting a new branch between b and c should
|
||||||
prod.post_status(prs[2].head, 'success', 'ci/runbot')
|
# create a bridge forward port
|
||||||
env.run_crons()
|
_, pr1_c, pr1_new = PRs.search([('source_id', '=', pr1_id.id)], order='number')
|
||||||
# merge pr2
|
assert pr1_c.target.name == 'c'
|
||||||
with prod:
|
assert pr1_new.target.name == 'new'
|
||||||
validate('staging.a')
|
assert pr1_c.parent_id == pr1_new
|
||||||
env.run_crons()
|
assert pr1_new.parent_id == pr1_fp_id
|
||||||
|
|
||||||
# ci on pr1/pr2 fp to b
|
# ci on pr1/pr2 fp to b
|
||||||
sources = [
|
sources = [to_pr(env, pr).id for pr in prs]
|
||||||
env['runbot_merge.pull_requests'].search([
|
|
||||||
('repository.name', '=', prod.name),
|
|
||||||
('number', '=', pr.number),
|
|
||||||
]).id
|
|
||||||
for pr in prs
|
|
||||||
]
|
|
||||||
sources.append(prx_id.id)
|
sources.append(prx_id.id)
|
||||||
# CI all the forward port PRs (shouldn't hurt to re-ci the forward port of
|
# CI all the forward port PRs (shouldn't hurt to re-ci the forward port of
|
||||||
# prs[0] to b aka pr0_fp_id
|
# prs[0] to b aka pr0_fp_id
|
||||||
for target in ['b', 'new', 'c']:
|
for target in ['new', 'c']:
|
||||||
fps = PRs.search([('source_id', 'in', sources), ('target.name', '=', target)])
|
fps = PRs.search([('source_id', 'in', sources), ('target.name', '=', target)])
|
||||||
with prod:
|
with prod:
|
||||||
for fp in fps:
|
for fp in fps:
|
||||||
validate(fp.head)
|
validate(fp.head)
|
||||||
env.run_crons()
|
env.run_crons()
|
||||||
# now fps should be the last PR of each sequence, and thus r+-able
|
# now fps should be the last PR of each sequence, and thus r+-able (via
|
||||||
|
# fwbot so preceding PR is also r+'d)
|
||||||
with prod:
|
with prod:
|
||||||
for pr in fps:
|
for pr in fps:
|
||||||
assert pr.target.name == 'c'
|
assert pr.target.name == 'c'
|
||||||
prod.get_pr(pr.number).post_comment(
|
prod.get_pr(pr.number).post_comment(
|
||||||
'%s r+' % project.fp_github_name,
|
'%s r+' % project.fp_github_name,
|
||||||
config['role_reviewer']['token'])
|
config['role_reviewer']['token'])
|
||||||
assert all(p.state == 'merged' for p in PRs.browse(sources)), \
|
assert all(p.state == 'merged' for p in PRs.browse(sources)),\
|
||||||
"all sources should be merged"
|
"all sources should be merged"
|
||||||
assert all(p.state == 'ready' for p in PRs.search([('id', 'not in', sources)])),\
|
assert all(p.state == 'ready' for p in PRs.search([('source_id', '!=', False), ('target.name', '!=', 'b')])), \
|
||||||
"All PRs except sources should be ready"
|
"All PRs except sources and prs on disabled branch should be ready"
|
||||||
env.run_crons()
|
env.run_crons()
|
||||||
|
|
||||||
|
assert len(env['runbot_merge.stagings'].search([])) == 2,\
|
||||||
|
"enabled branches should have been staged"
|
||||||
with prod:
|
with prod:
|
||||||
for target in ['b', 'new', 'c']:
|
for target in ['new', 'c']:
|
||||||
validate('staging.' + target)
|
validate(f'staging.{target}')
|
||||||
env.run_crons()
|
env.run_crons()
|
||||||
assert all(p.state == 'merged' for p in PRs.search([])), \
|
assert all(p.state == 'merged' for p in PRs.search([('target.name', '!=', 'b')])), \
|
||||||
"All PRs should be merged now"
|
"All PRs except disabled branch should be merged now"
|
||||||
|
|
||||||
assert prod.read_tree(prod.commit('c')) == {
|
assert prod.read_tree(prod.commit('c')) == {
|
||||||
**original_c_tree,
|
**original_c_tree,
|
||||||
'0': '0', '1': '1', '2': '2', # updates from PRs
|
'0': '0', '1': '1', # updates from PRs
|
||||||
'x': 'x',
|
'x': 'x',
|
||||||
}, "check that C got all the updates"
|
}, "check that C got all the updates"
|
||||||
assert prod.read_tree(prod.commit('new')) == {
|
assert prod.read_tree(prod.commit('new')) == {
|
||||||
**original_c_tree,
|
**original_c_tree,
|
||||||
'0': '0', '1': '1', '2': '2', # updates from PRs
|
'0': '0', '1': '1', # updates from PRs
|
||||||
'x': 'x',
|
'x': 'x',
|
||||||
}, "check that new got all the updates (should be in the same state as c really)"
|
}, "check that new got all the updates (should be in the same state as c really)"
|
||||||
|
|
||||||
|
@ -265,7 +265,7 @@ class Branch(models.Model):
|
|||||||
self.env['runbot_merge.pull_requests.feedback'].create([{
|
self.env['runbot_merge.pull_requests.feedback'].create([{
|
||||||
'repository': pr.repository.id,
|
'repository': pr.repository.id,
|
||||||
'pull_request': pr.number,
|
'pull_request': pr.number,
|
||||||
'message': f'{pr.ping()}the target branch {pr.target.name!r} has been disabled, you may want to close this PR.',
|
'message': f'Hey {pr.ping()}the target branch {pr.target.name!r} has been disabled, you may want to close this PR.',
|
||||||
} for pr in self.prs])
|
} for pr in self.prs])
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, conf
|
|||||||
assert pr.comments == [
|
assert pr.comments == [
|
||||||
(users['reviewer'], "hansen r+"),
|
(users['reviewer'], "hansen r+"),
|
||||||
seen(env, pr, users),
|
seen(env, pr, users),
|
||||||
(users['user'], "@%(user)s @%(reviewer)s the target branch 'other' has been disabled, you may want to close this PR." % users),
|
(users['user'], "Hey @%(user)s @%(reviewer)s the target branch 'other' has been disabled, you may want to close this PR." % users),
|
||||||
]
|
]
|
||||||
|
|
||||||
with repo:
|
with repo:
|
||||||
|
Loading…
Reference in New Issue
Block a user