[FIX] runbot_merge: followup detached PRs when disabling branches

Although the handling of forward ports on disabled branch was improved
in 94fe0329b4 in order to avoid losing
or needing to manually port such, because it goes through
`_schedule_fw_followup` some of the tests *that* performs were missed,
most notably that it only ports batches when no PRs are detached.

This is an issue if we need to force the port because of a branch
being deactivated: the forward-port could have stopped there due to a
conflict, in which case it's always going to be detached.

Thus the `force_fw` flag should also override the parenting state
check.

Also while at it make `force_fw` a regular flag, I don't understand
why I made it into a context value in the first place, it's only
passed from one location and that's directy calling the one function
which uses it...

Fixes #897
This commit is contained in:
Xavier Morel 2024-06-28 12:22:23 +02:00
parent 318e55337c
commit 0206d5f977
3 changed files with 29 additions and 13 deletions

View File

@ -83,12 +83,11 @@ class Project(models.Model):
# and has not already been forward ported
and Batch.search_count([('parent_id', '=', b.id)]) == 0
)
ported |= extant.prs.filtered(lambda p: p._find_next_target())
# enqueue a forward port as if the now deactivated branch had
# been skipped over (which is the normal fw behaviour)
for b in extant.with_context(force_fw=True):
# otherwise enqueue a followup
b._schedule_fp_followup()
# PRs may have different limits in the same batch so only notify
# those which actually needed porting
ported |= extant._schedule_fp_followup(force_fw=True)\
.prs.filtered(lambda p: p._find_next_target())
if not ported:
return

View File

@ -957,7 +957,7 @@ def test_disable_branch_with_batches(env, config, make_repo, users):
})
# endregion
# region set up forward ported batch
# region set up forward ported batches
with repo, fork, repo2, fork2:
fork.make_commits("a", Commit("x", tree={"x": "1"}), ref="heads/x")
pr1_a = repo.make_pr(title="X", target="a", head=f"{fork.owner}:x")
@ -968,6 +968,11 @@ def test_disable_branch_with_batches(env, config, make_repo, users):
pr2_a = repo2.make_pr(title="X", target="a", head=f"{fork2.owner}:x")
pr2_a.post_comment("hansen r+", config['role_reviewer']['token'])
repo2.post_status(pr2_a.head, "success")
fork.make_commits("a", Commit("y", tree={"y": "1"}), ref="heads/y")
pr3_a = repo.make_pr(title="Y", target="a", head=f"{fork.owner}:y")
pr3_a.post_comment("hansen r+", config['role_reviewer']['token'])
repo.post_status(pr3_a.head, 'success')
# remove just pr2 from the forward ports (maybe?)
pr2_a_id = to_pr(env, pr2_a)
pr2_a_id.limit_id = branch_b.id
@ -975,7 +980,6 @@ def test_disable_branch_with_batches(env, config, make_repo, users):
assert pr2_a_id.limit_id == branch_b
# endregion
with repo, repo2:
repo.post_status('staging.a', 'success')
repo2.post_status('staging.a', 'success')
@ -984,10 +988,15 @@ def test_disable_branch_with_batches(env, config, make_repo, users):
PullRequests = env['runbot_merge.pull_requests']
pr1_b_id = PullRequests.search([('parent_id', '=', to_pr(env, pr1_a).id)])
pr2_b_id = PullRequests.search([('parent_id', '=', pr2_a_id.id)])
pr3_b_id = PullRequests.search([('parent_id', '=', to_pr(env, pr3_a).id)])
assert pr1_b_id.parent_id
assert pr1_b_id.state == 'opened'
assert pr2_b_id.parent_id
assert pr2_b_id.state == 'opened'
assert pr3_b_id.parent_id
assert pr3_b_id.state == 'opened'
# detach pr3 (?)
pr3_b_id.write({'parent_id': False, 'detach_reason': 'because'})
b_id = proj.branch_ids.filtered(lambda b: b.name == 'b')
proj.write({
@ -995,8 +1004,10 @@ def test_disable_branch_with_batches(env, config, make_repo, users):
})
env.run_crons()
assert not b_id.active
assert PullRequests.search_count([]) == 5, "should have ported pr1 but not pr2"
assert PullRequests.search([], order="number DESC", limit=1).parent_id == pr1_b_id
# pr1_a, pr1_b, pr1_c, pr2_a, pr2_b, pr3_a, pr3_b, pr3_c
assert PullRequests.search_count([]) == 8, "should have ported pr1 and pr3 but not pr2"
assert PullRequests.search_count([('parent_id', '=', pr1_b_id.id)])
assert PullRequests.search_count([('parent_id', '=', pr3_b_id.id)])
assert repo.get_pr(pr1_b_id.number).comments == [
seen(env, repo.get_pr(pr1_b_id.number), users),

View File

@ -415,7 +415,7 @@ class Batch(models.Model):
new_batch._schedule_fp_followup()
return new_batch
def _schedule_fp_followup(self):
def _schedule_fp_followup(self, *, force_fw=False):
_logger = logging.getLogger(__name__).getChild('forwardport.next')
# if the PR has a parent and is CI-validated, enqueue the next PR
scheduled = self.browse(())
@ -424,10 +424,16 @@ class Batch(models.Model):
_logger.info('Checking if forward-port %s (%s)', batch, prs)
# in cas of conflict or update individual PRs will "lose" their
# parent, which should prevent forward porting
if not (batch.parent_id and all(p.parent_id for p in batch.prs)):
#
# even if we force_fw, a *followup* should still only be for forward
# ports so check that the batch has a parent (which should be the
# same thing as all the PRs having a source, kinda, but cheaper,
# it's not entirely true as technically the user could have added a
# PR to the forward ported batch
if not (batch.parent_id and force_fw or all(p.parent_id for p in batch.prs)):
_logger.info('-> no parent %s (%s)', batch, prs)
continue
if not self.env.context.get('force_fw') and batch.source.fw_policy != 'skipci' \
if not force_fw and batch.source.fw_policy != 'skipci' \
and (invalid := batch.prs.filtered(lambda p: p.state not in ['validated', 'ready'])):
_logger.info(
'-> wrong state %s (%s)',