[FIX] *: behaviour around branch deactivation & fw maintenance

Test and refine the handling of batch forward ports around branch
deactivation, especially with differential. Notably, fix an error in
the conversion of the FW process to batches: individual PR limit was
not correctly taken in account during forward port unless *all* PRs
were done, even though that is a primary motivation for the
change.

Partial forward porting should now work correctly, and the detection
and handling of differential next target should be better handled to
boot.

Significantly rework the interplay between batches and PRs being
closed in order to maintain sequencing / consistency of forward port
sequences: previously a batch would get deleted if all its PRs are
closed, but that is an issue when it is part of a forward port
sequence as we now lose information.

Instead, detach the PRs from the batch as before but have the batch
skip unlinking if it has historical value (parent or child
batch). Currently the batch's state is a bit weird as it doesn't get
merged, but...

While at it, significantly simplify `_try_closing` as it turns out to
have a ton of incidental / historical complexity from old attempts at
fixing concurrency issues, which should not be necessary anymore and
in fact actively interfere with the new and more compute-heavy state
of things.
This commit is contained in:
Xavier Morel 2024-02-23 13:46:37 +01:00
parent 124d1212a2
commit 94fe0329b4
4 changed files with 338 additions and 74 deletions

View File

@ -26,6 +26,7 @@ import requests
from odoo import models, fields, api
from odoo.exceptions import UserError
from odoo.osv import expression
from odoo.tools.misc import topological_sort, groupby
from odoo.tools.appdirs import user_cache_dir
from odoo.addons.base.models.res_partner import Partner
@ -62,35 +63,43 @@ class Project(models.Model):
originally disabled (and thus skipped over)
"""
Batch = self.env['runbot_merge.batch']
ported = self.env['runbot_merge.pull_requests']
for p in self:
actives = previously_active_branches[p]
for deactivated in p.branch_ids.filtered(lambda b: not b.active) & actives:
# if a PR targets a deactivated branch, and that's not its limit,
# and it doesn't have a child (e.g. CI failed), enqueue a forward
# port as if the now deactivated branch had been skipped over (which
# is the normal fw behaviour)
# if a non-merged batch targets a deactivated branch which is
# not its limit
extant = Batch.search([
('parent_id', '!=', False),
('target', '=', deactivated.id),
# FIXME: this should probably be *all* the PRs of the batch
# if at least one of the PRs has a different limit
('prs.limit_id', '!=', deactivated.id),
('merge_date', '=', False),
])
]).filtered(lambda b:\
# and has a next target (should already be a function of
# the search but doesn't hurt)
b._find_next_target() \
# and has not already been forward ported
and Batch.search_count([('parent_id', '=', b.id)]) == 0
)
ported |= extant.prs.filtered(lambda p: p._find_next_target())
# enqueue a forward port as if the now deactivated branch had
# been skipped over (which is the normal fw behaviour)
for b in extant.with_context(force_fw=True):
next_target = b._find_next_target()
# should not happen since we already filtered out limits
if not next_target:
continue
# check if it has a descendant in the next branch, if so skip
if Batch.search_count([
('parent_id', '=', b.id),
('target', '=', next_target.id)
]):
continue
# otherwise enqueue a followup
b._schedule_fp_followup()
if not ported:
return
for feedback in self.env['runbot_merge.pull_requests.feedback'].search(expression.OR(
[('repository', '=', p.repository.id), ('pull_request', '=', p.number)]
for p in ported
)):
# FIXME: better signal
if 'disabled' in feedback.message:
feedback.message += '\n\nAs this was not its limit, it will automatically be forward ported to the next active branch.'
def _insert_intermediate_prs(self, branches_before):
"""If new branches have been added to the sequence inbetween existing
branches (mostly a freeze inserted before the main branch), fill in

View File

@ -918,3 +918,253 @@ def test_missing_magic_ref(env, config, make_repo):
# what they are (rather than e.g. diff the HEAD it branch with the target)
# as a result it doesn't forwardport our fake, we'd have to reset the PR's
# branch for that to happen
def test_disable_branch_with_batches(env, config, make_repo, users):
"""We want to avoid losing pull requests, so when deactivating a branch,
if there are *forward port* batches targeting that branch which have not
been forward ported yet port them over, as if their source had been merged
after the branch was disabled (thus skipped over)
"""
proj, repo, fork = make_basic(env, config, make_repo, fp_token=True, fp_remote=True)
proj.repo_ids.required_statuses = "default"
branch_b = env['runbot_merge.branch'].search([('name', '=', 'b')])
assert branch_b
# region repo2 creation & setup
repo2 = make_repo('proj2')
with repo2:
[a, b, c] = repo2.make_commits(
None,
Commit("a", tree={"f": "a"}),
Commit("b", tree={"g": "b"}),
Commit("c", tree={"h": "c"}),
)
repo2.make_ref("heads/a", a)
repo2.make_ref("heads/b", b)
repo2.make_ref("heads/c", c)
fork2 = repo2.fork()
repo2_id = env['runbot_merge.repository'].create({
"project_id": proj.id,
"name": repo2.name,
"required_statuses": "default",
"fp_remote_target": fork2.name,
})
env['res.partner'].search([
('github_login', '=', config['role_reviewer']['user'])
]).write({
'review_rights': [(0, 0, {'repository_id': repo2_id.id, 'review': True})]
})
env['res.partner'].search([
('github_login', '=', config['role_self_reviewer']['user'])
]).write({
'review_rights': [(0, 0, {'repository_id': repo2_id.id, 'self_review': True})]
})
# endregion
# region set up forward ported batch
with repo, fork, repo2, fork2:
fork.make_commits("a", Commit("x", tree={"x": "1"}), ref="heads/x")
pr1_a = repo.make_pr(title="X", target="a", head=f"{fork.owner}:x")
pr1_a.post_comment("hansen r+", config['role_reviewer']['token'])
repo.post_status(pr1_a.head, "success")
fork2.make_commits("a", Commit("x", tree={"x": "1"}), ref="heads/x")
pr2_a = repo2.make_pr(title="X", target="a", head=f"{fork2.owner}:x")
pr2_a.post_comment("hansen r+", config['role_reviewer']['token'])
repo2.post_status(pr2_a.head, "success")
# remove just pr2 from the forward ports (maybe?)
pr2_a_id = to_pr(env, pr2_a)
pr2_a_id.limit_id = branch_b.id
env.run_crons()
assert pr2_a_id.limit_id == branch_b
# endregion
with repo, repo2:
repo.post_status('staging.a', 'success')
repo2.post_status('staging.a', 'success')
env.run_crons()
PullRequests = env['runbot_merge.pull_requests']
pr1_b_id = PullRequests.search([('parent_id', '=', to_pr(env, pr1_a).id)])
pr2_b_id = PullRequests.search([('parent_id', '=', pr2_a_id.id)])
assert pr1_b_id.parent_id
assert pr1_b_id.state == 'opened'
assert pr2_b_id.parent_id
assert pr2_b_id.state == 'opened'
b_id = proj.branch_ids.filtered(lambda b: b.name == 'b')
proj.write({
'branch_ids': [(1, b_id.id, {'active': False})]
})
env.run_crons()
assert not b_id.active
assert PullRequests.search_count([]) == 5, "should have ported pr1 but not pr2"
assert PullRequests.search([], order="number DESC", limit=1).parent_id == pr1_b_id
assert repo.get_pr(pr1_b_id.number).comments == [
seen(env, repo.get_pr(pr1_b_id.number), users),
(users['user'], "This PR targets b and is part of the forward-port chain. Further PRs will be created up to c.\n\nMore info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port\n"),
(users['user'], "@{user} @{reviewer} the target branch 'b' has been disabled, you may want to close this PR.\n\nAs this was not its limit, it will automatically be forward ported to the next active branch.".format_map(users)),
]
assert repo2.get_pr(pr2_b_id.number).comments == [
seen(env, repo2.get_pr(pr2_b_id.number), users),
(users['user'], """\
@{user} @{reviewer} this PR targets b and is the last of the forward-port chain.
To merge the full chain, use
> @hansen r+
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
""".format_map(users)),
(users['user'], "@{user} @{reviewer} the target branch 'b' has been disabled, you may want to close this PR.".format_map(users)),
]
def test_disable_multitudes(env, config, make_repo, users, setreviewers):
"""Ensure that deactivation ports can jump over other deactivated branches.
"""
# region setup
repo = make_repo("bob")
project = env['runbot_merge.project'].create({
"name": "bob",
"github_token": config['github']['token'],
"github_prefix": "hansen",
"fp_github_token": config['github']['token'],
"fp_github_name": "herbert",
"branch_ids": [
(0, 0, {'name': 'a', 'sequence': 90}),
(0, 0, {'name': 'b', 'sequence': 80}),
(0, 0, {'name': 'c', 'sequence': 70}),
(0, 0, {'name': 'd', 'sequence': 60}),
],
"repo_ids": [(0, 0, {
'name': repo.name,
'required_statuses': 'default',
'fp_remote_target': repo.name,
})],
})
setreviewers(project.repo_ids)
with repo:
[a, b, c, d] = repo.make_commits(
None,
Commit("a", tree={"branch": "a"}),
Commit("b", tree={"branch": "b"}),
Commit("c", tree={"branch": "c"}),
Commit("d", tree={"branch": "d"}),
)
repo.make_ref("heads/a", a)
repo.make_ref("heads/b", b)
repo.make_ref("heads/c", c)
repo.make_ref("heads/d", d)
# endregion
with repo:
[a] = repo.make_commits("a", Commit("X", tree={"x": "1"}), ref="heads/x")
pra = repo.make_pr(target="a", head="x")
pra.post_comment("hansen r+", config['role_reviewer']['token'])
repo.post_status(a, "success")
env.run_crons()
with repo:
repo.post_status('staging.a', 'success')
env.run_crons()
pra_id = to_pr(env, pra)
assert pra_id.state == 'merged'
prb_id = env['runbot_merge.pull_requests'].search([('target.name', '=', 'b')])
assert prb_id.parent_id == pra_id
project.write({
'branch_ids': [
(1, b.id, {'active': False})
for b in env['runbot_merge.branch'].search([('name', 'in', ['b', 'c'])])
]
})
env.run_crons()
# should not have ported prb to the disabled branch c
assert not env['runbot_merge.pull_requests'].search([('target.name', '=', 'c')])
# should have ported prb to the active branch d
prd_id = env['runbot_merge.pull_requests'].search([('target.name', '=', 'd')])
assert prd_id
assert prd_id.parent_id == prb_id
prb = repo.get_pr(prb_id.number)
assert prb.comments == [
seen(env, prb, users),
(users['user'], 'This PR targets b and is part of the forward-port chain. Further PRs will be created up to d.\n\nMore info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port\n'),
(users['user'], """\
@{user} @{reviewer} the target branch 'b' has been disabled, you may want to close this PR.
As this was not its limit, it will automatically be forward ported to the next active branch.\
""".format_map(users)),
]
prd = repo.get_pr(prd_id.number)
assert prd.comments == [
seen(env, prd, users),
(users['user'], """\
@{user} @{reviewer} this PR targets d and is the last of the forward-port chain.
To merge the full chain, use
> @hansen r+
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
""".format_map(users))
]
def test_maintain_batch_history(env, config, make_repo, users):
"""Batches which are part of a forward port sequence should not be deleted
even if all their PRs are closed.
Sadly in that case it's a bit difficult to maintain the integrity of the
batch as each PR being closed (until the last one?) will be removed from
the batch.
"""
proj, repo, fork = make_basic(env, config, make_repo, fp_token=True, fp_remote=True)
proj.repo_ids.required_statuses = "default"
with repo, fork:
fork.make_commits("a", Commit("x", tree={"x": "1"}), ref="heads/x")
pr1_a = repo.make_pr(title="X", target="a", head=f"{fork.owner}:x")
pr1_a.post_comment("hansen r+", config['role_reviewer']['token'])
repo.post_status(pr1_a.head, "success")
env.run_crons()
pr1_a_id = to_pr(env, pr1_a)
with repo:
repo.post_status('staging.a', 'success')
env.run_crons()
pr1_b_id = env['runbot_merge.pull_requests'].search([('parent_id', '=', pr1_a_id.id)])
with repo:
repo.post_status(pr1_b_id.head, 'success')
env.run_crons()
pr1_c_id = env['runbot_merge.pull_requests'].search([('parent_id', '=', pr1_b_id.id)])
# region check that all the batches are set up correctly
assert pr1_a_id.batch_id
assert pr1_b_id.batch_id
assert pr1_c_id.batch_id
assert pr1_c_id.batch_id.parent_id == pr1_b_id.batch_id
assert pr1_b_id.batch_id.parent_id == pr1_a_id.batch_id
b_batch = pr1_b_id.batch_id
# endregion
pr1_b = repo.get_pr(pr1_b_id.number)
with repo:
pr1_b.close()
env.run_crons()
assert pr1_b_id.state == 'closed'
# region check that all the batches are *still* set up correctly
assert pr1_a_id.batch_id
assert not pr1_b_id.batch_id, "the PR still gets detached from the batch"
assert b_batch.exists(), "the batch is not deleted tho"
assert pr1_c_id.batch_id
assert pr1_c_id.batch_id.parent_id == b_batch
assert b_batch.parent_id == pr1_a_id.batch_id
# endregion

View File

@ -1,18 +1,16 @@
from __future__ import annotations
import ast
import base64
import contextlib
import logging
import os
import re
import typing
from collections import defaultdict
from collections.abc import Iterator
import requests
from odoo import models, fields, api
from .pull_requests import PullRequests, Branch
from .utils import enum
@ -142,8 +140,8 @@ class Batch(models.Model):
@api.depends(
"merge_date",
"prs.error", "prs.draft", "prs.squash", "prs.merge_method",
"skipchecks", "prs.status", "prs.reviewed_by",
"prs.target"
"skipchecks",
"prs.status", "prs.reviewed_by", "prs.target",
)
def _compute_stageable(self):
for batch in self:
@ -213,30 +211,32 @@ class Batch(models.Model):
)
return
PRs = self.env['runbot_merge.pull_requests']
targets = defaultdict(lambda: PRs)
for p, t in zip(self.prs, all_targets):
if t is None:
if t:
targets[t] |= p
else:
_logger.info("Skip forward porting %s (of %s): no next target", p.display_name, self)
# all the PRs *with a next target* should have the same, we can have PRs
# stopping forward port earlier but skipping... probably not
targets = set(filter(None, all_targets))
if len(targets) != 1:
m = dict(zip(all_targets, self.prs))
m.pop(None, None)
mm = dict(zip(self.prs, all_targets))
for pr in self.prs:
t = mm[pr]
# if a PR is skipped, don't flag it for discrepancy
if not t:
continue
other, linked = next((target, p) for target, p in m.items() if target != t)
self.env.ref('runbot_merge.forwardport.failure.discrepancy')._send(
repository=pr.repository,
pull_request=pr.number,
token_field='fp_github_token',
format_args={'pr': pr, 'linked': linked, 'next': t.name, 'other': other.name},
)
for t, prs in targets.items():
linked, other = next((
(linked, other)
for other, linkeds in targets.items()
if other != t
for linked in linkeds
))
for pr in prs:
self.env.ref('runbot_merge.forwardport.failure.discrepancy')._send(
repository=pr.repository,
pull_request=pr.number,
token_field='fp_github_token',
format_args={'pr': pr, 'linked': linked, 'next': t.name, 'other': other.name},
)
_logger.warning(
"Cancelling forward-port of %s (%s): found different next branches (%s)",
self,
@ -245,7 +245,7 @@ class Batch(models.Model):
)
return
target = targets.pop()
target, prs = next(iter(targets.items()))
# this is run by the cron, no need to check if otherwise scheduled:
# either the scheduled job is this one, or it's an other scheduling
# which will run after this one and will see the port already exists
@ -253,7 +253,7 @@ class Batch(models.Model):
_logger.warning(
"Will not forward-port %s (%s): already ported",
self,
', '.join(self.prs.mapped('display_name'))
', '.join(prs.mapped('display_name'))
)
return
@ -269,7 +269,7 @@ class Batch(models.Model):
)
conflicts = {}
with contextlib.ExitStack() as s:
for pr in self.prs:
for pr in prs:
conflicts[pr], working_copy = pr._create_fp_branch(
target, new_branch, s)
@ -281,9 +281,9 @@ class Batch(models.Model):
# could create a batch here but then we'd have to update `_from_gh` to
# take a batch and then `create` to not automatically resolve batches,
# easier to not do that.
new_batch = self.env['runbot_merge.pull_requests'].browse(())
new_batch = PRs.browse(())
self.env.cr.execute('LOCK runbot_merge_pull_requests IN SHARE MODE')
for pr in self.prs:
for pr in prs:
owner, _ = pr.repository.fp_remote_target.split('/', 1)
source = pr.source_id or pr
root = pr.root_id
@ -305,15 +305,15 @@ class Batch(models.Model):
# delete all the branches this should automatically close the
# PRs if we've created any. Using the API here is probably
# simpler than going through the working copies
for repo in self.prs.mapped('repository'):
for repo in prs.mapped('repository'):
d = gh.delete(f'https://api.github.com/repos/{repo.fp_remote_target}/git/refs/heads/{new_branch}')
if d.ok:
_logger.info("Deleting %s:%s=success", repo.fp_remote_target, new_branch)
else:
_logger.warning("Deleting %s:%s=%s", repo.fp_remote_target, new_branch, d.text)
raise RuntimeError("Forwardport failure: %s (%s)" % (pr.display_name, r.text))
raise RuntimeError(f"Forwardport failure: {pr.display_name} ({r.text})")
new_pr = pr._from_gh(r.json())
new_pr = PRs._from_gh(r.json())
_logger.info("Created forward-port PR %s", new_pr)
new_batch |= new_pr
@ -335,7 +335,7 @@ class Batch(models.Model):
format_args={'source': source, 'pr': pr, 'new': new_pr, 'footer': FOOTER},
)
for pr, new_pr in zip(self.prs, new_batch):
for pr, new_pr in zip(prs, new_batch):
(h, out, err, hh) = conflicts.get(pr) or (None, None, None, None)
if h:
@ -371,6 +371,8 @@ class Batch(models.Model):
f"* {p.display_name}\n"
for p in pr._iter_ancestors()
if p.parent_id
if p.state not in ('closed', 'merged')
if p.target.active
)
template = 'runbot_merge.forwardport.final'
format_args = {
@ -505,3 +507,18 @@ class Batch(models.Model):
tip._schedule_fp_followup()
return True
def unlink(self):
"""
batches can be unlinked if they:
- have run out of PRs
- and don't have a parent batch (which is not being deleted)
- and don't have a child batch (which is not being deleted)
this is to keep track of forward port histories at the batch level
"""
unlinkable = self.filtered(
lambda b: not (b.prs or (b.parent_id - self) or (self.search([('parent_id', '=', b.id)]) - self))
)
return super(Batch, unlinkable).unlink()

View File

@ -288,9 +288,8 @@ class Branch(models.Model):
b.display_name += ' (inactive)'
def write(self, vals):
super().write(vals)
if vals.get('active') is False:
self.active_staging_id.cancel(
if vals.get('active') is False and (actives := self.filtered('active')):
actives.active_staging_id.cancel(
"Target branch deactivated by %r.",
self.env.user.login,
)
@ -299,8 +298,9 @@ class Branch(models.Model):
'repository': pr.repository.id,
'pull_request': pr.number,
'message': tmpl._format(pr=pr),
} for pr in self.prs])
} for pr in actives.prs])
self.env.ref('runbot_merge.branch_cleanup')._trigger()
super().write(vals)
return True
@api.depends('staging_ids.active')
@ -939,7 +939,7 @@ class PullRequests(models.Model):
"""
root = (self.source_id or self)
if self.target == root.limit_id:
return
return None
branches = root.target.project_id.with_context(active_test=False)._forward_port_ordered()
if (branch_filter := self.repository.branch_filter) and branch_filter != '[]':
@ -1255,8 +1255,7 @@ class PullRequests(models.Model):
case True if not self.closed:
vals['reviewed_by'] = False
vals['batch_id'] = False
if len(self.batch_id.prs) == 1:
self.env.cr.precommit.add(self.batch_id.unlink)
self.env.cr.precommit.add(self.batch_id.unlink)
case False if self.closed:
vals['batch_id'] = self._get_batch(
target=vals.get('target') or self.target.id,
@ -1373,29 +1372,18 @@ class PullRequests(models.Model):
# ignore if the PR is already being updated in a separate transaction
# (most likely being merged?)
self.env.cr.execute('''
SELECT id, state, batch_id FROM runbot_merge_pull_requests
WHERE id = %s AND state != 'merged'
SELECT batch_id FROM runbot_merge_pull_requests
WHERE id = %s AND state != 'merged' AND state != 'closed'
FOR UPDATE SKIP LOCKED;
''', [self.id])
r = self.env.cr.fetchone()
if not r:
return False
self.env.cr.execute('''
UPDATE runbot_merge_pull_requests
SET closed=True, reviewed_by = null
WHERE id = %s
''', [self.id])
self.env.cr.commit()
self.modified(['closed', 'reviewed_by', 'batch_id'])
self.unstage("closed by %s", by)
# `write` automatically unsets reviewer & batch when closing but...
self.write({'closed': True, 'reviewed_by': False, 'batch_id': False})
# PRs should leave their batch when *closed*, and batches must die when
# no PR is linked
old_batch = self.batch_id
self.batch_id = False
if not old_batch.prs:
old_batch.unlink()
return True
# ordering is a bit unintuitive because the lowest sequence (and name)
@ -1507,10 +1495,10 @@ class Feedback(models.Model):
"""
_name = _description = 'runbot_merge.pull_requests.feedback'
repository = fields.Many2one('runbot_merge.repository', required=True)
repository = fields.Many2one('runbot_merge.repository', required=True, index=True)
# store the PR number (not id) as we may want to send feedback to PR
# objects on non-handled branches
pull_request = fields.Integer(group_operator=None)
pull_request = fields.Integer(group_operator=None, index=True)
message = fields.Char()
close = fields.Boolean()
token_field = fields.Selection(