mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 15:35:46 +07:00
[IMP] forwardport: allow updating the fw limit after merging the source
Currently, once a source PR has been merged it's not possible to set or update a limit, which can be inconvenient (e.g. people might have forgotten to set it, or realise after the fact that setting one is not useful, or might realise after the fact that they should *unset* it). This PR relaxes that constraint (which is not just a relaxation as it requires a bunch of additional work and validation), it should now be possible to: - update the limit to any target, as long as that target is at or above the current forwardport tip's - with the exception of a tip being forward ported, as that can't be cancelled - resume a forward port stopped by a previously set limit (just increase it to whatever) - set a limit from any forward-port PR - set a limit from a source, post-merge The process should also provide pretty chatty feedback: - feedback on the source and closest root - feedback about adjustments if any (e.g. setting the limit to B but there's already a forward port to C, the 'bot should set the limit to C and tell you that it happened and why) Fixes #506
This commit is contained in:
parent
ea2857ec31
commit
f44b0c018e
@ -24,7 +24,20 @@ class Queue:
|
||||
raise NotImplementedError
|
||||
|
||||
def _process(self):
|
||||
for b in self.search(self._search_domain(), order='create_date, id', limit=self.limit):
|
||||
skip = 0
|
||||
from_clause, where_clause, params = self._search(self._search_domain(), order='create_date, id', limit=1).get_sql()
|
||||
for _ in range(self.limit):
|
||||
self.env.cr.execute(f"""
|
||||
SELECT id FROM {from_clause}
|
||||
WHERE {where_clause or "true"}
|
||||
ORDER BY create_date, id
|
||||
LIMIT 1 OFFSET %s
|
||||
FOR UPDATE SKIP LOCKED
|
||||
""", [*params, skip])
|
||||
b = self.browse(self.env.cr.fetchone())
|
||||
if not b:
|
||||
return
|
||||
|
||||
try:
|
||||
with sentry_sdk.start_span(description=self._name):
|
||||
b._process_item()
|
||||
@ -33,11 +46,12 @@ class Queue:
|
||||
except Exception:
|
||||
_logger.exception("Error while processing %s, skipping", b)
|
||||
self.env.cr.rollback()
|
||||
b._on_failure()
|
||||
if b._on_failure():
|
||||
skip += 1
|
||||
self.env.cr.commit()
|
||||
|
||||
def _on_failure(self):
|
||||
pass
|
||||
return True
|
||||
|
||||
def _search_domain(self):
|
||||
return []
|
||||
@ -48,7 +62,7 @@ class ForwardPortTasks(models.Model, Queue):
|
||||
|
||||
limit = 10
|
||||
|
||||
batch_id = fields.Many2one('runbot_merge.batch', required=True)
|
||||
batch_id = fields.Many2one('runbot_merge.batch', required=True, index=True)
|
||||
source = fields.Selection([
|
||||
('merge', 'Merge'),
|
||||
('fp', 'Forward Port Followup'),
|
||||
|
@ -24,21 +24,26 @@ import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
import typing
|
||||
from functools import reduce
|
||||
from operator import itemgetter
|
||||
from pathlib import Path
|
||||
|
||||
import dateutil.relativedelta
|
||||
import psycopg2.errors
|
||||
import requests
|
||||
|
||||
from odoo import models, fields, api
|
||||
from odoo.osv import expression
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools.misc import topological_sort, groupby
|
||||
from odoo.tools.misc import topological_sort, groupby, Reverse
|
||||
from odoo.tools.sql import reverse_order
|
||||
from odoo.tools.appdirs import user_cache_dir
|
||||
from odoo.addons.base.models.res_partner import Partner
|
||||
from odoo.addons.runbot_merge import git, utils
|
||||
from odoo.addons.runbot_merge.models.pull_requests import RPLUS
|
||||
from odoo.addons.runbot_merge.models.pull_requests import RPLUS, Branch
|
||||
from odoo.addons.runbot_merge.models.stagings_create import Message
|
||||
|
||||
|
||||
footer = '\nMore info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port\n'
|
||||
|
||||
DEFAULT_DELTA = dateutil.relativedelta.relativedelta(days=3)
|
||||
@ -48,6 +53,8 @@ _logger = logging.getLogger('odoo.addons.forwardport')
|
||||
class Project(models.Model):
|
||||
_inherit = 'runbot_merge.project'
|
||||
|
||||
id: int
|
||||
github_prefix: str
|
||||
fp_github_token = fields.Char()
|
||||
fp_github_name = fields.Char(store=True, compute="_compute_git_identity")
|
||||
fp_github_email = fields.Char(store=True, compute="_compute_git_identity")
|
||||
@ -217,11 +224,25 @@ class Project(models.Model):
|
||||
|
||||
class Repository(models.Model):
|
||||
_inherit = 'runbot_merge.repository'
|
||||
|
||||
id: int
|
||||
project_id: Project
|
||||
name: str
|
||||
branch_filter: str
|
||||
fp_remote_target = fields.Char(help="where FP branches get pushed")
|
||||
|
||||
class PullRequests(models.Model):
|
||||
_inherit = 'runbot_merge.pull_requests'
|
||||
|
||||
id: int
|
||||
display_name: str
|
||||
number: int
|
||||
repository: Repository
|
||||
target: Branch
|
||||
reviewed_by: Partner
|
||||
head: str
|
||||
state: str
|
||||
|
||||
statuses = fields.Text(recursive=True)
|
||||
|
||||
limit_id = fields.Many2one('runbot_merge.branch', help="Up to which branch should this PR be forward-ported")
|
||||
@ -230,6 +251,7 @@ class PullRequests(models.Model):
|
||||
'runbot_merge.pull_requests', index=True,
|
||||
help="a PR with a parent is an automatic forward port"
|
||||
)
|
||||
root_id = fields.Many2one('runbot_merge.pull_requests', compute='_compute_root', recursive=True)
|
||||
source_id = fields.Many2one('runbot_merge.pull_requests', index=True, help="the original source of this FP even if parents were detached along the way")
|
||||
forwardport_ids = fields.One2many('runbot_merge.pull_requests', 'source_id')
|
||||
reminder_backoff_factor = fields.Integer(default=-4, group_operator=None)
|
||||
@ -273,6 +295,10 @@ class PullRequests(models.Model):
|
||||
)
|
||||
pr.ping = s and (s + ' ')
|
||||
|
||||
@api.depends('parent_id.root_id')
|
||||
def _compute_root(self):
|
||||
for p in self:
|
||||
p.root_id = reduce(lambda _, p: p, self._iter_ancestors())
|
||||
|
||||
@api.model_create_single
|
||||
def create(self, vals):
|
||||
@ -291,7 +317,7 @@ class PullRequests(models.Model):
|
||||
ast.literal_eval(repo.branch_filter or '[]')
|
||||
)[-1].id
|
||||
if vals.get('parent_id') and 'source_id' not in vals:
|
||||
vals['source_id'] = self.browse(vals['parent_id'])._get_root().id
|
||||
vals['source_id'] = self.browse(vals['parent_id']).root_id.id
|
||||
if vals.get('state') == 'merged':
|
||||
vals['merge_date'] = fields.Datetime.now()
|
||||
return super().create(vals)
|
||||
@ -317,12 +343,12 @@ class PullRequests(models.Model):
|
||||
# updating children
|
||||
if self.search_count([('parent_id', '=', self.id)]):
|
||||
self.env['forwardport.updates'].create({
|
||||
'original_root': self._get_root().id,
|
||||
'original_root': self.root_id.id,
|
||||
'new_root': self.id
|
||||
})
|
||||
|
||||
if vals.get('parent_id') and 'source_id' not in vals:
|
||||
vals['source_id'] = self.browse(vals['parent_id'])._get_root().id
|
||||
vals['source_id'] = self.browse(vals['parent_id']).root_id.id
|
||||
if vals.get('state') == 'merged':
|
||||
vals['merge_date'] = fields.Datetime.now()
|
||||
r = super().write(vals)
|
||||
@ -444,28 +470,7 @@ class PullRequests(models.Model):
|
||||
elif not limit:
|
||||
msg = "please provide a branch to forward-port to."
|
||||
else:
|
||||
limit_id = self.env['runbot_merge.branch'].with_context(active_test=False).search([
|
||||
('project_id', '=', self.repository.project_id.id),
|
||||
('name', '=', limit),
|
||||
])
|
||||
if self.source_id:
|
||||
msg = "forward-port limit can only be set on " \
|
||||
f"an origin PR ({self.source_id.display_name} " \
|
||||
"here) before it's merged and forward-ported."
|
||||
elif self.state in ['merged', 'closed']:
|
||||
msg = "forward-port limit can only be set before the PR is merged."
|
||||
elif not limit_id:
|
||||
msg = "there is no branch %r, it can't be used as a forward port target." % limit
|
||||
elif limit_id == self.target:
|
||||
ping = False
|
||||
msg = "Forward-port disabled."
|
||||
self.limit_id = limit_id
|
||||
elif not limit_id.active:
|
||||
msg = "branch %r is disabled, it can't be used as a forward port target." % limit_id.name
|
||||
else:
|
||||
ping = False
|
||||
msg = "Forward-porting to %r." % limit_id.name
|
||||
self.limit_id = limit_id
|
||||
ping, msg = self._maybe_update_limit(limit)
|
||||
|
||||
if msg or close:
|
||||
if msg:
|
||||
@ -480,6 +485,98 @@ class PullRequests(models.Model):
|
||||
'token_field': 'fp_github_token',
|
||||
})
|
||||
|
||||
def _maybe_update_limit(self, limit: str) -> typing.Tuple[bool, str]:
|
||||
limit_id = self.env['runbot_merge.branch'].with_context(active_test=False).search([
|
||||
('project_id', '=', self.repository.project_id.id),
|
||||
('name', '=', limit),
|
||||
])
|
||||
if not limit_id:
|
||||
return True, f"there is no branch {limit!r}, it can't be used as a forward port target."
|
||||
|
||||
if limit_id != self.target and not limit_id.active:
|
||||
return True, f"branch {limit_id.name!r} is disabled, it can't be used as a forward port target."
|
||||
|
||||
# not forward ported yet, just acknowledge the request
|
||||
if not self.source_id and self.state != 'merged':
|
||||
self.limit_id = limit_id
|
||||
if branch_key(limit_id) <= branch_key(self.target):
|
||||
return False, "Forward-port disabled."
|
||||
else:
|
||||
return False, f"Forward-porting to {limit_id.name!r}."
|
||||
|
||||
# if the PR has been forwardported
|
||||
prs = (self | self.forwardport_ids | self.source_id | self.source_id.forwardport_ids)
|
||||
tip = max(prs, key=pr_key)
|
||||
# if the fp tip was closed it's fine
|
||||
if tip.state == 'closed':
|
||||
return True, f"{tip.display_name} is closed, no forward porting is going on"
|
||||
|
||||
prs.limit_id = limit_id
|
||||
|
||||
real_limit = max(limit_id, tip.target, key=branch_key)
|
||||
|
||||
addendum = ''
|
||||
# check if tip was queued for forward porting, try to cancel if we're
|
||||
# supposed to stop here
|
||||
if real_limit == tip.target and (task := self.env['forwardport.batches'].search([('batch_id', 'in', tip.batch_ids.ids)])):
|
||||
try:
|
||||
with self.env.cr.savepoint():
|
||||
self.env.cr.execute(
|
||||
"SELECT FROM forwardport_batches "
|
||||
"WHERE id = %s FOR UPDATE NOWAIT",
|
||||
[task.id])
|
||||
except psycopg2.errors.LockNotAvailable:
|
||||
# row locked = port occurring and probably going to succeed,
|
||||
# so next(real_limit) likely a done deal already
|
||||
return True, (
|
||||
f"Forward port of {tip.display_name} likely already "
|
||||
f"ongoing, unable to cancel, close next forward port "
|
||||
f"when it completes.")
|
||||
else:
|
||||
self.env.cr.execute("DELETE FROM forwardport_batches WHERE id = %s", [task.id])
|
||||
|
||||
if real_limit != tip.target:
|
||||
# forward porting was previously stopped at tip, and we want it to
|
||||
# resume
|
||||
if tip.state == 'merged':
|
||||
self.env['forwardport.batches'].create({
|
||||
'batch_id': tip.batch_ids.sorted('id')[-1].id,
|
||||
'source': 'fp' if tip.parent_id else 'merge',
|
||||
})
|
||||
resumed = tip
|
||||
else:
|
||||
# reactivate batch
|
||||
tip.batch_ids.sorted('id')[-1].active = True
|
||||
resumed = tip._schedule_fp_followup()
|
||||
if resumed:
|
||||
addendum += f', resuming forward-port stopped at {tip.display_name}'
|
||||
|
||||
if real_limit != limit_id:
|
||||
addendum += f' (instead of the requested {limit_id.name!r} because {tip.display_name} already exists)'
|
||||
|
||||
# get a "stable" root rather than self's to avoid divertences between
|
||||
# PRs across a root divide (where one post-root would point to the root,
|
||||
# and one pre-root would point to the source, or a previous root)
|
||||
root = tip.root_id
|
||||
# reference the root being forward ported unless we are the root
|
||||
root_ref = '' if root == self else f' {root.display_name}'
|
||||
msg = f"Forward-porting{root_ref} to {real_limit.name!r}{addendum}."
|
||||
# send a message to the source & root except for self, if they exist
|
||||
root_msg = f'Forward-porting to {real_limit.name!r} (from {self.display_name}).'
|
||||
self.env['runbot_merge.pull_requests.feedback'].create([
|
||||
{
|
||||
'repository': p.repository.id,
|
||||
'pull_request': p.number,
|
||||
'message': root_msg,
|
||||
'token_field': 'fp_github_token',
|
||||
}
|
||||
# send messages to source and root unless root is self (as it
|
||||
# already gets the normal message)
|
||||
for p in (self.source_id | root) - self
|
||||
])
|
||||
|
||||
return False, msg
|
||||
|
||||
def _notify_ci_failed(self, ci):
|
||||
# only care about FP PRs which are not staged / merged yet
|
||||
# NB: probably ignore approved PRs as normal message will handle them?
|
||||
@ -501,6 +598,7 @@ class PullRequests(models.Model):
|
||||
def _schedule_fp_followup(self):
|
||||
_logger = logging.getLogger(__name__).getChild('forwardport.next')
|
||||
# if the PR has a parent and is CI-validated, enqueue the next PR
|
||||
scheduled = self.browse(())
|
||||
for pr in self:
|
||||
_logger.info('Checking if forward-port %s (%s)', pr.display_name, pr)
|
||||
if not pr.parent_id:
|
||||
@ -548,6 +646,8 @@ class PullRequests(models.Model):
|
||||
'batch_id': batch.id,
|
||||
'source': 'fp',
|
||||
})
|
||||
scheduled |= pr
|
||||
return scheduled
|
||||
|
||||
def _find_next_target(self, reference):
|
||||
""" Finds the branch between target and limit_id which follows
|
||||
@ -600,14 +700,15 @@ class PullRequests(models.Model):
|
||||
}
|
||||
return sorted(commits, key=lambda c: idx[c['sha']])
|
||||
|
||||
def _iter_ancestors(self):
|
||||
while self:
|
||||
yield self
|
||||
self = self.parent_id
|
||||
|
||||
def _iter_descendants(self):
|
||||
pr = self
|
||||
while True:
|
||||
pr = self.search([('parent_id', '=', pr.id)])
|
||||
if pr:
|
||||
yield pr
|
||||
else:
|
||||
break
|
||||
while pr := self.search([('parent_id', '=', pr.id)]):
|
||||
yield pr
|
||||
|
||||
@api.depends('parent_id.statuses')
|
||||
def _compute_statuses(self):
|
||||
@ -619,17 +720,6 @@ class PullRequests(models.Model):
|
||||
p.update(super()._get_overrides())
|
||||
return p
|
||||
|
||||
def _iter_ancestors(self):
|
||||
while self:
|
||||
yield self
|
||||
self = self.parent_id
|
||||
|
||||
def _get_root(self):
|
||||
root = self
|
||||
while root.parent_id:
|
||||
root = root.parent_id
|
||||
return root
|
||||
|
||||
def _port_forward(self):
|
||||
if not self:
|
||||
return
|
||||
@ -720,7 +810,7 @@ class PullRequests(models.Model):
|
||||
for pr in self:
|
||||
owner, _ = pr.repository.fp_remote_target.split('/', 1)
|
||||
source = pr.source_id or pr
|
||||
root = pr._get_root()
|
||||
root = pr.root_id
|
||||
|
||||
message = source.message + '\n\n' + '\n'.join(
|
||||
"Forward-Port-Of: %s" % p.display_name
|
||||
@ -872,7 +962,7 @@ class PullRequests(models.Model):
|
||||
:rtype: (None | (str, str, str, list[commit]), Repo)
|
||||
"""
|
||||
logger = _logger.getChild(str(self.id))
|
||||
root = self._get_root()
|
||||
root = self.root_id
|
||||
logger.info(
|
||||
"Forward-porting %s (%s) to %s",
|
||||
self.display_name, root.display_name, target_branch.name
|
||||
@ -1063,7 +1153,7 @@ stderr:
|
||||
|
||||
# ensures all reviewers in the review path are on the PR in order:
|
||||
# original reviewer, then last conflict reviewer, then current PR
|
||||
reviewers = (self | self._get_root() | self.source_id)\
|
||||
reviewers = (self | self.root_id | self.source_id)\
|
||||
.mapped('reviewed_by.formatted_email')
|
||||
|
||||
sobs = msg.headers.getlist('signed-off-by')
|
||||
@ -1127,6 +1217,19 @@ stderr:
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# ordering is a bit unintuitive because the lowest sequence (and name)
|
||||
# is the last link of the fp chain, reasoning is a bit more natural the
|
||||
# other way around (highest object is the last), especially with Python
|
||||
# not really having lazy sorts in the stdlib
|
||||
def branch_key(b: Branch, /, _key=itemgetter('sequence', 'name')):
|
||||
return Reverse(_key(b))
|
||||
|
||||
|
||||
def pr_key(p: PullRequests, /):
|
||||
return branch_key(p.target)
|
||||
|
||||
|
||||
class Stagings(models.Model):
|
||||
_inherit = 'runbot_merge.stagings'
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from utils import seen, Commit, make_basic, to_pr
|
||||
@ -120,13 +121,11 @@ def test_disable(env, config, make_repo, users):
|
||||
seen(env, pr, users),
|
||||
}
|
||||
|
||||
|
||||
def test_limit_after_merge(env, config, make_repo, users):
|
||||
""" If attempting to set a limit (<up to>) on a PR which is merged
|
||||
(already forward-ported or not), or is a forward-port PR, fwbot should
|
||||
just feedback that it won't do it
|
||||
"""
|
||||
prod, other = make_basic(env, config, make_repo)
|
||||
reviewer = config['role_reviewer']['token']
|
||||
branch_b = env['runbot_merge.branch'].search([('name', '=', 'b')])
|
||||
branch_c = env['runbot_merge.branch'].search([('name', '=', 'c')])
|
||||
bot_name = env['runbot_merge.project'].search([]).fp_github_name
|
||||
with prod:
|
||||
@ -150,13 +149,13 @@ def test_limit_after_merge(env, config, make_repo, users):
|
||||
pr2.post_comment(bot_name + ' up to b', reviewer)
|
||||
env.run_crons()
|
||||
|
||||
assert p1.limit_id == p2.limit_id == branch_c, \
|
||||
"check that limit was not updated"
|
||||
assert p1.limit_id == p2.limit_id == branch_b
|
||||
assert pr1.comments == [
|
||||
(users['reviewer'], "hansen r+"),
|
||||
seen(env, pr1, users),
|
||||
(users['reviewer'], bot_name + ' up to b'),
|
||||
(users['user'], "@%s forward-port limit can only be set before the PR is merged." % users['reviewer']),
|
||||
(users['reviewer'], f'{bot_name} up to b'),
|
||||
(users['user'], "Forward-porting to 'b'."),
|
||||
(users['user'], f"Forward-porting to 'b' (from {p2.display_name})."),
|
||||
]
|
||||
assert pr2.comments == [
|
||||
seen(env, pr2, users),
|
||||
@ -165,12 +164,8 @@ This PR targets b and is part of the forward-port chain. Further PRs will be cre
|
||||
|
||||
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
|
||||
"""),
|
||||
(users['reviewer'], bot_name + ' up to b'),
|
||||
(users['user'], "@%s forward-port limit can only be set on an origin PR"
|
||||
" (%s here) before it's merged and forward-ported." % (
|
||||
users['reviewer'],
|
||||
p1.display_name,
|
||||
)),
|
||||
(users['reviewer'], f'{bot_name} up to b'),
|
||||
(users['user'], f"Forward-porting {p1.display_name} to 'b'."),
|
||||
]
|
||||
|
||||
# update pr2 to detach it from pr1
|
||||
@ -186,7 +181,7 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
|
||||
assert p2.source_id == p1
|
||||
|
||||
with prod:
|
||||
pr2.post_comment(bot_name + ' up to b', reviewer)
|
||||
pr2.post_comment(f'{bot_name} up to c', reviewer)
|
||||
env.run_crons()
|
||||
|
||||
assert pr2.comments[4:] == [
|
||||
@ -195,9 +190,268 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
|
||||
users['user'], users['reviewer'],
|
||||
p2.repository.project_id.github_prefix
|
||||
)),
|
||||
(users['reviewer'], bot_name + ' up to b'),
|
||||
(users['user'], f"@{users['reviewer']} forward-port limit can only be "
|
||||
f"set on an origin PR ({p1.display_name} here) before "
|
||||
f"it's merged and forward-ported."
|
||||
),
|
||||
(users['reviewer'], f'{bot_name} up to c'),
|
||||
(users['user'], "Forward-porting to 'c'."),
|
||||
]
|
||||
with prod:
|
||||
prod.post_status(p2.head, 'success', 'legal/cla')
|
||||
prod.post_status(p2.head, 'success', 'ci/runbot')
|
||||
pr2.post_comment('hansen r+', reviewer)
|
||||
env.run_crons()
|
||||
with prod:
|
||||
prod.post_status('staging.b', 'success', 'legal/cla')
|
||||
prod.post_status('staging.b', 'success', 'ci/runbot')
|
||||
env.run_crons()
|
||||
|
||||
_, _, p3 = env['runbot_merge.pull_requests'].search([], order='number')
|
||||
assert p3
|
||||
pr3 = prod.get_pr(p3.number)
|
||||
with prod:
|
||||
pr3.post_comment(f"{bot_name} up to c", reviewer)
|
||||
env.run_crons()
|
||||
assert pr3.comments == [
|
||||
seen(env, pr3, users),
|
||||
(users['user'], f"""\
|
||||
@{users['user']} @{users['reviewer']} this PR targets c and is the last of the forward-port chain.
|
||||
|
||||
To merge the full chain, use
|
||||
> @{p1.repository.project_id.fp_github_name} r+
|
||||
|
||||
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
|
||||
"""),
|
||||
(users['reviewer'], f"{bot_name} up to c"),
|
||||
(users['user'], f"Forward-porting {p2.display_name} to 'c'."),
|
||||
]
|
||||
# 7 of previous check, plus r+
|
||||
assert pr2.comments[8:] == [
|
||||
(users['user'], f"Forward-porting to 'c' (from {p3.display_name}).")
|
||||
]
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize("update_from", [
|
||||
pytest.param(lambda source: [('id', '=', source)], id='source'),
|
||||
pytest.param(lambda source: [('source_id', '=', source), ('target', '=', '2')], id='child'),
|
||||
pytest.param(lambda source: [('source_id', '=', source), ('target', '=', '3')], id='root'),
|
||||
pytest.param(lambda source: [('source_id', '=', source), ('target', '=', '4')], id='parent'),
|
||||
pytest.param(lambda source: [('source_id', '=', source), ('target', '=', '5')], id='current'),
|
||||
# pytest.param(id='tip'), # doesn't exist
|
||||
])
|
||||
@pytest.mark.parametrize("limit", range(1, 6+1))
|
||||
def test_post_merge(
|
||||
env, post_merge, users, config, branches,
|
||||
update_from: callable,
|
||||
limit: int,
|
||||
):
|
||||
PRs = env['runbot_merge.pull_requests']
|
||||
project, prod, _ = post_merge
|
||||
reviewer = config['role_reviewer']['token']
|
||||
|
||||
# fetch source PR
|
||||
[source] = PRs.search([('source_id', '=', False)])
|
||||
|
||||
# validate the forward ports for "child", "root", and "parent" so "current"
|
||||
# exists and we have one more target
|
||||
for branch in map(str, range(2, 4+1)):
|
||||
setci(source=source, repo=prod, target=branch)
|
||||
env.run_crons()
|
||||
# update 3 to make it into a root
|
||||
root = PRs.search([('source_id', '=', source.id), ('target.name', '=', '3')])
|
||||
root.write({'parent_id': False, 'detach_reason': 'testing'})
|
||||
# send detach messages so they're not part of the limit stuff batch
|
||||
env.run_crons()
|
||||
|
||||
# cheat: we know PR numbers are assigned sequentially
|
||||
prs = list(map(prod.get_pr, range(1, 6)))
|
||||
before = {p.number: len(p.comments) for p in prs}
|
||||
|
||||
from_id = PRs.search(update_from(source.id))
|
||||
from_ = prod.get_pr(from_id.number)
|
||||
with prod:
|
||||
from_.post_comment(f'{project.fp_github_name} up to {limit}', reviewer)
|
||||
env.run_crons()
|
||||
|
||||
# there should always be a comment on the source and root indicating how
|
||||
# far we port
|
||||
# the PR we post on should have a comment indicating the correction
|
||||
current_id = PRs.search([('number', '=', '5')])
|
||||
actual_limit = max(limit, 5)
|
||||
for p in prs:
|
||||
# case for the PR on which we posted the comment
|
||||
if p.number == from_.number:
|
||||
root_opt = '' if p.number == root.number else f' {root.display_name}'
|
||||
trailer = '' if actual_limit == limit else f" (instead of the requested '{limit}' because {current_id.display_name} already exists)"
|
||||
assert p.comments[before[p.number] + 1:] == [
|
||||
(users['user'], f"Forward-porting{root_opt} to '{actual_limit}'{trailer}.")
|
||||
]
|
||||
# case for reference PRs source and root (which get their own notifications)
|
||||
elif p.number in (source.number, root.number):
|
||||
assert p.comments[before[p.number]:] == [
|
||||
(users['user'], f"Forward-porting to '{actual_limit}' (from {from_id.display_name}).")
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize('mode', [
|
||||
None,
|
||||
# last forward port should fail ci, and only be validated after target bump
|
||||
'failbump',
|
||||
# last forward port should fail ci, then be validated, then target bump
|
||||
'failsucceed',
|
||||
# last forward port should be merged before bump
|
||||
'mergetip',
|
||||
# every forward port should be merged before bump
|
||||
'mergeall',
|
||||
])
|
||||
def test_resume_fw(env, post_merge, users, config, branches, mode):
|
||||
"""Singleton version of test_post_merge: completes the forward porting
|
||||
including validation then tries to increase the limit, which should resume
|
||||
forward porting
|
||||
"""
|
||||
|
||||
PRs = env['runbot_merge.pull_requests']
|
||||
project, prod, _ = post_merge
|
||||
reviewer = config['role_reviewer']['token']
|
||||
|
||||
# fetch source PR
|
||||
[source] = PRs.search([('source_id', '=', False)])
|
||||
with prod:
|
||||
prod.get_pr(source.number).post_comment(f'{project.fp_github_name} up to 5', reviewer)
|
||||
# validate the forward ports for "child", "root", and "parent" so "current"
|
||||
# exists and we have one more target
|
||||
for branch in map(str, range(2, 5+1)):
|
||||
setci(
|
||||
source=source, repo=prod, target=branch,
|
||||
status='failure' if branch == '5' and mode in ('failbump', 'failsucceed') else 'success'
|
||||
)
|
||||
env.run_crons()
|
||||
# cheat: we know PR numbers are assigned sequentially
|
||||
prs = list(map(prod.get_pr, range(1, 6)))
|
||||
before = {p.number: len(p.comments) for p in prs}
|
||||
|
||||
if mode == 'failsucceed':
|
||||
setci(source=source, repo=prod, target=5)
|
||||
# sees the success, limit is still 5, considers the porting finished
|
||||
env.run_crons()
|
||||
|
||||
if mode and mode.startswith('merge'):
|
||||
numbers = range(5 if mode == 'mergetip' else 2, 5 + 1)
|
||||
with prod:
|
||||
for number in numbers:
|
||||
prod.get_pr(number).post_comment(f'{project.github_prefix} r+', reviewer)
|
||||
env.run_crons()
|
||||
with prod:
|
||||
for target in numbers:
|
||||
pr = PRs.search([('target.name', '=', str(target))])
|
||||
print(pr.display_name, pr.state, pr.staging_id)
|
||||
prod.post_status(f'staging.{target}', 'success')
|
||||
env.run_crons()
|
||||
for number in numbers:
|
||||
assert PRs.search([('number', '=', number)]).state == 'merged'
|
||||
|
||||
from_ = prod.get_pr(source.number)
|
||||
with prod:
|
||||
from_.post_comment(f'{project.fp_github_name} up to 6', reviewer)
|
||||
env.run_crons()
|
||||
|
||||
if mode == 'failbump':
|
||||
setci(source=source, repo=prod, target=5)
|
||||
# setci moved the PR from opened to validated, so *now* it can be
|
||||
# forward-ported, but that still needs to actually happen
|
||||
env.run_crons()
|
||||
|
||||
# since PR5 CI succeeded and we've increased the limit there should be a
|
||||
# new PR
|
||||
assert PRs.search([('source_id', '=', source.id), ('target.name', '=', 6)])
|
||||
pr5_id = PRs.search([('source_id', '=', source.id), ('target.name', '=', 5)])
|
||||
if mode == 'failbump':
|
||||
# because the initial forward porting was never finished as the PR CI
|
||||
# failed until *after* we bumped the limit, so it's not *resuming* per se.
|
||||
assert prs[0].comments[before[1]+1:] == [
|
||||
(users['user'], f"Forward-porting to '6'.")
|
||||
]
|
||||
else:
|
||||
assert prs[0].comments[before[1]+1:] == [
|
||||
(users['user'], f"Forward-porting to '6', resuming forward-port stopped at {pr5_id.display_name}.")
|
||||
]
|
||||
|
||||
def setci(*, source, repo, target, status='success'):
|
||||
"""Validates (CI success) the descendant of ``source`` targeting ``target``
|
||||
in ``repo``.
|
||||
"""
|
||||
pr = source.search([('source_id', '=', source.id), ('target.name', '=', str(target))])
|
||||
with repo:
|
||||
repo.post_status(pr.head, status)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def branches():
|
||||
"""Need enough branches to make space for:
|
||||
|
||||
- a source
|
||||
- an ancestor (before and separated from the root, but not the source)
|
||||
- a root (break in the parent chain
|
||||
- a parent (between "current" and root)
|
||||
- "current"
|
||||
- the tip branch
|
||||
"""
|
||||
return range(1, 6 + 1)
|
||||
|
||||
@pytest.fixture
|
||||
def post_merge(env, config, users, make_repo, branches):
|
||||
"""Create a setup for the post-merge limits test which is both simpler and
|
||||
more complicated than the standard test setup(s): it doesn't need more
|
||||
variety in code, but it needs a lot more "depth" in terms of number of
|
||||
branches it supports. Branches are fixture-ed to make it easier to share
|
||||
between this fixture and the actual test.
|
||||
|
||||
All the branches are set to the same commit because that basically
|
||||
shouldn't matter.
|
||||
"""
|
||||
prod = make_repo("post-merge-test")
|
||||
with prod:
|
||||
[c] = prod.make_commits(None, Commit('base', tree={'f': ''}))
|
||||
for i in branches:
|
||||
prod.make_ref(f'heads/{i}', c)
|
||||
dev = prod.fork()
|
||||
|
||||
proj = env['runbot_merge.project'].create({
|
||||
'name': prod.name,
|
||||
'github_token': config['github']['token'],
|
||||
'github_prefix': 'hansen',
|
||||
'fp_github_token': config['github']['token'],
|
||||
'fp_github_name': 'herbert',
|
||||
'fp_github_email': 'hb@example.com',
|
||||
'branch_ids': [
|
||||
(0, 0, {'name': str(i), 'sequence': 1000 - (i * 10)})
|
||||
for i in branches
|
||||
],
|
||||
'repo_ids': [
|
||||
(0, 0, {
|
||||
'name': prod.name,
|
||||
'required_statuses': 'default',
|
||||
'fp_remote_target': dev.name,
|
||||
})
|
||||
]
|
||||
})
|
||||
|
||||
env['res.partner'].search([
|
||||
('github_login', '=', config['role_reviewer']['user'])
|
||||
]).write({
|
||||
'review_rights': [(0, 0, {'repository_id': proj.repo_ids.id, 'review': True})]
|
||||
})
|
||||
|
||||
mbot = proj.github_prefix
|
||||
reviewer = config['role_reviewer']['token']
|
||||
# merge the source PR
|
||||
source_target = str(branches[0])
|
||||
with prod:
|
||||
[c] = prod.make_commits(source_target, Commit('my pr', tree={'x': ''}), ref='heads/mypr')
|
||||
pr1 = prod.make_pr(target=source_target, head=c, title="a title")
|
||||
|
||||
prod.post_status(c, 'success')
|
||||
pr1.post_comment(f'{mbot} r+', reviewer)
|
||||
env.run_crons()
|
||||
with prod:
|
||||
prod.post_status(f'staging.{source_target}', 'success')
|
||||
env.run_crons()
|
||||
|
||||
return proj, prod, dev
|
||||
|
8
runbot_merge/changelog/2023-10/free-the-limit.md
Normal file
8
runbot_merge/changelog/2023-10/free-the-limit.md
Normal file
@ -0,0 +1,8 @@
|
||||
IMP: allow setting forward-port limits after the source pull request has been merged
|
||||
|
||||
Should now be possible to both extend and retract the forward port limit
|
||||
afterwards, though obviously no shorter than the current tip of the forward
|
||||
port sequence. One limitation is that forward ports being created can't be
|
||||
stopped so there might be some windows where trying to set the limit to the
|
||||
current tip will fail (because it's in the process of being forward-ported to
|
||||
the next branch).
|
Loading…
Reference in New Issue
Block a user