mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 15:35:46 +07:00
[ADD] forwardport: special handling of adding branches to projects
If a new branch is added to a project, there's an issue with *ongoing* forward ports (forward ports which were not merged before the branch was forked from an existing one): the new branch gets "skipped" and might be missing some fixes until those are noticed and backported. This commit hooks into updating projects to try and see if the update consists of adding a branch inside the sequence, in which case it tries to find the FP sequences to update and queues up new "intermediate" forward ports to insert into the existing sequences. Note: had to increase the cron socket limit to 2mn as 1mn blew up the big staging cron in the test (after all the forward-port PRs are approved). Fixes #262 [FIX]
This commit is contained in:
parent
0d33b87579
commit
9aac1b4a3e
@ -84,7 +84,7 @@ def _set_socket_timeout():
|
||||
""" Avoid unlimited wait on standard sockets during tests, this is mostly
|
||||
an issue for non-trivial cron calls
|
||||
"""
|
||||
socket.setdefaulttimeout(60.0)
|
||||
socket.setdefaulttimeout(120.0)
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def config(pytestconfig):
|
||||
@ -962,6 +962,9 @@ class Model:
|
||||
def __repr__(self):
|
||||
return "{}({})".format(self._model, ', '.join(str(id_) for id_ in self._ids))
|
||||
|
||||
def browse(self, ids):
|
||||
return Model(self._env, self._model, ids)
|
||||
|
||||
def exists(self):
|
||||
ids = self._env(self._model, 'exists', self._ids)
|
||||
return Model(self._env, self._model, ids)
|
||||
|
@ -43,12 +43,25 @@ class BatchQueue(models.Model, Queue):
|
||||
source = fields.Selection([
|
||||
('merge', 'Merge'),
|
||||
('fp', 'Forward Port Followup'),
|
||||
('insert', 'New branch port')
|
||||
], required=True)
|
||||
|
||||
def _process_item(self):
|
||||
batch = self.batch_id
|
||||
|
||||
newbatch = batch.prs._port_forward()
|
||||
# insert new barch in ancestry sequence unless conflict (= no parent)
|
||||
if self.source == 'insert':
|
||||
for pr in newbatch.prs:
|
||||
if not pr.parent_id:
|
||||
break
|
||||
newchild = pr.search([
|
||||
('parent_id', '=', pr.parent_id.id),
|
||||
('id', '!=', pr.id),
|
||||
])
|
||||
if newchild:
|
||||
newchild.parent_id = pr.id
|
||||
|
||||
if newbatch:
|
||||
_logger.info(
|
||||
"Processing %s (from %s): %s (%s) -> %s (%s)",
|
||||
|
@ -29,6 +29,7 @@ import dateutil
|
||||
import requests
|
||||
|
||||
from odoo import _, models, fields, api
|
||||
from odoo.osv import expression
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools import topological_sort, groupby
|
||||
from odoo.tools.appdirs import user_cache_dir
|
||||
@ -106,6 +107,82 @@ class Project(models.Model):
|
||||
if to_remove:
|
||||
self.env['forwardport.tagging'].browse(to_remove).unlink()
|
||||
|
||||
def write(self, vals):
|
||||
Branches = self.env['runbot_merge.branch']
|
||||
# check on branches both active and inactive so disabling branches doesn't
|
||||
# make it look like the sequence changed.
|
||||
self_ = self.with_context(active_test=False)
|
||||
branches_before = {project: project._forward_port_ordered() for project in self_}
|
||||
|
||||
r = super().write(vals)
|
||||
for p in self_:
|
||||
# check if the branches sequence has been modified
|
||||
bbefore = branches_before[p]
|
||||
bafter = p._forward_port_ordered()
|
||||
if bafter.ids == bbefore.ids:
|
||||
continue
|
||||
# if it's just that a branch was inserted at the end forwardport
|
||||
# should keep on keeping normally
|
||||
if bafter.ids[:-1] == bbefore.ids:
|
||||
continue
|
||||
|
||||
if bafter <= bbefore:
|
||||
raise UserError("Branches can not be reordered or removed after saving.")
|
||||
|
||||
# Last possibility: branch was inserted but not at end, get all
|
||||
# branches before and all branches after
|
||||
before = new = after = Branches
|
||||
for b in bafter:
|
||||
if b in bbefore:
|
||||
if new:
|
||||
after += b
|
||||
else:
|
||||
before += b
|
||||
else:
|
||||
if new:
|
||||
raise UserError("Inserting multiple branches at the same time is not supported")
|
||||
new = b
|
||||
# find all FPs whose ancestry spans the insertion
|
||||
leaves = self.env['runbot_merge.pull_requests'].search([
|
||||
('state', 'not in', ['closed', 'merged']),
|
||||
('target', 'in', after.ids),
|
||||
('source_id.target', 'in', before.ids),
|
||||
])
|
||||
# get all PRs just preceding the insertion point which either are
|
||||
# sources of the above or have the same source
|
||||
candidates = self.env['runbot_merge.pull_requests'].search([
|
||||
('target', '=', before[-1].id),
|
||||
'|', ('id', 'in', leaves.mapped('source_id').ids),
|
||||
('source_id', 'in', leaves.mapped('source_id').ids),
|
||||
])
|
||||
# enqueue the creation of a new forward-port based on our candidates
|
||||
# but it should only create a single step and needs to stitch batch
|
||||
# the parents linked list, so it has a special type
|
||||
for c in candidates:
|
||||
self.env['forwardport.batches'].create({
|
||||
'batch_id': self.env['runbot_merge.batch'].create({
|
||||
'target': before[-1].id,
|
||||
'prs': [(4, c.id, 0)],
|
||||
'active': False,
|
||||
}).id,
|
||||
'source': 'insert',
|
||||
})
|
||||
return r
|
||||
|
||||
def _forward_port_ordered(self, domain=()):
|
||||
Branches = self.env['runbot_merge.branch']
|
||||
ordering_items = re.split(r',\s*', 'fp_sequence,' + Branches._order)
|
||||
ordering = ','.join(
|
||||
# reverse order (desc -> asc, asc -> desc) as we want the "lower"
|
||||
# branches to be first in the ordering
|
||||
f[:-5] if f.lower().endswith(' desc') else f + ' desc'
|
||||
for f in ordering_items
|
||||
)
|
||||
return Branches.search(expression.AND([
|
||||
[('project_id', '=', self.id)],
|
||||
domain or [],
|
||||
]), order=ordering)
|
||||
|
||||
class Repository(models.Model):
|
||||
_inherit = 'runbot_merge.repository'
|
||||
fp_remote_target = fields.Char(help="where FP branches get pushed")
|
||||
@ -122,19 +199,6 @@ class Branch(models.Model):
|
||||
for b in self:
|
||||
b.fp_enabled = b.active and b.fp_target
|
||||
|
||||
# FIXME: this should be per-project...
|
||||
def _forward_port_ordered(self, domain=[]):
|
||||
""" Returns all branches in forward port order (from the lowest to
|
||||
the highest — usually master)
|
||||
"""
|
||||
return self.search(domain, order=self._forward_port_ordering())
|
||||
|
||||
def _forward_port_ordering(self):
|
||||
return ','.join(
|
||||
f[:-5] if f.lower().endswith(' desc') else f + ' desc'
|
||||
for f in re.split(r',\s*', 'fp_sequence,' + self._order)
|
||||
)
|
||||
|
||||
class PullRequests(models.Model):
|
||||
_inherit = 'runbot_merge.pull_requests'
|
||||
|
||||
@ -142,11 +206,7 @@ class PullRequests(models.Model):
|
||||
|
||||
# QUESTION: should the limit be copied on each child, or should it be inferred from the parent? Also what happens when detaching, is the detached PR configured independently?
|
||||
# QUESTION: what happens if the limit_id is deactivated with extant PRs?
|
||||
limit_id = fields.Many2one(
|
||||
'runbot_merge.branch',
|
||||
default=lambda self: self.env['runbot_merge.branch']._forward_port_ordered()[-1],
|
||||
help="Up to which branch should this PR be forward-ported"
|
||||
)
|
||||
limit_id = fields.Many2one('runbot_merge.branch', help="Up to which branch should this PR be forward-ported")
|
||||
|
||||
parent_id = fields.Many2one(
|
||||
'runbot_merge.pull_requests', index=True,
|
||||
@ -169,6 +229,12 @@ class PullRequests(models.Model):
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
if 'limit_id' not in vals:
|
||||
branch = self.env['runbot_merge.branch'].browse(vals['target'])
|
||||
repo = self.env['runbot_merge.repository'].browse(vals['repository'])
|
||||
vals['limit_id'] = branch.project_id._forward_port_ordered(
|
||||
ast.literal_eval(repo.branch_filter or '[]')
|
||||
)[-1].id
|
||||
if vals.get('parent_id') and 'source_id' not in vals:
|
||||
vals['source_id'] = self.browse(vals['parent_id'])._get_root().id
|
||||
return super().create(vals)
|
||||
@ -376,9 +442,9 @@ class PullRequests(models.Model):
|
||||
return
|
||||
# NOTE: assumes even disabled branches are properly sequenced, would
|
||||
# probably be a good idea to have the FP view show all branches
|
||||
branches = list(self.env['runbot_merge.branch']
|
||||
branches = list(self.target.project_id
|
||||
.with_context(active_test=False)
|
||||
._forward_port_ordered(ast.literal_eval(self.repository.branch_filter)))
|
||||
._forward_port_ordered(ast.literal_eval(self.repository.branch_filter or '[]')))
|
||||
|
||||
# get all branches between max(root.target, ref.target) (excluded) and limit (included)
|
||||
from_ = max(branches.index(self.target), branches.index(reference.target))
|
||||
|
@ -405,3 +405,147 @@ class TestNotAllBranches:
|
||||
" next branch 'b'." % (a.name, pr_a.number)
|
||||
)
|
||||
]
|
||||
|
||||
def test_new_intermediate_branch(env, config, make_repo):
|
||||
""" In the case of a freeze / release a new intermediate branch appears in
|
||||
the sequence. New or ongoing forward ports should pick it up just fine (as
|
||||
the "next target" is decided when a PR is ported forward) however this is
|
||||
an issue for existing yet-to-be-merged sequences e.g. given the branches
|
||||
1.0, 2.0 and master, if a branch 3.0 is forked off from master and inserted
|
||||
before it, we need to create a new *intermediate* forward port PR
|
||||
"""
|
||||
def validate(commit):
|
||||
prod.post_status(commit, 'success', 'ci/runbot')
|
||||
prod.post_status(commit, 'success', 'legal/cla')
|
||||
project, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=True)
|
||||
original_c_tree = prod.read_tree(prod.commit('c'))
|
||||
prs = []
|
||||
with prod:
|
||||
for i in ['0', '1', '2']:
|
||||
prod.make_commits('a', Commit(i, tree={i:i}), ref='heads/branch%s' % i)
|
||||
pr = prod.make_pr(target='a', head='branch%s' % i)
|
||||
prs.append(pr)
|
||||
validate(pr.head)
|
||||
pr.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||
# cancel validation of PR2
|
||||
prod.post_status(prs[2].head, 'failure', 'ci/runbot')
|
||||
# also add a PR targeting b forward-ported to c, in order to check
|
||||
# for an insertion right after the source
|
||||
prod.make_commits('b', Commit('x', tree={'x': 'x'}), ref='heads/branchx')
|
||||
prx = prod.make_pr(target='b', head='branchx')
|
||||
validate(prx.head)
|
||||
prx.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||
env.run_crons()
|
||||
with prod:
|
||||
validate('staging.a')
|
||||
validate('staging.b')
|
||||
env.run_crons()
|
||||
|
||||
# should have merged pr1, pr2 and prx and created their forward ports, now
|
||||
# validate pr0's FP so the c-targeted FP is created
|
||||
PRs = env['runbot_merge.pull_requests']
|
||||
pr0_id = PRs.search([
|
||||
('repository.name', '=', prod.name),
|
||||
('number', '=', prs[0].number),
|
||||
])
|
||||
pr0_fp_id = PRs.search([
|
||||
('source_id', '=', pr0_id.id),
|
||||
])
|
||||
assert pr0_fp_id
|
||||
assert pr0_fp_id.target.name == 'b'
|
||||
with prod:
|
||||
validate(pr0_fp_id.head)
|
||||
env.run_crons()
|
||||
original0 = PRs.search([('parent_id', '=', pr0_fp_id.id)])
|
||||
assert original0, "Could not find FP of PR0 to C"
|
||||
assert original0.target.name == 'c'
|
||||
|
||||
# also check prx's fp
|
||||
prx_id = PRs.search([('repository.name', '=', prod.name), ('number', '=', prx.number)])
|
||||
prx_fp_id = PRs.search([('source_id', '=', prx_id.id)])
|
||||
assert prx_fp_id
|
||||
assert prx_fp_id.target.name == 'c'
|
||||
|
||||
# NOTE: the branch must be created on git(hub) first, probably
|
||||
# create new branch forked from the "current master" (c)
|
||||
c = prod.commit('c').id
|
||||
with prod:
|
||||
prod.make_ref('heads/new', c)
|
||||
currents = {branch.name: branch.id for branch in project.branch_ids}
|
||||
# insert a branch between "b" and "c"
|
||||
project.write({
|
||||
'branch_ids': [
|
||||
(1, currents['a'], {'fp_sequence': 3}),
|
||||
(1, currents['b'], {'fp_sequence': 2}),
|
||||
(0, False, {'name': 'new', 'fp_sequence': 1, 'fp_target': True}),
|
||||
(1, currents['c'], {'fp_sequence': 0})
|
||||
]
|
||||
})
|
||||
env.run_crons()
|
||||
descendants = PRs.search([('source_id', '=', pr0_id.id)])
|
||||
new0 = descendants - pr0_fp_id - original0
|
||||
assert len(new0) == 1
|
||||
assert new0.parent_id == pr0_fp_id
|
||||
assert original0.parent_id == new0
|
||||
|
||||
descx = PRs.search([('source_id', '=', prx_id.id)])
|
||||
newx = descx - prx_fp_id
|
||||
assert len(newx) == 1
|
||||
assert newx.parent_id == prx_id
|
||||
assert prx_fp_id.parent_id == newx
|
||||
|
||||
# finish up: merge pr1 and pr2, ensure all the content is present in both
|
||||
# "new" (the newly inserted branch) and "c" (the tippity tip)
|
||||
with prod: # validate pr2
|
||||
prod.post_status(prs[2].head, 'success', 'ci/runbot')
|
||||
env.run_crons()
|
||||
# merge pr2
|
||||
with prod:
|
||||
validate('staging.a')
|
||||
env.run_crons()
|
||||
# ci on pr1/pr2 fp to b
|
||||
sources = [
|
||||
env['runbot_merge.pull_requests'].search([
|
||||
('repository.name', '=', prod.name),
|
||||
('number', '=', pr.number),
|
||||
]).id
|
||||
for pr in prs
|
||||
]
|
||||
sources.append(prx_id.id)
|
||||
# CI all the forward port PRs (shouldn't hurt to re-ci the forward port of
|
||||
# prs[0] to b aka pr0_fp_id
|
||||
for target in ['b', 'new', 'c']:
|
||||
fps = PRs.search([('source_id', 'in', sources), ('target.name', '=', target)])
|
||||
with prod:
|
||||
for fp in fps:
|
||||
validate(fp.head)
|
||||
env.run_crons()
|
||||
# now fps should be the last PR of each sequence, and thus r+-able
|
||||
with prod:
|
||||
for pr in fps:
|
||||
assert pr.target.name == 'c'
|
||||
prod.get_pr(pr.number).post_comment(
|
||||
'%s r+' % project.fp_github_name,
|
||||
config['role_reviewer']['token'])
|
||||
assert all(p.state == 'merged' for p in PRs.browse(sources)), \
|
||||
"all sources should be merged"
|
||||
assert all(p.state == 'ready' for p in PRs.search([('id', 'not in', sources)])),\
|
||||
"All PRs except sources should be ready"
|
||||
env.run_crons()
|
||||
with prod:
|
||||
for target in ['b', 'new', 'c']:
|
||||
validate('staging.' + target)
|
||||
env.run_crons()
|
||||
assert all(p.state == 'merged' for p in PRs.search([])), \
|
||||
"All PRs should be merged now"
|
||||
|
||||
assert prod.read_tree(prod.commit('c')) == {
|
||||
**original_c_tree,
|
||||
'0': '0', '1': '1', '2': '2', # updates from PRs
|
||||
'x': 'x',
|
||||
}, "check that C got all the updates"
|
||||
assert prod.read_tree(prod.commit('new')) == {
|
||||
**original_c_tree,
|
||||
'0': '0', '1': '1', '2': '2', # updates from PRs
|
||||
'x': 'x',
|
||||
}, "check that new got all the updates (should be in the same state as c really)"
|
||||
|
Loading…
Reference in New Issue
Block a user