mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 15:35:46 +07:00
[IMP] *: freeze wizard v3, freezer and wizarder
Stop *staging* release PRs: they are normally fairly simple and should not fail their staging outside of unreliable tests (or possibly a few edge cases e.g. forgot one version change thing), however staging them creates the possibility of a "version hole" on the release branch which is undesirable. Instead, immediately and unconditionally push the release commits onto the newly created branches, if there are things which don't work they can be fixed afterwards (and the process refined, maybe). Also add the same feature for *bump* PRs, with the difference that the bump PRs are not created / requested by default (they have to be opted in individually). For convenience, add a feature which automatically finds the PRs via inputting the label (not really tested yet). Closes #603
This commit is contained in:
parent
7eeee99735
commit
b86092de83
@ -20,13 +20,3 @@ class FreezeWizard(models.Model):
|
||||
if not self.search_count([]):
|
||||
self.env.ref('forwardport.port_forward').active = True
|
||||
return r
|
||||
|
||||
def action_freeze(self):
|
||||
# have to store wizard content as it's removed during freeze
|
||||
project = self.project_id
|
||||
branches_before = project.branch_ids
|
||||
prs = self.mapped('release_pr_ids.pr_id')
|
||||
r = super().action_freeze()
|
||||
new_branch = project.branch_ids - branches_before
|
||||
prs.write({'limit_id': new_branch.id})
|
||||
return r
|
||||
|
@ -801,10 +801,9 @@ def test_freeze(env, config, make_repo, users):
|
||||
|
||||
assert not w_id.errors
|
||||
w_id.action_freeze()
|
||||
env.run_crons() # stage freeze PRs
|
||||
with prod:
|
||||
prod.post_status('staging.post-b', 'success', 'ci/runbot')
|
||||
prod.post_status('staging.post-b', 'success', 'legal/cla')
|
||||
# run crons to process the feedback, run a second time in case of e.g.
|
||||
# forward porting
|
||||
env.run_crons()
|
||||
env.run_crons()
|
||||
|
||||
assert release_id.state == 'merged'
|
||||
|
@ -188,7 +188,7 @@ class GH(object):
|
||||
head = '<Response [%s]: %s)>' % (r.status_code, r.json() if _is_json(r) else r.text)
|
||||
|
||||
if head == to:
|
||||
_logger.info("Sanity check ref update of %s to %s: ok", branch, to)
|
||||
_logger.debug("Sanity check ref update of %s to %s: ok", branch, to)
|
||||
return
|
||||
|
||||
_logger.warning("Sanity check ref update of %s, expected %s got %s", branch, to, head)
|
||||
@ -225,7 +225,7 @@ class GH(object):
|
||||
|
||||
status0 = r.status_code
|
||||
_logger.debug(
|
||||
'set_ref(update, %s, %s, %s -> %s (%s)',
|
||||
'ref_set(%s, %s, %s -> %s (%s)',
|
||||
self._repo, branch, sha, status0,
|
||||
'OK' if status0 == 200 else r.text or r.reason
|
||||
)
|
||||
@ -240,30 +240,35 @@ class GH(object):
|
||||
|
||||
# 422 makes no sense but that's what github returns, leaving 404 just
|
||||
# in case
|
||||
status1 = None
|
||||
if status0 in (404, 422):
|
||||
# fallback: create ref
|
||||
r = self('post', 'git/refs', json={
|
||||
'ref': 'refs/heads/{}'.format(branch),
|
||||
'sha': sha,
|
||||
}, check=False)
|
||||
status1 = r.status_code
|
||||
_logger.debug(
|
||||
'set_ref(create, %s, %s, %s) -> %s (%s)',
|
||||
self._repo, branch, sha, status1,
|
||||
'OK' if status1 == 201 else r.text or r.reason
|
||||
)
|
||||
status1 = self.create_ref(branch, sha)
|
||||
if status1 == 201:
|
||||
@utils.backoff(exc=AssertionError)
|
||||
def _wait_for_update():
|
||||
head = self._check_updated(branch, sha)
|
||||
assert not head, "Sanity check ref update of %s, expected %s got %s" % (
|
||||
branch, sha, head
|
||||
)
|
||||
return
|
||||
else:
|
||||
status1 = None
|
||||
|
||||
raise AssertionError("set_ref failed(%s, %s)" % (status0, status1))
|
||||
|
||||
def create_ref(self, branch, sha):
|
||||
r = self('post', 'git/refs', json={
|
||||
'ref': 'refs/heads/{}'.format(branch),
|
||||
'sha': sha,
|
||||
}, check=False)
|
||||
status = r.status_code
|
||||
_logger.debug(
|
||||
'ref_create(%s, %s, %s) -> %s (%s)',
|
||||
self._repo, branch, sha, status,
|
||||
'OK' if status == 201 else r.text or r.reason
|
||||
)
|
||||
if status == 201:
|
||||
@utils.backoff(exc=AssertionError)
|
||||
def _wait_for_update():
|
||||
head = self._check_updated(branch, sha)
|
||||
assert not head, \
|
||||
f"Sanity check ref update of {branch}, expected {sha} got {head}"
|
||||
return status
|
||||
|
||||
def merge(self, sha, dest, message):
|
||||
r = self('post', 'merges', json={
|
||||
'base': dest,
|
||||
|
@ -1,10 +1,13 @@
|
||||
import contextlib
|
||||
import enum
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
|
||||
from odoo import models, fields, api
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.addons.runbot_merge.exceptions import FastForwardError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
class FreezeWizard(models.Model):
|
||||
@ -12,36 +15,90 @@ class FreezeWizard(models.Model):
|
||||
_description = "Wizard for freezing a project('s master)"
|
||||
|
||||
project_id = fields.Many2one('runbot_merge.project', required=True)
|
||||
errors = fields.Text(compute='_compute_errors')
|
||||
branch_name = fields.Char(required=True, help="Name of the new branches to create")
|
||||
release_pr_ids = fields.One2many(
|
||||
'runbot_merge.project.freeze.prs', 'wizard_id',
|
||||
string="Release pull requests",
|
||||
help="Pull requests used as tips for the freeze branches, one per repository"
|
||||
)
|
||||
|
||||
required_pr_ids = fields.Many2many(
|
||||
'runbot_merge.pull_requests', string="Required Pull Requests",
|
||||
domain="[('state', 'not in', ('closed', 'merged'))]",
|
||||
help="Pull requests which must have been merged before the freeze is allowed",
|
||||
)
|
||||
errors = fields.Text(compute='_compute_errors')
|
||||
|
||||
release_label = fields.Char()
|
||||
release_pr_ids = fields.One2many(
|
||||
'runbot_merge.project.freeze.prs', 'wizard_id',
|
||||
string="Release pull requests",
|
||||
help="Pull requests used as tips for the freeze branches, "
|
||||
"one per repository"
|
||||
)
|
||||
|
||||
bump_label = fields.Char()
|
||||
bump_pr_ids = fields.One2many(
|
||||
'runbot_merge.project.freeze.bumps', 'wizard_id',
|
||||
string="Bump pull requests",
|
||||
help="Pull requests used as tips of the frozen-off branches, "
|
||||
"one per repository"
|
||||
)
|
||||
|
||||
_sql_constraints = [
|
||||
('unique_per_project', 'unique (project_id)',
|
||||
"There should be only one ongoing freeze per project"),
|
||||
]
|
||||
|
||||
@api.onchange('release_label')
|
||||
def _onchange_release_label(self):
|
||||
prs = self.env['runbot_merge.pull_requests'].search([
|
||||
('label', '=', self.release_label)
|
||||
])
|
||||
for release_pr in self.release_pr_ids:
|
||||
release_pr.pd_id = next((
|
||||
p.id for p in prs
|
||||
if p.repository == release_pr.repository_id
|
||||
), False)
|
||||
|
||||
@api.onchange('release_pr_ids')
|
||||
def _onchange_release_prs(self):
|
||||
labels = {p.pr_id.label for p in self.release_pr_ids}
|
||||
self.release_label = len(labels) == 1 and labels.pop()
|
||||
|
||||
@api.onchange('bump_label')
|
||||
def _onchange_bump_label(self):
|
||||
prs = self.env['runbot_merge.pull_requests'].search([
|
||||
('label', '=', self.bump_label)
|
||||
])
|
||||
for bump_pr in self.bump_pr_ids:
|
||||
bump_pr.pd_id = next((
|
||||
p.id for p in prs
|
||||
if p.repository == bump_pr.repository_id
|
||||
), False)
|
||||
|
||||
@api.onchange('bump_pr_ids')
|
||||
def _onchange_bump_prs(self):
|
||||
labels = {p.pr_id.label for p in self.bump_pr_ids}
|
||||
self.bump_label = len(labels) == 1 and labels.pop()
|
||||
|
||||
@api.depends('release_pr_ids.pr_id.label', 'required_pr_ids.state')
|
||||
def _compute_errors(self):
|
||||
errors = []
|
||||
without = self.release_pr_ids.filtered(lambda p: not p.pr_id)
|
||||
if without:
|
||||
errors.append("* Every repository must have a release PR, missing release PRs for %s." % ', '.join(
|
||||
p.repository_id.name for p in without
|
||||
without.mapped('repository_id.name')
|
||||
))
|
||||
|
||||
labels = set(self.mapped('release_pr_ids.pr_id.label'))
|
||||
if len(labels) != 1:
|
||||
errors.append("* All release PRs must have the same label, found %r." % ', '.join(sorted(labels)))
|
||||
non_squash = self.mapped('release_pr_ids.pr_id').filtered(lambda p: not p.squash)
|
||||
if non_squash:
|
||||
errors.append("* Release PRs should have a single commit, found more in %s." % ', '.join(p.display_name for p in non_squash))
|
||||
|
||||
bump_labels = set(self.mapped('bump_pr_ids.pr_id.label'))
|
||||
if len(bump_labels) > 1:
|
||||
errors.append("* All bump PRs must have the same label, found %r" % ', '.join(sorted(bump_labels)))
|
||||
non_squash = self.mapped('bump_pr_ids.pr_id').filtered(lambda p: not p.squash)
|
||||
if non_squash:
|
||||
errors.append("* Bump PRs should have a single commit, found more in %s." % ', '.join(p.display_name for p in non_squash))
|
||||
|
||||
unready = sum(p.state not in ('closed', 'merged') for p in self.required_pr_ids)
|
||||
if unready:
|
||||
@ -73,6 +130,14 @@ class FreezeWizard(models.Model):
|
||||
if self.errors:
|
||||
return self.action_open()
|
||||
|
||||
conflict_crons = self.env.ref('runbot_merge.merge_cron') | self.env.ref('runbot_merge.staging_cron')
|
||||
# we don't want to run concurrently to the crons above, though we
|
||||
# don't need to prevent read access to them
|
||||
self.env.cr.execute(
|
||||
'SELECT * FROM ir_cron WHERE id =ANY(%s) FOR SHARE NOWAIT',
|
||||
[conflict_crons.ids]
|
||||
)
|
||||
|
||||
project_id = self.project_id
|
||||
# need to create the new branch, but at the same time resequence
|
||||
# everything so the new branch is the second one, just after the branch
|
||||
@ -86,47 +151,136 @@ class FreezeWizard(models.Model):
|
||||
'sequence': next(seq),
|
||||
})
|
||||
]
|
||||
for s, b in zip(seq, rest):
|
||||
commands.append((1, b.id, {'sequence': s}))
|
||||
commands.extend((1, b.id, {'sequence': s}) for s, b in zip(seq, rest))
|
||||
project_id.branch_ids = commands
|
||||
master_name = master.name
|
||||
|
||||
# update release PRs to get merged on the newly created branch
|
||||
new_branch = project_id.branch_ids - master - rest
|
||||
self.release_pr_ids.mapped('pr_id').write({'target': new_branch.id, 'priority': 0})
|
||||
gh_sessions = {r: r.github() for r in self.project_id.repo_ids}
|
||||
|
||||
# create new branch on every repository
|
||||
errors = []
|
||||
repository = None
|
||||
# prep new branch (via tmp refs) on every repo
|
||||
rel_heads = {}
|
||||
# store for master heads as odds are high the bump pr(s) will be on the
|
||||
# same repo as one of the release PRs
|
||||
prevs = {}
|
||||
for rel in self.release_pr_ids:
|
||||
repository = rel.repository_id
|
||||
gh = repository.github()
|
||||
# annoyance: can't directly alias a ref to an other ref, need to
|
||||
# resolve the "old" branch explicitely
|
||||
prev = gh('GET', f'git/refs/heads/{master.name}')
|
||||
if not prev.ok:
|
||||
errors.append(f"Unable to resolve branch {master.name} of repository {repository.name} to a commit.")
|
||||
break
|
||||
new_branch = gh('POST', 'git/refs', json={
|
||||
'ref': 'refs/heads/' + self.branch_name,
|
||||
'sha': prev.json()['object']['sha'],
|
||||
}, check=False)
|
||||
if not new_branch.ok:
|
||||
err = new_branch.json()['message']
|
||||
errors.append(f"Unable to create branch {master.name} of repository {repository.name}: {err}.")
|
||||
break
|
||||
repo_id = rel.repository_id
|
||||
gh = gh_sessions[repo_id]
|
||||
try:
|
||||
prev = prevs[repo_id] = gh.head(master_name)
|
||||
except Exception:
|
||||
raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.")
|
||||
|
||||
# create the tmp branch to merge the PR into
|
||||
tmp_branch = f'tmp.{self.branch_name}'
|
||||
try:
|
||||
gh.set_ref(tmp_branch, prev)
|
||||
except Exception as err:
|
||||
raise UserError(f"Unable to create branch {self.branch_name} of repository {repo_id.name}: {err}.")
|
||||
|
||||
rel_heads[repo_id], _ = gh.rebase(rel.pr_id.number, tmp_branch)
|
||||
time.sleep(1)
|
||||
|
||||
# if an error occurred during creation, try to clean up then raise error
|
||||
if errors:
|
||||
for r in self.release_pr_ids:
|
||||
if r.repository_id == repository:
|
||||
# prep bump
|
||||
bump_heads = {}
|
||||
for bump in self.bump_pr_ids:
|
||||
repo_id = bump.repository_id
|
||||
gh = gh_sessions[repo_id]
|
||||
|
||||
try:
|
||||
prev = prevs[repo_id] = prevs.get(repo_id) or gh.head(master_name)
|
||||
except Exception:
|
||||
raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.")
|
||||
|
||||
# create the tmp branch to merge the PR into
|
||||
tmp_branch = f'tmp.{master_name}'
|
||||
try:
|
||||
gh.set_ref(tmp_branch, prev)
|
||||
except Exception as err:
|
||||
raise UserError(f"Unable to create branch {master_name} of repository {repo_id.name}: {err}.")
|
||||
|
||||
bump_heads[repo_id], _ = gh.rebase(bump.pr_id.number, tmp_branch)
|
||||
time.sleep(1)
|
||||
|
||||
deployed = {}
|
||||
# at this point we've got a bunch of tmp branches with merged release
|
||||
# and bump PRs, it's time to update the corresponding targets
|
||||
to_delete = [] # release prs go on new branches which we try to delete on failure
|
||||
to_revert = [] # bump prs go on new branch which we try to revert on failure
|
||||
failure = None
|
||||
for rel in self.release_pr_ids:
|
||||
repo_id = rel.repository_id
|
||||
# helper API currently has no API to ensure we're just creating a
|
||||
# new branch (as cheaply as possible) so do it by hand
|
||||
status = None
|
||||
with contextlib.suppress(Exception):
|
||||
status = gh_sessions[repo_id].create_ref(self.branch_name, rel_heads[repo_id])
|
||||
deployed[rel.pr_id.id] = rel_heads[repo_id]
|
||||
to_delete.append(repo_id)
|
||||
|
||||
if status != 201:
|
||||
failure = ('create', repo_id.name, self.branch_name)
|
||||
break
|
||||
else: # all release deployments succeeded
|
||||
for bump in self.bump_pr_ids:
|
||||
repo_id = bump.repository_id
|
||||
try:
|
||||
gh_sessions[repo_id].fast_forward(master_name, bump_heads[repo_id])
|
||||
deployed[bump.pr_id.id] = bump_heads[repo_id]
|
||||
to_revert.append(repo_id)
|
||||
except FastForwardError:
|
||||
failure = ('fast-forward', repo_id.name, master_name)
|
||||
break
|
||||
|
||||
deletion = r.repository_id.github().delete(f'git/refs/heads/{self.branch_name}')
|
||||
if failure:
|
||||
addendums = []
|
||||
# creating the branch failed, try to delete all previous branches
|
||||
failures = []
|
||||
for prev_id in to_revert:
|
||||
revert = gh_sessions[prev_id]('PATCH', f'git/refs/heads/{master_name}', json={
|
||||
'sha': prevs[prev_id],
|
||||
'force': True
|
||||
}, check=False)
|
||||
if not revert.ok:
|
||||
failures.append(prev_id.name)
|
||||
if failures:
|
||||
addendums.append(
|
||||
"Subsequently unable to revert branches created in %s." % \
|
||||
', '.join(failures)
|
||||
)
|
||||
failures.clear()
|
||||
|
||||
for prev_id in to_delete:
|
||||
deletion = gh_sessions[prev_id]('DELETE', f'git/refs/heads/{self.branch_name}', check=False)
|
||||
if not deletion.ok:
|
||||
errors.append(f"Consequently unable to delete branch {self.branch_name} of repository {r.name}.")
|
||||
time.sleep(1)
|
||||
raise UserError('\n'.join(errors))
|
||||
failures.append(prev_id.name)
|
||||
if failures:
|
||||
addendums.append(
|
||||
"Subsequently unable to delete branches created in %s." % \
|
||||
", ".join(failures)
|
||||
)
|
||||
failures.clear()
|
||||
|
||||
if addendums:
|
||||
addendum = '\n\n' + '\n'.join(addendums)
|
||||
else:
|
||||
addendum = ''
|
||||
|
||||
reason, repo, branch = failure
|
||||
raise UserError(
|
||||
f"Unable to {reason} branch {repo}:{branch}.{addendum}"
|
||||
)
|
||||
|
||||
all_prs = self.release_pr_ids.pr_id | self.bump_pr_ids.pr_id
|
||||
all_prs.state = 'merged'
|
||||
self.env['runbot_merge.pull_requests.feedback'].create([{
|
||||
'repository': pr.repository.id,
|
||||
'pull_request': pr.number,
|
||||
'close': True,
|
||||
'message': json.dumps({
|
||||
'sha': deployed[pr.id],
|
||||
'base': self.branch_name if pr in self.release_pr_ids.pr_id else None
|
||||
})
|
||||
} for pr in all_prs])
|
||||
|
||||
# delete wizard
|
||||
self.sudo().unlink()
|
||||
@ -155,6 +309,31 @@ class ReleasePullRequest(models.Model):
|
||||
domain='[("repository", "=", repository_id), ("state", "not in", ("closed", "merged"))]',
|
||||
string="Release Pull Request",
|
||||
)
|
||||
label = fields.Char(related='pr_id.label')
|
||||
|
||||
def write(self, vals):
|
||||
# only the pr should be writeable after initial creation
|
||||
assert 'wizard_id' not in vals
|
||||
assert 'repository_id' not in vals
|
||||
# and if the PR gets set, it should match the requested repository
|
||||
if 'pr_id' in vals:
|
||||
assert self.env['runbot_merge.pull_requests'].browse(vals['pr_id'])\
|
||||
.repository == self.repository_id
|
||||
|
||||
return super().write(vals)
|
||||
|
||||
class BumpPullRequest(models.Model):
|
||||
_name = 'runbot_merge.project.freeze.bumps'
|
||||
_description = "links to pull requests used to \"bump\" the development branches"
|
||||
|
||||
wizard_id = fields.Many2one('runbot_merge.project.freeze', required=True, index=True, ondelete='cascade')
|
||||
repository_id = fields.Many2one('runbot_merge.repository', required=True)
|
||||
pr_id = fields.Many2one(
|
||||
'runbot_merge.pull_requests',
|
||||
domain='[("repository", "=", repository_id), ("state", "not in", ("closed", "merged"))]',
|
||||
string="Release Pull Request",
|
||||
)
|
||||
label = fields.Char(related='pr_id.label')
|
||||
|
||||
def write(self, vals):
|
||||
# only the pr should be writeable after initial creation
|
||||
|
@ -23,12 +23,42 @@
|
||||
</group>
|
||||
</group>
|
||||
<group>
|
||||
<group colspan="2">
|
||||
<group colspan="2" string="Release">
|
||||
<p>
|
||||
Release (freeze) PRs, provide the first commit
|
||||
of the new branches. Each PR must have a single
|
||||
commit.
|
||||
</p>
|
||||
<p class="alert alert-warning" role="alert">
|
||||
These PRs will be merged directly, not staged.
|
||||
</p>
|
||||
<field name="release_label"/>
|
||||
<field name="release_pr_ids" nolabel="1">
|
||||
<tree editable="bottom" create="false">
|
||||
<field name="repository_id" readonly="1"/>
|
||||
<field name="pr_id" options="{'no_create': True}"
|
||||
context="{'pr_include_title': 1}"/>
|
||||
<field name="label"/>
|
||||
</tree>
|
||||
</field>
|
||||
</group>
|
||||
</group>
|
||||
<group>
|
||||
<group colspan="2" string="Bump">
|
||||
<p>
|
||||
Bump PRs, provide the first commit of the source
|
||||
branches after the release has been cut.
|
||||
</p>
|
||||
<p class="alert alert-warning" role="alert">
|
||||
These PRs will be merged directly, not staged.
|
||||
</p>
|
||||
<field name="bump_label"/>
|
||||
<field name="bump_pr_ids" nolabel="1">
|
||||
<tree editable="bottom" create="false">
|
||||
<field name="repository_id" readonly="1"/>
|
||||
<field name="pr_id" options="{'no_create': True}"
|
||||
context="{'pr_include_title': 1}"/>
|
||||
<field name="label"/>
|
||||
</tree>
|
||||
</field>
|
||||
</group>
|
||||
|
@ -1542,20 +1542,24 @@ class Feedback(models.Model):
|
||||
|
||||
try:
|
||||
message = f.message
|
||||
if f.close:
|
||||
gh.close(f.pull_request)
|
||||
try:
|
||||
data = json.loads(message or '')
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
else:
|
||||
with contextlib.suppress(json.JSONDecodeError):
|
||||
data = json.loads(message or '')
|
||||
message = data.get('message')
|
||||
|
||||
if data.get('base'):
|
||||
gh('PATCH', f'pulls/{f.pull_request}', json={'base': data['base']})
|
||||
|
||||
if f.close:
|
||||
pr_to_notify = self.env['runbot_merge.pull_requests'].search([
|
||||
('repository', '=', repo.id),
|
||||
('number', '=', f.pull_request),
|
||||
])
|
||||
if pr_to_notify:
|
||||
pr_to_notify._notify_merged(gh, data)
|
||||
message = None
|
||||
|
||||
if f.close:
|
||||
gh.close(f.pull_request)
|
||||
|
||||
if message:
|
||||
gh.comment(f.pull_request, message)
|
||||
except Exception:
|
||||
@ -2150,9 +2154,8 @@ def parse_refs_smart(read):
|
||||
return read(length - 4)
|
||||
|
||||
header = read_line()
|
||||
assert header == b'# service=git-upload-pack\n', header
|
||||
sep = read_line()
|
||||
assert sep is None, sep
|
||||
assert header.rstrip() == b'# service=git-upload-pack', header
|
||||
assert read_line() is None, "failed to find first flush line"
|
||||
# read lines until second delimiter
|
||||
for line in iter(read_line, None):
|
||||
if line.startswith(ZERO_REF):
|
||||
|
@ -1,7 +1,8 @@
|
||||
id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
|
||||
access_runbot_merge_project_admin,Admin access to project,model_runbot_merge_project,runbot_merge.group_admin,1,1,1,1
|
||||
access_runbot_merge_project_freeze,Admin access to freeze wizard,model_runbot_merge_project_freeze,runbot_merge.group_admin,1,1,0,0
|
||||
access_runbot_merge_project_freeze_prs,Admin access to freeze wizard prs,model_runbot_merge_project_freeze_prs,runbot_merge.group_admin,1,1,0,1
|
||||
access_runbot_merge_project_freeze_prs,Admin access to freeze wizard release prs,model_runbot_merge_project_freeze_prs,runbot_merge.group_admin,1,1,0,1
|
||||
access_runbot_merge_project_freeze_bumps,Admin access to freeze wizard bump prs,model_runbot_merge_project_freeze_bumps,runbot_merge.group_admin,1,1,1,1
|
||||
access_runbot_merge_repository_admin,Admin access to repo,model_runbot_merge_repository,runbot_merge.group_admin,1,1,1,1
|
||||
access_runbot_merge_repository_status_admin,Admin access to repo statuses,model_runbot_merge_repository_status,runbot_merge.group_admin,1,1,1,1
|
||||
access_runbot_merge_branch_admin,Admin access to branches,model_runbot_merge_branch,runbot_merge.group_admin,1,1,1,1
|
||||
|
|
@ -3620,6 +3620,7 @@ class TestFeedback:
|
||||
]
|
||||
|
||||
class TestInfrastructure:
|
||||
@pytest.mark.skip(reason="Don't want to implement")
|
||||
def test_protection(self, repo):
|
||||
""" force-pushing on a protected ref should fail
|
||||
"""
|
||||
|
@ -7,12 +7,13 @@ are staged concurrently in all repos
|
||||
"""
|
||||
import json
|
||||
import time
|
||||
import xmlrpc.client
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
from lxml.etree import XPath, tostring
|
||||
from lxml.etree import XPath
|
||||
|
||||
from utils import seen, re_matches, get_partner, pr_page, to_pr, Commit
|
||||
from utils import seen, get_partner, pr_page, to_pr, Commit
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -1104,6 +1105,7 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
|
||||
* have a project with 3 repos, and two branches (1.0 and master) each
|
||||
* have 2 PRs required for the freeze
|
||||
* prep 3 freeze PRs
|
||||
* prep 1 bump PR
|
||||
* trigger the freeze wizard
|
||||
* trigger it again (check that the same object is returned, there should
|
||||
only be one freeze per project at a time)
|
||||
@ -1122,49 +1124,13 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
|
||||
(0, 0, {'name': '1.0', 'sequence': 2}),
|
||||
]
|
||||
|
||||
masters = []
|
||||
for r in [repo_a, repo_b, repo_c]:
|
||||
with r:
|
||||
[root, _] = r.make_commits(
|
||||
None,
|
||||
Commit('base', tree={'version': '', 'f': '0'}),
|
||||
Commit('release 1.0', tree={'version': '1.0'} if r is repo_a else None),
|
||||
ref='heads/1.0'
|
||||
)
|
||||
masters.extend(r.make_commits(root, Commit('other', tree={'f': '1'}), ref='heads/master'))
|
||||
|
||||
# have 2 PRs required for the freeze
|
||||
with repo_a:
|
||||
repo_a.make_commits(masters[0], Commit('super important file', tree={'g': 'x'}), ref='heads/apr')
|
||||
pr_required_a = repo_a.make_pr(target='master', head='apr')
|
||||
with repo_c:
|
||||
repo_c.make_commits(masters[2], Commit('update thing', tree={'f': '2'}), ref='heads/cpr')
|
||||
pr_required_c = repo_c.make_pr(target='master', head='cpr')
|
||||
|
||||
# have 3 release PRs, only the first one updates the tree (version file)
|
||||
with repo_a:
|
||||
repo_a.make_commits(
|
||||
masters[0],
|
||||
Commit('Release 1.1 (A)', tree={'version': '1.1'}),
|
||||
ref='heads/release-1.1'
|
||||
)
|
||||
pr_rel_a = repo_a.make_pr(target='master', head='release-1.1')
|
||||
with repo_b:
|
||||
repo_b.make_commits(
|
||||
masters[1],
|
||||
Commit('Release 1.1 (B)', tree=None),
|
||||
ref='heads/release-1.1'
|
||||
)
|
||||
pr_rel_b = repo_b.make_pr(target='master', head='release-1.1')
|
||||
with repo_c:
|
||||
repo_c.make_commits(masters[2], Commit("Some change", tree={'a': '1'}), ref='heads/whocares')
|
||||
pr_other = repo_c.make_pr(target='master', head='whocares')
|
||||
repo_c.make_commits(
|
||||
masters[2],
|
||||
Commit('Release 1.1 (C)', tree=None),
|
||||
ref='heads/release-1.1'
|
||||
)
|
||||
pr_rel_c = repo_c.make_pr(target='master', head='release-1.1')
|
||||
[
|
||||
(master_head_a, master_head_b, master_head_c),
|
||||
(pr_required_a, _, pr_required_c),
|
||||
(pr_rel_a, pr_rel_b, pr_rel_c),
|
||||
pr_bump_a,
|
||||
pr_other
|
||||
] = setup_mess(repo_a, repo_b, repo_c)
|
||||
env.run_crons() # process the PRs
|
||||
|
||||
release_prs = {
|
||||
@ -1172,6 +1138,7 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
|
||||
repo_b.name: to_pr(env, pr_rel_b),
|
||||
repo_c.name: to_pr(env, pr_rel_c),
|
||||
}
|
||||
pr_bump_id = to_pr(env, pr_bump_a)
|
||||
# trigger the ~~tree~~ freeze wizard
|
||||
w = project.action_prepare_freeze()
|
||||
w2 = project.action_prepare_freeze()
|
||||
@ -1189,6 +1156,11 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
|
||||
for r in w_id.release_pr_ids:
|
||||
r.pr_id = release_prs[r.repository_id.name].id
|
||||
w_id.release_pr_ids[-1].pr_id = to_pr(env, pr_other).id
|
||||
# configure bump
|
||||
assert not w_id.bump_pr_ids, "there is no bump pr by default"
|
||||
w_id.write({'bump_pr_ids': [
|
||||
(0, 0, {'repository_id': pr_bump_id.repository.id, 'pr_id': pr_bump_id.id})
|
||||
]})
|
||||
r = w_id.action_freeze()
|
||||
assert r == w, "the freeze is not ready so the wizard should redirect to itself"
|
||||
owner = repo_c.owner
|
||||
@ -1233,15 +1205,34 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
|
||||
assert r['res_model'] == 'runbot_merge.project'
|
||||
assert r['res_id'] == project.id
|
||||
|
||||
env.run_crons() # stage the release prs
|
||||
for repo in [repo_a, repo_b, repo_c]:
|
||||
with repo:
|
||||
repo.post_status('staging.1.1', 'success', 'ci/runbot')
|
||||
repo.post_status('staging.1.1', 'success', 'legal/cla')
|
||||
env.run_crons() # get the release prs merged
|
||||
# stuff that's done directly
|
||||
for pr_id in release_prs.values():
|
||||
assert pr_id.state == 'merged'
|
||||
assert pr_bump_id.state == 'merged'
|
||||
|
||||
# stuff that's behind a cron
|
||||
env.run_crons()
|
||||
|
||||
assert pr_rel_a.state == "closed"
|
||||
assert pr_rel_a.base['ref'] == '1.1'
|
||||
assert pr_rel_b.state == "closed"
|
||||
assert pr_rel_b.base['ref'] == '1.1'
|
||||
assert pr_rel_c.state == "closed"
|
||||
assert pr_rel_c.base['ref'] == '1.1'
|
||||
for pr_id in release_prs.values():
|
||||
assert pr_id.target.name == '1.1'
|
||||
assert pr_id.state == 'merged'
|
||||
|
||||
assert pr_bump_a.state == 'closed'
|
||||
assert pr_bump_a.base['ref'] == 'master'
|
||||
assert pr_bump_id.target.name == 'master'
|
||||
|
||||
m_a = repo_a.commit('master')
|
||||
assert m_a.message.startswith('Bump A')
|
||||
assert repo_a.read_tree(m_a) == {
|
||||
'f': '1', # from master
|
||||
'g': 'x', # from required PR (merged into master before forking)
|
||||
'version': '1.2-alpha', # from bump PR
|
||||
}
|
||||
|
||||
c_a = repo_a.commit('1.1')
|
||||
assert c_a.message.startswith('Release 1.1 (A)')
|
||||
@ -1252,19 +1243,71 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
|
||||
}
|
||||
c_a_parent = repo_a.commit(c_a.parents[0])
|
||||
assert c_a_parent.message.startswith('super important file')
|
||||
assert c_a_parent.parents[0] == masters[0]
|
||||
assert c_a_parent.parents[0] == master_head_a
|
||||
|
||||
c_b = repo_b.commit('1.1')
|
||||
assert c_b.message.startswith('Release 1.1 (B)')
|
||||
assert repo_b.read_tree(c_b) == {'f': '1', 'version': ''}
|
||||
assert c_b.parents[0] == masters[1]
|
||||
assert c_b.parents[0] == master_head_b
|
||||
|
||||
c_c = repo_c.commit('1.1')
|
||||
assert c_c.message.startswith('Release 1.1 (C)')
|
||||
assert repo_c.read_tree(c_c) == {'f': '2', 'version': ''}
|
||||
assert repo_c.commit(c_c.parents[0]).parents[0] == masters[2]
|
||||
assert repo_c.commit(c_c.parents[0]).parents[0] == master_head_c
|
||||
|
||||
|
||||
def setup_mess(repo_a, repo_b, repo_c):
|
||||
master_heads = []
|
||||
for r in [repo_a, repo_b, repo_c]:
|
||||
with r:
|
||||
[root, _] = r.make_commits(
|
||||
None,
|
||||
Commit('base', tree={'version': '', 'f': '0'}),
|
||||
Commit('release 1.0', tree={'version': '1.0'} if r is repo_a else None),
|
||||
ref='heads/1.0'
|
||||
)
|
||||
master_heads.extend(r.make_commits(root, Commit('other', tree={'f': '1'}), ref='heads/master'))
|
||||
# have 2 PRs required for the freeze
|
||||
with repo_a:
|
||||
repo_a.make_commits(master_heads[0], Commit('super important file', tree={'g': 'x'}), ref='heads/apr')
|
||||
pr_required_a = repo_a.make_pr(target='master', head='apr')
|
||||
with repo_c:
|
||||
repo_c.make_commits(master_heads[2], Commit('update thing', tree={'f': '2'}), ref='heads/cpr')
|
||||
pr_required_c = repo_c.make_pr(target='master', head='cpr')
|
||||
# have 3 release PRs, only the first one updates the tree (version file)
|
||||
with repo_a:
|
||||
repo_a.make_commits(
|
||||
master_heads[0],
|
||||
Commit('Release 1.1 (A)', tree={'version': '1.1'}),
|
||||
ref='heads/release-1.1'
|
||||
)
|
||||
pr_rel_a = repo_a.make_pr(target='master', head='release-1.1')
|
||||
with repo_b:
|
||||
repo_b.make_commits(
|
||||
master_heads[1],
|
||||
Commit('Release 1.1 (B)', tree=None),
|
||||
ref='heads/release-1.1'
|
||||
)
|
||||
pr_rel_b = repo_b.make_pr(target='master', head='release-1.1')
|
||||
with repo_c:
|
||||
repo_c.make_commits(master_heads[2], Commit("Some change", tree={'a': '1'}), ref='heads/whocares')
|
||||
pr_other = repo_c.make_pr(target='master', head='whocares')
|
||||
repo_c.make_commits(
|
||||
master_heads[2],
|
||||
Commit('Release 1.1 (C)', tree=None),
|
||||
ref='heads/release-1.1'
|
||||
)
|
||||
pr_rel_c = repo_c.make_pr(target='master', head='release-1.1')
|
||||
# have one bump PR on repo A
|
||||
with repo_a:
|
||||
repo_a.make_commits(
|
||||
master_heads[0],
|
||||
Commit("Bump A", tree={'version': '1.2-alpha'}),
|
||||
ref='heads/bump-1.1',
|
||||
)
|
||||
pr_bump_a = repo_a.make_pr(target='master', head='bump-1.1')
|
||||
return master_heads, (pr_required_a, None, pr_required_c), (pr_rel_a, pr_rel_b, pr_rel_c), pr_bump_a, pr_other
|
||||
|
||||
def test_freeze_subset(env, project, repo_a, repo_b, repo_c, users, config):
|
||||
"""It should be possible to only freeze a subset of a project when e.g. one
|
||||
of the repository is managed differently than the rest and has
|
||||
@ -1335,3 +1378,61 @@ def test_freeze_subset(env, project, repo_a, repo_b, repo_c, users, config):
|
||||
except AssertionError:
|
||||
...
|
||||
# can't stage because we (wilfully) don't have branches 1.1 in repos B and C
|
||||
|
||||
@pytest.mark.skip("env's session is not thread-safe sadface")
|
||||
def test_race_conditions():
|
||||
"""need the ability to dup the env in order to send concurrent requests to
|
||||
the inner odoo
|
||||
|
||||
- try to run the action_freeze during a cron (merge or staging), should
|
||||
error (recover and return nice message?)
|
||||
- somehow get ahead of the action and update master's commit between moment
|
||||
where it is fetched and moment where the bump pr is fast-forwarded,
|
||||
there's actually a bit of time thanks to the rate limiting (fetch of base,
|
||||
update of tmp to base, rebase of commits on tmp, wait 1s, for each release
|
||||
and bump PR, then the release branches are created, and finally the bump
|
||||
prs)
|
||||
"""
|
||||
...
|
||||
|
||||
def test_freeze_conflict(env, project, repo_a, repo_b, repo_c, users, config):
|
||||
"""If one of the branches we're trying to create already exists, the wizard
|
||||
fails.
|
||||
"""
|
||||
project.branch_ids = [
|
||||
(1, project.branch_ids.id, {'sequence': 1}),
|
||||
(0, 0, {'name': '1.0', 'sequence': 2}),
|
||||
]
|
||||
heads, _, (pr_rel_a, pr_rel_b, pr_rel_c), bump, other = \
|
||||
setup_mess(repo_a, repo_b, repo_c)
|
||||
env.run_crons()
|
||||
|
||||
release_prs = {
|
||||
repo_a.name: to_pr(env, pr_rel_a),
|
||||
repo_b.name: to_pr(env, pr_rel_b),
|
||||
repo_c.name: to_pr(env, pr_rel_c),
|
||||
}
|
||||
|
||||
w = project.action_prepare_freeze()
|
||||
w_id = env[w['res_model']].browse([w['res_id']])
|
||||
for repo, release_pr in release_prs.items():
|
||||
w_id.release_pr_ids\
|
||||
.filtered(lambda r: r.repository_id.name == repo)\
|
||||
.pr_id = release_pr.id
|
||||
|
||||
# create conflicting branch
|
||||
with repo_c:
|
||||
repo_c.make_ref('heads/1.1', heads[2])
|
||||
|
||||
# actually perform the freeze
|
||||
with pytest.raises(xmlrpc.client.Fault) as e:
|
||||
w_id.action_freeze()
|
||||
assert f"Unable to create branch {repo_c.name}:1.1" in e.value.faultString
|
||||
|
||||
# branches a and b should have been deleted
|
||||
with pytest.raises(AssertionError) as e:
|
||||
repo_a.get_ref('heads/1.1')
|
||||
assert e.value.args[0].startswith("Not Found")
|
||||
with pytest.raises(AssertionError) as e:
|
||||
repo_b.get_ref('heads/1.1')
|
||||
assert e.value.args[0].startswith("Not Found")
|
||||
|
Loading…
Reference in New Issue
Block a user