[MERGE] from 14.0

Get mergebot updates from since the runbot was upgraded.

Also convert assets to new system for compatibility.
This commit is contained in:
Xavier Morel 2022-08-23 14:43:43 +02:00
commit 389ea03544
46 changed files with 1685 additions and 736 deletions

View File

@ -50,11 +50,14 @@ import functools
import http.client
import itertools
import os
import pathlib
import pprint
import random
import re
import socket
import subprocess
import sys
import tempfile
import time
import uuid
import warnings
@ -268,14 +271,18 @@ class DbDict(dict):
self._adpath = adpath
def __missing__(self, module):
self[module] = db = 'template_%s' % uuid.uuid4()
subprocess.run([
'odoo', '--no-http',
'--addons-path', self._adpath,
'-d', db, '-i', module,
'--max-cron-threads', '0',
'--stop-after-init',
'--log-level', 'warn'
], check=True)
with tempfile.TemporaryDirectory() as d:
subprocess.run([
'odoo', '--no-http',
'--addons-path', self._adpath,
'-d', db, '-i', module + ',auth_oauth',
'--max-cron-threads', '0',
'--stop-after-init',
'--log-level', 'warn'
],
check=True,
env={**os.environ, 'XDG_DATA_HOME': d}
)
return db
@pytest.fixture(scope='session')
@ -334,21 +341,48 @@ def port():
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s.getsockname()[1]
@pytest.fixture(scope='session')
def dummy_addons_path():
with tempfile.TemporaryDirectory() as dummy_addons_path:
mod = pathlib.Path(dummy_addons_path, 'saas_worker')
mod.mkdir(0o700)
(mod / '__init__.py').write_bytes(b'')
(mod / '__manifest__.py').write_text(pprint.pformat({
'name': 'dummy saas_worker',
'version': '1.0',
}), encoding='utf-8')
(mod / 'util.py').write_text("""\
def from_role(_):
return lambda fn: fn
""", encoding='utf-8')
yield dummy_addons_path
@pytest.fixture
def server(request, db, port, module):
def server(request, db, port, module, dummy_addons_path, tmpdir):
log_handlers = [
'odoo.modules.loading:WARNING',
]
if not request.config.getoption('--log-github'):
log_handlers.append('github_requests:WARNING')
addons_path = ','.join(map(str, [
request.config.getoption('--addons-path'),
dummy_addons_path,
]))
p = subprocess.Popen([
'odoo', '--http-port', str(port),
'--addons-path', request.config.getoption('--addons-path'),
'--addons-path', addons_path,
'-d', db,
'--max-cron-threads', '0', # disable cron threads (we're running crons by hand)
*itertools.chain.from_iterable(('--log-handler', h) for h in log_handlers),
])
], env={
**os.environ,
# stop putting garbage in the user dirs, and potentially creating conflicts
# TODO: way to override this with macOS?
'XDG_DATA_HOME': str(tmpdir.mkdir('share')),
'XDG_CACHE_HOME': str(tmpdir.mkdir('cache')),
})
try:
wait_for_server(db, port, p, module)
@ -558,17 +592,6 @@ class Repo:
parents=[p['sha'] for p in gh_commit['parents']],
)
def log(self, ref_or_sha):
for page in itertools.count(1):
r = self._session.get(
'https://api.github.com/repos/{}/commits'.format(self.name),
params={'sha': ref_or_sha, 'page': page}
)
assert 200 <= r.status_code < 300, r.json()
yield from map(self._commit_from_gh, r.json())
if not r.links.get('next'):
return
def read_tree(self, commit):
""" read tree object from commit
@ -813,6 +836,12 @@ mutation setDraft($pid: ID!) {
}
}
'''
def state_prop(name: str) -> property:
@property
def _prop(self):
return self._pr[name]
return _prop.setter(lambda self, v: self._set_prop(name, v))
class PR:
def __init__(self, repo, number):
self.repo = repo
@ -837,15 +866,9 @@ class PR:
caching['If-Modified-Since']= r.headers['Last-Modified']
return contents
@property
def title(self):
return self._pr['title']
title = title.setter(lambda self, v: self._set_prop('title', v))
@property
def base(self):
return self._pr['base']
base = base.setter(lambda self, v: self._set_prop('base', v))
title = state_prop('title')
body = state_prop('body')
base = state_prop('base')
@property
def draft(self):
@ -875,10 +898,6 @@ class PR:
def state(self):
return self._pr['state']
@property
def body(self):
return self._pr['body']
@property
def comments(self):
r = self.repo._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number))
@ -1105,17 +1124,8 @@ class Model:
def create(self, values):
return Model(self._env, self._model, [self._env(self._model, 'create', values)])
def write(self, values):
return self._env(self._model, 'write', self._ids, values)
def read(self, fields):
return self._env(self._model, 'read', self._ids, fields)
def name_get(self):
return self._env(self._model, 'name_get', self._ids)
def unlink(self):
return self._env(self._model, 'unlink', self._ids)
def check_object_reference(self, *args, **kwargs):
return self.env(self._model, 'check_object_reference', *args, **kwargs)
def sorted(self, field):
rs = sorted(self.read([field]), key=lambda r: r[field])

View File

@ -8,6 +8,7 @@
'data/security.xml',
'data/crons.xml',
'data/views.xml',
'data/queues.xml',
],
'license': 'LGPL-3',
}

View File

@ -0,0 +1 @@
IMP: notifications when reopening a closed forward-port (e.g. indicate that they're detached)

View File

@ -0,0 +1 @@
IMP: use the `diff3` conflict style, should make forward port conflicts clearer and easier to fix

View File

@ -0,0 +1 @@
IMP: flag detached PRs in their dashboard

View File

@ -0,0 +1,51 @@
<odoo>
<record id="action_forward_port" model="ir.actions.act_window">
<field name="name">Forward port batches</field>
<field name="res_model">forwardport.batches</field>
<field name="context">{'active_test': False}</field>
</record>
<record id="tree_forward_port" model="ir.ui.view">
<field name="name">Forward port batches</field>
<field name="model">forwardport.batches</field>
<field name="arch" type="xml">
<tree>
<field name="source"/>
<field name="batch_id"/>
</tree>
</field>
</record>
<record id="form_forward_port" model="ir.ui.view">
<field name="name">Forward port batch</field>
<field name="model">forwardport.batches</field>
<field name="arch" type="xml">
<form>
<group>
<group><field name="source"/></group>
<group><field name="batch_id"/></group>
</group>
</form>
</field>
</record>
<record id="action_followup_updates" model="ir.actions.act_window">
<field name="name">Followup Updates</field>
<field name="res_model">forwardport.updates</field>
</record>
<record id="tree_followup_updates" model="ir.ui.view">
<field name="name">Followup Updates</field>
<field name="model">forwardport.updates</field>
<field name="arch" type="xml">
<tree editable="bottom">
<field name="original_root"/>
<field name="new_root"/>
</tree>
</field>
</record>
<menuitem name="Forward Port Batches" id="menu_forward_port"
parent="runbot_merge.menu_queues"
action="action_forward_port"/>
<menuitem name="Followup Updates" id="menu_followup"
parent="runbot_merge.menu_queues"
action="action_followup_updates"/>
</odoo>

View File

@ -1,5 +1,5 @@
<odoo>
<template id="dashboard" inherit_id="runbot_merge.dashboard">
<template id="alerts" inherit_id="runbot_merge.alerts">
<xpath expr="//div[@id='alerts']">
<t t-set="fpcron" t-value="env(user=1).ref('forwardport.port_forward')"/>
<div t-if="not fpcron.active" class="alert alert-warning col-12" role="alert">
@ -12,6 +12,7 @@
<t t-set="outstanding" t-value="env['runbot_merge.pull_requests'].search_count([
('source_id', '!=', False),
('state', 'not in', ['merged', 'closed']),
('source_id.merge_date', '&lt;', datetime.datetime.now() - relativedelta(days=3)),
])"/>
<div t-if="outstanding != 0" class="alert col-md-12 alert-warning mb-0">
<a href="/forwardport/outstanding">
@ -108,6 +109,11 @@
<a t-att-href="pr.source_id.url">
<span t-field="pr.source_id.display_name"/>
</a>
<span t-if="not pr.parent_id"
class="badge badge-danger user-select-none"
title="A detached PR behaves like a non-forward-port, it has to be approved via the mergebot, this is usually caused by the forward-port having been in conflict or updated.">
DETACHED
</span>
</dd>
</t>
<t t-if="pr.forwardport_ids">
@ -175,6 +181,9 @@
<field name="inherit_id" ref="runbot_merge.runbot_merge_form_prs"/>
<field name="model">runbot_merge.pull_requests</field>
<field name="arch" type="xml">
<xpath expr="//field[@name='state']" position="after">
<field name="merge_date" attrs="{'invisible': [('state', '!=', 'merged')]}"/>
</xpath>
<xpath expr="//sheet/group[2]" position="after">
<separator string="Forward Port" attrs="{'invisible': [('source_id', '=', False)]}"/>
<group attrs="{'invisible': [('source_id', '!=', False)]}">

View File

@ -15,6 +15,7 @@ MERGE_AGE = relativedelta.relativedelta(weeks=2)
_logger = logging.getLogger(__name__)
class Queue:
__slots__ = ()
limit = 100
def _process_item(self):
@ -79,12 +80,12 @@ class ForwardPortTasks(models.Model, Queue):
batch.active = False
CONFLICT_TEMPLATE = "WARNING: the latest change ({previous.head}) triggered " \
CONFLICT_TEMPLATE = "{ping}WARNING: the latest change ({previous.head}) triggered " \
"a conflict when updating the next forward-port " \
"({next.display_name}), and has been ignored.\n\n" \
"You will need to update this pull request differently, " \
"or fix the issue by hand on {next.display_name}."
CHILD_CONFLICT = "WARNING: the update of {previous.display_name} to " \
CHILD_CONFLICT = "{ping}WARNING: the update of {previous.display_name} to " \
"{previous.head} has caused a conflict in this pull request, " \
"data may have been lost."
class UpdateQueue(models.Model, Queue):
@ -118,14 +119,15 @@ class UpdateQueue(models.Model, Queue):
Feedback.create({
'repository': child.repository.id,
'pull_request': child.number,
'message': "Ancestor PR %s has been updated but this PR"
'message': "%sancestor PR %s has been updated but this PR"
" is %s and can't be updated to match."
"\n\n"
"You may want or need to manually update any"
" followup PR." % (
self.new_root.display_name,
child.state,
)
child.ping(),
self.new_root.display_name,
child.state,
)
})
return
@ -137,6 +139,7 @@ class UpdateQueue(models.Model, Queue):
'repository': previous.repository.id,
'pull_request': previous.number,
'message': CONFLICT_TEMPLATE.format(
ping=previous.ping(),
previous=previous,
next=child
)
@ -144,7 +147,7 @@ class UpdateQueue(models.Model, Queue):
Feedback.create({
'repository': child.repository.id,
'pull_request': child.number,
'message': CHILD_CONFLICT.format(previous=previous, next=child)\
'message': CHILD_CONFLICT.format(ping=child.ping(), previous=previous, next=child)\
+ (f'\n\nstdout:\n```\n{out.strip()}\n```' if out.strip() else '')
+ (f'\n\nstderr:\n```\n{err.strip()}\n```' if err.strip() else '')
})

View File

@ -240,6 +240,7 @@ class PullRequests(models.Model):
# again 2 PRs with same head is weird so...
newhead = vals.get('head')
with_parents = self.filtered('parent_id')
closed_fp = self.filtered(lambda p: p.state == 'closed' and p.source_id)
if newhead and not self.env.context.get('ignore_head_update') and newhead != self.head:
vals.setdefault('parent_id', False)
# if any children, this is an FP PR being updated, enqueue
@ -261,10 +262,24 @@ class PullRequests(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': p.repository.id,
'pull_request': p.number,
'message': "This PR was modified / updated and has become a normal PR. "
"It should be merged the normal way (via @%s)" % p.repository.project_id.github_prefix,
'message': "%sthis PR was modified / updated and has become a normal PR. "
"It should be merged the normal way (via @%s)" % (
p.source_id.ping(),
p.repository.project_id.github_prefix,
),
'token_field': 'fp_github_token',
})
for p in closed_fp.filtered(lambda p: p.state != 'closed'):
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': p.repository.id,
'pull_request': p.number,
'message': "%sthis PR was closed then reopened. "
"It should be merged the normal way (via @%s)" % (
p.source_id.ping(),
p.repository.project_id.github_prefix,
),
'token_field': 'fp_github_token',
})
if vals.get('state') == 'merged':
for p in self:
self.env['forwardport.branch_remover'].create({
@ -282,6 +297,7 @@ class PullRequests(models.Model):
r = super()._try_closing(by)
if r:
self.with_context(forwardport_detach_warn=False).parent_id = False
self.search([('parent_id', '=', self.id)]).parent_id = False
return r
def _parse_commands(self, author, comment, login):
@ -298,7 +314,6 @@ class PullRequests(models.Model):
)
return
Feedback = self.env['runbot_merge.pull_requests.feedback']
# TODO: don't use a mutable tokens iterator
tokens = iter(tokens)
while True:
@ -306,6 +321,7 @@ class PullRequests(models.Model):
if token is None:
break
ping = False
close = False
msg = None
if token in ('ci', 'skipci'):
@ -314,7 +330,8 @@ class PullRequests(models.Model):
pr.fw_policy = token
msg = "Not waiting for CI to create followup forward-ports." if token == 'skipci' else "Waiting for CI to create followup forward-ports."
else:
msg = "I don't trust you enough to do that @{}.".format(login)
ping = True
msg = "you can't configure ci."
if token == 'ignore': # replace 'ignore' by 'up to <pr_branch>'
token = 'up'
@ -322,55 +339,56 @@ class PullRequests(models.Model):
if token in ('r+', 'review+'):
if not self.source_id:
Feedback.create({
'repository': self.repository.id,
'pull_request': self.number,
'message': "I'm sorry, @{}. I can only do this on forward-port PRs and this ain't one.".format(login),
'token_field': 'fp_github_token',
})
continue
merge_bot = self.repository.project_id.github_prefix
# don't update the root ever
for pr in (p for p in self._iter_ancestors() if p.parent_id if p.state in RPLUS):
# only the author is delegated explicitely on the
pr._parse_commands(author, {**comment, 'body': merge_bot + ' r+'}, login)
ping = True
msg = "I can only do this on forward-port PRs and this is not one, see {}.".format(
self.repository.project_id.github_prefix
)
elif not self.parent_id:
ping = True
msg = "I can only do this on unmodified forward-port PRs, ask {}.".format(
self.repository.project_id.github_prefix
)
else:
merge_bot = self.repository.project_id.github_prefix
# don't update the root ever
for pr in (p for p in self._iter_ancestors() if p.parent_id if p.state in RPLUS):
# only the author is delegated explicitely on the
pr._parse_commands(author, {**comment, 'body': merge_bot + ' r+'}, login)
elif token == 'close':
msg = "I'm sorry, @{}. I can't close this PR for you.".format(
login)
if self.source_id._pr_acl(author).is_reviewer:
close = True
msg = None
else:
ping = True
msg = "you can't close PRs."
elif token == 'up' and next(tokens, None) == 'to':
limit = next(tokens, None)
ping = True
if not self._pr_acl(author).is_author:
Feedback.create({
'repository': self.repository.id,
'pull_request': self.number,
'message': "I'm sorry, @{}. You can't set a forward-port limit.".format(login),
'token_field': 'fp_github_token',
})
continue
if not limit:
msg = "Please provide a branch to forward-port to."
msg = "you can't set a forward-port limit.".format(login)
elif not limit:
msg = "please provide a branch to forward-port to."
else:
limit_id = self.env['runbot_merge.branch'].with_context(active_test=False).search([
('project_id', '=', self.repository.project_id.id),
('name', '=', limit),
])
if self.source_id:
msg = "Sorry, forward-port limit can only be set on " \
msg = "forward-port limit can only be set on " \
f"an origin PR ({self.source_id.display_name} " \
"here) before it's merged and forward-ported."
elif self.state in ['merged', 'closed']:
msg = "Sorry, forward-port limit can only be set before the PR is merged."
msg = "forward-port limit can only be set before the PR is merged."
elif not limit_id:
msg = "There is no branch %r, it can't be used as a forward port target." % limit
msg = "there is no branch %r, it can't be used as a forward port target." % limit
elif limit_id == self.target:
ping = False
msg = "Forward-port disabled."
self.limit_id = limit_id
elif not limit_id.fp_enabled:
msg = "Branch %r is disabled, it can't be used as a forward port target." % limit_id.name
msg = "branch %r is disabled, it can't be used as a forward port target." % limit_id.name
else:
ping = False
msg = "Forward-porting to %r." % limit_id.name
self.limit_id = limit_id
@ -382,7 +400,7 @@ class PullRequests(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': self.repository.id,
'pull_request': self.number,
'message': msg,
'message': f'@{author.github_login} {msg}' if msg and ping else msg,
'close': close,
'token_field': 'fp_github_token',
})
@ -397,8 +415,8 @@ class PullRequests(models.Model):
'repository': self.repository.id,
'pull_request': self.number,
'token_field': 'fp_github_token',
'message': '%s\n\n%s failed on this forward-port PR' % (
self.source_id._pingline(),
'message': '%s%s failed on this forward-port PR' % (
self.source_id.ping(),
ci,
)
})
@ -578,10 +596,10 @@ class PullRequests(models.Model):
'repository': pr.repository.id,
'pull_request': pr.number,
'token_field': 'fp_github_token',
'message': "This pull request can not be forward ported: "
'message': "%sthis pull request can not be forward ported: "
"next branch is %r but linked pull request %s "
"has a next branch %r." % (
t.name, linked.display_name, other.name
pr.ping(), t.name, linked.display_name, other.name
)
})
_logger.warning(
@ -678,8 +696,8 @@ class PullRequests(models.Model):
'delegates': [(6, False, (source.delegates | pr.delegates).ids)]
})
if has_conflicts and pr.parent_id and pr.state not in ('merged', 'closed'):
message = source._pingline() + """
The next pull request (%s) is in conflict. You can merge the chain up to here by saying
message = source.ping() + """\
the next pull request (%s) is in conflict. You can merge the chain up to here by saying
> @%s r+
%s""" % (new_pr.display_name, pr.repository.project_id.fp_github_name, footer)
self.env['runbot_merge.pull_requests.feedback'].create({
@ -710,19 +728,21 @@ The next pull request (%s) is in conflict. You can merge the chain up to here by
'* %s%s\n' % (sha, ' <- on this commit' if sha == h else '')
for sha in hh
)
message = f"""{source._pingline()} cherrypicking of pull request {source.display_name} failed.
message = f"""{source.ping()}cherrypicking of pull request {source.display_name} failed.
{lines}{sout}{serr}
Either perform the forward-port manually (and push to this branch, proceeding as usual) or close this PR (maybe?).
In the former case, you may want to edit this PR message as well.
"""
elif has_conflicts:
message = """%s
While this was properly forward-ported, at least one co-dependent PR (%s) did not succeed. You will need to fix it before this can be merged.
message = """%s\
while this was properly forward-ported, at least one co-dependent PR (%s) did \
not succeed. You will need to fix it before this can be merged.
Both this PR and the others will need to be approved via `@%s r+` as they are all considered "in conflict".
Both this PR and the others will need to be approved via `@%s r+` as they are \
all considered "in conflict".
%s""" % (
source._pingline(),
source.ping(),
', '.join(p.display_name for p in (new_batch - new_pr)),
proj.github_prefix,
footer
@ -733,8 +753,8 @@ Both this PR and the others will need to be approved via `@%s r+` as they are al
for p in pr._iter_ancestors()
if p.parent_id
)
message = source._pingline() + """
This PR targets %s and is the last of the forward-port chain%s
message = source.ping() + """\
this PR targets %s and is the last of the forward-port chain%s
%s
To merge the full chain, say
> @%s r+
@ -772,14 +792,6 @@ This PR targets %s and is part of the forward-port chain. Further PRs will be cr
b.prs[0]._schedule_fp_followup()
return b
def _pingline(self):
assignees = (self.author | self.reviewed_by).mapped('github_login')
return "Ping %s" % ', '.join(
'@' + login
for login in assignees
if login
)
def _create_fp_branch(self, target_branch, fp_branch_name, cleanup):
""" Creates a forward-port for the current PR to ``target_branch`` under
``fp_branch_name``.
@ -794,6 +806,7 @@ This PR targets %s and is part of the forward-port chain. Further PRs will be cr
:rtype: (None | (str, str, str, list[str]), Repo)
"""
source = self._get_local_directory()
root = self._get_root()
# update all the branches & PRs
r = source.with_params('gc.pruneExpire=1.day.ago')\
.with_config(
@ -802,17 +815,16 @@ This PR targets %s and is part of the forward-port chain. Further PRs will be cr
)\
.fetch('-p', 'origin')
_logger.info("Updated %s:\n%s", source._directory, r.stdout.decode())
# FIXME: check that pr.head is pull/{number}'s head instead?
source.cat_file(e=self.head)
source.cat_file(e=root.head)
# create working copy
_logger.info("Create working copy to forward-port %s:%d to %s",
self.repository.name, self.number, target_branch.name)
_logger.info(
"Create working copy to forward-port %s (really %s) to %s",
self.display_name, root.display_name, target_branch.name)
working_copy = source.clone(
cleanup.enter_context(
tempfile.TemporaryDirectory(
prefix='%s:%d-to-%s-' % (
self.repository.name,
self.number,
prefix='%s-to-%s-' % (
root.display_name,
target_branch.name
),
dir=user_cache_dir('forwardport')
@ -831,7 +843,6 @@ This PR targets %s and is part of the forward-port chain. Further PRs will be cr
_logger.info("Create FP branch %s", fp_branch_name)
working_copy.checkout(b=fp_branch_name)
root = self._get_root()
try:
root._cherry_pick(working_copy)
return None, working_copy
@ -876,7 +887,7 @@ This PR targets %s and is part of the forward-port chain. Further PRs will be cr
# switch back to the PR branch
conf.checkout(fp_branch_name)
# cherry-pick the squashed commit to generate the conflict
conf.with_params('merge.renamelimit=0')\
conf.with_params('merge.renamelimit=0', 'merge.conflictstyle=diff3')\
.with_config(check=False)\
.cherry_pick(squashed, no_commit=True)
status = conf.stdout().status(short=True, untracked_files='no').stdout.decode()
@ -1015,7 +1026,7 @@ stderr:
subprocess.run([
'git', 'clone', '--bare',
'https://{}:{}@github.com/{}'.format(
self.repository.project_id.fp_github_name,
self.repository.project_id.fp_github_name or '',
self.repository.project_id.fp_github_token,
self.repository.name,
),
@ -1084,8 +1095,9 @@ stderr:
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': source.repository.id,
'pull_request': source.number,
'message': "This pull request has forward-port PRs awaiting action (not merged or closed): %s" % ', '.join(
pr.display_name for pr in sorted(prs, key=lambda p: p.number)
'message': "%sthis pull request has forward-port PRs awaiting action (not merged or closed):\n%s" % (
source.ping(),
'\n- '.join(pr.display_name for pr in sorted(prs, key=lambda p: p.number))
),
'token_field': 'fp_github_token',
})
@ -1134,7 +1146,7 @@ class Repo:
try:
return self._opener(args, **opts)
except subprocess.CalledProcessError as e:
_logger.error("git call error:\n%s", e.stderr.decode())
_logger.error("git call error:%s", ('\n' + e.stderr.decode()) if e.stderr else e )
raise
def stdout(self, flag=True):

View File

@ -20,13 +20,3 @@ class FreezeWizard(models.Model):
if not self.search_count([]):
self.env.ref('forwardport.port_forward').active = True
return r
def action_freeze(self):
# have to store wizard content as it's removed during freeze
project = self.project_id
branches_before = project.branch_ids
prs = self.mapped('release_pr_ids.pr_id')
r = super().action_freeze()
new_branch = project.branch_ids - branches_before
prs.write({'limit_id': new_branch.id})
return r

View File

@ -1,12 +1,8 @@
# -*- coding: utf-8 -*-
import pathlib
import re
import requests
from shutil import rmtree
import pytest
from odoo.tools.appdirs import user_cache_dir
import requests
@pytest.fixture
def default_crons():
@ -47,20 +43,6 @@ def _check_scopes(config):
assert token_scopes >= required_scopes, \
"%s should have scopes %s, found %s" % (section, token_scopes, required_scopes)
@pytest.fixture(autouse=True)
def _cleanup_cache(config, users):
""" forwardport has a repo cache which it assumes is unique per name
but tests always use the same repo paths / names for different repos
(the repos get re-created), leading to divergent repo histories.
So clear cache after each test, two tests should not share repos.
"""
yield
cache_root = pathlib.Path(user_cache_dir('forwardport'))
rmtree(cache_root / config['github']['owner'], ignore_errors=True)
for login in users.values():
rmtree(cache_root / login, ignore_errors=True)
@pytest.fixture()
def module():
""" When a test function is (going to be) run, selects the containing

View File

@ -60,9 +60,10 @@ def test_conflict(env, config, make_repo, users):
'g': 'a',
'h': re_matches(r'''<<<\x3c<<< HEAD
a
|||||||| parent of [\da-f]{7,}.*
=======
xxx
>>>\x3e>>> [0-9a-f]{7,}.*
>>>\x3e>>> [\da-f]{7,}.*
'''),
}
prb = prod.get_pr(prb_id.number)
@ -73,8 +74,8 @@ This PR targets b and is part of the forward-port chain. Further PRs will be cre
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
'''),
(users['user'], """Ping @%s, @%s
The next pull request (%s) is in conflict. You can merge the chain up to here by saying
(users['user'], """@%s @%s the next pull request (%s) is in conflict. \
You can merge the chain up to here by saying
> @%s r+
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
@ -323,9 +324,10 @@ def test_multiple_commits_different_authorship(env, config, make_repo, users, ro
assert re.match(r'''<<<\x3c<<< HEAD
b
|||||||| parent of [\da-f]{7,}.*
=======
2
>>>\x3e>>> [0-9a-f]{7,}.*
>>>\x3e>>> [\da-f]{7,}.*
''', prod.read_tree(c)['g'])
# I'd like to fix the conflict so everything is clean and proper *but*
@ -343,11 +345,12 @@ b
assert pr2.comments == [
seen(env, pr2, users),
(users['user'], re_matches(r'Ping.*CONFLICT', re.DOTALL)),
(users['user'], re_matches(r'@%s @%s .*CONFLICT' % (users['user'], users['reviewer']), re.DOTALL)),
(users['reviewer'], 'hansen r+'),
(users['user'], f"All commits must have author and committer email, "
(users['user'], f"@{users['user']} @{users['reviewer']} unable to stage: "
"All commits must have author and committer email, "
f"missing email on {pr2_id.head} indicates the "
f"authorship is most likely incorrect."),
"authorship is most likely incorrect."),
]
assert pr2_id.state == 'error'
assert not pr2_id.staging_id, "staging should have been rejected"

View File

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import collections
import time
import pytest
@ -157,12 +158,12 @@ def test_disable(env, config, make_repo, users, enabled):
# responses and we don't care that much
assert set(pr.comments) == {
(users['reviewer'], "hansen r+\n%s up to" % bot_name),
(users['other'], "@%s please provide a branch to forward-port to." % users['reviewer']),
(users['reviewer'], "%s up to b" % bot_name),
(users['other'], "@%s branch 'b' is disabled, it can't be used as a forward port target." % users['reviewer']),
(users['reviewer'], "%s up to foo" % bot_name),
(users['other'], "@%s there is no branch 'foo', it can't be used as a forward port target." % users['reviewer']),
(users['reviewer'], "%s up to c" % bot_name),
(users['other'], "Please provide a branch to forward-port to."),
(users['other'], "Branch 'b' is disabled, it can't be used as a forward port target."),
(users['other'], "There is no branch 'foo', it can't be used as a forward port target."),
(users['other'], "Forward-porting to 'c'."),
seen(env, pr, users),
}
@ -201,14 +202,13 @@ def test_default_disabled(env, config, make_repo, users):
assert pr2.comments == [
seen(env, pr2, users),
(users['user'], """\
Ping @%s, @%s
This PR targets b and is the last of the forward-port chain.
@%(user)s @%(reviewer)s this PR targets b and is the last of the forward-port chain.
To merge the full chain, say
> @%s r+
> @%(user)s r+
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
""" % (users['user'], users['reviewer'], users['user'])),
""" % users)
]
def test_limit_after_merge(env, config, make_repo, users):
@ -247,7 +247,7 @@ def test_limit_after_merge(env, config, make_repo, users):
(users['reviewer'], "hansen r+"),
seen(env, pr1, users),
(users['reviewer'], bot_name + ' up to b'),
(bot_name, "Sorry, forward-port limit can only be set before the PR is merged."),
(bot_name, "@%s forward-port limit can only be set before the PR is merged." % users['reviewer']),
]
assert pr2.comments == [
seen(env, pr2, users),
@ -257,9 +257,11 @@ This PR targets b and is part of the forward-port chain. Further PRs will be cre
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
"""),
(users['reviewer'], bot_name + ' up to b'),
(bot_name, "Sorry, forward-port limit can only be set on an origin PR"
" (%s here) before it's merged and forward-ported." % p1.display_name
),
(bot_name, "@%s forward-port limit can only be set on an origin PR"
" (%s here) before it's merged and forward-ported." % (
users['reviewer'],
p1.display_name,
)),
]
# update pr2 to detach it from pr1
@ -279,10 +281,13 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
env.run_crons()
assert pr2.comments[4:] == [
(bot_name, "This PR was modified / updated and has become a normal PR. "
"It should be merged the normal way (via @hansen)"),
(bot_name, "@%s @%s this PR was modified / updated and has become a normal PR. "
"It should be merged the normal way (via @%s)" % (
users['user'], users['reviewer'],
p2.repository.project_id.github_prefix
)),
(users['reviewer'], bot_name + ' up to b'),
(bot_name, "Sorry, forward-port limit can only be set on an origin PR "
(bot_name, f"@{users['reviewer']} forward-port limit can only be set on an origin PR "
f"({p1.display_name} here) before it's merged and forward-ported."
),
]

View File

@ -125,8 +125,11 @@ def test_straightforward_flow(env, config, make_repo, users):
assert pr.comments == [
(users['reviewer'], 'hansen r+ rebase-ff'),
seen(env, pr, users),
(users['user'], 'Merge method set to rebase and fast-forward'),
(users['user'], 'This pull request has forward-port PRs awaiting action (not merged or closed): ' + ', '.join((pr1 | pr2).mapped('display_name'))),
(users['user'], 'Merge method set to rebase and fast-forward.'),
(users['user'], '@%s @%s this pull request has forward-port PRs awaiting action (not merged or closed):\n%s' % (
users['other'], users['reviewer'],
'\n- '.join((pr1 | pr2).mapped('display_name'))
)),
]
assert pr0_ == pr0
@ -148,8 +151,7 @@ def test_straightforward_flow(env, config, make_repo, users):
assert pr2_remote.comments == [
seen(env, pr2_remote, users),
(users['user'], """\
Ping @%s, @%s
This PR targets c and is the last of the forward-port chain containing:
@%s @%s this PR targets c and is the last of the forward-port chain containing:
* %s
To merge the full chain, say
@ -314,11 +316,18 @@ def test_empty(env, config, make_repo, users):
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK})
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK})
awaiting = (
users['other'],
'@%s @%s this pull request has forward-port PRs awaiting action (not merged or closed):\n%s' % (
users['user'], users['reviewer'],
fail_id.display_name
)
)
assert pr1.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, pr1, users),
(users['other'], 'This pull request has forward-port PRs awaiting action (not merged or closed): ' + fail_id.display_name),
(users['other'], 'This pull request has forward-port PRs awaiting action (not merged or closed): ' + fail_id.display_name),
awaiting,
awaiting,
], "each cron run should trigger a new message on the ancestor"
# check that this stops if we close the PR
with prod:
@ -327,8 +336,8 @@ def test_empty(env, config, make_repo, users):
assert pr1.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, pr1, users),
(users['other'], 'This pull request has forward-port PRs awaiting action (not merged or closed): ' + fail_id.display_name),
(users['other'], 'This pull request has forward-port PRs awaiting action (not merged or closed): ' + fail_id.display_name),
awaiting,
awaiting,
]
def test_partially_empty(env, config, make_repo):
@ -562,8 +571,7 @@ def test_delegate_fw(env, config, make_repo, users):
assert pr2.comments == [
seen(env, pr2, users),
(users['user'], '''Ping @{self_reviewer}, @{reviewer}
This PR targets c and is the last of the forward-port chain.
(users['user'], '''@{self_reviewer} @{reviewer} this PR targets c and is the last of the forward-port chain.
To merge the full chain, say
> @{user} r+
@ -790,11 +798,12 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
""")
]
def test_closing_after_fp(self, env, config, make_repo):
def test_closing_after_fp(self, env, config, make_repo, users):
""" Closing a PR which has been forward-ported should not touch the
followups
"""
prod, other = make_basic(env, config, make_repo)
project = env['runbot_merge.project'].search([])
with prod:
[p_1] = prod.make_commits(
'a',
@ -814,23 +823,51 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
# should merge the staging then create the FP PR
env.run_crons()
pr0, pr1 = env['runbot_merge.pull_requests'].search([], order='number')
pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number')
with prod:
prod.post_status(pr1.head, 'success', 'legal/cla')
prod.post_status(pr1.head, 'success', 'ci/runbot')
prod.post_status(pr1_id.head, 'success', 'legal/cla')
prod.post_status(pr1_id.head, 'success', 'ci/runbot')
# should create the second staging
env.run_crons()
pr0_1, pr1_1, pr2_1 = env['runbot_merge.pull_requests'].search([], order='number')
assert pr0_1 == pr0
assert pr1_1 == pr1
pr0_id2, pr1_id2, pr2_id = env['runbot_merge.pull_requests'].search([], order='number')
assert pr0_id2 == pr0_id
assert pr1_id2 == pr1_id
pr1 = prod.get_pr(pr1_id.number)
with prod:
pr1.close()
assert pr1_id.state == 'closed'
assert not pr1_id.parent_id
assert pr2_id.state == 'opened'
assert not pr2_id.parent_id, \
"the descendant of a closed PR doesn't really make sense, maybe?"
with prod:
prod.get_pr(pr1.number).close()
pr1.open()
assert pr1_id.state == 'validated'
env.run_crons()
assert pr1.comments[-1] == (
users['user'],
"@{} @{} this PR was closed then reopened. "
"It should be merged the normal way (via @{})".format(
users['user'],
users['reviewer'],
project.github_prefix,
)
)
assert pr1_1.state == 'closed'
assert not pr1_1.parent_id
assert pr2_1.state == 'opened'
with prod:
pr1.post_comment(f'{project.fp_github_name} r+', config['role_reviewer']['token'])
env.run_crons()
assert pr1.comments[-1] == (
users['user'],
"@{} I can only do this on unmodified forward-port PRs, ask {}.".format(
users['reviewer'],
project.github_prefix,
),
)
class TestBranchDeletion:
def test_delete_normal(self, env, config, make_repo):

View File

@ -44,7 +44,7 @@ This PR targets b and is part of the forward-port chain. Further PRs will be cre
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
''')
ci_warning = (users['user'], 'Ping @%(user)s, @%(reviewer)s\n\nci/runbot failed on this forward-port PR' % users)
ci_warning = (users['user'], '@%(user)s @%(reviewer)s ci/runbot failed on this forward-port PR' % users)
# oh no CI of the first FP PR failed!
# simulate status being sent multiple times (e.g. on multiple repos) with
@ -103,7 +103,11 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
assert pr1_remote.comments == [
seen(env, pr1_remote, users),
fp_intermediate, ci_warning, ci_warning,
(users['user'], "This PR was modified / updated and has become a normal PR. It should be merged the normal way (via @%s)" % pr1_id.repository.project_id.github_prefix),
(users['user'], "@%s @%s this PR was modified / updated and has become a normal PR. "
"It should be merged the normal way (via @%s)" % (
users['user'], users['reviewer'],
pr1_id.repository.project_id.github_prefix
)),
], "users should be warned that the PR has become non-FP"
# NOTE: should the followup PR wait for pr1 CI or not?
assert pr2_id.head != pr2_head
@ -210,9 +214,13 @@ def test_update_merged(env, make_repo, config, users):
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
'''),
(users['reviewer'], 'hansen r+'),
(users['user'], """Ancestor PR %s has been updated but this PR is merged and can't be updated to match.
(users['user'], """@%s @%s ancestor PR %s has been updated but this PR is merged and can't be updated to match.
You may want or need to manually update any followup PR.""" % pr1_id.display_name)
You may want or need to manually update any followup PR.""" % (
users['user'],
users['reviewer'],
pr1_id.display_name,
))
]
def test_duplicate_fw(env, make_repo, setreviewers, config, users):
@ -366,9 +374,10 @@ def test_subsequent_conflict(env, make_repo, config, users):
'g': 'a',
'h': re_matches(r'''<<<\x3c<<< HEAD
a
|||||||| parent of [\da-f]{7,}.*
=======
conflict!
>>>\x3e>>> [0-9a-f]{7,}.*
>>>\x3e>>> [\da-f]{7,}.*
'''),
'x': '0',
}
@ -377,7 +386,7 @@ conflict!
# 2. "forward port chain" bit
# 3. updated / modified & got detached
assert pr2.comments[3:] == [
(users['user'], f"WARNING: the latest change ({pr2_id.head}) triggered "
(users['user'], f"@{users['user']} WARNING: the latest change ({pr2_id.head}) triggered "
f"a conflict when updating the next forward-port "
f"({pr3_id.display_name}), and has been ignored.\n\n"
f"You will need to update this pull request "
@ -389,7 +398,7 @@ conflict!
# 2. forward-port chain thing
assert repo.get_pr(pr3_id.number).comments[2:] == [
(users['user'], re_matches(f'''\
WARNING: the update of {pr2_id.display_name} to {pr2_id.head} has caused a \
@{users['user']} WARNING: the update of {pr2_id.display_name} to {pr2_id.head} has caused a \
conflict in this pull request, data may have been lost.
stdout:

View File

@ -399,18 +399,20 @@ class TestNotAllBranches:
assert pr_a.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, pr_a, users),
(users['user'], "This pull request can not be forward ported: next "
"branch is 'b' but linked pull request %s#%d has a"
" next branch 'c'." % (b.name, pr_b.number)
)
(users['user'], "@%s @%s this pull request can not be forward ported:"
" next branch is 'b' but linked pull request %s "
"has a next branch 'c'." % (
users['user'], users['reviewer'], pr_b_id.display_name,
)),
]
assert pr_b.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, pr_b, users),
(users['user'], "This pull request can not be forward ported: next "
"branch is 'c' but linked pull request %s#%d has a"
" next branch 'b'." % (a.name, pr_a.number)
)
(users['user'], "@%s @%s this pull request can not be forward ported:"
" next branch is 'c' but linked pull request %s "
"has a next branch 'b'." % (
users['user'], users['reviewer'], pr_a_id.display_name,
)),
]
def test_new_intermediate_branch(env, config, make_repo):
@ -588,9 +590,11 @@ def test_author_can_close_via_fwbot(env, config, make_repo):
pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number')
assert pr0_id.number == pr.number
pr1 = prod.get_pr(pr1_id.number)
# user can't close PR directly
# `other` can't close fw PR directly, because that requires triage (and even
# write depending on account type) access to the repo, which an external
# contributor probably does not have
with prod, pytest.raises(Exception):
pr1.close(other_token) # what the fuck?
pr1.close(other_token)
# use can close via fwbot
with prod:
pr1.post_comment('%s close' % project.fp_github_name, other_token)
@ -755,7 +759,7 @@ def test_approve_draft(env, config, make_repo, users):
assert pr.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, pr, users),
(users['user'], f"I'm sorry, @{users['reviewer']}. Draft PRs can not be approved."),
(users['user'], f"I'm sorry, @{users['reviewer']}: draft PRs can not be approved."),
]
with prod:
@ -799,10 +803,9 @@ def test_freeze(env, config, make_repo, users):
assert not w_id.errors
w_id.action_freeze()
env.run_crons() # stage freeze PRs
with prod:
prod.post_status('staging.post-b', 'success', 'ci/runbot')
prod.post_status('staging.post-b', 'success', 'legal/cla')
# run crons to process the feedback, run a second time in case of e.g.
# forward porting
env.run_crons()
env.run_crons()
assert release_id.state == 'merged'

View File

@ -49,7 +49,7 @@ class re_matches:
return self._r.match(text)
def __repr__(self):
return '~' + self._r.pattern + '~'
return self._r.pattern + '...'
def seen(env, pr, users):
return users['user'], f'[Pull request status dashboard]({to_pr(env, pr).url}).'
@ -126,10 +126,12 @@ def pr_page(page, pr):
return html.fromstring(page(f'/{pr.repo.name}/pull/{pr.number}'))
def to_pr(env, pr):
return env['runbot_merge.pull_requests'].search([
pr = env['runbot_merge.pull_requests'].search([
('repository.name', '=', pr.repo.name),
('number', '=', pr.number),
])
assert len(pr) == 1, f"Expected to find {pr.repo.name}#{pr.number}, got {pr}."
return pr
def part_of(label, pr_id, *, separator='\n\n'):
""" Adds the "part-of" pseudo-header in the footer.

View File

@ -10,6 +10,8 @@
'views/res_partner.xml',
'views/runbot_merge_project.xml',
'views/mergebot.xml',
'views/queues.xml',
'views/configuration.xml',
'views/templates.xml',
'models/project_freeze/views.xml',
],

View File

@ -0,0 +1 @@
IMP: show current alerts (disabled crons) on the PR pages

View File

@ -0,0 +1,4 @@
IMP: automatically close PRs when their target branch is deactivated
Leave a message on the PRs to explain, such PRs should also be reopen-able if
the users wants to retarget them.

View File

@ -0,0 +1,4 @@
FIX: correctly handle PR empty PR descriptions
Github's webhook for this case are weird, and weren't handled correctly,
updating a PR's description to *or from* empty might be mishandled.

View File

@ -0,0 +1,3 @@
IMP: review pinging (`@`-notification) of users by the mergebot and forwardbot
The bots should more consistently ping users when they need some sort of action to proceed.

View File

@ -0,0 +1 @@
ADD: automated provisioning of accounts from odoo.com

View File

@ -0,0 +1,8 @@
IMP: various UI items
- more clearly differentiate between "pending" and "unknown" statuses on stagings
- fix "outstanding forward ports" count
- add date of staging last modification (= success / failure instant)
- correctly retrieve and include fast-forward and unstaging reasons
- show the warnings banner (e.g. staging disabled) on the PR pages, as not all
users routinely visit the main dashboard

View File

@ -0,0 +1 @@
FIX: properly unstage pull requests when they're retargeted (base branch is changed)

View File

@ -120,41 +120,49 @@ def handle_pr(env, event):
if not source_branch:
return handle_pr(env, dict(event, action='opened'))
pr_obj = find(source_branch)
updates = {}
if source_branch != branch:
updates['target'] = branch.id
updates['squash'] = pr['commits'] == 1
if event['changes'].keys() & {'title', 'body'}:
updates['message'] = "{}\n\n{}".format(pr['title'].strip(), pr['body'].strip())
if branch != pr_obj.target:
updates['target'] = branch.id
updates['squash'] = pr['commits'] == 1
# turns out github doesn't bother sending a change key if the body is
# changing from empty (None), therefore ignore that entirely, just
# generate the message and check if it changed
message = pr['title'].strip()
body = (pr['body'] or '').strip()
if body:
message += f"\n\n{body}"
if message != pr_obj.message:
updates['message'] = message
_logger.info("update: %s#%d = %s (by %s)", repo.name, pr['number'], updates, event['sender']['login'])
if updates:
pr_obj = find(source_branch)
pr_obj.write(updates)
return 'Updated {}'.format(pr_obj.id)
return "Nothing to update ({})".format(event['changes'].keys())
message = None
if not branch:
message = f"This PR targets the un-managed branch {r}:{b}, it can not be merged."
message = f"This PR targets the un-managed branch {r}:{b}, it needs to be retargeted before it can be merged."
_logger.info("Ignoring event %s on PR %s#%d for un-managed branch %s",
event['action'], r, pr['number'], b)
elif not branch.active:
message = f"This PR targets the disabled branch {r}:{b}, it can not be merged."
message = f"This PR targets the disabled branch {r}:{b}, it needs to be retargeted before it can be merged."
if message and event['action'] not in ('synchronize', 'closed'):
feedback(message=message)
if not branch:
return "Not set up to care about {}:{}".format(r, b)
author_name = pr['user']['login']
author = env['res.partner'].search([('github_login', '=', author_name)], limit=1)
if not author:
author = env['res.partner'].create({
'name': author_name,
'github_login': author_name,
})
_logger.info("%s: %s#%s (%s) (%s)", event['action'], repo.name, pr['number'], pr['title'].strip(), author.github_login)
_logger.info("%s: %s#%s (%s) (by %s)", event['action'], repo.name, pr['number'], pr['title'].strip(), event['sender']['login'])
if event['action'] == 'opened':
author_name = pr['user']['login']
author = env['res.partner'].search([('github_login', '=', author_name)], limit=1)
if not author:
env['res.partner'].create({'name': author_name, 'github_login': author_name})
pr_obj = env['runbot_merge.pull_requests']._from_gh(pr)
return "Tracking PR as {}".format(pr_obj.id)
@ -171,11 +179,7 @@ def handle_pr(env, event):
return "It's my understanding that closed/merged PRs don't get sync'd"
if pr_obj.state == 'ready':
pr_obj.unstage(
"PR %s updated by %s",
pr_obj.display_name,
event['sender']['login']
)
pr_obj.unstage("updated by %s", event['sender']['login'])
_logger.info(
"PR %s updated to %s by %s, resetting to 'open' and squash=%s",
@ -222,7 +226,7 @@ def handle_pr(env, event):
if pr_obj.state == 'merged':
feedback(
close=True,
message="@%s ya silly goose you can't reopen a PR that's been merged PR." % event['sender']['login']
message="@%s ya silly goose you can't reopen a merged PR." % event['sender']['login']
)
if pr_obj.state == 'closed':

View File

@ -6,12 +6,9 @@ import pathlib
import markdown
import markupsafe
import werkzeug.exceptions
from lxml import etree
from lxml.builder import ElementMaker
from odoo.http import Controller, route, request
A = ElementMaker(namespace="http://www.w3.org/2005/Atom")
LIMIT = 20
class MergebotDashboard(Controller):
@route('/runbot_merge', auth="public", type="http", website=True)
@ -22,13 +19,17 @@ class MergebotDashboard(Controller):
@route('/runbot_merge/<int:branch_id>', auth='public', type='http', website=True)
def stagings(self, branch_id, until=None):
branch = request.env['runbot_merge.branch'].browse(branch_id).sudo().exists()
if not branch:
raise werkzeug.exceptions.NotFound()
stagings = request.env['runbot_merge.stagings'].with_context(active_test=False).sudo().search([
('target', '=', branch_id),
('target', '=', branch.id),
('staged_at', '<=', until) if until else (True, '=', True),
], order='staged_at desc', limit=LIMIT+1)
return request.render('runbot_merge.branch_stagings', {
'branch': request.env['runbot_merge.branch'].browse(branch_id).sudo(),
'branch': branch,
'stagings': stagings[:LIMIT],
'next': stagings[-1].staged_at if len(stagings) > LIMIT else None,
})

View File

@ -1,18 +1,114 @@
# -*- coding: utf-8 -*-
import logging
from odoo.http import Controller, request, route
from odoo import http
from odoo.http import request
try:
from odoo.addons.saas_worker.util import from_role
except ImportError:
def from_role(_):
return lambda _: None
class MergebotReviewerProvisioning(http.Controller):
_logger = logging.getLogger(__name__)
class MergebotReviewerProvisioning(Controller):
@from_role('accounts')
@route('/runbot_merge/users', type='json', auth='public')
def list_users(self):
env = request.env(su=True)
return [{
'github_login': u.github_login,
'email': u.email,
}
for u in env['res.users'].search([])
if u.github_login
]
@from_role('accounts')
@http.route(['/runbot_merge/get_reviewers'], type='json', auth='public')
@route('/runbot_merge/provision', type='json', auth='public')
def provision_user(self, users):
_logger.info('Provisioning %s users: %s.', len(users), ', '.join(map(
'{email} ({github_login})'.format_map,
users
)))
env = request.env(su=True)
Partners = env['res.partner']
Users = env['res.users']
existing_partners = Partners.search([
'|', ('email', 'in', [u['email'] for u in users]),
('github_login', 'in', [u['github_login'] for u in users])
])
_logger.info("Found %d existing matching partners.", len(existing_partners))
partners = {}
for p in existing_partners:
if p.email:
# email is not unique, though we want it to be (probably)
current = partners.get(p.email)
if current:
_logger.warning(
"Lookup conflict: %r set on two partners %r and %r.",
p.email, current.display_name, p.display_name,
)
else:
partners[p.email] = p
if p.github_login:
# assume there can't be an existing one because github_login is
# unique, and should not be able to collide with emails
partners[p.github_login] = p
internal = env.ref('base.group_user')
odoo_provider = env.ref('auth_oauth.provider_openerp')
to_create = []
created = updated = 0
for new in users:
if 'sub' in new:
new['oauth_provider_id'] = odoo_provider.id
new['oauth_uid'] = new.pop('sub')
# prioritise by github_login as that's the unique-est point of information
current = partners.get(new['github_login']) or partners.get(new['email']) or Partners
# entry doesn't have user -> create user
if not current.user_ids:
# skip users without an email (= login) as that
# fails
if not new['email']:
continue
new['login'] = new['email']
new['groups_id'] = [(4, internal.id)]
# entry has partner -> create user linked to existing partner
# (and update partner implicitly)
if current:
new['partner_id'] = current.id
to_create.append(new)
continue
# otherwise update user (if there is anything to update)
user = current.user_ids
if len(user) != 1:
_logger.warning("Got %d users for partner %s.", len(user), current.display_name)
user = user[:1]
update_vals = {
k: v
for k, v in new.items()
if v not in ('login', 'email')
if v != (user[k] if k != 'oauth_provider_id' else user[k].id)
}
if update_vals:
user.write(update_vals)
updated += 1
if to_create:
# only create 100 users at a time to avoid request timeout
Users.create(to_create[:100])
created = len(to_create[:100])
_logger.info("Provisioning: created %d updated %d.", created, updated)
return [created, updated]
@from_role('accounts')
@route(['/runbot_merge/get_reviewers'], type='json', auth='public')
def fetch_reviewers(self, **kwargs):
reviewers = request.env['res.partner.review'].sudo().search([
'|', ('review', '=', True), ('self_review', '=', True)
@ -20,7 +116,7 @@ class MergebotReviewerProvisioning(http.Controller):
return reviewers
@from_role('accounts')
@http.route(['/runbot_merge/remove_reviewers'], type='json', auth='public', methods=['POST'])
@route(['/runbot_merge/remove_reviewers'], type='json', auth='public', methods=['POST'])
def update_reviewers(self, github_logins, **kwargs):
partners = request.env['res.partner'].sudo().search([('github_login', 'in', github_logins)])
partners.write({
@ -32,3 +128,4 @@ class MergebotReviewerProvisioning(http.Controller):
partners.mapped('user_ids').write({
'groups_id': [(6, 0, [request.env.ref('base.group_portal').id])]
})
return True

View File

@ -188,7 +188,7 @@ class GH(object):
head = '<Response [%s]: %s)>' % (r.status_code, r.json() if _is_json(r) else r.text)
if head == to:
_logger.info("Sanity check ref update of %s to %s: ok", branch, to)
_logger.debug("Sanity check ref update of %s to %s: ok", branch, to)
return
_logger.warning("Sanity check ref update of %s, expected %s got %s", branch, to, head)
@ -202,10 +202,19 @@ class GH(object):
def _wait_for_update():
if not self._check_updated(branch, sha):
return
raise exceptions.FastForwardError(self._repo)
except requests.HTTPError:
raise exceptions.FastForwardError(self._repo) \
from Exception("timeout: never saw %s" % sha)
except requests.HTTPError as e:
_logger.debug('fast_forward(%s, %s, %s) -> ERROR', self._repo, branch, sha, exc_info=True)
raise exceptions.FastForwardError(self._repo)
if e.response.status_code == 422:
try:
r = e.response.json()
except Exception:
pass
else:
if isinstance(r, dict) and 'message' in r:
e = Exception(r['message'].lower())
raise exceptions.FastForwardError(self._repo) from e
def set_ref(self, branch, sha):
# force-update ref
@ -216,7 +225,7 @@ class GH(object):
status0 = r.status_code
_logger.debug(
'set_ref(update, %s, %s, %s -> %s (%s)',
'ref_set(%s, %s, %s -> %s (%s)',
self._repo, branch, sha, status0,
'OK' if status0 == 200 else r.text or r.reason
)
@ -231,30 +240,35 @@ class GH(object):
# 422 makes no sense but that's what github returns, leaving 404 just
# in case
status1 = None
if status0 in (404, 422):
# fallback: create ref
r = self('post', 'git/refs', json={
'ref': 'refs/heads/{}'.format(branch),
'sha': sha,
}, check=False)
status1 = r.status_code
_logger.debug(
'set_ref(create, %s, %s, %s) -> %s (%s)',
self._repo, branch, sha, status1,
'OK' if status1 == 201 else r.text or r.reason
)
status1 = self.create_ref(branch, sha)
if status1 == 201:
@utils.backoff(exc=AssertionError)
def _wait_for_update():
head = self._check_updated(branch, sha)
assert not head, "Sanity check ref update of %s, expected %s got %s" % (
branch, sha, head
)
return
else:
status1 = None
raise AssertionError("set_ref failed(%s, %s)" % (status0, status1))
def create_ref(self, branch, sha):
r = self('post', 'git/refs', json={
'ref': 'refs/heads/{}'.format(branch),
'sha': sha,
}, check=False)
status = r.status_code
_logger.debug(
'ref_create(%s, %s, %s) -> %s (%s)',
self._repo, branch, sha, status,
'OK' if status == 201 else r.text or r.reason
)
if status == 201:
@utils.backoff(exc=AssertionError)
def _wait_for_update():
head = self._check_updated(branch, sha)
assert not head, \
f"Sanity check ref update of {branch}, expected {sha} got {head}"
return status
def merge(self, sha, dest, message):
r = self('post', 'merges', json={
'base': dest,

View File

@ -1,10 +1,13 @@
import contextlib
import enum
import itertools
import json
import logging
import time
from odoo import models, fields, api
from odoo.exceptions import UserError
from odoo.addons.runbot_merge.exceptions import FastForwardError
_logger = logging.getLogger(__name__)
class FreezeWizard(models.Model):
@ -12,36 +15,90 @@ class FreezeWizard(models.Model):
_description = "Wizard for freezing a project('s master)"
project_id = fields.Many2one('runbot_merge.project', required=True)
errors = fields.Text(compute='_compute_errors')
branch_name = fields.Char(required=True, help="Name of the new branches to create")
release_pr_ids = fields.One2many(
'runbot_merge.project.freeze.prs', 'wizard_id',
string="Release pull requests",
help="Pull requests used as tips for the freeze branches, one per repository"
)
required_pr_ids = fields.Many2many(
'runbot_merge.pull_requests', string="Required Pull Requests",
domain="[('state', 'not in', ('closed', 'merged'))]",
help="Pull requests which must have been merged before the freeze is allowed",
)
errors = fields.Text(compute='_compute_errors')
release_label = fields.Char()
release_pr_ids = fields.One2many(
'runbot_merge.project.freeze.prs', 'wizard_id',
string="Release pull requests",
help="Pull requests used as tips for the freeze branches, "
"one per repository"
)
bump_label = fields.Char()
bump_pr_ids = fields.One2many(
'runbot_merge.project.freeze.bumps', 'wizard_id',
string="Bump pull requests",
help="Pull requests used as tips of the frozen-off branches, "
"one per repository"
)
_sql_constraints = [
('unique_per_project', 'unique (project_id)',
"There should be only one ongoing freeze per project"),
]
@api.onchange('release_label')
def _onchange_release_label(self):
prs = self.env['runbot_merge.pull_requests'].search([
('label', '=', self.release_label)
])
for release_pr in self.release_pr_ids:
release_pr.pd_id = next((
p.id for p in prs
if p.repository == release_pr.repository_id
), False)
@api.onchange('release_pr_ids')
def _onchange_release_prs(self):
labels = {p.pr_id.label for p in self.release_pr_ids}
self.release_label = len(labels) == 1 and labels.pop()
@api.onchange('bump_label')
def _onchange_bump_label(self):
prs = self.env['runbot_merge.pull_requests'].search([
('label', '=', self.bump_label)
])
for bump_pr in self.bump_pr_ids:
bump_pr.pd_id = next((
p.id for p in prs
if p.repository == bump_pr.repository_id
), False)
@api.onchange('bump_pr_ids')
def _onchange_bump_prs(self):
labels = {p.pr_id.label for p in self.bump_pr_ids}
self.bump_label = len(labels) == 1 and labels.pop()
@api.depends('release_pr_ids.pr_id.label', 'required_pr_ids.state')
def _compute_errors(self):
errors = []
without = self.release_pr_ids.filtered(lambda p: not p.pr_id)
if without:
errors.append("* Every repository must have a release PR, missing release PRs for %s." % ', '.join(
p.repository_id.name for p in without
without.mapped('repository_id.name')
))
labels = set(self.mapped('release_pr_ids.pr_id.label'))
if len(labels) != 1:
errors.append("* All release PRs must have the same label, found %r." % ', '.join(sorted(labels)))
non_squash = self.mapped('release_pr_ids.pr_id').filtered(lambda p: not p.squash)
if non_squash:
errors.append("* Release PRs should have a single commit, found more in %s." % ', '.join(p.display_name for p in non_squash))
bump_labels = set(self.mapped('bump_pr_ids.pr_id.label'))
if len(bump_labels) > 1:
errors.append("* All bump PRs must have the same label, found %r" % ', '.join(sorted(bump_labels)))
non_squash = self.mapped('bump_pr_ids.pr_id').filtered(lambda p: not p.squash)
if non_squash:
errors.append("* Bump PRs should have a single commit, found more in %s." % ', '.join(p.display_name for p in non_squash))
unready = sum(p.state not in ('closed', 'merged') for p in self.required_pr_ids)
if unready:
@ -73,6 +130,14 @@ class FreezeWizard(models.Model):
if self.errors:
return self.action_open()
conflict_crons = self.env.ref('runbot_merge.merge_cron') | self.env.ref('runbot_merge.staging_cron')
# we don't want to run concurrently to the crons above, though we
# don't need to prevent read access to them
self.env.cr.execute(
'SELECT * FROM ir_cron WHERE id =ANY(%s) FOR SHARE NOWAIT',
[conflict_crons.ids]
)
project_id = self.project_id
# need to create the new branch, but at the same time resequence
# everything so the new branch is the second one, just after the branch
@ -86,47 +151,136 @@ class FreezeWizard(models.Model):
'sequence': next(seq),
})
]
for s, b in zip(seq, rest):
commands.append((1, b.id, {'sequence': s}))
commands.extend((1, b.id, {'sequence': s}) for s, b in zip(seq, rest))
project_id.branch_ids = commands
master_name = master.name
# update release PRs to get merged on the newly created branch
new_branch = project_id.branch_ids - master - rest
self.release_pr_ids.mapped('pr_id').write({'target': new_branch.id, 'priority': 0})
gh_sessions = {r: r.github() for r in self.project_id.repo_ids}
# create new branch on every repository
errors = []
repository = None
# prep new branch (via tmp refs) on every repo
rel_heads = {}
# store for master heads as odds are high the bump pr(s) will be on the
# same repo as one of the release PRs
prevs = {}
for rel in self.release_pr_ids:
repository = rel.repository_id
gh = repository.github()
# annoyance: can't directly alias a ref to an other ref, need to
# resolve the "old" branch explicitely
prev = gh('GET', f'git/refs/heads/{master.name}')
if not prev.ok:
errors.append(f"Unable to resolve branch {master.name} of repository {repository.name} to a commit.")
break
new_branch = gh('POST', 'git/refs', json={
'ref': 'refs/heads/' + self.branch_name,
'sha': prev.json()['object']['sha'],
}, check=False)
if not new_branch.ok:
err = new_branch.json()['message']
errors.append(f"Unable to create branch {master.name} of repository {repository.name}: {err}.")
break
repo_id = rel.repository_id
gh = gh_sessions[repo_id]
try:
prev = prevs[repo_id] = gh.head(master_name)
except Exception:
raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.")
# create the tmp branch to merge the PR into
tmp_branch = f'tmp.{self.branch_name}'
try:
gh.set_ref(tmp_branch, prev)
except Exception as err:
raise UserError(f"Unable to create branch {self.branch_name} of repository {repo_id.name}: {err}.")
rel_heads[repo_id], _ = gh.rebase(rel.pr_id.number, tmp_branch)
time.sleep(1)
# if an error occurred during creation, try to clean up then raise error
if errors:
for r in self.release_pr_ids:
if r.repository_id == repository:
# prep bump
bump_heads = {}
for bump in self.bump_pr_ids:
repo_id = bump.repository_id
gh = gh_sessions[repo_id]
try:
prev = prevs[repo_id] = prevs.get(repo_id) or gh.head(master_name)
except Exception:
raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.")
# create the tmp branch to merge the PR into
tmp_branch = f'tmp.{master_name}'
try:
gh.set_ref(tmp_branch, prev)
except Exception as err:
raise UserError(f"Unable to create branch {master_name} of repository {repo_id.name}: {err}.")
bump_heads[repo_id], _ = gh.rebase(bump.pr_id.number, tmp_branch)
time.sleep(1)
deployed = {}
# at this point we've got a bunch of tmp branches with merged release
# and bump PRs, it's time to update the corresponding targets
to_delete = [] # release prs go on new branches which we try to delete on failure
to_revert = [] # bump prs go on new branch which we try to revert on failure
failure = None
for rel in self.release_pr_ids:
repo_id = rel.repository_id
# helper API currently has no API to ensure we're just creating a
# new branch (as cheaply as possible) so do it by hand
status = None
with contextlib.suppress(Exception):
status = gh_sessions[repo_id].create_ref(self.branch_name, rel_heads[repo_id])
deployed[rel.pr_id.id] = rel_heads[repo_id]
to_delete.append(repo_id)
if status != 201:
failure = ('create', repo_id.name, self.branch_name)
break
else: # all release deployments succeeded
for bump in self.bump_pr_ids:
repo_id = bump.repository_id
try:
gh_sessions[repo_id].fast_forward(master_name, bump_heads[repo_id])
deployed[bump.pr_id.id] = bump_heads[repo_id]
to_revert.append(repo_id)
except FastForwardError:
failure = ('fast-forward', repo_id.name, master_name)
break
deletion = r.repository_id.github().delete(f'git/refs/heads/{self.branch_name}')
if failure:
addendums = []
# creating the branch failed, try to delete all previous branches
failures = []
for prev_id in to_revert:
revert = gh_sessions[prev_id]('PATCH', f'git/refs/heads/{master_name}', json={
'sha': prevs[prev_id],
'force': True
}, check=False)
if not revert.ok:
failures.append(prev_id.name)
if failures:
addendums.append(
"Subsequently unable to revert branches created in %s." % \
', '.join(failures)
)
failures.clear()
for prev_id in to_delete:
deletion = gh_sessions[prev_id]('DELETE', f'git/refs/heads/{self.branch_name}', check=False)
if not deletion.ok:
errors.append(f"Consequently unable to delete branch {self.branch_name} of repository {r.name}.")
time.sleep(1)
raise UserError('\n'.join(errors))
failures.append(prev_id.name)
if failures:
addendums.append(
"Subsequently unable to delete branches created in %s." % \
", ".join(failures)
)
failures.clear()
if addendums:
addendum = '\n\n' + '\n'.join(addendums)
else:
addendum = ''
reason, repo, branch = failure
raise UserError(
f"Unable to {reason} branch {repo}:{branch}.{addendum}"
)
all_prs = self.release_pr_ids.pr_id | self.bump_pr_ids.pr_id
all_prs.state = 'merged'
self.env['runbot_merge.pull_requests.feedback'].create([{
'repository': pr.repository.id,
'pull_request': pr.number,
'close': True,
'message': json.dumps({
'sha': deployed[pr.id],
'base': self.branch_name if pr in self.release_pr_ids.pr_id else None
})
} for pr in all_prs])
# delete wizard
self.sudo().unlink()
@ -155,6 +309,31 @@ class ReleasePullRequest(models.Model):
domain='[("repository", "=", repository_id), ("state", "not in", ("closed", "merged"))]',
string="Release Pull Request",
)
label = fields.Char(related='pr_id.label')
def write(self, vals):
# only the pr should be writeable after initial creation
assert 'wizard_id' not in vals
assert 'repository_id' not in vals
# and if the PR gets set, it should match the requested repository
if 'pr_id' in vals:
assert self.env['runbot_merge.pull_requests'].browse(vals['pr_id'])\
.repository == self.repository_id
return super().write(vals)
class BumpPullRequest(models.Model):
_name = 'runbot_merge.project.freeze.bumps'
_description = "links to pull requests used to \"bump\" the development branches"
wizard_id = fields.Many2one('runbot_merge.project.freeze', required=True, index=True, ondelete='cascade')
repository_id = fields.Many2one('runbot_merge.repository', required=True)
pr_id = fields.Many2one(
'runbot_merge.pull_requests',
domain='[("repository", "=", repository_id), ("state", "not in", ("closed", "merged"))]',
string="Release Pull Request",
)
label = fields.Char(related='pr_id.label')
def write(self, vals):
# only the pr should be writeable after initial creation

View File

@ -23,12 +23,42 @@
</group>
</group>
<group>
<group colspan="2">
<group colspan="2" string="Release">
<p>
Release (freeze) PRs, provide the first commit
of the new branches. Each PR must have a single
commit.
</p>
<p class="alert alert-warning" role="alert">
These PRs will be merged directly, not staged.
</p>
<field name="release_label"/>
<field name="release_pr_ids" nolabel="1">
<tree editable="bottom" create="false">
<field name="repository_id" readonly="1"/>
<field name="pr_id" options="{'no_create': True}"
context="{'pr_include_title': 1}"/>
<field name="label"/>
</tree>
</field>
</group>
</group>
<group>
<group colspan="2" string="Bump">
<p>
Bump PRs, provide the first commit of the source
branches after the release has been cut.
</p>
<p class="alert alert-warning" role="alert">
These PRs will be merged directly, not staged.
</p>
<field name="bump_label"/>
<field name="bump_pr_ids" nolabel="1">
<tree editable="bottom" create="false">
<field name="repository_id" readonly="1"/>
<field name="pr_id" options="{'no_create': True}"
context="{'pr_include_title': 1}"/>
<field name="label"/>
</tree>
</field>
</group>

View File

@ -3,6 +3,7 @@
import ast
import base64
import collections
import contextlib
import datetime
import io
import itertools
@ -116,7 +117,7 @@ All substitutions are tentatively applied sequentially to the input.
feedback({
'repository': self.id,
'pull_request': number,
'message': "I'm sorry. Branch `{}` is not within my remit.".format(pr['base']['ref']),
'message': "Branch `{}` is not within my remit, imma just ignore it.".format(pr['base']['ref']),
})
return
@ -142,9 +143,11 @@ All substitutions are tentatively applied sequentially to the input.
feedback({
'repository': self.id,
'pull_request': number,
'message': "Sorry, I didn't know about this PR and had to retrieve "
"its information, you may have to re-approve it."
'message': "%sI didn't know about this PR and had to retrieve "
"its information, you may have to re-approve it as "
"I didn't see previous commands." % pr_id.ping()
})
sender = {'login': self.project_id.github_prefix}
# init the PR to the null commit so we can later synchronise it back
# back to the "proper" head while resetting reviews
controllers.handle_pr(self.env, {
@ -154,6 +157,7 @@ All substitutions are tentatively applied sequentially to the input.
'head': {**pr['head'], 'sha': '0'*40},
'state': 'open',
},
'sender': sender,
})
# fetch & set up actual head
for st in gh.statuses(pr['head']['sha']):
@ -175,20 +179,21 @@ All substitutions are tentatively applied sequentially to the input.
'review': item,
'pull_request': pr,
'repository': {'full_name': self.name},
'sender': sender,
})
else:
controllers.handle_comment(self.env, {
'action': 'created',
'issue': issue,
'sender': item['user'],
'comment': item,
'repository': {'full_name': self.name},
'sender': sender,
})
# sync to real head
controllers.handle_pr(self.env, {
'action': 'synchronize',
'pull_request': pr,
'sender': {'login': self.project_id.github_prefix}
'sender': sender,
})
if pr['state'] == 'closed':
# don't go through controller because try_closing does weird things
@ -244,6 +249,23 @@ class Branch(models.Model):
self._table, ['name', 'project_id'])
return res
@api.depends('active')
def _compute_display_name(self):
super()._compute_display_name()
for b in self.filtered(lambda b: not b.active):
b.display_name += ' (inactive)'
def write(self, vals):
super().write(vals)
if vals.get('active') is False:
self.env['runbot_merge.pull_requests.feedback'].create([{
'repository': pr.repository.id,
'pull_request': pr.number,
'close': True,
'message': f'{pr.ping()}the target branch {pr.target.name!r} has been disabled, closing this PR.',
} for pr in self.prs])
return True
@api.depends('staging_ids.active')
def _compute_active_staging(self):
for b in self:
@ -338,14 +360,20 @@ class Branch(models.Model):
_logger.exception("Failed to merge %s into staging branch", pr.display_name)
if first or isinstance(e, exceptions.Unmergeable):
if len(e.args) > 1 and e.args[1]:
message = e.args[1]
reason = e.args[1]
else:
message = "Unable to stage PR (%s)" % e.__context__
reason = e.__context__
# if the reason is a json document, assume it's a github
# error and try to extract the error message to give it to
# the user
with contextlib.suppress(Exception):
reason = json.loads(str(reason))['message'].lower()
pr.state = 'error'
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': pr.repository.id,
'pull_request': pr.number,
'message': message,
'message': f'{pr.ping()}unable to stage: {reason}',
})
else:
first = False
@ -534,6 +562,17 @@ class PullRequests(models.Model):
repo_name = fields.Char(related='repository.name')
message_title = fields.Char(compute='_compute_message_title')
def ping(self, author=True, reviewer=True):
P = self.env['res.partner']
s = ' '.join(
f'@{p.github_login}'
for p in (self.author if author else P) | (self.reviewed_by if reviewer else P)
if p
)
if s:
s += ' '
return s
@api.depends('repository.name', 'number')
def _compute_url(self):
base = werkzeug.urls.url_parse(self.env['ir.config_parameter'].sudo().get_param('web.base.url', 'http://localhost:8069'))
@ -791,30 +830,31 @@ class PullRequests(models.Model):
msgs = []
for command, param in commands:
ok = False
msg = []
msg = None
if command == 'retry':
if is_author:
if self.state == 'error':
ok = True
self.state = 'ready'
else:
msg = "Retry makes no sense when the PR is not in error."
msg = "retry makes no sense when the PR is not in error."
elif command == 'check':
if is_author:
self.env['runbot_merge.fetch_job'].create({
'repository': self.repository.id,
'number': self.number,
})
ok = True
elif command == 'review':
if self.draft:
msg = "Draft PRs can not be approved."
msg = "draft PRs can not be approved."
elif param and is_reviewer:
oldstate = self.state
newstate = RPLUS.get(self.state)
if not author.email:
msg = "I must know your email before you can review PRs. Please contact an administrator."
elif not newstate:
msg = "This PR is already reviewed, reviewing it again is useless."
msg = "this PR is already reviewed, reviewing it again is useless."
else:
self.state = newstate
self.reviewed_by = author
@ -831,7 +871,7 @@ class PullRequests(models.Model):
Feedback.create({
'repository': self.repository.id,
'pull_request': self.number,
'message': "@{}, you may want to rebuild or fix this PR as it has failed CI.".format(author.github_login),
'message': "@{} you may want to rebuild or fix this PR as it has failed CI.".format(login),
})
elif not param and is_author:
newstate = RMINUS.get(self.state)
@ -845,7 +885,7 @@ class PullRequests(models.Model):
'pull_request': self.number,
'message': "PR priority reset to 1, as pull requests with priority 0 ignore review state.",
})
self.unstage("unreview (r-) by %s", author.github_login)
self.unstage("unreviewed (r-) by %s", login)
ok = True
else:
msg = "r- makes no sense in the current PR state."
@ -874,7 +914,7 @@ class PullRequests(models.Model):
elif command == 'method':
if is_reviewer:
if param == 'squash' and not self.squash:
msg = "Squash can only be used with a single commit at this time."
msg = "squash can only be used with a single commit at this time."
else:
self.merge_method = param
ok = True
@ -882,7 +922,7 @@ class PullRequests(models.Model):
Feedback.create({
'repository': self.repository.id,
'pull_request': self.number,
'message':"Merge method set to %s" % explanation
'message':"Merge method set to %s." % explanation
})
elif command == 'override':
overridable = author.override_rights\
@ -904,7 +944,7 @@ class PullRequests(models.Model):
c.create({'sha': self.head, 'statuses': '{}'})
ok = True
else:
msg = f"You are not allowed to override this status."
msg = "you are not allowed to override this status."
else:
# ignore unknown commands
continue
@ -919,21 +959,23 @@ class PullRequests(models.Model):
applied.append(reformat(command, param))
else:
ignored.append(reformat(command, param))
msgs.append(msg or "You can't {}.".format(reformat(command, param)))
msgs.append(msg or "you can't {}.".format(reformat(command, param)))
if msgs:
joiner = ' ' if len(msgs) == 1 else '\n- '
msgs.insert(0, "I'm sorry, @{}:".format(login))
Feedback.create({
'repository': self.repository.id,
'pull_request': self.number,
'message': joiner.join(msgs),
})
msg = []
if applied:
msg.append('applied ' + ' '.join(applied))
if ignored:
ignoredstr = ' '.join(ignored)
msg.append('ignored ' + ignoredstr)
if msgs:
msgs.insert(0, "I'm sorry, @{}.".format(login))
Feedback.create({
'repository': self.repository.id,
'pull_request': self.number,
'message': ' '.join(msgs),
})
return '\n'.join(msg)
def _pr_acl(self, user):
@ -1020,7 +1062,7 @@ class PullRequests(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': self.repository.id,
'pull_request': self.number,
'message': "%r failed on this reviewed PR." % ci,
'message': "%s%r failed on this reviewed PR." % (self.ping(), ci),
})
def _auto_init(self):
@ -1089,6 +1131,12 @@ class PullRequests(models.Model):
def write(self, vals):
if vals.get('squash'):
vals['merge_method'] = False
prev = None
if 'target' in vals or 'message' in vals:
prev = {
pr.id: {'target': pr.target, 'message': pr.message}
for pr in self
}
w = super().write(vals)
@ -1096,6 +1144,18 @@ class PullRequests(models.Model):
if newhead:
c = self.env['runbot_merge.commit'].search([('sha', '=', newhead)])
self._validate(json.loads(c.statuses or '{}'))
if prev:
for pr in self:
old_target = prev[pr.id]['target']
if pr.target != old_target:
pr.unstage(
"target (base) branch was changed from %r to %r",
old_target.display_name, pr.target.display_name,
)
old_message = prev[pr.id]['message']
if pr.merge_method in ('merge', 'rebase-merge') and pr.message != old_message:
pr.unstage("merge message updated")
return w
def _check_linked_prs_statuses(self, commit=False):
@ -1139,11 +1199,9 @@ class PullRequests(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': r.repository.id,
'pull_request': r.number,
'message': "Linked pull request(s) {} not ready. Linked PRs are not staged until all of them are ready.".format(
', '.join(map(
'{0.display_name}'.format,
unready
))
'message': "{}linked pull request(s) {} not ready. Linked PRs are not staged until all of them are ready.".format(
r.ping(),
', '.join(map('{0.display_name}'.format, unready))
)
})
r.link_warned = True
@ -1152,6 +1210,11 @@ class PullRequests(models.Model):
# send feedback for multi-commit PRs without a merge_method (which
# we've not warned yet)
methods = ''.join(
'* `%s` to %s\n' % pair
for pair in type(self).merge_method.selection
if pair[0] != 'squash'
)
for r in self.search([
('state', '=', 'ready'),
('squash', '=', False),
@ -1161,10 +1224,9 @@ class PullRequests(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': r.repository.id,
'pull_request': r.number,
'message': "Because this PR has multiple commits, I need to know how to merge it:\n\n" + ''.join(
'* `%s` to %s\n' % pair
for pair in type(self).merge_method.selection
if pair[0] != 'squash'
'message': "%sbecause this PR has multiple commits, I need to know how to merge it:\n\n%s" % (
r.ping(),
methods,
)
})
r.method_warned = True
@ -1348,7 +1410,7 @@ class PullRequests(models.Model):
# else remove this batch from the split
b.split_id = False
self.staging_id.cancel(reason, *args)
self.staging_id.cancel('%s ' + reason, self.display_name, *args)
def _try_closing(self, by):
# ignore if the PR is already being updated in a separate transaction
@ -1368,11 +1430,7 @@ class PullRequests(models.Model):
''', [self.id])
self.env.cr.commit()
self.modified(['state'])
self.unstage(
"PR %s closed by %s",
self.display_name,
by
)
self.unstage("closed by %s", by)
return True
# state changes on reviews
@ -1501,20 +1559,24 @@ class Feedback(models.Model):
try:
message = f.message
if f.close:
gh.close(f.pull_request)
try:
data = json.loads(message or '')
except json.JSONDecodeError:
pass
else:
with contextlib.suppress(json.JSONDecodeError):
data = json.loads(message or '')
message = data.get('message')
if data.get('base'):
gh('PATCH', f'pulls/{f.pull_request}', json={'base': data['base']})
if f.close:
pr_to_notify = self.env['runbot_merge.pull_requests'].search([
('repository', '=', repo.id),
('number', '=', f.pull_request),
])
if pr_to_notify:
pr_to_notify._notify_merged(gh, data)
message = None
if f.close:
gh.close(f.pull_request)
if message:
gh.comment(f.pull_request, message)
except Exception:
@ -1621,6 +1683,17 @@ class Stagings(models.Model):
statuses = fields.Binary(compute='_compute_statuses')
def name_get(self):
return [
(staging.id, "%d (%s, %s%s)" % (
staging.id,
staging.target.name,
staging.state,
(', ' + staging.reason) if staging.reason else '',
))
for staging in self
]
@api.depends('heads')
def _compute_statuses(self):
""" Fetches statuses associated with the various heads, returned as
@ -1725,7 +1798,7 @@ class Stagings(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': pr.repository.id,
'pull_request': pr.number,
'message':"Staging failed: %s" % message
'message': "%sstaging failed: %s" % (pr.ping(), message),
})
self.batch_ids.write({'active': False})
@ -1988,8 +2061,9 @@ class Batch(models.Model):
gh = meta[pr.repository]['gh']
_logger.info(
"Staging pr %s for target %s; squash=%s",
pr.display_name, pr.target.name, pr.squash
"Staging pr %s for target %s; method=%s",
pr.display_name, pr.target.name,
pr.merge_method or (pr.squash and 'single') or None
)
target = 'tmp.{}'.format(pr.target.name)
@ -2027,11 +2101,11 @@ class Batch(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': pr.repository.id,
'pull_request': pr.number,
'message': "We apparently missed an update to this PR "
'message': "%swe apparently missed an update to this PR "
"and tried to stage it in a state which "
"might not have been approved. PR has been "
"updated to %s, please check and approve or "
"re-approve." % new_head
"re-approve." % (pr.ping(), new_head)
})
return self.env['runbot_merge.batch']
@ -2097,7 +2171,7 @@ def to_status(v):
return v
return {'state': v, 'target_url': None, 'description': None}
refline = re.compile(rb'([0-9a-f]{40}) ([^\0\n]+)(\0.*)?\n$')
refline = re.compile(rb'([\da-f]{40}) ([^\0\n]+)(\0.*)?\n?$')
ZERO_REF = b'0'*40
def parse_refs_smart(read):
""" yields pkt-line data (bytes), or None for flush lines """
@ -2108,9 +2182,8 @@ def parse_refs_smart(read):
return read(length - 4)
header = read_line()
assert header == b'# service=git-upload-pack\n', header
sep = read_line()
assert sep is None, sep
assert header.rstrip() == b'# service=git-upload-pack', header
assert read_line() is None, "failed to find first flush line"
# read lines until second delimiter
for line in iter(read_line, None):
if line.startswith(ZERO_REF):

View File

@ -13,6 +13,7 @@ class CIText(fields.Char):
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(index=True)
github_login = CIText()
delegate_reviewer = fields.Many2many('runbot_merge.pull_requests')
formatted_email = fields.Char(string="commit email", compute='_rfc5322_formatted')

View File

@ -1,7 +1,8 @@
id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
access_runbot_merge_project_admin,Admin access to project,model_runbot_merge_project,runbot_merge.group_admin,1,1,1,1
access_runbot_merge_project_freeze,Admin access to freeze wizard,model_runbot_merge_project_freeze,runbot_merge.group_admin,1,1,0,0
access_runbot_merge_project_freeze_prs,Admin access to freeze wizard prs,model_runbot_merge_project_freeze_prs,runbot_merge.group_admin,1,1,0,1
access_runbot_merge_project_freeze_prs,Admin access to freeze wizard release prs,model_runbot_merge_project_freeze_prs,runbot_merge.group_admin,1,1,0,1
access_runbot_merge_project_freeze_bumps,Admin access to freeze wizard bump prs,model_runbot_merge_project_freeze_bumps,runbot_merge.group_admin,1,1,1,1
access_runbot_merge_repository_admin,Admin access to repo,model_runbot_merge_repository,runbot_merge.group_admin,1,1,1,1
access_runbot_merge_repository_status_admin,Admin access to repo statuses,model_runbot_merge_repository_status,runbot_merge.group_admin,1,1,1,1
access_runbot_merge_branch_admin,Admin access to branches,model_runbot_merge_branch,runbot_merge.group_admin,1,1,1,1

1 id name model_id:id group_id:id perm_read perm_write perm_create perm_unlink
2 access_runbot_merge_project_admin Admin access to project model_runbot_merge_project runbot_merge.group_admin 1 1 1 1
3 access_runbot_merge_project_freeze Admin access to freeze wizard model_runbot_merge_project_freeze runbot_merge.group_admin 1 1 0 0
4 access_runbot_merge_project_freeze_prs Admin access to freeze wizard prs Admin access to freeze wizard release prs model_runbot_merge_project_freeze_prs runbot_merge.group_admin 1 1 0 1
5 access_runbot_merge_project_freeze_bumps Admin access to freeze wizard bump prs model_runbot_merge_project_freeze_bumps runbot_merge.group_admin 1 1 1 1
6 access_runbot_merge_repository_admin Admin access to repo model_runbot_merge_repository runbot_merge.group_admin 1 1 1 1
7 access_runbot_merge_repository_status_admin Admin access to repo statuses model_runbot_merge_repository_status runbot_merge.group_admin 1 1 1 1
8 access_runbot_merge_branch_admin Admin access to branches model_runbot_merge_branch runbot_merge.group_admin 1 1 1 1

View File

@ -40,23 +40,34 @@ h5 { font-size: 1em; }
// mergebot layouting
.stagings {
display: flex;
align-items: stretch;
}
.stagings > li {
flex: 1;
display: flex;
align-items: stretch;
padding: 0.1em;
padding-left: 0.5em;
}
.stagings > li:not(:last-child) {
border-right: 1px solid lightgray;
}
.batch:not(:last-child) {
border-bottom: 1px solid lightgray;
}
.batch a:not(:last-of-type) a:after {
> li {
flex: 1;
// prevent content-based autosizing otherwise that's flex' starting point
width: 0;
padding: 0.1em 0.1em 0.1em 0.5em;
&:not(:last-child) {
border-right: 1px solid lightgray;
}
}
.batch {
// cut off branch names if they can't be line-wrapped and would break the
// layout, works with flex to force all columns to be at the same size
overflow: hidden;
text-overflow: ellipsis;
&:not(:last-child) {
border-bottom: 1px solid lightgray;
}
}
.batch a:not(:last-of-type) a:after {
content: ",";
}
}
.pr-listing > * { display: inline-block; }
.pr-awaiting { opacity: 0.8; }
@ -84,3 +95,7 @@ dl.runbot-merge-fields {
@extend .col-sm-10;
}
}
.staging-statuses {
cursor: wait;
}

View File

@ -6,7 +6,7 @@ import time
from unittest import mock
import pytest
from lxml import html
from lxml import html, etree
import odoo
from utils import _simple_init, seen, re_matches, get_partner, Commit, pr_page, to_pr, part_of
@ -26,21 +26,34 @@ def repo(env, project, make_repo, users, setreviewers):
def test_trivial_flow(env, repo, page, users, config):
# create base branch
with repo:
m = repo.make_commit(None, "initial", None, tree={'a': 'some content'})
repo.make_ref('heads/master', m)
[m] = repo.make_commits(None, Commit("initial", tree={'a': 'some content'}), ref='heads/master')
# create PR with 2 commits
c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'})
c1 = repo.make_commit(c0, 'add file', None, tree={'a': 'some other content', 'b': 'a second file'})
pr = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c1,)
_, c1 = repo.make_commits(
m,
Commit('replace file contents', tree={'a': 'some other content'}),
Commit('add file', tree={'b': 'a second file'}),
ref='heads/other'
)
pr = repo.make_pr(title="gibberish", body="blahblah", target='master', head='other')
pr_id = to_pr(env, pr)
assert pr_id.state == 'opened'
env.run_crons()
assert pr.comments == [seen(env, pr, users)]
s = pr_page(page, pr).cssselect('.alert-info > ul > li')
pr_dashboard = pr_page(page, pr)
s = pr_dashboard.cssselect('.alert-info > ul > li')
assert [it.get('class') for it in s] == ['fail', 'fail', ''],\
"merge method unset, review missing, no CI"
assert dict(zip(
[e.text_content() for e in pr_dashboard.cssselect('dl.runbot-merge-fields dt')],
[e.text_content() for e in pr_dashboard.cssselect('dl.runbot-merge-fields dd')],
)) == {
'label': f"{config['github']['owner']}:other",
'head': c1,
'target': 'master',
}
with repo:
repo.post_status(c1, 'success', 'legal/cla')
@ -63,7 +76,6 @@ def test_trivial_flow(env, repo, page, users, config):
]
assert statuses == [('legal/cla', 'ok'), ('ci/runbot', 'ok')]
with repo:
pr.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token'])
assert pr_id.state == 'ready'
@ -443,8 +455,8 @@ def test_staging_conflict_first(env, repo, users, config, page):
assert pr.comments == [
(users['reviewer'], 'hansen r+ rebase-merge'),
seen(env, pr, users),
(users['user'], 'Merge method set to rebase and merge, using the PR as merge commit message'),
(users['user'], re_matches('^Unable to stage PR')),
(users['user'], 'Merge method set to rebase and merge, using the PR as merge commit message.'),
(users['user'], '@%(user)s @%(reviewer)s unable to stage: merge conflict' % users),
]
dangerbox = pr_page(page, pr).cssselect('.alert-danger span')
@ -566,15 +578,15 @@ def test_staging_ci_failure_single(env, repo, users, config, page):
assert pr.comments == [
(users['reviewer'], 'hansen r+ rebase-merge'),
seen(env, pr, users),
(users['user'], "Merge method set to rebase and merge, using the PR as merge commit message"),
(users['user'], 'Staging failed: ci/runbot')
(users['user'], "Merge method set to rebase and merge, using the PR as merge commit message."),
(users['user'], '@%(user)s @%(reviewer)s staging failed: ci/runbot' % users)
]
dangerbox = pr_page(page, pr).cssselect('.alert-danger span')
assert dangerbox
assert dangerbox[0].text == 'ci/runbot'
def test_ff_failure(env, repo, config):
def test_ff_failure(env, repo, config, page):
""" target updated while the PR is being staged => redo staging """
with repo:
m = repo.make_commit(None, 'initial', None, tree={'m': 'm'})
@ -587,10 +599,11 @@ def test_ff_failure(env, repo, config):
repo.post_status(prx.head, 'success', 'ci/runbot')
prx.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token'])
env.run_crons()
assert env['runbot_merge.pull_requests'].search([
st = env['runbot_merge.pull_requests'].search([
('repository.name', '=', repo.name),
('number', '=', prx.number)
]).staging_id
assert st
with repo:
m2 = repo.make_commit('heads/master', 'cockblock', None, tree={'m': 'm', 'm2': 'm2'})
@ -603,6 +616,14 @@ def test_ff_failure(env, repo, config):
repo.post_status(staging.id, 'success', 'ci/runbot')
env.run_crons()
assert st.reason == 'update is not a fast forward'
# check that it's added as title on the staging
doc = html.fromstring(page('/runbot_merge'))
_new, prev = doc.cssselect('li.staging')
assert 'bg-gray-lighter' in prev.classes, "ff failure is ~ cancelling"
assert prev.get('title') == re_matches('fast forward failed \(update is not a fast forward\)')
assert env['runbot_merge.pull_requests'].search([
('repository.name', '=', repo.name),
('number', '=', prx.number)
@ -684,7 +705,7 @@ def test_ff_failure_batch(env, repo, users, config):
}
class TestPREdition:
def test_edit(self, env, repo):
def test_edit(self, env, repo, config):
""" Editing PR:
* title (-> message)
@ -705,15 +726,29 @@ class TestPREdition:
c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'})
c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'})
prx = repo.make_pr(title='title', body='body', target='master', head=c2)
repo.post_status(prx.head, 'success', 'legal/cla')
repo.post_status(prx.head, 'success', 'ci/runbot')
prx.post_comment('hansen rebase-ff r+', config['role_reviewer']['token'])
env.run_crons()
pr = env['runbot_merge.pull_requests'].search([
('repository.name', '=', repo.name),
('number', '=', prx.number)
('number', '=', prx.number),
])
assert pr.state == 'ready'
st = pr.staging_id
assert st
assert pr.message == 'title\n\nbody'
with repo: prx.title = "title 2"
assert pr.message == 'title 2\n\nbody'
with repo: prx.body = None
assert pr.message == "title 2"
assert pr.staging_id, \
"message edition does not affect staging of rebased PRs"
with repo: prx.base = '1.0'
assert pr.target == branch_1
assert not pr.staging_id, "updated the base of a staged PR should have unstaged it"
assert st.reason == f"{pr.display_name} target (base) branch was changed from 'master' to '1.0'"
with repo: prx.base = '2.0'
assert not pr.exists()
@ -949,9 +984,9 @@ def test_ci_failure_after_review(env, repo, users, config):
assert prx.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, prx, users),
(users['user'], "'ci/runbot' failed on this reviewed PR.".format_map(users)),
(users['user'], "'legal/cla' failed on this reviewed PR.".format_map(users)),
(users['user'], "'legal/cla' failed on this reviewed PR.".format_map(users)),
(users['user'], "@{user} @{reviewer} 'ci/runbot' failed on this reviewed PR.".format_map(users)),
(users['user'], "@{user} @{reviewer} 'legal/cla' failed on this reviewed PR.".format_map(users)),
(users['user'], "@{user} @{reviewer} 'legal/cla' failed on this reviewed PR.".format_map(users)),
]
def test_reopen_merged_pr(env, repo, config, users):
@ -995,7 +1030,7 @@ def test_reopen_merged_pr(env, repo, config, users):
assert prx.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, prx, users),
(users['user'], "@%s ya silly goose you can't reopen a PR that's been merged PR." % users['other'])
(users['user'], "@%s ya silly goose you can't reopen a merged PR." % users['other'])
]
class TestNoRequiredStatus:
@ -1191,7 +1226,7 @@ class TestRetry:
(users['reviewer'], 'hansen r+'),
(users['reviewer'], 'hansen retry'),
seen(env, prx, users),
(users['user'], "I'm sorry, @{}. Retry makes no sense when the PR is not in error.".format(users['reviewer'])),
(users['user'], "I'm sorry, @{reviewer}: retry makes no sense when the PR is not in error.".format_map(users)),
]
@pytest.mark.parametrize('disabler', ['user', 'other', 'reviewer'])
@ -1387,12 +1422,13 @@ class TestMergeMethod:
assert prx.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, prx, users),
(users['user'], """Because this PR has multiple commits, I need to know how to merge it:
(users['user'], """@{user} @{reviewer} because this PR has multiple \
commits, I need to know how to merge it:
* `merge` to merge directly, using the PR as merge commit message
* `rebase-merge` to rebase and merge, using the PR as merge commit message
* `rebase-ff` to rebase and fast-forward
"""),
""".format_map(users)),
]
def test_pr_method_no_review(self, repo, env, users, config):
@ -1432,11 +1468,11 @@ class TestMergeMethod:
assert prx.comments == [
(users['reviewer'], 'hansen rebase-merge'),
seen(env, prx, users),
(users['user'], "Merge method set to rebase and merge, using the PR as merge commit message"),
(users['user'], "Merge method set to rebase and merge, using the PR as merge commit message."),
(users['reviewer'], 'hansen merge'),
(users['user'], "Merge method set to merge directly, using the PR as merge commit message"),
(users['user'], "Merge method set to merge directly, using the PR as merge commit message."),
(users['reviewer'], 'hansen rebase-ff'),
(users['user'], "Merge method set to rebase and fast-forward"),
(users['user'], "Merge method set to rebase and fast-forward."),
]
def test_pr_rebase_merge(self, repo, env, users, config):
@ -1498,6 +1534,20 @@ class TestMergeMethod:
frozenset([nm2, nb1])
)
assert staging == merge_head
st = pr_id.staging_id
assert st
with repo: prx.title = 'title 2'
assert not pr_id.staging_id, "updating the message of a merge-staged PR should unstage rien"
assert st.reason == f'{pr_id.display_name} merge message updated'
# since we updated the description, the merge_head value is impacted,
# and it's checked again later on
merge_head = (
merge_head[0].replace('title', 'title 2'),
merge_head[1],
)
env.run_crons()
assert pr_id.staging_id, "PR should immediately be re-stageable"
with repo:
repo.post_status('heads/staging.master', 'success', 'legal/cla')
@ -1990,7 +2040,7 @@ Part-of: {pr_id.display_name}"""
assert pr1.comments == [
seen(env, pr1, users),
(users['reviewer'], 'hansen r+ squash'),
(users['user'], 'Merge method set to squash')
(users['user'], 'Merge method set to squash.')
]
merged_head = repo.commit('master')
assert merged_head.message == f"""first pr
@ -2014,13 +2064,13 @@ Signed-off-by: {get_partner(env, users["reviewer"]).formatted_email}\
assert pr2.comments == [
seen(env, pr2, users),
(users['reviewer'], 'hansen r+ squash'),
(users['user'], f"I'm sorry, @{users['reviewer']}. Squash can only be used with a single commit at this time."),
(users['user'], """Because this PR has multiple commits, I need to know how to merge it:
(users['user'], f"I'm sorry, @{users['reviewer']}: squash can only be used with a single commit at this time."),
(users['user'], """@{user} @{reviewer} because this PR has multiple commits, I need to know how to merge it:
* `merge` to merge directly, using the PR as merge commit message
* `rebase-merge` to rebase and merge, using the PR as merge commit message
* `rebase-ff` to rebase and fast-forward
""")
""".format_map(users))
]
@pytest.mark.xfail(reason="removed support for squash- command")
@ -2683,14 +2733,16 @@ class TestBatching(object):
repo.make_ref('heads/master', m)
pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'])
p_1 = to_pr(env, pr1)
pr2 = self._pr(repo, 'PR2', [{'a': 'some content', 'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'])
p_2 = to_pr(env, pr2)
env.run_crons()
p_1 = to_pr(env, pr1)
p_2 = to_pr(env, pr2)
st = env['runbot_merge.stagings'].search([])
# both prs should be part of the staging
assert st.mapped('batch_ids.prs') == p_1 | p_2
# add CI failure
with repo:
repo.post_status('heads/staging.master', 'failure', 'ci/runbot')
@ -2824,7 +2876,7 @@ class TestReviewing(object):
(users['user'], "I'm sorry, @{}. I'm afraid I can't do that.".format(users['other'])),
(users['reviewer'], 'hansen r+'),
(users['reviewer'], 'hansen r+'),
(users['user'], "I'm sorry, @{}. This PR is already reviewed, reviewing it again is useless.".format(
(users['user'], "I'm sorry, @{}: this PR is already reviewed, reviewing it again is useless.".format(
users['reviewer'])),
]
@ -2853,7 +2905,7 @@ class TestReviewing(object):
assert prx.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, prx, users),
(users['user'], "I'm sorry, @{}. You can't review+.".format(users['reviewer'])),
(users['user'], "I'm sorry, @{}: you can't review+.".format(users['reviewer'])),
]
def test_self_review_success(self, env, repo, users, config):
@ -3009,7 +3061,7 @@ class TestReviewing(object):
seen(env, pr, users),
(users['reviewer'], 'hansen delegate+'),
(users['user'], 'hansen r+'),
(users['user'], f"I'm sorry, @{users['user']}. I must know your email before you can review PRs. Please contact an administrator."),
(users['user'], f"I'm sorry, @{users['user']}: I must know your email before you can review PRs. Please contact an administrator."),
]
user_partner.fetch_github_email()
assert user_partner.email
@ -3073,9 +3125,9 @@ class TestUnknownPR:
seen(env, prx, users),
(users['reviewer'], 'hansen r+'),
(users['reviewer'], 'hansen r+'),
(users['user'], "Sorry, I didn't know about this PR and had to "
(users['user'], "I didn't know about this PR and had to "
"retrieve its information, you may have to "
"re-approve it."),
"re-approve it as I didn't see previous commands."),
seen(env, prx, users),
]
@ -3126,9 +3178,9 @@ class TestUnknownPR:
assert pr.comments == [
seen(env, pr, users),
(users['reviewer'], 'hansen r+'),
(users['user'], "Sorry, I didn't know about this PR and had to "
"retrieve its information, you may have to "
"re-approve it."),
(users['user'], "I didn't know about this PR and had to retrieve "
"its information, you may have to re-approve it "
"as I didn't see previous commands."),
seen(env, pr, users),
]
@ -3153,8 +3205,8 @@ class TestUnknownPR:
assert prx.comments == [
(users['reviewer'], 'hansen r+'),
(users['user'], "This PR targets the un-managed branch %s:branch, it can not be merged." % repo.name),
(users['user'], "I'm sorry. Branch `branch` is not within my remit."),
(users['user'], "This PR targets the un-managed branch %s:branch, it needs to be retargeted before it can be merged." % repo.name),
(users['user'], "Branch `branch` is not within my remit, imma just ignore it."),
]
def test_rplus_review_unmanaged(self, env, repo, users, config):
@ -3551,7 +3603,7 @@ class TestFeedback:
assert prx.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, prx, users),
(users['user'], "'ci/runbot' failed on this reviewed PR.")
(users['user'], "@%(user)s @%(reviewer)s 'ci/runbot' failed on this reviewed PR." % users)
]
def test_review_failed(self, repo, env, users, config):
@ -3581,9 +3633,11 @@ class TestFeedback:
assert prx.comments == [
seen(env, prx, users),
(users['reviewer'], 'hansen r+'),
(users['user'], "@%s, you may want to rebuild or fix this PR as it has failed CI." % users['reviewer'])
(users['user'], "@%s you may want to rebuild or fix this PR as it has failed CI." % users['reviewer'])
]
class TestInfrastructure:
@pytest.mark.skip(reason="Don't want to implement")
def test_protection(self, repo):
""" force-pushing on a protected ref should fail
"""

View File

@ -1,6 +1,6 @@
from utils import seen, Commit
from utils import seen, Commit, pr_page
def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, config, users):
def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, config, users, page):
""" PRs to disabled branches are ignored, but what if the PR exists *before*
the branch is disabled?
"""
@ -13,7 +13,8 @@ def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, conf
repo_id = env['runbot_merge.repository'].create({
'project_id': project.id,
'name': repo.name,
'status_ids': [(0, 0, {'context': 'status'})]
'status_ids': [(0, 0, {'context': 'status'})],
'group_id': False,
})
setreviewers(*project.repo_ids)
@ -25,42 +26,57 @@ def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, conf
[c] = repo.make_commits(ot, Commit('wheee', tree={'b': '2'}))
pr = repo.make_pr(title="title", body='body', target='other', head=c)
repo.post_status(c, 'success', 'status')
pr.post_comment('hansen r+', config['role_reviewer']['token'])
env.run_crons()
pr_id = env['runbot_merge.pull_requests'].search([
('repository', '=', repo_id.id),
('number', '=', pr.number),
])
branch_id = pr_id.target
assert pr_id.staging_id
staging_id = branch_id.active_staging_id
assert staging_id == pr_id.staging_id
# disable branch "other"
project.branch_ids.filtered(lambda b: b.name == 'other').active = False
# r+ the PR
with repo:
pr.post_comment('hansen r+', config['role_reviewer']['token'])
branch_id.active = False
env.run_crons()
# nothing should happen, the PR should be unstaged forever, maybe?
assert pr_id.state == 'ready'
assert not branch_id.active_staging_id
assert staging_id.state == 'cancelled', \
"closing the PRs should have canceled the staging"
p = pr_page(page, pr)
target = dict(zip(
(e.text for e in p.cssselect('dl.runbot-merge-fields dt')),
(p.cssselect('dl.runbot-merge-fields dd'))
))['target']
assert target.text_content() == 'other (inactive)'
assert target.get('class') == 'text-muted bg-warning'
# the PR should have been closed implicitly
assert pr_id.state == 'closed'
assert not pr_id.staging_id
with repo:
pr.open()
pr.post_comment('hansen r+', config['role_reviewer']['token'])
assert pr_id.state == 'ready', "pr should be reopenable"
env.run_crons()
assert pr.comments == [
(users['reviewer'], "hansen r+"),
seen(env, pr, users),
(users['user'], "@%(user)s @%(reviewer)s the target branch 'other' has been disabled, closing this PR." % users),
(users['reviewer'], "hansen r+"),
(users['user'], "This PR targets the disabled branch %s:other, it needs to be retargeted before it can be merged." % repo.name),
]
with repo:
[c2] = repo.make_commits(ot, Commit('wheee', tree={'b': '3'}))
repo.update_ref(pr.ref, c2, force=True)
assert pr_id.head == c2, "pr should be aware of its update"
with repo:
pr.close()
assert pr_id.state == 'closed', "pr should be closeable"
with repo:
pr.open()
assert pr_id.state == 'opened', "pr should be reopenable (state reset due to force push"
env.run_crons()
assert pr.comments == [
(users['reviewer'], "hansen r+"),
seen(env, pr, users),
(users['user'], "This PR targets the disabled branch %s:other, it can not be merged." % repo.name),
], "reopening a PR to an inactive branch should send feedback, but not closing it"
with repo:
pr.base = 'other2'
repo.post_status(c2, 'success', 'status')
@ -96,7 +112,7 @@ def test_new_pr_no_branch(env, project, make_repo, setreviewers, users):
('number', '=', pr.number),
]), "the PR should not have been created in the backend"
assert pr.comments == [
(users['user'], "This PR targets the un-managed branch %s:other, it can not be merged." % repo.name),
(users['user'], "This PR targets the un-managed branch %s:other, it needs to be retargeted before it can be merged." % repo.name),
]
def test_new_pr_disabled_branch(env, project, make_repo, setreviewers, users):
@ -131,45 +147,6 @@ def test_new_pr_disabled_branch(env, project, make_repo, setreviewers, users):
assert pr_id, "the PR should have been created in the backend"
assert pr_id.state == 'opened'
assert pr.comments == [
(users['user'], "This PR targets the disabled branch %s:other, it can not be merged." % repo.name),
(users['user'], "This PR targets the disabled branch %s:other, it needs to be retargeted before it can be merged." % repo.name),
seen(env, pr, users),
]
def test_retarget_from_disabled(env, make_repo, project, setreviewers):
""" Retargeting a PR from a disabled branch should not duplicate the PR
"""
repo = make_repo('repo')
project.write({'branch_ids': [(0, 0, {'name': '1.0'}), (0, 0, {'name': '2.0'})]})
repo_id = env['runbot_merge.repository'].create({
'project_id': project.id,
'name': repo.name,
'required_statuses': 'legal/cla,ci/runbot',
})
setreviewers(repo_id)
with repo:
[c0] = repo.make_commits(None, Commit('0', tree={'a': '0'}), ref='heads/1.0')
[c1] = repo.make_commits(c0, Commit('1', tree={'a': '1'}), ref='heads/2.0')
repo.make_commits(c1, Commit('2', tree={'a': '2'}), ref='heads/master')
# create PR on 1.0
repo.make_commits(c0, Commit('c', tree={'a': '0', 'b': '0'}), ref='heads/changes')
prx = repo.make_pr(head='changes', target='1.0')
branch_1 = project.branch_ids.filtered(lambda b: b.name == '1.0')
# there should only be a single PR in the system at this point
[pr] = env['runbot_merge.pull_requests'].search([])
assert pr.target == branch_1
# branch 1 is EOL, disable it
branch_1.active = False
with repo:
# we forgot we had active PRs for it, and we may want to merge them
# still, retarget them!
prx.base = '2.0'
# check that we still only have one PR in the system
[pr_] = env['runbot_merge.pull_requests'].search([])
# and that it's the same as the old one, just edited with a new target
assert pr_ == pr
assert pr.target == project.branch_ids.filtered(lambda b: b.name == '2.0')

View File

@ -7,12 +7,13 @@ are staged concurrently in all repos
"""
import json
import time
import xmlrpc.client
import pytest
import requests
from lxml.etree import XPath, tostring
from lxml.etree import XPath
from utils import seen, re_matches, get_partner, pr_page, to_pr, Commit
from utils import seen, get_partner, pr_page, to_pr, Commit
@pytest.fixture
@ -429,7 +430,7 @@ def test_merge_fail(env, project, repo_a, repo_b, users, config):
assert pr1b.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, pr1b, users),
(users['user'], re_matches('^Unable to stage PR')),
(users['user'], '@%(user)s @%(reviewer)s unable to stage: merge conflict' % users),
]
other = to_pr(env, pr1a)
reviewer = get_partner(env, users["reviewer"]).formatted_email
@ -527,14 +528,22 @@ class TestCompanionsNotReady:
assert p_a.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, p_a, users),
(users['user'], "Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (repo_b.name, p_b.number)),
(users['user'], "@%s @%s linked pull request(s) %s not ready. Linked PRs are not staged until all of them are ready." % (
users['user'],
users['reviewer'],
pr_b.display_name,
)),
]
# ensure the message is only sent once per PR
env.run_crons('runbot_merge.check_linked_prs_status')
assert p_a.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, p_a, users),
(users['user'], "Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (repo_b.name, p_b.number)),
(users['user'], "@%s @%s linked pull request(s) %s not ready. Linked PRs are not staged until all of them are ready." % (
users['user'],
users['reviewer'],
pr_b.display_name,
)),
]
assert p_b.comments == [seen(env, p_b, users)]
@ -570,7 +579,8 @@ class TestCompanionsNotReady:
assert pr_b.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, pr_b, users),
(users['user'], "Linked pull request(s) %s#%d, %s#%d not ready. Linked PRs are not staged until all of them are ready." % (
(users['user'], "@%s @%s linked pull request(s) %s#%d, %s#%d not ready. Linked PRs are not staged until all of them are ready." % (
users['user'], users['reviewer'],
repo_a.name, pr_a.number,
repo_c.name, pr_c.number
))
@ -609,7 +619,8 @@ class TestCompanionsNotReady:
assert pr_b.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, pr_b, users),
(users['user'], "Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (
(users['user'], "@%s @%s linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (
users['user'], users['reviewer'],
repo_a.name, pr_a.number
))
]
@ -617,7 +628,8 @@ class TestCompanionsNotReady:
(users['reviewer'], 'hansen r+'),
seen(env, pr_c, users),
(users['user'],
"Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (
"@%s @%s linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (
users['user'], users['reviewer'],
repo_a.name, pr_a.number
))
]
@ -655,7 +667,10 @@ def test_other_failed(env, project, repo_a, repo_b, users, config):
assert pr_a.comments == [
(users['reviewer'], 'hansen r+'),
seen(env, pr_a, users),
(users['user'], 'Staging failed: ci/runbot on %s (view more at http://example.org/b)' % sth)
(users['user'], '@%s @%s staging failed: ci/runbot on %s (view more at http://example.org/b)' % (
users['user'], users['reviewer'],
sth
))
]
class TestMultiBatches:
@ -670,12 +685,16 @@ class TestMultiBatches:
make_branch(repo_b, 'master', 'initial', {'b': 'b0'})
prs = [(
a and to_pr(env, make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)),
b and to_pr(env, make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],))
a and make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']),
b and make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']),
)
for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)])
]
env.run_crons()
prs = [
(a and to_pr(env, a), b and to_pr(env, b))
for (a, b) in prs
]
st = env['runbot_merge.stagings'].search([])
assert st
@ -699,12 +718,16 @@ class TestMultiBatches:
make_branch(repo_b, 'master', 'initial', {'b': 'b0'})
prs = [(
a and to_pr(env, make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)),
b and to_pr(env, make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],))
a and make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']),
b and make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']),
)
for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)])
]
env.run_crons()
prs = [
(a and to_pr(env, a), b and to_pr(env, b))
for (a, b) in prs
]
st0 = env['runbot_merge.stagings'].search([])
assert len(st0.batch_ids) == 5
@ -916,7 +939,8 @@ class TestSubstitutions:
'label': original,
'sha': format(pr_number, 'x')*40,
}
}
},
'sender': {'login': 'pytest'}
}
)
pr = env['runbot_merge.pull_requests'].search([
@ -1081,6 +1105,7 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
* have a project with 3 repos, and two branches (1.0 and master) each
* have 2 PRs required for the freeze
* prep 3 freeze PRs
* prep 1 bump PR
* trigger the freeze wizard
* trigger it again (check that the same object is returned, there should
only be one freeze per project at a time)
@ -1099,49 +1124,13 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
(0, 0, {'name': '1.0', 'sequence': 2}),
]
masters = []
for r in [repo_a, repo_b, repo_c]:
with r:
[root, _] = r.make_commits(
None,
Commit('base', tree={'version': '', 'f': '0'}),
Commit('release 1.0', tree={'version': '1.0'} if r is repo_a else None),
ref='heads/1.0'
)
masters.extend(r.make_commits(root, Commit('other', tree={'f': '1'}), ref='heads/master'))
# have 2 PRs required for the freeze
with repo_a:
repo_a.make_commits(masters[0], Commit('super important file', tree={'g': 'x'}), ref='heads/apr')
pr_required_a = repo_a.make_pr(target='master', head='apr')
with repo_c:
repo_c.make_commits(masters[2], Commit('update thing', tree={'f': '2'}), ref='heads/cpr')
pr_required_c = repo_c.make_pr(target='master', head='cpr')
# have 3 release PRs, only the first one updates the tree (version file)
with repo_a:
repo_a.make_commits(
masters[0],
Commit('Release 1.1 (A)', tree={'version': '1.1'}),
ref='heads/release-1.1'
)
pr_rel_a = repo_a.make_pr(target='master', head='release-1.1')
with repo_b:
repo_b.make_commits(
masters[1],
Commit('Release 1.1 (B)', tree=None),
ref='heads/release-1.1'
)
pr_rel_b = repo_b.make_pr(target='master', head='release-1.1')
with repo_c:
repo_c.make_commits(masters[2], Commit("Some change", tree={'a': '1'}), ref='heads/whocares')
pr_other = repo_c.make_pr(target='master', head='whocares')
repo_c.make_commits(
masters[2],
Commit('Release 1.1 (C)', tree=None),
ref='heads/release-1.1'
)
pr_rel_c = repo_c.make_pr(target='master', head='release-1.1')
[
(master_head_a, master_head_b, master_head_c),
(pr_required_a, _, pr_required_c),
(pr_rel_a, pr_rel_b, pr_rel_c),
pr_bump_a,
pr_other
] = setup_mess(repo_a, repo_b, repo_c)
env.run_crons() # process the PRs
release_prs = {
@ -1149,6 +1138,7 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
repo_b.name: to_pr(env, pr_rel_b),
repo_c.name: to_pr(env, pr_rel_c),
}
pr_bump_id = to_pr(env, pr_bump_a)
# trigger the ~~tree~~ freeze wizard
w = project.action_prepare_freeze()
w2 = project.action_prepare_freeze()
@ -1166,6 +1156,11 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
for r in w_id.release_pr_ids:
r.pr_id = release_prs[r.repository_id.name].id
w_id.release_pr_ids[-1].pr_id = to_pr(env, pr_other).id
# configure bump
assert not w_id.bump_pr_ids, "there is no bump pr by default"
w_id.write({'bump_pr_ids': [
(0, 0, {'repository_id': pr_bump_id.repository.id, 'pr_id': pr_bump_id.id})
]})
r = w_id.action_freeze()
assert r == w, "the freeze is not ready so the wizard should redirect to itself"
owner = repo_c.owner
@ -1210,15 +1205,34 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
assert r['res_model'] == 'runbot_merge.project'
assert r['res_id'] == project.id
env.run_crons() # stage the release prs
for repo in [repo_a, repo_b, repo_c]:
with repo:
repo.post_status('staging.1.1', 'success', 'ci/runbot')
repo.post_status('staging.1.1', 'success', 'legal/cla')
env.run_crons() # get the release prs merged
# stuff that's done directly
for pr_id in release_prs.values():
assert pr_id.state == 'merged'
assert pr_bump_id.state == 'merged'
# stuff that's behind a cron
env.run_crons()
assert pr_rel_a.state == "closed"
assert pr_rel_a.base['ref'] == '1.1'
assert pr_rel_b.state == "closed"
assert pr_rel_b.base['ref'] == '1.1'
assert pr_rel_c.state == "closed"
assert pr_rel_c.base['ref'] == '1.1'
for pr_id in release_prs.values():
assert pr_id.target.name == '1.1'
assert pr_id.state == 'merged'
assert pr_bump_a.state == 'closed'
assert pr_bump_a.base['ref'] == 'master'
assert pr_bump_id.target.name == 'master'
m_a = repo_a.commit('master')
assert m_a.message.startswith('Bump A')
assert repo_a.read_tree(m_a) == {
'f': '1', # from master
'g': 'x', # from required PR (merged into master before forking)
'version': '1.2-alpha', # from bump PR
}
c_a = repo_a.commit('1.1')
assert c_a.message.startswith('Release 1.1 (A)')
@ -1229,19 +1243,71 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
}
c_a_parent = repo_a.commit(c_a.parents[0])
assert c_a_parent.message.startswith('super important file')
assert c_a_parent.parents[0] == masters[0]
assert c_a_parent.parents[0] == master_head_a
c_b = repo_b.commit('1.1')
assert c_b.message.startswith('Release 1.1 (B)')
assert repo_b.read_tree(c_b) == {'f': '1', 'version': ''}
assert c_b.parents[0] == masters[1]
assert c_b.parents[0] == master_head_b
c_c = repo_c.commit('1.1')
assert c_c.message.startswith('Release 1.1 (C)')
assert repo_c.read_tree(c_c) == {'f': '2', 'version': ''}
assert repo_c.commit(c_c.parents[0]).parents[0] == masters[2]
assert repo_c.commit(c_c.parents[0]).parents[0] == master_head_c
def setup_mess(repo_a, repo_b, repo_c):
master_heads = []
for r in [repo_a, repo_b, repo_c]:
with r:
[root, _] = r.make_commits(
None,
Commit('base', tree={'version': '', 'f': '0'}),
Commit('release 1.0', tree={'version': '1.0'} if r is repo_a else None),
ref='heads/1.0'
)
master_heads.extend(r.make_commits(root, Commit('other', tree={'f': '1'}), ref='heads/master'))
# have 2 PRs required for the freeze
with repo_a:
repo_a.make_commits(master_heads[0], Commit('super important file', tree={'g': 'x'}), ref='heads/apr')
pr_required_a = repo_a.make_pr(target='master', head='apr')
with repo_c:
repo_c.make_commits(master_heads[2], Commit('update thing', tree={'f': '2'}), ref='heads/cpr')
pr_required_c = repo_c.make_pr(target='master', head='cpr')
# have 3 release PRs, only the first one updates the tree (version file)
with repo_a:
repo_a.make_commits(
master_heads[0],
Commit('Release 1.1 (A)', tree={'version': '1.1'}),
ref='heads/release-1.1'
)
pr_rel_a = repo_a.make_pr(target='master', head='release-1.1')
with repo_b:
repo_b.make_commits(
master_heads[1],
Commit('Release 1.1 (B)', tree=None),
ref='heads/release-1.1'
)
pr_rel_b = repo_b.make_pr(target='master', head='release-1.1')
with repo_c:
repo_c.make_commits(master_heads[2], Commit("Some change", tree={'a': '1'}), ref='heads/whocares')
pr_other = repo_c.make_pr(target='master', head='whocares')
repo_c.make_commits(
master_heads[2],
Commit('Release 1.1 (C)', tree=None),
ref='heads/release-1.1'
)
pr_rel_c = repo_c.make_pr(target='master', head='release-1.1')
# have one bump PR on repo A
with repo_a:
repo_a.make_commits(
master_heads[0],
Commit("Bump A", tree={'version': '1.2-alpha'}),
ref='heads/bump-1.1',
)
pr_bump_a = repo_a.make_pr(target='master', head='bump-1.1')
return master_heads, (pr_required_a, None, pr_required_c), (pr_rel_a, pr_rel_b, pr_rel_c), pr_bump_a, pr_other
def test_freeze_subset(env, project, repo_a, repo_b, repo_c, users, config):
"""It should be possible to only freeze a subset of a project when e.g. one
of the repository is managed differently than the rest and has
@ -1312,3 +1378,61 @@ def test_freeze_subset(env, project, repo_a, repo_b, repo_c, users, config):
except AssertionError:
...
# can't stage because we (wilfully) don't have branches 1.1 in repos B and C
@pytest.mark.skip("env's session is not thread-safe sadface")
def test_race_conditions():
"""need the ability to dup the env in order to send concurrent requests to
the inner odoo
- try to run the action_freeze during a cron (merge or staging), should
error (recover and return nice message?)
- somehow get ahead of the action and update master's commit between moment
where it is fetched and moment where the bump pr is fast-forwarded,
there's actually a bit of time thanks to the rate limiting (fetch of base,
update of tmp to base, rebase of commits on tmp, wait 1s, for each release
and bump PR, then the release branches are created, and finally the bump
prs)
"""
...
def test_freeze_conflict(env, project, repo_a, repo_b, repo_c, users, config):
"""If one of the branches we're trying to create already exists, the wizard
fails.
"""
project.branch_ids = [
(1, project.branch_ids.id, {'sequence': 1}),
(0, 0, {'name': '1.0', 'sequence': 2}),
]
heads, _, (pr_rel_a, pr_rel_b, pr_rel_c), bump, other = \
setup_mess(repo_a, repo_b, repo_c)
env.run_crons()
release_prs = {
repo_a.name: to_pr(env, pr_rel_a),
repo_b.name: to_pr(env, pr_rel_b),
repo_c.name: to_pr(env, pr_rel_c),
}
w = project.action_prepare_freeze()
w_id = env[w['res_model']].browse([w['res_id']])
for repo, release_pr in release_prs.items():
w_id.release_pr_ids\
.filtered(lambda r: r.repository_id.name == repo)\
.pr_id = release_pr.id
# create conflicting branch
with repo_c:
repo_c.make_ref('heads/1.1', heads[2])
# actually perform the freeze
with pytest.raises(xmlrpc.client.Fault) as e:
w_id.action_freeze()
assert f"Unable to create branch {repo_c.name}:1.1" in e.value.faultString
# branches a and b should have been deleted
with pytest.raises(AssertionError) as e:
repo_a.get_ref('heads/1.1')
assert e.value.args[0].startswith("Not Found")
with pytest.raises(AssertionError) as e:
repo_b.get_ref('heads/1.1')
assert e.value.args[0].startswith("Not Found")

View File

@ -1,11 +1,10 @@
import requests
from utils import Commit, to_pr
def test_partner_merge(env):
p_src = env['res.partner'].create({
'name': 'kfhsf',
'github_login': 'tyu'
}) | env['res.partner'].create({
'name': "xxx",
'github_login': 'xxx'
})
@ -97,3 +96,36 @@ def test_message_desync(env, project, make_repo, users, setreviewers, config):
assert st.message.startswith('title\n\nbody'),\
"the stored PR message should have been ignored when staging"
assert st.parents == [m, c], "check the staging's ancestry is the right one"
def test_unreviewer(env, project, port):
repo = env['runbot_merge.repository'].create({
'project_id': project.id,
'name': 'a_test_repo',
'status_ids': [(0, 0, {'context': 'status'})]
})
p = env['res.partner'].create({
'name': 'George Pearce',
'github_login': 'emubitch',
'review_rights': [(0, 0, {'repository_id': repo.id, 'review': True})]
})
r = requests.post(f'http://localhost:{port}/runbot_merge/get_reviewers', json={
'jsonrpc': '2.0',
'id': None,
'method': 'call',
'params': {},
})
r.raise_for_status()
assert 'error' not in r.json()
assert r.json()['result'] == ['emubitch']
r = requests.post(f'http://localhost:{port}/runbot_merge/remove_reviewers', json={
'jsonrpc': '2.0',
'id': None,
'method': 'call',
'params': {'github_logins': ['emubitch']},
})
r.raise_for_status()
assert 'error' not in r.json()
assert p.review_rights == env['res.partner.review']

View File

@ -0,0 +1,102 @@
import pytest
import requests
GEORGE = {
'name': "George Pearce",
'email': 'george@example.org',
'github_login': 'emubitch',
'sub': '19321102'
}
def test_basic_provisioning(env, port):
r = provision_user(port, [GEORGE])
assert r == [1, 0]
g = env['res.users'].search([('login', '=', GEORGE['email'])])
assert g.partner_id.name == GEORGE['name']
assert g.partner_id.github_login == GEORGE['github_login']
assert g.oauth_uid == GEORGE['sub']
(model, g_id) = env['ir.model.data']\
.check_object_reference('base', 'group_user')
assert model == 'res.groups'
assert g.groups_id.id == g_id, "check that users were provisioned as internal (not portal)"
# repeated provisioning should be a no-op
r = provision_user(port, [GEORGE])
assert r == [0, 0]
# the email (real login) should be the determinant, any other field is
# updatable
r = provision_user(port, [{**GEORGE, 'name': "x"}])
assert r == [0, 1]
r = provision_user(port, [dict(GEORGE, name="x", github_login="y", sub="42")])
assert r == [0, 1]
# can't fail anymore because github_login now used to look up the existing
# user
# with pytest.raises(Exception):
# provision_user(port, [{
# 'name': "other@example.org",
# 'email': "x",
# 'github_login': "y",
# 'sub': "42"
# }])
r = provision_user(port, [dict(GEORGE, active=False)])
assert r == [0, 1]
assert not env['res.users'].search([('login', '=', GEORGE['email'])])
assert env['res.partner'].search([('email', '=', GEORGE['email'])])
def test_upgrade_partner(env, port):
# If a partner exists for a github login (and / or email?) it can be
# upgraded by creating a user for it
p = env['res.partner'].create({
'name': GEORGE['name'],
'email': GEORGE['email'],
})
r = provision_user(port, [GEORGE])
assert r == [1, 0]
assert p.user_ids.read(['email', 'github_login', 'oauth_uid']) == [{
'id': p.user_ids.id,
'github_login': GEORGE['github_login'],
'oauth_uid': GEORGE['sub'],
'email': GEORGE['email'],
}]
p.user_ids.unlink()
p.unlink()
p = env['res.partner'].create({
'name': GEORGE['name'],
'github_login': GEORGE['github_login'],
})
r = provision_user(port, [GEORGE])
assert r == [1, 0]
assert p.user_ids.read(['email', 'github_login', 'oauth_uid']) == [{
'id': p.user_ids.id,
'github_login': GEORGE['github_login'],
'oauth_uid': GEORGE['sub'],
'email': GEORGE['email'],
}]
p.user_ids.unlink()
p.unlink()
def test_no_email(env, port):
""" Provisioning system should ignore email-less entries
"""
r = provision_user(port, [{**GEORGE, 'email': None}])
assert r == [0, 0]
def provision_user(port, users):
r = requests.post(f'http://localhost:{port}/runbot_merge/provision', json={
'jsonrpc': '2.0',
'id': None,
'method': 'call',
'params': {'users': users},
})
r.raise_for_status()
json = r.json()
assert 'error' not in json
return json['result']

View File

@ -89,7 +89,7 @@ def test_basic(env, project, make_repo, users, setreviewers, config):
(users['reviewer'], 'hansen r+'),
seen(env, pr, users),
(users['reviewer'], 'hansen override=l/int'),
(users['user'], "I'm sorry, @{}. You are not allowed to override this status.".format(users['reviewer'])),
(users['user'], "I'm sorry, @{}: you are not allowed to override this status.".format(users['reviewer'])),
(users['other'], "hansen override=l/int"),
]
assert pr_id.statuses == '{}'

View File

@ -0,0 +1,43 @@
<odoo>
<record id="action_overrides" model="ir.actions.act_window">
<field name="name">CI / statuses overrides</field>
<field name="res_model">res.partner.override</field>
</record>
<record id="tree_overrides" model="ir.ui.view">
<field name="name">Overrides List</field>
<field name="model">res.partner.override</field>
<field name="arch" type="xml">
<tree editable="bottom">
<field name="context"/>
<field name="repository_id"/>
<field name="partner_ids" widget="many2many_tags"/>
</tree>
</field>
</record>
<record id="action_review" model="ir.actions.act_window">
<field name="name">Review Rights</field>
<field name="res_model">res.partner.review</field>
<field name="context">{'search_default_group_by_repository': True}</field>
</record>
<record id="tree_review" model="ir.ui.view">
<field name="name">Review Rights</field>
<field name="model">res.partner.review</field>
<field name="arch" type="xml">
<tree editable="bottom">
<field name="repository_id"/>
<field name="partner_id"/>
<field name="review"/>
<field name="self_review"/>
</tree>
</field>
</record>
<menuitem name="Configuration" id="menu_configuration" parent="runbot_merge_menu"/>
<menuitem name="CI Overrides" id="menu_configuration_overrides"
parent="menu_configuration"
action="action_overrides"/>
<menuitem name="Review Rights" id="menu_configuration_review"
parent="menu_configuration"
action="action_review"/>
</odoo>

View File

@ -77,6 +77,8 @@
<field name="number"/>
<field name="target"/>
<field name="state"/>
<field name="author"/>
<field name="reviewed_by"/>
</tree>
</field>
</record>
@ -97,10 +99,10 @@
<field name="target"/>
<field name="state"/>
<field name="author"/>
<field name="priority"/>
</group>
<group>
<field name="label"/>
<field name="priority"/>
<field name="squash"/>
</group>
</group>
@ -108,6 +110,8 @@
<group colspan="4">
<field name="head"/>
<field name="statuses"/>
</group>
<group colspan="4">
<field name="overrides"/>
</group>
</group>
@ -196,7 +200,8 @@
<group string="Batches">
<field name="batch_ids" colspan="4" nolabel="1">
<tree>
<field name="prs"/>
<field name="prs" widget="many2many_tags"
options="{'no_quick_create': True}"/>
</tree>
</field>
</group>
@ -205,66 +210,33 @@
</field>
</record>
<record id="runbot_merge_action_fetches" model="ir.actions.act_window">
<field name="name">PRs to fetch</field>
<field name="res_model">runbot_merge.fetch_job</field>
<field name="view_mode">tree</field>
<field name="context">{'default_active': True}</field>
<record id="runbot_merge_action_commits" model="ir.actions.act_window">
<field name="name">Commit Statuses</field>
<field name="res_model">runbot_merge.commit</field>
<field name="view_mode">tree,form</field>
</record>
<record id="runbot_merge_search_fetches" model="ir.ui.view">
<field name="name">Fetches Search</field>
<field name="model">runbot_merge.fetch_job</field>
<field name="arch" type="xml">
<search>
<filter string="Active" name="active"
domain="[('active', '=', True)]"/>
<field name="repository"/>
<field name="number"/>
</search>
</field>
</record>
<record id="runbot_merge_tree_fetches" model="ir.ui.view">
<field name="name">Fetches Tree</field>
<field name="model">runbot_merge.fetch_job</field>
<record id="runbot_merge_commits_tree" model="ir.ui.view">
<field name="name">commits list</field>
<field name="model">runbot_merge.commit</field>
<field name="arch" type="xml">
<tree>
<field name="repository"/>
<field name="number"/>
</tree>
</field>
</record>
<record id="runbot_merge_action_overrides" model="ir.actions.act_window">
<field name="name">CI / statuses overrides</field>
<field name="res_model">res.partner.override</field>
</record>
<record id="runot_merge_tree_overrides" model="ir.ui.view">
<field name="name">Overrides List</field>
<field name="model">res.partner.override</field>
<field name="arch" type="xml">
<tree editable="bottom">
<field name="context"/>
<field name="repository_id"/>
<field name="partner_ids" widget="many2many_tags"/>
<field name="sha"/>
<field name="statuses"/>
</tree>
</field>
</record>
<menuitem name="Mergebot" id="runbot_merge_menu"/>
<menuitem name="Projects" id="runbot_merge_menu_project"
parent="runbot_merge_menu"
action="runbot_merge_action_projects"/>
<menuitem name="Pull Requests" id="runbot_merge_menu_prs"
parent="runbot_merge_menu"
action="runbot_merge_action_prs"/>
<menuitem name="Stagings" id="runbot_merge_menu_stagings"
parent="runbot_merge_menu"
action="runbot_merge_action_stagings"/>
<menuitem name="Fetches" id="runbot_merge_menu_fetches"
parent="runbot_merge_menu"
action="runbot_merge_action_fetches"/>
<menuitem name="Configuration" id="runbot_merge_menu_configuration" parent="runbot_merge_menu"/>
<menuitem name="CI Overrides" id="runbot_merge_menu_configuration_overrides"
parent="runbot_merge_menu"
action="runbot_merge_action_overrides"/>
<menuitem name="Projects" id="runbot_merge_menu_project"
parent="runbot_merge_menu"
action="runbot_merge_action_projects"/>
<menuitem name="Pull Requests" id="runbot_merge_menu_prs"
parent="runbot_merge_menu"
action="runbot_merge_action_prs"/>
<menuitem name="Stagings" id="runbot_merge_menu_stagings"
parent="runbot_merge_menu"
action="runbot_merge_action_stagings"/>
<menuitem name="Commits" id="runbot_merge_menu_commits"
parent="runbot_merge_menu"
action="runbot_merge_action_commits"/>
</odoo>

View File

@ -0,0 +1,97 @@
<odoo>
<!--
Queues mergebot menu: contains various list views inspecting the cron tasks
(mostly)
-->
<record id="action_splits" model="ir.actions.act_window">
<field name="name">Splits</field>
<field name="res_model">runbot_merge.split</field>
</record>
<record id="tree_splits" model="ir.ui.view">
<field name="name">Splits</field>
<field name="model">runbot_merge.split</field>
<field name="arch" type="xml">
<tree>
<field name="id"/>
<field name="target"/>
</tree>
</field>
</record>
<record id="action_feedback" model="ir.actions.act_window">
<field name="name">Feedback</field>
<field name="res_model">runbot_merge.pull_requests.feedback</field>
</record>
<record id="tree_feedback" model="ir.ui.view">
<field name="name">Feedback</field>
<field name="model">runbot_merge.pull_requests.feedback</field>
<field name="arch" type="xml">
<tree>
<field name="repository"/>
<field name="pull_request"/>
<field name="message"/>
<field name="close"/>
</tree>
</field>
</record>
<record id="action_tagging" model="ir.actions.act_window">
<field name="name">Tagging</field>
<field name="res_model">runbot_merge.pull_requests.tagging</field>
</record>
<record id="tree_tagging" model="ir.ui.view">
<field name="name">Tagging</field>
<field name="model">runbot_merge.pull_requests.tagging</field>
<field name="arch" type="xml">
<tree editable="bottom">
<field name="repository"/>
<field name="pull_request"/>
<field name="tags_add"/>
<field name="tags_remove"/>
</tree>
</field>
</record>
<record id="action_fetches" model="ir.actions.act_window">
<field name="name">PRs to fetch</field>
<field name="res_model">runbot_merge.fetch_job</field>
<field name="view_mode">tree</field>
<field name="context">{'default_active': True}</field>
</record>
<record id="search_fetches" model="ir.ui.view">
<field name="name">Fetches Search</field>
<field name="model">runbot_merge.fetch_job</field>
<field name="arch" type="xml">
<search>
<filter string="Active" name="active"
domain="[('active', '=', True)]"/>
<field name="repository"/>
<field name="number"/>
</search>
</field>
</record>
<record id="tree_fetches" model="ir.ui.view">
<field name="name">Fetches Tree</field>
<field name="model">runbot_merge.fetch_job</field>
<field name="arch" type="xml">
<tree>
<field name="repository"/>
<field name="number"/>
</tree>
</field>
</record>
<menuitem name="Queues" id="menu_queues" parent="runbot_merge_menu"/>
<menuitem name="Splits" id="menu_queues_splits"
parent="menu_queues"
action="action_splits"/>
<menuitem name="Feedback" id="menu_queues_feedback"
parent="menu_queues"
action="action_feedback"/>
<menuitem name="Tagging" id="menu_queues_tagging"
parent="menu_queues"
action="action_tagging"/>
<menuitem name="Fetches" id="menu_fetches"
parent="menu_queues"
action="action_fetches"/>
</odoo>

View File

@ -70,7 +70,14 @@
</group>
<group>
<group colspan="4" string="Delegate On">
<field name="delegate_reviewer" nolabel="1"/>
<field name="delegate_reviewer" nolabel="1">
<tree>
<field name="repository"/>
<field name="number"/>
<field name="target"/>
<field name="state"/>
</tree>
</field>
</group>
</group>
</page>

View File

@ -10,22 +10,73 @@
</xpath>
</template>
<template id="link-pr" name="create a link to `pr`">
<t t-set="title">
<t t-if="pr.repository.group_id &lt;= env.user.groups_id">
<t t-esc="pr.message.split('\n')[0]"/>
</t>
</t>
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}"
t-att-title="pr.blocked or title.strip()"
t-att-target="target or None"
><t t-esc="pr.display_name"/></a>
</template>
<template id="staging-statuses" name="dropdown statuses list of stagings">
<div class="dropdown" t-if="staging.heads">
<button class="btn btn-link dropdown-toggle"
type="button"
data-toggle="dropdown"
aria-haspopup="true"
aria-expanded="true"
t-attf-title="Staged at {{staging.staged_at}}Z"
>
<t t-raw="0"/>
<span class="caret"></span>
</button>
<ul class="dropdown-menu staging-statuses">
<li groups="runbot_merge.group_admin">
<a t-attf-href="/web#id={{staging.id}}&amp;view_type=form&amp;model=runbot_merge.stagings"
target="new">
Open Staging
</a>
</li>
<t t-set="statuses" t-value="{(r, c): (s, t) for r, c, s, t in staging.statuses}"/>
<t t-foreach="repo_statuses._for_staging(staging)" t-as="req">
<t t-set="st" t-value="statuses.get((req.repo_id.name, req.context)) or (None, None)"/>
<li t-att-class="
'bg-success' if st[0] == 'success'
else 'bg-danger' if st[0] in ('error', 'failure')
else 'bg-info' if st[0]
else 'bg-light'"
><a t-att-href="st[1]" target="new">
<t t-esc="req.repo_id.name"/>: <t t-esc="req.context"/>
</a></li>
</t>
</ul>
</div>
</template>
<template id="alerts">
<div id="alerts" class="row text-center">
<div class="alert alert-light col-md-12 h6 mb-0">
<a href="/runbot_merge/changelog">Changelog</a>
</div>
<t t-set="stagingcron" t-value="env(user=1).ref('runbot_merge.staging_cron')"/>
<div t-if="not stagingcron.active" class="alert alert-warning col-12 mb-0" role="alert">
Staging is disabled, "ready" pull requests will not be staged.
</div>
<t t-set="mergecron" t-value="env(user=1).ref('runbot_merge.merge_cron')"/>
<div t-if="not mergecron.active" class="alert alert-warning col-12 mb-0" role="alert">
Merging is disabled, stagings will not be integrated.
</div>
</div>
</template>
<template id="dashboard" name="mergebot dashboard">
<t t-call="website.layout">
<div id="wrap"><div class="container-fluid">
<div id="alerts" class="row text-center">
<div class="alert alert-light col-md-12 h6 mb-0">
<a href="/runbot_merge/changelog">Changelog</a>
</div>
<t t-set="stagingcron" t-value="env(user=1).ref('runbot_merge.staging_cron')"/>
<div t-if="not stagingcron.active" class="alert alert-warning col-12 mb-0" role="alert">
Staging is disabled, "ready" pull requests will not be staged.
</div>
<t t-set="mergecron" t-value="env(user=1).ref('runbot_merge.merge_cron')"/>
<div t-if="not mergecron.active" class="alert alert-warning col-12 mb-0" role="alert">
Merging is disabled, stagings will not be disabled.
</div>
</div>
<t t-call="runbot_merge.alerts"/>
<section t-foreach="projects.with_context(active_test=False)" t-as="project" class="row">
<h1 class="col-md-12"><t t-esc="project.name"/></h1>
<div class="col-md-12">
@ -63,10 +114,7 @@
<li t-foreach="splits" t-as="split">
<ul class="pr-listing list-inline list-unstyled mb0">
<li t-foreach="split.mapped('batch_ids.prs')" t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}"
t-att-title="pr.message.split('\n')[0]">
<t t-esc="pr.repository.name"/>#<t t-esc="pr.number"/>
</a>
<t t-call="runbot_merge.link-pr"/>
</li>
</ul>
</li>
@ -76,10 +124,7 @@
<h5>Awaiting</h5>
<ul class="list-inline">
<li t-foreach="ready" t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}"
t-att-title="pr.message.split('\n')[0]">
<t t-esc="pr.repository.name"/>#<t t-esc="pr.number"/>
</a>
<t t-call="runbot_merge.link-pr"/>
</li>
</ul>
</div>
@ -87,10 +132,7 @@
<h5>Blocked</h5>
<ul class="list-inline">
<li t-foreach="blocked" t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}"
t-att-title="pr.blocked">
<t t-esc="pr.repository.name"/>#<t t-esc="pr.number"/>
</a>
<t t-call="runbot_merge.link-pr"/>
</li>
</ul>
</div>
@ -105,10 +147,7 @@
<h5>Failed</h5>
<ul class="list-inline">
<li t-foreach="failed" t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}"
t-att-title="pr.message.split('\n')[0]">
<t t-esc="pr.repository.name"/>#<t t-esc="pr.number"/>
</a>
<t t-call="runbot_merge.link-pr"/>
</li>
</ul>
</div>
@ -135,49 +174,24 @@
<t t-if="staging_index >= 4">visible-lg-block</t>
</t>
<t t-set="title">
<t t-if="staging.state == 'canceled'">Cancelled: <t t-esc="staging.reason"/></t>
<t t-if="staging.state == 'ff_failed'">Fast Forward Failed</t>
<t t-if="staging.state not in ('canceled', 'ff_failed')"><t t-esc="staging.reason"/></t>
<t t-if="staging.state == 'ff_failed'">fast forward failed (<t t-esc="staging.reason"/>)</t>
<t t-if="staging.state == 'pending'">last status</t>
</t>
<li t-attf-class="staging {{stateclass.strip()}} {{decorationclass.strip()}}" t-att-title="title.strip() or None">
<!-- separate concatenation to avoid having line-break in title as some browsers trigger it -->
<!-- write-date may have microsecond precision, strip that information -->
<!-- use write-date under assumption that a staging is last modified when it ends -->
<t t-set="title"><t t-esc="title.strip() or staging.reason"/> at <t t-esc="staging.write_date.replace(microsecond=0)"/>Z</t>
<li t-attf-class="staging {{stateclass.strip()}} {{decorationclass.strip()}}" t-att-title="title">
<ul class="list-unstyled">
<li t-foreach="staging.batch_ids" t-as="batch" class="batch">
<t t-esc="batch.prs[:1].label"/>
<t t-foreach="batch.prs" t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}"
t-att-title="pr.message.split('\n')[0]"><t t-esc="pr.repository.name"/>#<t t-esc="pr.number"/></a>
<t t-call="runbot_merge.link-pr"/>
</t>
</li>
</ul>
<t t-if="staging.heads">
<div class="dropdown">
<button class="btn btn-link dropdown-toggle" type="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true"
t-attf-title="{{staging.staged_at}}Z">
Staged <span t-field="staging.staged_at" t-options="{'widget': 'relative'}"/>
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li groups="runbot_merge.group_admin">
<a t-attf-href="/web#id={{staging.id}}&amp;view_type=form&amp;model=runbot_merge.stagings" target="new">
Open Staging
</a>
</li>
<t t-set="statuses" t-value="{(r, c): (s, t) for r, c, s, t in staging.statuses}"/>
<t t-foreach="repo_statuses._for_staging(staging)" t-as="req">
<t t-set="st" t-value="statuses.get((req.repo_id.name, req.context)) or ('pending', None)"/>
<li t-att-class="
'bg-success' if st[0] == 'success'
else 'bg-danger' if st[0] in ('error', 'failure')
else 'bg-info'"
>
<a t-att-href="st[1]" target="new">
<t t-esc="req.repo_id.name"/>: <t t-esc="req.context"/>
</a>
</li>
</t>
</ul>
</div>
<t t-call="runbot_merge.staging-statuses">
Staged <span t-field="staging.staged_at" t-options="{'widget': 'relative'}"/>
</t>
</li>
</t>
@ -224,68 +238,32 @@
<span t-field="staging.staged_at"
t-options="{'format': 'yyyy-MM-dd\'T\'HH:mm:ssZ'}"/>
</t>
<div class="dropdown" t-if="staging.heads">
<button class="btn btn-link dropdown-toggle"
type="button"
data-toggle="dropdown"
aria-haspopup="true"
aria-expanded="true">
<span t-field="staging.staged_at"
t-options="{'format': 'yyyy-MM-dd\'T\'HH:mm:ssZ'}"/>
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li groups="runbot_merge.group_admin">
<a t-attf-href="/web#id={{staging.id}}&amp;view_type=form&amp;model=runbot_merge.stagings"
target="new">
Open Staging
</a>
</li>
<t t-set="statuses" t-value="{(r, c): (s, t) for r, c, s, t in staging.statuses}"/>
<t t-foreach="repo_statuses._for_staging(staging)" t-as="req">
<t t-set="st" t-value="statuses.get((req.repo_id.name, req.context)) or ('pending', None)"/>
<li t-att-class="
'bg-success' if st[0] == 'success'
else 'bg-danger' if st[0] in ('error', 'failure')
else 'bg-info'"
>
<a t-att-href="st[1]"
target="new">
<t t-esc="req.repo_id.name"/>:
<t t-esc="req.context"/>
</a>
</li>
</t>
</ul>
</div>
<t t-call="runbot_merge.staging-statuses">
<span t-field="staging.staged_at"
t-options="{'format': 'yyyy-MM-dd\'T\'HH:mm:ssZ'}"/>
</t>
</th>
<td>
<ul class="list-inline list-unstyled mb0">
<t t-foreach="staging.batch_ids"
t-as="batch">
<t t-set="first_pr"
t-value="batch.prs[-1]"/>
<li class="dropdown">
t-value="batch.prs[-1:]"/>
<li class="dropdown" t-if="first_pr">
<button class="btn btn-link dropdown-toggle"
type="button"
data-toggle="dropdown"
aria-haspopup="true"
aria-expanded="true"
t-att-title="first_pr.message.split('\n')[0]"
>
<t t-esc="first_pr.label"/>
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li t-foreach="batch.prs"
t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}"
t-att-title="pr.message.split('\n')[0]"
target="new">
<t t-esc="pr.repository.name"/>
#
<t t-esc="pr.number"/>
</a>
<li t-foreach="batch.prs" t-as="pr">
<t t-call="runbot_merge.link-pr">
<t t-set="target">new</t>
</t>
</li>
</ul>
</li>
@ -411,12 +389,13 @@
<template id="view_pull_request">
<t t-call="website.layout">
<div id="wrap"><div class="container-fluid">
<t t-call="runbot_merge.alerts"/>
<h1>
<a t-att-href="pr.github_url" t-field="pr.display_name">
</a>
<a t-attf-href="/web#view_type=form&amp;model=runbot_merge.pull_requests&amp;id={{pr.id}}"
class="btn btn-sm btn-secondary align-top float-right"
groups="base.group_user">View in backend</a>
<a t-att-href="pr.github_url" t-field="pr.display_name">
</a>
<a t-attf-href="/web#view_type=form&amp;model=runbot_merge.pull_requests&amp;id={{pr.id}}"
class="btn btn-sm btn-secondary align-top float-right"
groups="base.group_user">View in backend</a>
</h1>
<h6>Created by <span t-field="pr.author.display_name"/></h6>
<t t-set="tmpl">
@ -425,11 +404,14 @@
<t t-else="">open</t>
</t>
<t t-call="runbot_merge.view_pull_request_info_{{tmpl.strip()}}"/>
<t t-set="target_cls" t-value="None if pr.target.active else 'text-muted bg-warning'"/>
<dl class="runbot-merge-fields">
<dt>label</dt>
<dd><span t-field="pr.label"/></dd>
<dt>head</dt>
<dd><a t-attf-href="{{pr.github_url}}/commits/{{pr.head}}"><span t-field="pr.head"/></a></dd>
<dt t-att-class="target_cls">target</dt>
<dd t-att-class="target_cls"><span t-field="pr.target"/></dd>
</dl>
<p t-field="pr.message"/>
</div></div>