[MERGE] from 13.0

Get mergebot updates from since the runbot was upgraded.

NOTE: updates forwardport.models.forwardport.Queue with slots for
compatibility with commit
odoo/odoo@ea3e39506a "use slots for
BaseModel", otherwise we get

    TypeError: __bases__ assignment: 'ForwardPortTasks' object layout differs from 'BaseModel'
This commit is contained in:
Xavier Morel 2022-08-23 14:33:37 +02:00
commit 65c2ffc997
47 changed files with 1686 additions and 737 deletions

View File

@ -50,11 +50,14 @@ import functools
import http.client import http.client
import itertools import itertools
import os import os
import pathlib
import pprint
import random import random
import re import re
import socket import socket
import subprocess import subprocess
import sys import sys
import tempfile
import time import time
import uuid import uuid
import warnings import warnings
@ -268,14 +271,18 @@ class DbDict(dict):
self._adpath = adpath self._adpath = adpath
def __missing__(self, module): def __missing__(self, module):
self[module] = db = 'template_%s' % uuid.uuid4() self[module] = db = 'template_%s' % uuid.uuid4()
subprocess.run([ with tempfile.TemporaryDirectory() as d:
'odoo', '--no-http', subprocess.run([
'--addons-path', self._adpath, 'odoo', '--no-http',
'-d', db, '-i', module, '--addons-path', self._adpath,
'--max-cron-threads', '0', '-d', db, '-i', module + ',auth_oauth',
'--stop-after-init', '--max-cron-threads', '0',
'--log-level', 'warn' '--stop-after-init',
], check=True) '--log-level', 'warn'
],
check=True,
env={**os.environ, 'XDG_DATA_HOME': d}
)
return db return db
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
@ -334,21 +341,48 @@ def port():
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s.getsockname()[1] return s.getsockname()[1]
@pytest.fixture(scope='session')
def dummy_addons_path():
with tempfile.TemporaryDirectory() as dummy_addons_path:
mod = pathlib.Path(dummy_addons_path, 'saas_worker')
mod.mkdir(0o700)
(mod / '__init__.py').write_bytes(b'')
(mod / '__manifest__.py').write_text(pprint.pformat({
'name': 'dummy saas_worker',
'version': '1.0',
}), encoding='utf-8')
(mod / 'util.py').write_text("""\
def from_role(_):
return lambda fn: fn
""", encoding='utf-8')
yield dummy_addons_path
@pytest.fixture @pytest.fixture
def server(request, db, port, module): def server(request, db, port, module, dummy_addons_path, tmpdir):
log_handlers = [ log_handlers = [
'odoo.modules.loading:WARNING', 'odoo.modules.loading:WARNING',
] ]
if not request.config.getoption('--log-github'): if not request.config.getoption('--log-github'):
log_handlers.append('github_requests:WARNING') log_handlers.append('github_requests:WARNING')
addons_path = ','.join(map(str, [
request.config.getoption('--addons-path'),
dummy_addons_path,
]))
p = subprocess.Popen([ p = subprocess.Popen([
'odoo', '--http-port', str(port), 'odoo', '--http-port', str(port),
'--addons-path', request.config.getoption('--addons-path'), '--addons-path', addons_path,
'-d', db, '-d', db,
'--max-cron-threads', '0', # disable cron threads (we're running crons by hand) '--max-cron-threads', '0', # disable cron threads (we're running crons by hand)
*itertools.chain.from_iterable(('--log-handler', h) for h in log_handlers), *itertools.chain.from_iterable(('--log-handler', h) for h in log_handlers),
]) ], env={
**os.environ,
# stop putting garbage in the user dirs, and potentially creating conflicts
# TODO: way to override this with macOS?
'XDG_DATA_HOME': str(tmpdir.mkdir('share')),
'XDG_CACHE_HOME': str(tmpdir.mkdir('cache')),
})
try: try:
wait_for_server(db, port, p, module) wait_for_server(db, port, p, module)
@ -558,17 +592,6 @@ class Repo:
parents=[p['sha'] for p in gh_commit['parents']], parents=[p['sha'] for p in gh_commit['parents']],
) )
def log(self, ref_or_sha):
for page in itertools.count(1):
r = self._session.get(
'https://api.github.com/repos/{}/commits'.format(self.name),
params={'sha': ref_or_sha, 'page': page}
)
assert 200 <= r.status_code < 300, r.json()
yield from map(self._commit_from_gh, r.json())
if not r.links.get('next'):
return
def read_tree(self, commit): def read_tree(self, commit):
""" read tree object from commit """ read tree object from commit
@ -813,6 +836,12 @@ mutation setDraft($pid: ID!) {
} }
} }
''' '''
def state_prop(name: str) -> property:
@property
def _prop(self):
return self._pr[name]
return _prop.setter(lambda self, v: self._set_prop(name, v))
class PR: class PR:
def __init__(self, repo, number): def __init__(self, repo, number):
self.repo = repo self.repo = repo
@ -837,15 +866,9 @@ class PR:
caching['If-Modified-Since']= r.headers['Last-Modified'] caching['If-Modified-Since']= r.headers['Last-Modified']
return contents return contents
@property title = state_prop('title')
def title(self): body = state_prop('body')
return self._pr['title'] base = state_prop('base')
title = title.setter(lambda self, v: self._set_prop('title', v))
@property
def base(self):
return self._pr['base']
base = base.setter(lambda self, v: self._set_prop('base', v))
@property @property
def draft(self): def draft(self):
@ -875,10 +898,6 @@ class PR:
def state(self): def state(self):
return self._pr['state'] return self._pr['state']
@property
def body(self):
return self._pr['body']
@property @property
def comments(self): def comments(self):
r = self.repo._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number)) r = self.repo._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number))
@ -1105,17 +1124,8 @@ class Model:
def create(self, values): def create(self, values):
return Model(self._env, self._model, [self._env(self._model, 'create', values)]) return Model(self._env, self._model, [self._env(self._model, 'create', values)])
def write(self, values): def check_object_reference(self, *args, **kwargs):
return self._env(self._model, 'write', self._ids, values) return self.env(self._model, 'check_object_reference', *args, **kwargs)
def read(self, fields):
return self._env(self._model, 'read', self._ids, fields)
def name_get(self):
return self._env(self._model, 'name_get', self._ids)
def unlink(self):
return self._env(self._model, 'unlink', self._ids)
def sorted(self, field): def sorted(self, field):
rs = sorted(self.read([field]), key=lambda r: r[field]) rs = sorted(self.read([field]), key=lambda r: r[field])

View File

@ -8,6 +8,7 @@
'data/security.xml', 'data/security.xml',
'data/crons.xml', 'data/crons.xml',
'data/views.xml', 'data/views.xml',
'data/queues.xml',
], ],
'license': 'LGPL-3', 'license': 'LGPL-3',
} }

View File

@ -0,0 +1 @@
IMP: notifications when reopening a closed forward-port (e.g. indicate that they're detached)

View File

@ -0,0 +1 @@
IMP: use the `diff3` conflict style, should make forward port conflicts clearer and easier to fix

View File

@ -0,0 +1 @@
IMP: flag detached PRs in their dashboard

View File

@ -0,0 +1,51 @@
<odoo>
<record id="action_forward_port" model="ir.actions.act_window">
<field name="name">Forward port batches</field>
<field name="res_model">forwardport.batches</field>
<field name="context">{'active_test': False}</field>
</record>
<record id="tree_forward_port" model="ir.ui.view">
<field name="name">Forward port batches</field>
<field name="model">forwardport.batches</field>
<field name="arch" type="xml">
<tree>
<field name="source"/>
<field name="batch_id"/>
</tree>
</field>
</record>
<record id="form_forward_port" model="ir.ui.view">
<field name="name">Forward port batch</field>
<field name="model">forwardport.batches</field>
<field name="arch" type="xml">
<form>
<group>
<group><field name="source"/></group>
<group><field name="batch_id"/></group>
</group>
</form>
</field>
</record>
<record id="action_followup_updates" model="ir.actions.act_window">
<field name="name">Followup Updates</field>
<field name="res_model">forwardport.updates</field>
</record>
<record id="tree_followup_updates" model="ir.ui.view">
<field name="name">Followup Updates</field>
<field name="model">forwardport.updates</field>
<field name="arch" type="xml">
<tree editable="bottom">
<field name="original_root"/>
<field name="new_root"/>
</tree>
</field>
</record>
<menuitem name="Forward Port Batches" id="menu_forward_port"
parent="runbot_merge.menu_queues"
action="action_forward_port"/>
<menuitem name="Followup Updates" id="menu_followup"
parent="runbot_merge.menu_queues"
action="action_followup_updates"/>
</odoo>

View File

@ -1,5 +1,5 @@
<odoo> <odoo>
<template id="dashboard" inherit_id="runbot_merge.dashboard"> <template id="alerts" inherit_id="runbot_merge.alerts">
<xpath expr="//div[@id='alerts']"> <xpath expr="//div[@id='alerts']">
<t t-set="fpcron" t-value="env(user=1).ref('forwardport.port_forward')"/> <t t-set="fpcron" t-value="env(user=1).ref('forwardport.port_forward')"/>
<div t-if="not fpcron.active" class="alert alert-warning col-12" role="alert"> <div t-if="not fpcron.active" class="alert alert-warning col-12" role="alert">
@ -12,6 +12,7 @@
<t t-set="outstanding" t-value="env['runbot_merge.pull_requests'].search_count([ <t t-set="outstanding" t-value="env['runbot_merge.pull_requests'].search_count([
('source_id', '!=', False), ('source_id', '!=', False),
('state', 'not in', ['merged', 'closed']), ('state', 'not in', ['merged', 'closed']),
('source_id.merge_date', '&lt;', datetime.datetime.now() - relativedelta(days=3)),
])"/> ])"/>
<div t-if="outstanding != 0" class="alert col-md-12 alert-warning mb-0"> <div t-if="outstanding != 0" class="alert col-md-12 alert-warning mb-0">
<a href="/forwardport/outstanding"> <a href="/forwardport/outstanding">
@ -108,6 +109,11 @@
<a t-att-href="pr.source_id.url"> <a t-att-href="pr.source_id.url">
<span t-field="pr.source_id.display_name"/> <span t-field="pr.source_id.display_name"/>
</a> </a>
<span t-if="not pr.parent_id"
class="badge badge-danger user-select-none"
title="A detached PR behaves like a non-forward-port, it has to be approved via the mergebot, this is usually caused by the forward-port having been in conflict or updated.">
DETACHED
</span>
</dd> </dd>
</t> </t>
<t t-if="pr.forwardport_ids"> <t t-if="pr.forwardport_ids">
@ -175,6 +181,9 @@
<field name="inherit_id" ref="runbot_merge.runbot_merge_form_prs"/> <field name="inherit_id" ref="runbot_merge.runbot_merge_form_prs"/>
<field name="model">runbot_merge.pull_requests</field> <field name="model">runbot_merge.pull_requests</field>
<field name="arch" type="xml"> <field name="arch" type="xml">
<xpath expr="//field[@name='state']" position="after">
<field name="merge_date" attrs="{'invisible': [('state', '!=', 'merged')]}"/>
</xpath>
<xpath expr="//sheet/group[2]" position="after"> <xpath expr="//sheet/group[2]" position="after">
<separator string="Forward Port" attrs="{'invisible': [('source_id', '=', False)]}"/> <separator string="Forward Port" attrs="{'invisible': [('source_id', '=', False)]}"/>
<group attrs="{'invisible': [('source_id', '!=', False)]}"> <group attrs="{'invisible': [('source_id', '!=', False)]}">

View File

@ -15,6 +15,7 @@ MERGE_AGE = relativedelta.relativedelta(weeks=2)
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
class Queue: class Queue:
__slots__ = ()
limit = 100 limit = 100
def _process_item(self): def _process_item(self):
@ -79,12 +80,12 @@ class ForwardPortTasks(models.Model, Queue):
batch.active = False batch.active = False
CONFLICT_TEMPLATE = "WARNING: the latest change ({previous.head}) triggered " \ CONFLICT_TEMPLATE = "{ping}WARNING: the latest change ({previous.head}) triggered " \
"a conflict when updating the next forward-port " \ "a conflict when updating the next forward-port " \
"({next.display_name}), and has been ignored.\n\n" \ "({next.display_name}), and has been ignored.\n\n" \
"You will need to update this pull request differently, " \ "You will need to update this pull request differently, " \
"or fix the issue by hand on {next.display_name}." "or fix the issue by hand on {next.display_name}."
CHILD_CONFLICT = "WARNING: the update of {previous.display_name} to " \ CHILD_CONFLICT = "{ping}WARNING: the update of {previous.display_name} to " \
"{previous.head} has caused a conflict in this pull request, " \ "{previous.head} has caused a conflict in this pull request, " \
"data may have been lost." "data may have been lost."
class UpdateQueue(models.Model, Queue): class UpdateQueue(models.Model, Queue):
@ -118,14 +119,15 @@ class UpdateQueue(models.Model, Queue):
Feedback.create({ Feedback.create({
'repository': child.repository.id, 'repository': child.repository.id,
'pull_request': child.number, 'pull_request': child.number,
'message': "Ancestor PR %s has been updated but this PR" 'message': "%sancestor PR %s has been updated but this PR"
" is %s and can't be updated to match." " is %s and can't be updated to match."
"\n\n" "\n\n"
"You may want or need to manually update any" "You may want or need to manually update any"
" followup PR." % ( " followup PR." % (
self.new_root.display_name, child.ping(),
child.state, self.new_root.display_name,
) child.state,
)
}) })
return return
@ -137,6 +139,7 @@ class UpdateQueue(models.Model, Queue):
'repository': previous.repository.id, 'repository': previous.repository.id,
'pull_request': previous.number, 'pull_request': previous.number,
'message': CONFLICT_TEMPLATE.format( 'message': CONFLICT_TEMPLATE.format(
ping=previous.ping(),
previous=previous, previous=previous,
next=child next=child
) )
@ -144,7 +147,7 @@ class UpdateQueue(models.Model, Queue):
Feedback.create({ Feedback.create({
'repository': child.repository.id, 'repository': child.repository.id,
'pull_request': child.number, 'pull_request': child.number,
'message': CHILD_CONFLICT.format(previous=previous, next=child)\ 'message': CHILD_CONFLICT.format(ping=child.ping(), previous=previous, next=child)\
+ (f'\n\nstdout:\n```\n{out.strip()}\n```' if out.strip() else '') + (f'\n\nstdout:\n```\n{out.strip()}\n```' if out.strip() else '')
+ (f'\n\nstderr:\n```\n{err.strip()}\n```' if err.strip() else '') + (f'\n\nstderr:\n```\n{err.strip()}\n```' if err.strip() else '')
}) })

View File

@ -240,6 +240,7 @@ class PullRequests(models.Model):
# again 2 PRs with same head is weird so... # again 2 PRs with same head is weird so...
newhead = vals.get('head') newhead = vals.get('head')
with_parents = self.filtered('parent_id') with_parents = self.filtered('parent_id')
closed_fp = self.filtered(lambda p: p.state == 'closed' and p.source_id)
if newhead and not self.env.context.get('ignore_head_update') and newhead != self.head: if newhead and not self.env.context.get('ignore_head_update') and newhead != self.head:
vals.setdefault('parent_id', False) vals.setdefault('parent_id', False)
# if any children, this is an FP PR being updated, enqueue # if any children, this is an FP PR being updated, enqueue
@ -261,10 +262,24 @@ class PullRequests(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({ self.env['runbot_merge.pull_requests.feedback'].create({
'repository': p.repository.id, 'repository': p.repository.id,
'pull_request': p.number, 'pull_request': p.number,
'message': "This PR was modified / updated and has become a normal PR. " 'message': "%sthis PR was modified / updated and has become a normal PR. "
"It should be merged the normal way (via @%s)" % p.repository.project_id.github_prefix, "It should be merged the normal way (via @%s)" % (
p.source_id.ping(),
p.repository.project_id.github_prefix,
),
'token_field': 'fp_github_token', 'token_field': 'fp_github_token',
}) })
for p in closed_fp.filtered(lambda p: p.state != 'closed'):
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': p.repository.id,
'pull_request': p.number,
'message': "%sthis PR was closed then reopened. "
"It should be merged the normal way (via @%s)" % (
p.source_id.ping(),
p.repository.project_id.github_prefix,
),
'token_field': 'fp_github_token',
})
if vals.get('state') == 'merged': if vals.get('state') == 'merged':
for p in self: for p in self:
self.env['forwardport.branch_remover'].create({ self.env['forwardport.branch_remover'].create({
@ -282,6 +297,7 @@ class PullRequests(models.Model):
r = super()._try_closing(by) r = super()._try_closing(by)
if r: if r:
self.with_context(forwardport_detach_warn=False).parent_id = False self.with_context(forwardport_detach_warn=False).parent_id = False
self.search([('parent_id', '=', self.id)]).parent_id = False
return r return r
def _parse_commands(self, author, comment, login): def _parse_commands(self, author, comment, login):
@ -298,7 +314,6 @@ class PullRequests(models.Model):
) )
return return
Feedback = self.env['runbot_merge.pull_requests.feedback']
# TODO: don't use a mutable tokens iterator # TODO: don't use a mutable tokens iterator
tokens = iter(tokens) tokens = iter(tokens)
while True: while True:
@ -306,6 +321,7 @@ class PullRequests(models.Model):
if token is None: if token is None:
break break
ping = False
close = False close = False
msg = None msg = None
if token in ('ci', 'skipci'): if token in ('ci', 'skipci'):
@ -314,7 +330,8 @@ class PullRequests(models.Model):
pr.fw_policy = token pr.fw_policy = token
msg = "Not waiting for CI to create followup forward-ports." if token == 'skipci' else "Waiting for CI to create followup forward-ports." msg = "Not waiting for CI to create followup forward-ports." if token == 'skipci' else "Waiting for CI to create followup forward-ports."
else: else:
msg = "I don't trust you enough to do that @{}.".format(login) ping = True
msg = "you can't configure ci."
if token == 'ignore': # replace 'ignore' by 'up to <pr_branch>' if token == 'ignore': # replace 'ignore' by 'up to <pr_branch>'
token = 'up' token = 'up'
@ -322,55 +339,56 @@ class PullRequests(models.Model):
if token in ('r+', 'review+'): if token in ('r+', 'review+'):
if not self.source_id: if not self.source_id:
Feedback.create({ ping = True
'repository': self.repository.id, msg = "I can only do this on forward-port PRs and this is not one, see {}.".format(
'pull_request': self.number, self.repository.project_id.github_prefix
'message': "I'm sorry, @{}. I can only do this on forward-port PRs and this ain't one.".format(login), )
'token_field': 'fp_github_token', elif not self.parent_id:
}) ping = True
continue msg = "I can only do this on unmodified forward-port PRs, ask {}.".format(
merge_bot = self.repository.project_id.github_prefix self.repository.project_id.github_prefix
# don't update the root ever )
for pr in (p for p in self._iter_ancestors() if p.parent_id if p.state in RPLUS): else:
# only the author is delegated explicitely on the merge_bot = self.repository.project_id.github_prefix
pr._parse_commands(author, {**comment, 'body': merge_bot + ' r+'}, login) # don't update the root ever
for pr in (p for p in self._iter_ancestors() if p.parent_id if p.state in RPLUS):
# only the author is delegated explicitely on the
pr._parse_commands(author, {**comment, 'body': merge_bot + ' r+'}, login)
elif token == 'close': elif token == 'close':
msg = "I'm sorry, @{}. I can't close this PR for you.".format(
login)
if self.source_id._pr_acl(author).is_reviewer: if self.source_id._pr_acl(author).is_reviewer:
close = True close = True
msg = None else:
ping = True
msg = "you can't close PRs."
elif token == 'up' and next(tokens, None) == 'to': elif token == 'up' and next(tokens, None) == 'to':
limit = next(tokens, None) limit = next(tokens, None)
ping = True
if not self._pr_acl(author).is_author: if not self._pr_acl(author).is_author:
Feedback.create({ msg = "you can't set a forward-port limit.".format(login)
'repository': self.repository.id, elif not limit:
'pull_request': self.number, msg = "please provide a branch to forward-port to."
'message': "I'm sorry, @{}. You can't set a forward-port limit.".format(login),
'token_field': 'fp_github_token',
})
continue
if not limit:
msg = "Please provide a branch to forward-port to."
else: else:
limit_id = self.env['runbot_merge.branch'].with_context(active_test=False).search([ limit_id = self.env['runbot_merge.branch'].with_context(active_test=False).search([
('project_id', '=', self.repository.project_id.id), ('project_id', '=', self.repository.project_id.id),
('name', '=', limit), ('name', '=', limit),
]) ])
if self.source_id: if self.source_id:
msg = "Sorry, forward-port limit can only be set on " \ msg = "forward-port limit can only be set on " \
f"an origin PR ({self.source_id.display_name} " \ f"an origin PR ({self.source_id.display_name} " \
"here) before it's merged and forward-ported." "here) before it's merged and forward-ported."
elif self.state in ['merged', 'closed']: elif self.state in ['merged', 'closed']:
msg = "Sorry, forward-port limit can only be set before the PR is merged." msg = "forward-port limit can only be set before the PR is merged."
elif not limit_id: elif not limit_id:
msg = "There is no branch %r, it can't be used as a forward port target." % limit msg = "there is no branch %r, it can't be used as a forward port target." % limit
elif limit_id == self.target: elif limit_id == self.target:
ping = False
msg = "Forward-port disabled." msg = "Forward-port disabled."
self.limit_id = limit_id self.limit_id = limit_id
elif not limit_id.fp_enabled: elif not limit_id.fp_enabled:
msg = "Branch %r is disabled, it can't be used as a forward port target." % limit_id.name msg = "branch %r is disabled, it can't be used as a forward port target." % limit_id.name
else: else:
ping = False
msg = "Forward-porting to %r." % limit_id.name msg = "Forward-porting to %r." % limit_id.name
self.limit_id = limit_id self.limit_id = limit_id
@ -382,7 +400,7 @@ class PullRequests(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({ self.env['runbot_merge.pull_requests.feedback'].create({
'repository': self.repository.id, 'repository': self.repository.id,
'pull_request': self.number, 'pull_request': self.number,
'message': msg, 'message': f'@{author.github_login} {msg}' if msg and ping else msg,
'close': close, 'close': close,
'token_field': 'fp_github_token', 'token_field': 'fp_github_token',
}) })
@ -397,8 +415,8 @@ class PullRequests(models.Model):
'repository': self.repository.id, 'repository': self.repository.id,
'pull_request': self.number, 'pull_request': self.number,
'token_field': 'fp_github_token', 'token_field': 'fp_github_token',
'message': '%s\n\n%s failed on this forward-port PR' % ( 'message': '%s%s failed on this forward-port PR' % (
self.source_id._pingline(), self.source_id.ping(),
ci, ci,
) )
}) })
@ -578,10 +596,10 @@ class PullRequests(models.Model):
'repository': pr.repository.id, 'repository': pr.repository.id,
'pull_request': pr.number, 'pull_request': pr.number,
'token_field': 'fp_github_token', 'token_field': 'fp_github_token',
'message': "This pull request can not be forward ported: " 'message': "%sthis pull request can not be forward ported: "
"next branch is %r but linked pull request %s " "next branch is %r but linked pull request %s "
"has a next branch %r." % ( "has a next branch %r." % (
t.name, linked.display_name, other.name pr.ping(), t.name, linked.display_name, other.name
) )
}) })
_logger.warning( _logger.warning(
@ -678,8 +696,8 @@ class PullRequests(models.Model):
'delegates': [(6, False, (source.delegates | pr.delegates).ids)] 'delegates': [(6, False, (source.delegates | pr.delegates).ids)]
}) })
if has_conflicts and pr.parent_id and pr.state not in ('merged', 'closed'): if has_conflicts and pr.parent_id and pr.state not in ('merged', 'closed'):
message = source._pingline() + """ message = source.ping() + """\
The next pull request (%s) is in conflict. You can merge the chain up to here by saying the next pull request (%s) is in conflict. You can merge the chain up to here by saying
> @%s r+ > @%s r+
%s""" % (new_pr.display_name, pr.repository.project_id.fp_github_name, footer) %s""" % (new_pr.display_name, pr.repository.project_id.fp_github_name, footer)
self.env['runbot_merge.pull_requests.feedback'].create({ self.env['runbot_merge.pull_requests.feedback'].create({
@ -710,19 +728,21 @@ The next pull request (%s) is in conflict. You can merge the chain up to here by
'* %s%s\n' % (sha, ' <- on this commit' if sha == h else '') '* %s%s\n' % (sha, ' <- on this commit' if sha == h else '')
for sha in hh for sha in hh
) )
message = f"""{source._pingline()} cherrypicking of pull request {source.display_name} failed. message = f"""{source.ping()}cherrypicking of pull request {source.display_name} failed.
{lines}{sout}{serr} {lines}{sout}{serr}
Either perform the forward-port manually (and push to this branch, proceeding as usual) or close this PR (maybe?). Either perform the forward-port manually (and push to this branch, proceeding as usual) or close this PR (maybe?).
In the former case, you may want to edit this PR message as well. In the former case, you may want to edit this PR message as well.
""" """
elif has_conflicts: elif has_conflicts:
message = """%s message = """%s\
While this was properly forward-ported, at least one co-dependent PR (%s) did not succeed. You will need to fix it before this can be merged. while this was properly forward-ported, at least one co-dependent PR (%s) did \
not succeed. You will need to fix it before this can be merged.
Both this PR and the others will need to be approved via `@%s r+` as they are all considered "in conflict". Both this PR and the others will need to be approved via `@%s r+` as they are \
all considered "in conflict".
%s""" % ( %s""" % (
source._pingline(), source.ping(),
', '.join(p.display_name for p in (new_batch - new_pr)), ', '.join(p.display_name for p in (new_batch - new_pr)),
proj.github_prefix, proj.github_prefix,
footer footer
@ -733,8 +753,8 @@ Both this PR and the others will need to be approved via `@%s r+` as they are al
for p in pr._iter_ancestors() for p in pr._iter_ancestors()
if p.parent_id if p.parent_id
) )
message = source._pingline() + """ message = source.ping() + """\
This PR targets %s and is the last of the forward-port chain%s this PR targets %s and is the last of the forward-port chain%s
%s %s
To merge the full chain, say To merge the full chain, say
> @%s r+ > @%s r+
@ -772,14 +792,6 @@ This PR targets %s and is part of the forward-port chain. Further PRs will be cr
b.prs[0]._schedule_fp_followup() b.prs[0]._schedule_fp_followup()
return b return b
def _pingline(self):
assignees = (self.author | self.reviewed_by).mapped('github_login')
return "Ping %s" % ', '.join(
'@' + login
for login in assignees
if login
)
def _create_fp_branch(self, target_branch, fp_branch_name, cleanup): def _create_fp_branch(self, target_branch, fp_branch_name, cleanup):
""" Creates a forward-port for the current PR to ``target_branch`` under """ Creates a forward-port for the current PR to ``target_branch`` under
``fp_branch_name``. ``fp_branch_name``.
@ -794,6 +806,7 @@ This PR targets %s and is part of the forward-port chain. Further PRs will be cr
:rtype: (None | (str, str, str, list[str]), Repo) :rtype: (None | (str, str, str, list[str]), Repo)
""" """
source = self._get_local_directory() source = self._get_local_directory()
root = self._get_root()
# update all the branches & PRs # update all the branches & PRs
r = source.with_params('gc.pruneExpire=1.day.ago')\ r = source.with_params('gc.pruneExpire=1.day.ago')\
.with_config( .with_config(
@ -802,17 +815,16 @@ This PR targets %s and is part of the forward-port chain. Further PRs will be cr
)\ )\
.fetch('-p', 'origin') .fetch('-p', 'origin')
_logger.info("Updated %s:\n%s", source._directory, r.stdout.decode()) _logger.info("Updated %s:\n%s", source._directory, r.stdout.decode())
# FIXME: check that pr.head is pull/{number}'s head instead? source.cat_file(e=root.head)
source.cat_file(e=self.head)
# create working copy # create working copy
_logger.info("Create working copy to forward-port %s:%d to %s", _logger.info(
self.repository.name, self.number, target_branch.name) "Create working copy to forward-port %s (really %s) to %s",
self.display_name, root.display_name, target_branch.name)
working_copy = source.clone( working_copy = source.clone(
cleanup.enter_context( cleanup.enter_context(
tempfile.TemporaryDirectory( tempfile.TemporaryDirectory(
prefix='%s:%d-to-%s-' % ( prefix='%s-to-%s-' % (
self.repository.name, root.display_name,
self.number,
target_branch.name target_branch.name
), ),
dir=user_cache_dir('forwardport') dir=user_cache_dir('forwardport')
@ -831,7 +843,6 @@ This PR targets %s and is part of the forward-port chain. Further PRs will be cr
_logger.info("Create FP branch %s", fp_branch_name) _logger.info("Create FP branch %s", fp_branch_name)
working_copy.checkout(b=fp_branch_name) working_copy.checkout(b=fp_branch_name)
root = self._get_root()
try: try:
root._cherry_pick(working_copy) root._cherry_pick(working_copy)
return None, working_copy return None, working_copy
@ -876,7 +887,7 @@ This PR targets %s and is part of the forward-port chain. Further PRs will be cr
# switch back to the PR branch # switch back to the PR branch
conf.checkout(fp_branch_name) conf.checkout(fp_branch_name)
# cherry-pick the squashed commit to generate the conflict # cherry-pick the squashed commit to generate the conflict
conf.with_params('merge.renamelimit=0')\ conf.with_params('merge.renamelimit=0', 'merge.conflictstyle=diff3')\
.with_config(check=False)\ .with_config(check=False)\
.cherry_pick(squashed, no_commit=True) .cherry_pick(squashed, no_commit=True)
status = conf.stdout().status(short=True, untracked_files='no').stdout.decode() status = conf.stdout().status(short=True, untracked_files='no').stdout.decode()
@ -1015,7 +1026,7 @@ stderr:
subprocess.run([ subprocess.run([
'git', 'clone', '--bare', 'git', 'clone', '--bare',
'https://{}:{}@github.com/{}'.format( 'https://{}:{}@github.com/{}'.format(
self.repository.project_id.fp_github_name, self.repository.project_id.fp_github_name or '',
self.repository.project_id.fp_github_token, self.repository.project_id.fp_github_token,
self.repository.name, self.repository.name,
), ),
@ -1084,8 +1095,9 @@ stderr:
self.env['runbot_merge.pull_requests.feedback'].create({ self.env['runbot_merge.pull_requests.feedback'].create({
'repository': source.repository.id, 'repository': source.repository.id,
'pull_request': source.number, 'pull_request': source.number,
'message': "This pull request has forward-port PRs awaiting action (not merged or closed): %s" % ', '.join( 'message': "%sthis pull request has forward-port PRs awaiting action (not merged or closed):\n%s" % (
pr.display_name for pr in sorted(prs, key=lambda p: p.number) source.ping(),
'\n- '.join(pr.display_name for pr in sorted(prs, key=lambda p: p.number))
), ),
'token_field': 'fp_github_token', 'token_field': 'fp_github_token',
}) })
@ -1134,7 +1146,7 @@ class Repo:
try: try:
return self._opener(args, **opts) return self._opener(args, **opts)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
_logger.error("git call error:\n%s", e.stderr.decode()) _logger.error("git call error:%s", ('\n' + e.stderr.decode()) if e.stderr else e )
raise raise
def stdout(self, flag=True): def stdout(self, flag=True):

View File

@ -20,13 +20,3 @@ class FreezeWizard(models.Model):
if not self.search_count([]): if not self.search_count([]):
self.env.ref('forwardport.port_forward').active = True self.env.ref('forwardport.port_forward').active = True
return r return r
def action_freeze(self):
# have to store wizard content as it's removed during freeze
project = self.project_id
branches_before = project.branch_ids
prs = self.mapped('release_pr_ids.pr_id')
r = super().action_freeze()
new_branch = project.branch_ids - branches_before
prs.write({'limit_id': new_branch.id})
return r

View File

@ -1,12 +1,8 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import pathlib
import re import re
import requests
from shutil import rmtree
import pytest import pytest
import requests
from odoo.tools.appdirs import user_cache_dir
@pytest.fixture @pytest.fixture
def default_crons(): def default_crons():
@ -47,20 +43,6 @@ def _check_scopes(config):
assert token_scopes >= required_scopes, \ assert token_scopes >= required_scopes, \
"%s should have scopes %s, found %s" % (section, token_scopes, required_scopes) "%s should have scopes %s, found %s" % (section, token_scopes, required_scopes)
@pytest.fixture(autouse=True)
def _cleanup_cache(config, users):
""" forwardport has a repo cache which it assumes is unique per name
but tests always use the same repo paths / names for different repos
(the repos get re-created), leading to divergent repo histories.
So clear cache after each test, two tests should not share repos.
"""
yield
cache_root = pathlib.Path(user_cache_dir('forwardport'))
rmtree(cache_root / config['github']['owner'], ignore_errors=True)
for login in users.values():
rmtree(cache_root / login, ignore_errors=True)
@pytest.fixture() @pytest.fixture()
def module(): def module():
""" When a test function is (going to be) run, selects the containing """ When a test function is (going to be) run, selects the containing

View File

@ -60,9 +60,10 @@ def test_conflict(env, config, make_repo, users):
'g': 'a', 'g': 'a',
'h': re_matches(r'''<<<\x3c<<< HEAD 'h': re_matches(r'''<<<\x3c<<< HEAD
a a
|||||||| parent of [\da-f]{7,}.*
======= =======
xxx xxx
>>>\x3e>>> [0-9a-f]{7,}.* >>>\x3e>>> [\da-f]{7,}.*
'''), '''),
} }
prb = prod.get_pr(prb_id.number) prb = prod.get_pr(prb_id.number)
@ -73,8 +74,8 @@ This PR targets b and is part of the forward-port chain. Further PRs will be cre
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
'''), '''),
(users['user'], """Ping @%s, @%s (users['user'], """@%s @%s the next pull request (%s) is in conflict. \
The next pull request (%s) is in conflict. You can merge the chain up to here by saying You can merge the chain up to here by saying
> @%s r+ > @%s r+
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
@ -323,9 +324,10 @@ def test_multiple_commits_different_authorship(env, config, make_repo, users, ro
assert re.match(r'''<<<\x3c<<< HEAD assert re.match(r'''<<<\x3c<<< HEAD
b b
|||||||| parent of [\da-f]{7,}.*
======= =======
2 2
>>>\x3e>>> [0-9a-f]{7,}.* >>>\x3e>>> [\da-f]{7,}.*
''', prod.read_tree(c)['g']) ''', prod.read_tree(c)['g'])
# I'd like to fix the conflict so everything is clean and proper *but* # I'd like to fix the conflict so everything is clean and proper *but*
@ -343,11 +345,12 @@ b
assert pr2.comments == [ assert pr2.comments == [
seen(env, pr2, users), seen(env, pr2, users),
(users['user'], re_matches(r'Ping.*CONFLICT', re.DOTALL)), (users['user'], re_matches(r'@%s @%s .*CONFLICT' % (users['user'], users['reviewer']), re.DOTALL)),
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
(users['user'], f"All commits must have author and committer email, " (users['user'], f"@{users['user']} @{users['reviewer']} unable to stage: "
"All commits must have author and committer email, "
f"missing email on {pr2_id.head} indicates the " f"missing email on {pr2_id.head} indicates the "
f"authorship is most likely incorrect."), "authorship is most likely incorrect."),
] ]
assert pr2_id.state == 'error' assert pr2_id.state == 'error'
assert not pr2_id.staging_id, "staging should have been rejected" assert not pr2_id.staging_id, "staging should have been rejected"

View File

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import collections import collections
import time
import pytest import pytest
@ -157,12 +158,12 @@ def test_disable(env, config, make_repo, users, enabled):
# responses and we don't care that much # responses and we don't care that much
assert set(pr.comments) == { assert set(pr.comments) == {
(users['reviewer'], "hansen r+\n%s up to" % bot_name), (users['reviewer'], "hansen r+\n%s up to" % bot_name),
(users['other'], "@%s please provide a branch to forward-port to." % users['reviewer']),
(users['reviewer'], "%s up to b" % bot_name), (users['reviewer'], "%s up to b" % bot_name),
(users['other'], "@%s branch 'b' is disabled, it can't be used as a forward port target." % users['reviewer']),
(users['reviewer'], "%s up to foo" % bot_name), (users['reviewer'], "%s up to foo" % bot_name),
(users['other'], "@%s there is no branch 'foo', it can't be used as a forward port target." % users['reviewer']),
(users['reviewer'], "%s up to c" % bot_name), (users['reviewer'], "%s up to c" % bot_name),
(users['other'], "Please provide a branch to forward-port to."),
(users['other'], "Branch 'b' is disabled, it can't be used as a forward port target."),
(users['other'], "There is no branch 'foo', it can't be used as a forward port target."),
(users['other'], "Forward-porting to 'c'."), (users['other'], "Forward-porting to 'c'."),
seen(env, pr, users), seen(env, pr, users),
} }
@ -201,14 +202,13 @@ def test_default_disabled(env, config, make_repo, users):
assert pr2.comments == [ assert pr2.comments == [
seen(env, pr2, users), seen(env, pr2, users),
(users['user'], """\ (users['user'], """\
Ping @%s, @%s @%(user)s @%(reviewer)s this PR targets b and is the last of the forward-port chain.
This PR targets b and is the last of the forward-port chain.
To merge the full chain, say To merge the full chain, say
> @%s r+ > @%(user)s r+
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
""" % (users['user'], users['reviewer'], users['user'])), """ % users)
] ]
def test_limit_after_merge(env, config, make_repo, users): def test_limit_after_merge(env, config, make_repo, users):
@ -247,7 +247,7 @@ def test_limit_after_merge(env, config, make_repo, users):
(users['reviewer'], "hansen r+"), (users['reviewer'], "hansen r+"),
seen(env, pr1, users), seen(env, pr1, users),
(users['reviewer'], bot_name + ' up to b'), (users['reviewer'], bot_name + ' up to b'),
(bot_name, "Sorry, forward-port limit can only be set before the PR is merged."), (bot_name, "@%s forward-port limit can only be set before the PR is merged." % users['reviewer']),
] ]
assert pr2.comments == [ assert pr2.comments == [
seen(env, pr2, users), seen(env, pr2, users),
@ -257,9 +257,11 @@ This PR targets b and is part of the forward-port chain. Further PRs will be cre
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
"""), """),
(users['reviewer'], bot_name + ' up to b'), (users['reviewer'], bot_name + ' up to b'),
(bot_name, "Sorry, forward-port limit can only be set on an origin PR" (bot_name, "@%s forward-port limit can only be set on an origin PR"
" (%s here) before it's merged and forward-ported." % p1.display_name " (%s here) before it's merged and forward-ported." % (
), users['reviewer'],
p1.display_name,
)),
] ]
# update pr2 to detach it from pr1 # update pr2 to detach it from pr1
@ -279,10 +281,13 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
env.run_crons() env.run_crons()
assert pr2.comments[4:] == [ assert pr2.comments[4:] == [
(bot_name, "This PR was modified / updated and has become a normal PR. " (bot_name, "@%s @%s this PR was modified / updated and has become a normal PR. "
"It should be merged the normal way (via @hansen)"), "It should be merged the normal way (via @%s)" % (
users['user'], users['reviewer'],
p2.repository.project_id.github_prefix
)),
(users['reviewer'], bot_name + ' up to b'), (users['reviewer'], bot_name + ' up to b'),
(bot_name, "Sorry, forward-port limit can only be set on an origin PR " (bot_name, f"@{users['reviewer']} forward-port limit can only be set on an origin PR "
f"({p1.display_name} here) before it's merged and forward-ported." f"({p1.display_name} here) before it's merged and forward-ported."
), ),
] ]

View File

@ -125,8 +125,11 @@ def test_straightforward_flow(env, config, make_repo, users):
assert pr.comments == [ assert pr.comments == [
(users['reviewer'], 'hansen r+ rebase-ff'), (users['reviewer'], 'hansen r+ rebase-ff'),
seen(env, pr, users), seen(env, pr, users),
(users['user'], 'Merge method set to rebase and fast-forward'), (users['user'], 'Merge method set to rebase and fast-forward.'),
(users['user'], 'This pull request has forward-port PRs awaiting action (not merged or closed): ' + ', '.join((pr1 | pr2).mapped('display_name'))), (users['user'], '@%s @%s this pull request has forward-port PRs awaiting action (not merged or closed):\n%s' % (
users['other'], users['reviewer'],
'\n- '.join((pr1 | pr2).mapped('display_name'))
)),
] ]
assert pr0_ == pr0 assert pr0_ == pr0
@ -148,8 +151,7 @@ def test_straightforward_flow(env, config, make_repo, users):
assert pr2_remote.comments == [ assert pr2_remote.comments == [
seen(env, pr2_remote, users), seen(env, pr2_remote, users),
(users['user'], """\ (users['user'], """\
Ping @%s, @%s @%s @%s this PR targets c and is the last of the forward-port chain containing:
This PR targets c and is the last of the forward-port chain containing:
* %s * %s
To merge the full chain, say To merge the full chain, say
@ -314,11 +316,18 @@ def test_empty(env, config, make_repo, users):
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK}) env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK})
env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK}) env.run_crons('forwardport.reminder', 'runbot_merge.feedback_cron', context={'forwardport_updated_before': FAKE_PREV_WEEK})
awaiting = (
users['other'],
'@%s @%s this pull request has forward-port PRs awaiting action (not merged or closed):\n%s' % (
users['user'], users['reviewer'],
fail_id.display_name
)
)
assert pr1.comments == [ assert pr1.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr1, users), seen(env, pr1, users),
(users['other'], 'This pull request has forward-port PRs awaiting action (not merged or closed): ' + fail_id.display_name), awaiting,
(users['other'], 'This pull request has forward-port PRs awaiting action (not merged or closed): ' + fail_id.display_name), awaiting,
], "each cron run should trigger a new message on the ancestor" ], "each cron run should trigger a new message on the ancestor"
# check that this stops if we close the PR # check that this stops if we close the PR
with prod: with prod:
@ -327,8 +336,8 @@ def test_empty(env, config, make_repo, users):
assert pr1.comments == [ assert pr1.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr1, users), seen(env, pr1, users),
(users['other'], 'This pull request has forward-port PRs awaiting action (not merged or closed): ' + fail_id.display_name), awaiting,
(users['other'], 'This pull request has forward-port PRs awaiting action (not merged or closed): ' + fail_id.display_name), awaiting,
] ]
def test_partially_empty(env, config, make_repo): def test_partially_empty(env, config, make_repo):
@ -562,8 +571,7 @@ def test_delegate_fw(env, config, make_repo, users):
assert pr2.comments == [ assert pr2.comments == [
seen(env, pr2, users), seen(env, pr2, users),
(users['user'], '''Ping @{self_reviewer}, @{reviewer} (users['user'], '''@{self_reviewer} @{reviewer} this PR targets c and is the last of the forward-port chain.
This PR targets c and is the last of the forward-port chain.
To merge the full chain, say To merge the full chain, say
> @{user} r+ > @{user} r+
@ -790,11 +798,12 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
""") """)
] ]
def test_closing_after_fp(self, env, config, make_repo): def test_closing_after_fp(self, env, config, make_repo, users):
""" Closing a PR which has been forward-ported should not touch the """ Closing a PR which has been forward-ported should not touch the
followups followups
""" """
prod, other = make_basic(env, config, make_repo) prod, other = make_basic(env, config, make_repo)
project = env['runbot_merge.project'].search([])
with prod: with prod:
[p_1] = prod.make_commits( [p_1] = prod.make_commits(
'a', 'a',
@ -814,23 +823,51 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
# should merge the staging then create the FP PR # should merge the staging then create the FP PR
env.run_crons() env.run_crons()
pr0, pr1 = env['runbot_merge.pull_requests'].search([], order='number') pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number')
with prod: with prod:
prod.post_status(pr1.head, 'success', 'legal/cla') prod.post_status(pr1_id.head, 'success', 'legal/cla')
prod.post_status(pr1.head, 'success', 'ci/runbot') prod.post_status(pr1_id.head, 'success', 'ci/runbot')
# should create the second staging # should create the second staging
env.run_crons() env.run_crons()
pr0_1, pr1_1, pr2_1 = env['runbot_merge.pull_requests'].search([], order='number') pr0_id2, pr1_id2, pr2_id = env['runbot_merge.pull_requests'].search([], order='number')
assert pr0_1 == pr0 assert pr0_id2 == pr0_id
assert pr1_1 == pr1 assert pr1_id2 == pr1_id
pr1 = prod.get_pr(pr1_id.number)
with prod:
pr1.close()
assert pr1_id.state == 'closed'
assert not pr1_id.parent_id
assert pr2_id.state == 'opened'
assert not pr2_id.parent_id, \
"the descendant of a closed PR doesn't really make sense, maybe?"
with prod: with prod:
prod.get_pr(pr1.number).close() pr1.open()
assert pr1_id.state == 'validated'
env.run_crons()
assert pr1.comments[-1] == (
users['user'],
"@{} @{} this PR was closed then reopened. "
"It should be merged the normal way (via @{})".format(
users['user'],
users['reviewer'],
project.github_prefix,
)
)
assert pr1_1.state == 'closed' with prod:
assert not pr1_1.parent_id pr1.post_comment(f'{project.fp_github_name} r+', config['role_reviewer']['token'])
assert pr2_1.state == 'opened' env.run_crons()
assert pr1.comments[-1] == (
users['user'],
"@{} I can only do this on unmodified forward-port PRs, ask {}.".format(
users['reviewer'],
project.github_prefix,
),
)
class TestBranchDeletion: class TestBranchDeletion:
def test_delete_normal(self, env, config, make_repo): def test_delete_normal(self, env, config, make_repo):

View File

@ -44,7 +44,7 @@ This PR targets b and is part of the forward-port chain. Further PRs will be cre
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
''') ''')
ci_warning = (users['user'], 'Ping @%(user)s, @%(reviewer)s\n\nci/runbot failed on this forward-port PR' % users) ci_warning = (users['user'], '@%(user)s @%(reviewer)s ci/runbot failed on this forward-port PR' % users)
# oh no CI of the first FP PR failed! # oh no CI of the first FP PR failed!
# simulate status being sent multiple times (e.g. on multiple repos) with # simulate status being sent multiple times (e.g. on multiple repos) with
@ -103,7 +103,11 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
assert pr1_remote.comments == [ assert pr1_remote.comments == [
seen(env, pr1_remote, users), seen(env, pr1_remote, users),
fp_intermediate, ci_warning, ci_warning, fp_intermediate, ci_warning, ci_warning,
(users['user'], "This PR was modified / updated and has become a normal PR. It should be merged the normal way (via @%s)" % pr1_id.repository.project_id.github_prefix), (users['user'], "@%s @%s this PR was modified / updated and has become a normal PR. "
"It should be merged the normal way (via @%s)" % (
users['user'], users['reviewer'],
pr1_id.repository.project_id.github_prefix
)),
], "users should be warned that the PR has become non-FP" ], "users should be warned that the PR has become non-FP"
# NOTE: should the followup PR wait for pr1 CI or not? # NOTE: should the followup PR wait for pr1 CI or not?
assert pr2_id.head != pr2_head assert pr2_id.head != pr2_head
@ -210,9 +214,13 @@ def test_update_merged(env, make_repo, config, users):
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
'''), '''),
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
(users['user'], """Ancestor PR %s has been updated but this PR is merged and can't be updated to match. (users['user'], """@%s @%s ancestor PR %s has been updated but this PR is merged and can't be updated to match.
You may want or need to manually update any followup PR.""" % pr1_id.display_name) You may want or need to manually update any followup PR.""" % (
users['user'],
users['reviewer'],
pr1_id.display_name,
))
] ]
def test_duplicate_fw(env, make_repo, setreviewers, config, users): def test_duplicate_fw(env, make_repo, setreviewers, config, users):
@ -366,9 +374,10 @@ def test_subsequent_conflict(env, make_repo, config, users):
'g': 'a', 'g': 'a',
'h': re_matches(r'''<<<\x3c<<< HEAD 'h': re_matches(r'''<<<\x3c<<< HEAD
a a
|||||||| parent of [\da-f]{7,}.*
======= =======
conflict! conflict!
>>>\x3e>>> [0-9a-f]{7,}.* >>>\x3e>>> [\da-f]{7,}.*
'''), '''),
'x': '0', 'x': '0',
} }
@ -377,7 +386,7 @@ conflict!
# 2. "forward port chain" bit # 2. "forward port chain" bit
# 3. updated / modified & got detached # 3. updated / modified & got detached
assert pr2.comments[3:] == [ assert pr2.comments[3:] == [
(users['user'], f"WARNING: the latest change ({pr2_id.head}) triggered " (users['user'], f"@{users['user']} WARNING: the latest change ({pr2_id.head}) triggered "
f"a conflict when updating the next forward-port " f"a conflict when updating the next forward-port "
f"({pr3_id.display_name}), and has been ignored.\n\n" f"({pr3_id.display_name}), and has been ignored.\n\n"
f"You will need to update this pull request " f"You will need to update this pull request "
@ -389,7 +398,7 @@ conflict!
# 2. forward-port chain thing # 2. forward-port chain thing
assert repo.get_pr(pr3_id.number).comments[2:] == [ assert repo.get_pr(pr3_id.number).comments[2:] == [
(users['user'], re_matches(f'''\ (users['user'], re_matches(f'''\
WARNING: the update of {pr2_id.display_name} to {pr2_id.head} has caused a \ @{users['user']} WARNING: the update of {pr2_id.display_name} to {pr2_id.head} has caused a \
conflict in this pull request, data may have been lost. conflict in this pull request, data may have been lost.
stdout: stdout:

View File

@ -399,18 +399,20 @@ class TestNotAllBranches:
assert pr_a.comments == [ assert pr_a.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr_a, users), seen(env, pr_a, users),
(users['user'], "This pull request can not be forward ported: next " (users['user'], "@%s @%s this pull request can not be forward ported:"
"branch is 'b' but linked pull request %s#%d has a" " next branch is 'b' but linked pull request %s "
" next branch 'c'." % (b.name, pr_b.number) "has a next branch 'c'." % (
) users['user'], users['reviewer'], pr_b_id.display_name,
)),
] ]
assert pr_b.comments == [ assert pr_b.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr_b, users), seen(env, pr_b, users),
(users['user'], "This pull request can not be forward ported: next " (users['user'], "@%s @%s this pull request can not be forward ported:"
"branch is 'c' but linked pull request %s#%d has a" " next branch is 'c' but linked pull request %s "
" next branch 'b'." % (a.name, pr_a.number) "has a next branch 'b'." % (
) users['user'], users['reviewer'], pr_a_id.display_name,
)),
] ]
def test_new_intermediate_branch(env, config, make_repo): def test_new_intermediate_branch(env, config, make_repo):
@ -588,9 +590,11 @@ def test_author_can_close_via_fwbot(env, config, make_repo):
pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number') pr0_id, pr1_id = env['runbot_merge.pull_requests'].search([], order='number')
assert pr0_id.number == pr.number assert pr0_id.number == pr.number
pr1 = prod.get_pr(pr1_id.number) pr1 = prod.get_pr(pr1_id.number)
# user can't close PR directly # `other` can't close fw PR directly, because that requires triage (and even
# write depending on account type) access to the repo, which an external
# contributor probably does not have
with prod, pytest.raises(Exception): with prod, pytest.raises(Exception):
pr1.close(other_token) # what the fuck? pr1.close(other_token)
# use can close via fwbot # use can close via fwbot
with prod: with prod:
pr1.post_comment('%s close' % project.fp_github_name, other_token) pr1.post_comment('%s close' % project.fp_github_name, other_token)
@ -755,7 +759,7 @@ def test_approve_draft(env, config, make_repo, users):
assert pr.comments == [ assert pr.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr, users), seen(env, pr, users),
(users['user'], f"I'm sorry, @{users['reviewer']}. Draft PRs can not be approved."), (users['user'], f"I'm sorry, @{users['reviewer']}: draft PRs can not be approved."),
] ]
with prod: with prod:
@ -799,10 +803,9 @@ def test_freeze(env, config, make_repo, users):
assert not w_id.errors assert not w_id.errors
w_id.action_freeze() w_id.action_freeze()
env.run_crons() # stage freeze PRs # run crons to process the feedback, run a second time in case of e.g.
with prod: # forward porting
prod.post_status('staging.post-b', 'success', 'ci/runbot') env.run_crons()
prod.post_status('staging.post-b', 'success', 'legal/cla')
env.run_crons() env.run_crons()
assert release_id.state == 'merged' assert release_id.state == 'merged'

View File

@ -49,7 +49,7 @@ class re_matches:
return self._r.match(text) return self._r.match(text)
def __repr__(self): def __repr__(self):
return '~' + self._r.pattern + '~' return self._r.pattern + '...'
def seen(env, pr, users): def seen(env, pr, users):
return users['user'], f'[Pull request status dashboard]({to_pr(env, pr).url}).' return users['user'], f'[Pull request status dashboard]({to_pr(env, pr).url}).'
@ -126,10 +126,12 @@ def pr_page(page, pr):
return html.fromstring(page(f'/{pr.repo.name}/pull/{pr.number}')) return html.fromstring(page(f'/{pr.repo.name}/pull/{pr.number}'))
def to_pr(env, pr): def to_pr(env, pr):
return env['runbot_merge.pull_requests'].search([ pr = env['runbot_merge.pull_requests'].search([
('repository.name', '=', pr.repo.name), ('repository.name', '=', pr.repo.name),
('number', '=', pr.number), ('number', '=', pr.number),
]) ])
assert len(pr) == 1, f"Expected to find {pr.repo.name}#{pr.number}, got {pr}."
return pr
def part_of(label, pr_id, *, separator='\n\n'): def part_of(label, pr_id, *, separator='\n\n'):
""" Adds the "part-of" pseudo-header in the footer. """ Adds the "part-of" pseudo-header in the footer.

View File

@ -271,7 +271,7 @@ class Runbot(models.AbstractModel):
for pr_number, t in pull_info_failures.items(): for pr_number, t in pull_info_failures.items():
if t + 15*60 < time.time(): if t + 15*60 < time.time():
_logger.warning('Removing %s from pull_info_failures', pr_number) _logger.warning('Removing %s from pull_info_failures', pr_number)
del self.pull_info_failures[pr_number] del pull_info_failures[pr_number]
return manager.get('sleep', default_sleep) return manager.get('sleep', default_sleep)

View File

@ -10,6 +10,8 @@
'views/res_partner.xml', 'views/res_partner.xml',
'views/runbot_merge_project.xml', 'views/runbot_merge_project.xml',
'views/mergebot.xml', 'views/mergebot.xml',
'views/queues.xml',
'views/configuration.xml',
'views/templates.xml', 'views/templates.xml',
'models/project_freeze/views.xml', 'models/project_freeze/views.xml',
], ],

View File

@ -0,0 +1 @@
IMP: show current alerts (disabled crons) on the PR pages

View File

@ -0,0 +1,4 @@
IMP: automatically close PRs when their target branch is deactivated
Leave a message on the PRs to explain, such PRs should also be reopen-able if
the users wants to retarget them.

View File

@ -0,0 +1,4 @@
FIX: correctly handle PR empty PR descriptions
Github's webhook for this case are weird, and weren't handled correctly,
updating a PR's description to *or from* empty might be mishandled.

View File

@ -0,0 +1,3 @@
IMP: review pinging (`@`-notification) of users by the mergebot and forwardbot
The bots should more consistently ping users when they need some sort of action to proceed.

View File

@ -0,0 +1 @@
ADD: automated provisioning of accounts from odoo.com

View File

@ -0,0 +1,8 @@
IMP: various UI items
- more clearly differentiate between "pending" and "unknown" statuses on stagings
- fix "outstanding forward ports" count
- add date of staging last modification (= success / failure instant)
- correctly retrieve and include fast-forward and unstaging reasons
- show the warnings banner (e.g. staging disabled) on the PR pages, as not all
users routinely visit the main dashboard

View File

@ -0,0 +1 @@
FIX: properly unstage pull requests when they're retargeted (base branch is changed)

View File

@ -120,41 +120,49 @@ def handle_pr(env, event):
if not source_branch: if not source_branch:
return handle_pr(env, dict(event, action='opened')) return handle_pr(env, dict(event, action='opened'))
pr_obj = find(source_branch)
updates = {} updates = {}
if source_branch != branch: if source_branch != branch:
updates['target'] = branch.id if branch != pr_obj.target:
updates['squash'] = pr['commits'] == 1 updates['target'] = branch.id
if event['changes'].keys() & {'title', 'body'}: updates['squash'] = pr['commits'] == 1
updates['message'] = "{}\n\n{}".format(pr['title'].strip(), pr['body'].strip())
# turns out github doesn't bother sending a change key if the body is
# changing from empty (None), therefore ignore that entirely, just
# generate the message and check if it changed
message = pr['title'].strip()
body = (pr['body'] or '').strip()
if body:
message += f"\n\n{body}"
if message != pr_obj.message:
updates['message'] = message
_logger.info("update: %s#%d = %s (by %s)", repo.name, pr['number'], updates, event['sender']['login'])
if updates: if updates:
pr_obj = find(source_branch)
pr_obj.write(updates) pr_obj.write(updates)
return 'Updated {}'.format(pr_obj.id) return 'Updated {}'.format(pr_obj.id)
return "Nothing to update ({})".format(event['changes'].keys()) return "Nothing to update ({})".format(event['changes'].keys())
message = None message = None
if not branch: if not branch:
message = f"This PR targets the un-managed branch {r}:{b}, it can not be merged." message = f"This PR targets the un-managed branch {r}:{b}, it needs to be retargeted before it can be merged."
_logger.info("Ignoring event %s on PR %s#%d for un-managed branch %s", _logger.info("Ignoring event %s on PR %s#%d for un-managed branch %s",
event['action'], r, pr['number'], b) event['action'], r, pr['number'], b)
elif not branch.active: elif not branch.active:
message = f"This PR targets the disabled branch {r}:{b}, it can not be merged." message = f"This PR targets the disabled branch {r}:{b}, it needs to be retargeted before it can be merged."
if message and event['action'] not in ('synchronize', 'closed'): if message and event['action'] not in ('synchronize', 'closed'):
feedback(message=message) feedback(message=message)
if not branch: if not branch:
return "Not set up to care about {}:{}".format(r, b) return "Not set up to care about {}:{}".format(r, b)
author_name = pr['user']['login']
author = env['res.partner'].search([('github_login', '=', author_name)], limit=1)
if not author:
author = env['res.partner'].create({
'name': author_name,
'github_login': author_name,
})
_logger.info("%s: %s#%s (%s) (%s)", event['action'], repo.name, pr['number'], pr['title'].strip(), author.github_login) _logger.info("%s: %s#%s (%s) (by %s)", event['action'], repo.name, pr['number'], pr['title'].strip(), event['sender']['login'])
if event['action'] == 'opened': if event['action'] == 'opened':
author_name = pr['user']['login']
author = env['res.partner'].search([('github_login', '=', author_name)], limit=1)
if not author:
env['res.partner'].create({'name': author_name, 'github_login': author_name})
pr_obj = env['runbot_merge.pull_requests']._from_gh(pr) pr_obj = env['runbot_merge.pull_requests']._from_gh(pr)
return "Tracking PR as {}".format(pr_obj.id) return "Tracking PR as {}".format(pr_obj.id)
@ -171,11 +179,7 @@ def handle_pr(env, event):
return "It's my understanding that closed/merged PRs don't get sync'd" return "It's my understanding that closed/merged PRs don't get sync'd"
if pr_obj.state == 'ready': if pr_obj.state == 'ready':
pr_obj.unstage( pr_obj.unstage("updated by %s", event['sender']['login'])
"PR %s updated by %s",
pr_obj.display_name,
event['sender']['login']
)
_logger.info( _logger.info(
"PR %s updated to %s by %s, resetting to 'open' and squash=%s", "PR %s updated to %s by %s, resetting to 'open' and squash=%s",
@ -222,7 +226,7 @@ def handle_pr(env, event):
if pr_obj.state == 'merged': if pr_obj.state == 'merged':
feedback( feedback(
close=True, close=True,
message="@%s ya silly goose you can't reopen a PR that's been merged PR." % event['sender']['login'] message="@%s ya silly goose you can't reopen a merged PR." % event['sender']['login']
) )
if pr_obj.state == 'closed': if pr_obj.state == 'closed':

View File

@ -6,12 +6,9 @@ import pathlib
import markdown import markdown
import markupsafe import markupsafe
import werkzeug.exceptions import werkzeug.exceptions
from lxml import etree
from lxml.builder import ElementMaker
from odoo.http import Controller, route, request from odoo.http import Controller, route, request
A = ElementMaker(namespace="http://www.w3.org/2005/Atom")
LIMIT = 20 LIMIT = 20
class MergebotDashboard(Controller): class MergebotDashboard(Controller):
@route('/runbot_merge', auth="public", type="http", website=True) @route('/runbot_merge', auth="public", type="http", website=True)
@ -22,13 +19,17 @@ class MergebotDashboard(Controller):
@route('/runbot_merge/<int:branch_id>', auth='public', type='http', website=True) @route('/runbot_merge/<int:branch_id>', auth='public', type='http', website=True)
def stagings(self, branch_id, until=None): def stagings(self, branch_id, until=None):
branch = request.env['runbot_merge.branch'].browse(branch_id).sudo().exists()
if not branch:
raise werkzeug.exceptions.NotFound()
stagings = request.env['runbot_merge.stagings'].with_context(active_test=False).sudo().search([ stagings = request.env['runbot_merge.stagings'].with_context(active_test=False).sudo().search([
('target', '=', branch_id), ('target', '=', branch.id),
('staged_at', '<=', until) if until else (True, '=', True), ('staged_at', '<=', until) if until else (True, '=', True),
], order='staged_at desc', limit=LIMIT+1) ], order='staged_at desc', limit=LIMIT+1)
return request.render('runbot_merge.branch_stagings', { return request.render('runbot_merge.branch_stagings', {
'branch': request.env['runbot_merge.branch'].browse(branch_id).sudo(), 'branch': branch,
'stagings': stagings[:LIMIT], 'stagings': stagings[:LIMIT],
'next': stagings[-1].staged_at if len(stagings) > LIMIT else None, 'next': stagings[-1].staged_at if len(stagings) > LIMIT else None,
}) })

View File

@ -1,18 +1,114 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import logging
from odoo.http import Controller, request, route
from odoo import http
from odoo.http import request
try: try:
from odoo.addons.saas_worker.util import from_role from odoo.addons.saas_worker.util import from_role
except ImportError: except ImportError:
def from_role(_): def from_role(_):
return lambda _: None return lambda _: None
_logger = logging.getLogger(__name__)
class MergebotReviewerProvisioning(http.Controller): class MergebotReviewerProvisioning(Controller):
@from_role('accounts')
@route('/runbot_merge/users', type='json', auth='public')
def list_users(self):
env = request.env(su=True)
return [{
'github_login': u.github_login,
'email': u.email,
}
for u in env['res.users'].search([])
if u.github_login
]
@from_role('accounts') @from_role('accounts')
@http.route(['/runbot_merge/get_reviewers'], type='json', auth='public') @route('/runbot_merge/provision', type='json', auth='public')
def provision_user(self, users):
_logger.info('Provisioning %s users: %s.', len(users), ', '.join(map(
'{email} ({github_login})'.format_map,
users
)))
env = request.env(su=True)
Partners = env['res.partner']
Users = env['res.users']
existing_partners = Partners.search([
'|', ('email', 'in', [u['email'] for u in users]),
('github_login', 'in', [u['github_login'] for u in users])
])
_logger.info("Found %d existing matching partners.", len(existing_partners))
partners = {}
for p in existing_partners:
if p.email:
# email is not unique, though we want it to be (probably)
current = partners.get(p.email)
if current:
_logger.warning(
"Lookup conflict: %r set on two partners %r and %r.",
p.email, current.display_name, p.display_name,
)
else:
partners[p.email] = p
if p.github_login:
# assume there can't be an existing one because github_login is
# unique, and should not be able to collide with emails
partners[p.github_login] = p
internal = env.ref('base.group_user')
odoo_provider = env.ref('auth_oauth.provider_openerp')
to_create = []
created = updated = 0
for new in users:
if 'sub' in new:
new['oauth_provider_id'] = odoo_provider.id
new['oauth_uid'] = new.pop('sub')
# prioritise by github_login as that's the unique-est point of information
current = partners.get(new['github_login']) or partners.get(new['email']) or Partners
# entry doesn't have user -> create user
if not current.user_ids:
# skip users without an email (= login) as that
# fails
if not new['email']:
continue
new['login'] = new['email']
new['groups_id'] = [(4, internal.id)]
# entry has partner -> create user linked to existing partner
# (and update partner implicitly)
if current:
new['partner_id'] = current.id
to_create.append(new)
continue
# otherwise update user (if there is anything to update)
user = current.user_ids
if len(user) != 1:
_logger.warning("Got %d users for partner %s.", len(user), current.display_name)
user = user[:1]
update_vals = {
k: v
for k, v in new.items()
if v not in ('login', 'email')
if v != (user[k] if k != 'oauth_provider_id' else user[k].id)
}
if update_vals:
user.write(update_vals)
updated += 1
if to_create:
# only create 100 users at a time to avoid request timeout
Users.create(to_create[:100])
created = len(to_create[:100])
_logger.info("Provisioning: created %d updated %d.", created, updated)
return [created, updated]
@from_role('accounts')
@route(['/runbot_merge/get_reviewers'], type='json', auth='public')
def fetch_reviewers(self, **kwargs): def fetch_reviewers(self, **kwargs):
reviewers = request.env['res.partner.review'].sudo().search([ reviewers = request.env['res.partner.review'].sudo().search([
'|', ('review', '=', True), ('self_review', '=', True) '|', ('review', '=', True), ('self_review', '=', True)
@ -20,7 +116,7 @@ class MergebotReviewerProvisioning(http.Controller):
return reviewers return reviewers
@from_role('accounts') @from_role('accounts')
@http.route(['/runbot_merge/remove_reviewers'], type='json', auth='public', methods=['POST']) @route(['/runbot_merge/remove_reviewers'], type='json', auth='public', methods=['POST'])
def update_reviewers(self, github_logins, **kwargs): def update_reviewers(self, github_logins, **kwargs):
partners = request.env['res.partner'].sudo().search([('github_login', 'in', github_logins)]) partners = request.env['res.partner'].sudo().search([('github_login', 'in', github_logins)])
partners.write({ partners.write({
@ -32,3 +128,4 @@ class MergebotReviewerProvisioning(http.Controller):
partners.mapped('user_ids').write({ partners.mapped('user_ids').write({
'groups_id': [(6, 0, [request.env.ref('base.group_portal').id])] 'groups_id': [(6, 0, [request.env.ref('base.group_portal').id])]
}) })
return True

View File

@ -188,7 +188,7 @@ class GH(object):
head = '<Response [%s]: %s)>' % (r.status_code, r.json() if _is_json(r) else r.text) head = '<Response [%s]: %s)>' % (r.status_code, r.json() if _is_json(r) else r.text)
if head == to: if head == to:
_logger.info("Sanity check ref update of %s to %s: ok", branch, to) _logger.debug("Sanity check ref update of %s to %s: ok", branch, to)
return return
_logger.warning("Sanity check ref update of %s, expected %s got %s", branch, to, head) _logger.warning("Sanity check ref update of %s, expected %s got %s", branch, to, head)
@ -202,10 +202,19 @@ class GH(object):
def _wait_for_update(): def _wait_for_update():
if not self._check_updated(branch, sha): if not self._check_updated(branch, sha):
return return
raise exceptions.FastForwardError(self._repo) raise exceptions.FastForwardError(self._repo) \
except requests.HTTPError: from Exception("timeout: never saw %s" % sha)
except requests.HTTPError as e:
_logger.debug('fast_forward(%s, %s, %s) -> ERROR', self._repo, branch, sha, exc_info=True) _logger.debug('fast_forward(%s, %s, %s) -> ERROR', self._repo, branch, sha, exc_info=True)
raise exceptions.FastForwardError(self._repo) if e.response.status_code == 422:
try:
r = e.response.json()
except Exception:
pass
else:
if isinstance(r, dict) and 'message' in r:
e = Exception(r['message'].lower())
raise exceptions.FastForwardError(self._repo) from e
def set_ref(self, branch, sha): def set_ref(self, branch, sha):
# force-update ref # force-update ref
@ -216,7 +225,7 @@ class GH(object):
status0 = r.status_code status0 = r.status_code
_logger.debug( _logger.debug(
'set_ref(update, %s, %s, %s -> %s (%s)', 'ref_set(%s, %s, %s -> %s (%s)',
self._repo, branch, sha, status0, self._repo, branch, sha, status0,
'OK' if status0 == 200 else r.text or r.reason 'OK' if status0 == 200 else r.text or r.reason
) )
@ -231,30 +240,35 @@ class GH(object):
# 422 makes no sense but that's what github returns, leaving 404 just # 422 makes no sense but that's what github returns, leaving 404 just
# in case # in case
status1 = None
if status0 in (404, 422): if status0 in (404, 422):
# fallback: create ref # fallback: create ref
r = self('post', 'git/refs', json={ status1 = self.create_ref(branch, sha)
'ref': 'refs/heads/{}'.format(branch),
'sha': sha,
}, check=False)
status1 = r.status_code
_logger.debug(
'set_ref(create, %s, %s, %s) -> %s (%s)',
self._repo, branch, sha, status1,
'OK' if status1 == 201 else r.text or r.reason
)
if status1 == 201: if status1 == 201:
@utils.backoff(exc=AssertionError)
def _wait_for_update():
head = self._check_updated(branch, sha)
assert not head, "Sanity check ref update of %s, expected %s got %s" % (
branch, sha, head
)
return return
else:
status1 = None
raise AssertionError("set_ref failed(%s, %s)" % (status0, status1)) raise AssertionError("set_ref failed(%s, %s)" % (status0, status1))
def create_ref(self, branch, sha):
r = self('post', 'git/refs', json={
'ref': 'refs/heads/{}'.format(branch),
'sha': sha,
}, check=False)
status = r.status_code
_logger.debug(
'ref_create(%s, %s, %s) -> %s (%s)',
self._repo, branch, sha, status,
'OK' if status == 201 else r.text or r.reason
)
if status == 201:
@utils.backoff(exc=AssertionError)
def _wait_for_update():
head = self._check_updated(branch, sha)
assert not head, \
f"Sanity check ref update of {branch}, expected {sha} got {head}"
return status
def merge(self, sha, dest, message): def merge(self, sha, dest, message):
r = self('post', 'merges', json={ r = self('post', 'merges', json={
'base': dest, 'base': dest,

View File

@ -1,10 +1,13 @@
import contextlib
import enum import enum
import itertools import itertools
import json
import logging import logging
import time import time
from odoo import models, fields, api from odoo import models, fields, api
from odoo.exceptions import UserError from odoo.exceptions import UserError
from odoo.addons.runbot_merge.exceptions import FastForwardError
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
class FreezeWizard(models.Model): class FreezeWizard(models.Model):
@ -12,36 +15,90 @@ class FreezeWizard(models.Model):
_description = "Wizard for freezing a project('s master)" _description = "Wizard for freezing a project('s master)"
project_id = fields.Many2one('runbot_merge.project', required=True) project_id = fields.Many2one('runbot_merge.project', required=True)
errors = fields.Text(compute='_compute_errors')
branch_name = fields.Char(required=True, help="Name of the new branches to create") branch_name = fields.Char(required=True, help="Name of the new branches to create")
release_pr_ids = fields.One2many(
'runbot_merge.project.freeze.prs', 'wizard_id',
string="Release pull requests",
help="Pull requests used as tips for the freeze branches, one per repository"
)
required_pr_ids = fields.Many2many( required_pr_ids = fields.Many2many(
'runbot_merge.pull_requests', string="Required Pull Requests", 'runbot_merge.pull_requests', string="Required Pull Requests",
domain="[('state', 'not in', ('closed', 'merged'))]", domain="[('state', 'not in', ('closed', 'merged'))]",
help="Pull requests which must have been merged before the freeze is allowed", help="Pull requests which must have been merged before the freeze is allowed",
) )
errors = fields.Text(compute='_compute_errors')
release_label = fields.Char()
release_pr_ids = fields.One2many(
'runbot_merge.project.freeze.prs', 'wizard_id',
string="Release pull requests",
help="Pull requests used as tips for the freeze branches, "
"one per repository"
)
bump_label = fields.Char()
bump_pr_ids = fields.One2many(
'runbot_merge.project.freeze.bumps', 'wizard_id',
string="Bump pull requests",
help="Pull requests used as tips of the frozen-off branches, "
"one per repository"
)
_sql_constraints = [ _sql_constraints = [
('unique_per_project', 'unique (project_id)', ('unique_per_project', 'unique (project_id)',
"There should be only one ongoing freeze per project"), "There should be only one ongoing freeze per project"),
] ]
@api.onchange('release_label')
def _onchange_release_label(self):
prs = self.env['runbot_merge.pull_requests'].search([
('label', '=', self.release_label)
])
for release_pr in self.release_pr_ids:
release_pr.pd_id = next((
p.id for p in prs
if p.repository == release_pr.repository_id
), False)
@api.onchange('release_pr_ids')
def _onchange_release_prs(self):
labels = {p.pr_id.label for p in self.release_pr_ids}
self.release_label = len(labels) == 1 and labels.pop()
@api.onchange('bump_label')
def _onchange_bump_label(self):
prs = self.env['runbot_merge.pull_requests'].search([
('label', '=', self.bump_label)
])
for bump_pr in self.bump_pr_ids:
bump_pr.pd_id = next((
p.id for p in prs
if p.repository == bump_pr.repository_id
), False)
@api.onchange('bump_pr_ids')
def _onchange_bump_prs(self):
labels = {p.pr_id.label for p in self.bump_pr_ids}
self.bump_label = len(labels) == 1 and labels.pop()
@api.depends('release_pr_ids.pr_id.label', 'required_pr_ids.state') @api.depends('release_pr_ids.pr_id.label', 'required_pr_ids.state')
def _compute_errors(self): def _compute_errors(self):
errors = [] errors = []
without = self.release_pr_ids.filtered(lambda p: not p.pr_id) without = self.release_pr_ids.filtered(lambda p: not p.pr_id)
if without: if without:
errors.append("* Every repository must have a release PR, missing release PRs for %s." % ', '.join( errors.append("* Every repository must have a release PR, missing release PRs for %s." % ', '.join(
p.repository_id.name for p in without without.mapped('repository_id.name')
)) ))
labels = set(self.mapped('release_pr_ids.pr_id.label')) labels = set(self.mapped('release_pr_ids.pr_id.label'))
if len(labels) != 1: if len(labels) != 1:
errors.append("* All release PRs must have the same label, found %r." % ', '.join(sorted(labels))) errors.append("* All release PRs must have the same label, found %r." % ', '.join(sorted(labels)))
non_squash = self.mapped('release_pr_ids.pr_id').filtered(lambda p: not p.squash)
if non_squash:
errors.append("* Release PRs should have a single commit, found more in %s." % ', '.join(p.display_name for p in non_squash))
bump_labels = set(self.mapped('bump_pr_ids.pr_id.label'))
if len(bump_labels) > 1:
errors.append("* All bump PRs must have the same label, found %r" % ', '.join(sorted(bump_labels)))
non_squash = self.mapped('bump_pr_ids.pr_id').filtered(lambda p: not p.squash)
if non_squash:
errors.append("* Bump PRs should have a single commit, found more in %s." % ', '.join(p.display_name for p in non_squash))
unready = sum(p.state not in ('closed', 'merged') for p in self.required_pr_ids) unready = sum(p.state not in ('closed', 'merged') for p in self.required_pr_ids)
if unready: if unready:
@ -73,6 +130,14 @@ class FreezeWizard(models.Model):
if self.errors: if self.errors:
return self.action_open() return self.action_open()
conflict_crons = self.env.ref('runbot_merge.merge_cron') | self.env.ref('runbot_merge.staging_cron')
# we don't want to run concurrently to the crons above, though we
# don't need to prevent read access to them
self.env.cr.execute(
'SELECT * FROM ir_cron WHERE id =ANY(%s) FOR SHARE NOWAIT',
[conflict_crons.ids]
)
project_id = self.project_id project_id = self.project_id
# need to create the new branch, but at the same time resequence # need to create the new branch, but at the same time resequence
# everything so the new branch is the second one, just after the branch # everything so the new branch is the second one, just after the branch
@ -86,47 +151,136 @@ class FreezeWizard(models.Model):
'sequence': next(seq), 'sequence': next(seq),
}) })
] ]
for s, b in zip(seq, rest): commands.extend((1, b.id, {'sequence': s}) for s, b in zip(seq, rest))
commands.append((1, b.id, {'sequence': s}))
project_id.branch_ids = commands project_id.branch_ids = commands
master_name = master.name
# update release PRs to get merged on the newly created branch gh_sessions = {r: r.github() for r in self.project_id.repo_ids}
new_branch = project_id.branch_ids - master - rest
self.release_pr_ids.mapped('pr_id').write({'target': new_branch.id, 'priority': 0})
# create new branch on every repository # prep new branch (via tmp refs) on every repo
errors = [] rel_heads = {}
repository = None # store for master heads as odds are high the bump pr(s) will be on the
# same repo as one of the release PRs
prevs = {}
for rel in self.release_pr_ids: for rel in self.release_pr_ids:
repository = rel.repository_id repo_id = rel.repository_id
gh = repository.github() gh = gh_sessions[repo_id]
# annoyance: can't directly alias a ref to an other ref, need to try:
# resolve the "old" branch explicitely prev = prevs[repo_id] = gh.head(master_name)
prev = gh('GET', f'git/refs/heads/{master.name}') except Exception:
if not prev.ok: raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.")
errors.append(f"Unable to resolve branch {master.name} of repository {repository.name} to a commit.")
break # create the tmp branch to merge the PR into
new_branch = gh('POST', 'git/refs', json={ tmp_branch = f'tmp.{self.branch_name}'
'ref': 'refs/heads/' + self.branch_name, try:
'sha': prev.json()['object']['sha'], gh.set_ref(tmp_branch, prev)
}, check=False) except Exception as err:
if not new_branch.ok: raise UserError(f"Unable to create branch {self.branch_name} of repository {repo_id.name}: {err}.")
err = new_branch.json()['message']
errors.append(f"Unable to create branch {master.name} of repository {repository.name}: {err}.") rel_heads[repo_id], _ = gh.rebase(rel.pr_id.number, tmp_branch)
break
time.sleep(1) time.sleep(1)
# if an error occurred during creation, try to clean up then raise error # prep bump
if errors: bump_heads = {}
for r in self.release_pr_ids: for bump in self.bump_pr_ids:
if r.repository_id == repository: repo_id = bump.repository_id
gh = gh_sessions[repo_id]
try:
prev = prevs[repo_id] = prevs.get(repo_id) or gh.head(master_name)
except Exception:
raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.")
# create the tmp branch to merge the PR into
tmp_branch = f'tmp.{master_name}'
try:
gh.set_ref(tmp_branch, prev)
except Exception as err:
raise UserError(f"Unable to create branch {master_name} of repository {repo_id.name}: {err}.")
bump_heads[repo_id], _ = gh.rebase(bump.pr_id.number, tmp_branch)
time.sleep(1)
deployed = {}
# at this point we've got a bunch of tmp branches with merged release
# and bump PRs, it's time to update the corresponding targets
to_delete = [] # release prs go on new branches which we try to delete on failure
to_revert = [] # bump prs go on new branch which we try to revert on failure
failure = None
for rel in self.release_pr_ids:
repo_id = rel.repository_id
# helper API currently has no API to ensure we're just creating a
# new branch (as cheaply as possible) so do it by hand
status = None
with contextlib.suppress(Exception):
status = gh_sessions[repo_id].create_ref(self.branch_name, rel_heads[repo_id])
deployed[rel.pr_id.id] = rel_heads[repo_id]
to_delete.append(repo_id)
if status != 201:
failure = ('create', repo_id.name, self.branch_name)
break
else: # all release deployments succeeded
for bump in self.bump_pr_ids:
repo_id = bump.repository_id
try:
gh_sessions[repo_id].fast_forward(master_name, bump_heads[repo_id])
deployed[bump.pr_id.id] = bump_heads[repo_id]
to_revert.append(repo_id)
except FastForwardError:
failure = ('fast-forward', repo_id.name, master_name)
break break
deletion = r.repository_id.github().delete(f'git/refs/heads/{self.branch_name}') if failure:
addendums = []
# creating the branch failed, try to delete all previous branches
failures = []
for prev_id in to_revert:
revert = gh_sessions[prev_id]('PATCH', f'git/refs/heads/{master_name}', json={
'sha': prevs[prev_id],
'force': True
}, check=False)
if not revert.ok:
failures.append(prev_id.name)
if failures:
addendums.append(
"Subsequently unable to revert branches created in %s." % \
', '.join(failures)
)
failures.clear()
for prev_id in to_delete:
deletion = gh_sessions[prev_id]('DELETE', f'git/refs/heads/{self.branch_name}', check=False)
if not deletion.ok: if not deletion.ok:
errors.append(f"Consequently unable to delete branch {self.branch_name} of repository {r.name}.") failures.append(prev_id.name)
time.sleep(1) if failures:
raise UserError('\n'.join(errors)) addendums.append(
"Subsequently unable to delete branches created in %s." % \
", ".join(failures)
)
failures.clear()
if addendums:
addendum = '\n\n' + '\n'.join(addendums)
else:
addendum = ''
reason, repo, branch = failure
raise UserError(
f"Unable to {reason} branch {repo}:{branch}.{addendum}"
)
all_prs = self.release_pr_ids.pr_id | self.bump_pr_ids.pr_id
all_prs.state = 'merged'
self.env['runbot_merge.pull_requests.feedback'].create([{
'repository': pr.repository.id,
'pull_request': pr.number,
'close': True,
'message': json.dumps({
'sha': deployed[pr.id],
'base': self.branch_name if pr in self.release_pr_ids.pr_id else None
})
} for pr in all_prs])
# delete wizard # delete wizard
self.sudo().unlink() self.sudo().unlink()
@ -155,6 +309,31 @@ class ReleasePullRequest(models.Model):
domain='[("repository", "=", repository_id), ("state", "not in", ("closed", "merged"))]', domain='[("repository", "=", repository_id), ("state", "not in", ("closed", "merged"))]',
string="Release Pull Request", string="Release Pull Request",
) )
label = fields.Char(related='pr_id.label')
def write(self, vals):
# only the pr should be writeable after initial creation
assert 'wizard_id' not in vals
assert 'repository_id' not in vals
# and if the PR gets set, it should match the requested repository
if 'pr_id' in vals:
assert self.env['runbot_merge.pull_requests'].browse(vals['pr_id'])\
.repository == self.repository_id
return super().write(vals)
class BumpPullRequest(models.Model):
_name = 'runbot_merge.project.freeze.bumps'
_description = "links to pull requests used to \"bump\" the development branches"
wizard_id = fields.Many2one('runbot_merge.project.freeze', required=True, index=True, ondelete='cascade')
repository_id = fields.Many2one('runbot_merge.repository', required=True)
pr_id = fields.Many2one(
'runbot_merge.pull_requests',
domain='[("repository", "=", repository_id), ("state", "not in", ("closed", "merged"))]',
string="Release Pull Request",
)
label = fields.Char(related='pr_id.label')
def write(self, vals): def write(self, vals):
# only the pr should be writeable after initial creation # only the pr should be writeable after initial creation

View File

@ -23,12 +23,42 @@
</group> </group>
</group> </group>
<group> <group>
<group colspan="2"> <group colspan="2" string="Release">
<p>
Release (freeze) PRs, provide the first commit
of the new branches. Each PR must have a single
commit.
</p>
<p class="alert alert-warning" role="alert">
These PRs will be merged directly, not staged.
</p>
<field name="release_label"/>
<field name="release_pr_ids" nolabel="1"> <field name="release_pr_ids" nolabel="1">
<tree editable="bottom" create="false"> <tree editable="bottom" create="false">
<field name="repository_id" readonly="1"/> <field name="repository_id" readonly="1"/>
<field name="pr_id" options="{'no_create': True}" <field name="pr_id" options="{'no_create': True}"
context="{'pr_include_title': 1}"/> context="{'pr_include_title': 1}"/>
<field name="label"/>
</tree>
</field>
</group>
</group>
<group>
<group colspan="2" string="Bump">
<p>
Bump PRs, provide the first commit of the source
branches after the release has been cut.
</p>
<p class="alert alert-warning" role="alert">
These PRs will be merged directly, not staged.
</p>
<field name="bump_label"/>
<field name="bump_pr_ids" nolabel="1">
<tree editable="bottom" create="false">
<field name="repository_id" readonly="1"/>
<field name="pr_id" options="{'no_create': True}"
context="{'pr_include_title': 1}"/>
<field name="label"/>
</tree> </tree>
</field> </field>
</group> </group>

View File

@ -3,6 +3,7 @@
import ast import ast
import base64 import base64
import collections import collections
import contextlib
import datetime import datetime
import io import io
import itertools import itertools
@ -116,7 +117,7 @@ All substitutions are tentatively applied sequentially to the input.
feedback({ feedback({
'repository': self.id, 'repository': self.id,
'pull_request': number, 'pull_request': number,
'message': "I'm sorry. Branch `{}` is not within my remit.".format(pr['base']['ref']), 'message': "Branch `{}` is not within my remit, imma just ignore it.".format(pr['base']['ref']),
}) })
return return
@ -142,9 +143,11 @@ All substitutions are tentatively applied sequentially to the input.
feedback({ feedback({
'repository': self.id, 'repository': self.id,
'pull_request': number, 'pull_request': number,
'message': "Sorry, I didn't know about this PR and had to retrieve " 'message': "%sI didn't know about this PR and had to retrieve "
"its information, you may have to re-approve it." "its information, you may have to re-approve it as "
"I didn't see previous commands." % pr_id.ping()
}) })
sender = {'login': self.project_id.github_prefix}
# init the PR to the null commit so we can later synchronise it back # init the PR to the null commit so we can later synchronise it back
# back to the "proper" head while resetting reviews # back to the "proper" head while resetting reviews
controllers.handle_pr(self.env, { controllers.handle_pr(self.env, {
@ -154,6 +157,7 @@ All substitutions are tentatively applied sequentially to the input.
'head': {**pr['head'], 'sha': '0'*40}, 'head': {**pr['head'], 'sha': '0'*40},
'state': 'open', 'state': 'open',
}, },
'sender': sender,
}) })
# fetch & set up actual head # fetch & set up actual head
for st in gh.statuses(pr['head']['sha']): for st in gh.statuses(pr['head']['sha']):
@ -175,20 +179,21 @@ All substitutions are tentatively applied sequentially to the input.
'review': item, 'review': item,
'pull_request': pr, 'pull_request': pr,
'repository': {'full_name': self.name}, 'repository': {'full_name': self.name},
'sender': sender,
}) })
else: else:
controllers.handle_comment(self.env, { controllers.handle_comment(self.env, {
'action': 'created', 'action': 'created',
'issue': issue, 'issue': issue,
'sender': item['user'],
'comment': item, 'comment': item,
'repository': {'full_name': self.name}, 'repository': {'full_name': self.name},
'sender': sender,
}) })
# sync to real head # sync to real head
controllers.handle_pr(self.env, { controllers.handle_pr(self.env, {
'action': 'synchronize', 'action': 'synchronize',
'pull_request': pr, 'pull_request': pr,
'sender': {'login': self.project_id.github_prefix} 'sender': sender,
}) })
if pr['state'] == 'closed': if pr['state'] == 'closed':
# don't go through controller because try_closing does weird things # don't go through controller because try_closing does weird things
@ -244,6 +249,23 @@ class Branch(models.Model):
self._table, ['name', 'project_id']) self._table, ['name', 'project_id'])
return res return res
@api.depends('active')
def _compute_display_name(self):
super()._compute_display_name()
for b in self.filtered(lambda b: not b.active):
b.display_name += ' (inactive)'
def write(self, vals):
super().write(vals)
if vals.get('active') is False:
self.env['runbot_merge.pull_requests.feedback'].create([{
'repository': pr.repository.id,
'pull_request': pr.number,
'close': True,
'message': f'{pr.ping()}the target branch {pr.target.name!r} has been disabled, closing this PR.',
} for pr in self.prs])
return True
@api.depends('staging_ids.active') @api.depends('staging_ids.active')
def _compute_active_staging(self): def _compute_active_staging(self):
for b in self: for b in self:
@ -338,14 +360,20 @@ class Branch(models.Model):
_logger.exception("Failed to merge %s into staging branch", pr.display_name) _logger.exception("Failed to merge %s into staging branch", pr.display_name)
if first or isinstance(e, exceptions.Unmergeable): if first or isinstance(e, exceptions.Unmergeable):
if len(e.args) > 1 and e.args[1]: if len(e.args) > 1 and e.args[1]:
message = e.args[1] reason = e.args[1]
else: else:
message = "Unable to stage PR (%s)" % e.__context__ reason = e.__context__
# if the reason is a json document, assume it's a github
# error and try to extract the error message to give it to
# the user
with contextlib.suppress(Exception):
reason = json.loads(str(reason))['message'].lower()
pr.state = 'error' pr.state = 'error'
self.env['runbot_merge.pull_requests.feedback'].create({ self.env['runbot_merge.pull_requests.feedback'].create({
'repository': pr.repository.id, 'repository': pr.repository.id,
'pull_request': pr.number, 'pull_request': pr.number,
'message': message, 'message': f'{pr.ping()}unable to stage: {reason}',
}) })
else: else:
first = False first = False
@ -534,6 +562,17 @@ class PullRequests(models.Model):
repo_name = fields.Char(related='repository.name') repo_name = fields.Char(related='repository.name')
message_title = fields.Char(compute='_compute_message_title') message_title = fields.Char(compute='_compute_message_title')
def ping(self, author=True, reviewer=True):
P = self.env['res.partner']
s = ' '.join(
f'@{p.github_login}'
for p in (self.author if author else P) | (self.reviewed_by if reviewer else P)
if p
)
if s:
s += ' '
return s
@api.depends('repository.name', 'number') @api.depends('repository.name', 'number')
def _compute_url(self): def _compute_url(self):
base = werkzeug.urls.url_parse(self.env['ir.config_parameter'].sudo().get_param('web.base.url', 'http://localhost:8069')) base = werkzeug.urls.url_parse(self.env['ir.config_parameter'].sudo().get_param('web.base.url', 'http://localhost:8069'))
@ -791,30 +830,31 @@ class PullRequests(models.Model):
msgs = [] msgs = []
for command, param in commands: for command, param in commands:
ok = False ok = False
msg = [] msg = None
if command == 'retry': if command == 'retry':
if is_author: if is_author:
if self.state == 'error': if self.state == 'error':
ok = True ok = True
self.state = 'ready' self.state = 'ready'
else: else:
msg = "Retry makes no sense when the PR is not in error." msg = "retry makes no sense when the PR is not in error."
elif command == 'check': elif command == 'check':
if is_author: if is_author:
self.env['runbot_merge.fetch_job'].create({ self.env['runbot_merge.fetch_job'].create({
'repository': self.repository.id, 'repository': self.repository.id,
'number': self.number, 'number': self.number,
}) })
ok = True
elif command == 'review': elif command == 'review':
if self.draft: if self.draft:
msg = "Draft PRs can not be approved." msg = "draft PRs can not be approved."
elif param and is_reviewer: elif param and is_reviewer:
oldstate = self.state oldstate = self.state
newstate = RPLUS.get(self.state) newstate = RPLUS.get(self.state)
if not author.email: if not author.email:
msg = "I must know your email before you can review PRs. Please contact an administrator." msg = "I must know your email before you can review PRs. Please contact an administrator."
elif not newstate: elif not newstate:
msg = "This PR is already reviewed, reviewing it again is useless." msg = "this PR is already reviewed, reviewing it again is useless."
else: else:
self.state = newstate self.state = newstate
self.reviewed_by = author self.reviewed_by = author
@ -831,7 +871,7 @@ class PullRequests(models.Model):
Feedback.create({ Feedback.create({
'repository': self.repository.id, 'repository': self.repository.id,
'pull_request': self.number, 'pull_request': self.number,
'message': "@{}, you may want to rebuild or fix this PR as it has failed CI.".format(author.github_login), 'message': "@{} you may want to rebuild or fix this PR as it has failed CI.".format(login),
}) })
elif not param and is_author: elif not param and is_author:
newstate = RMINUS.get(self.state) newstate = RMINUS.get(self.state)
@ -845,7 +885,7 @@ class PullRequests(models.Model):
'pull_request': self.number, 'pull_request': self.number,
'message': "PR priority reset to 1, as pull requests with priority 0 ignore review state.", 'message': "PR priority reset to 1, as pull requests with priority 0 ignore review state.",
}) })
self.unstage("unreview (r-) by %s", author.github_login) self.unstage("unreviewed (r-) by %s", login)
ok = True ok = True
else: else:
msg = "r- makes no sense in the current PR state." msg = "r- makes no sense in the current PR state."
@ -874,7 +914,7 @@ class PullRequests(models.Model):
elif command == 'method': elif command == 'method':
if is_reviewer: if is_reviewer:
if param == 'squash' and not self.squash: if param == 'squash' and not self.squash:
msg = "Squash can only be used with a single commit at this time." msg = "squash can only be used with a single commit at this time."
else: else:
self.merge_method = param self.merge_method = param
ok = True ok = True
@ -882,7 +922,7 @@ class PullRequests(models.Model):
Feedback.create({ Feedback.create({
'repository': self.repository.id, 'repository': self.repository.id,
'pull_request': self.number, 'pull_request': self.number,
'message':"Merge method set to %s" % explanation 'message':"Merge method set to %s." % explanation
}) })
elif command == 'override': elif command == 'override':
overridable = author.override_rights\ overridable = author.override_rights\
@ -904,7 +944,7 @@ class PullRequests(models.Model):
c.create({'sha': self.head, 'statuses': '{}'}) c.create({'sha': self.head, 'statuses': '{}'})
ok = True ok = True
else: else:
msg = f"You are not allowed to override this status." msg = "you are not allowed to override this status."
else: else:
# ignore unknown commands # ignore unknown commands
continue continue
@ -919,21 +959,23 @@ class PullRequests(models.Model):
applied.append(reformat(command, param)) applied.append(reformat(command, param))
else: else:
ignored.append(reformat(command, param)) ignored.append(reformat(command, param))
msgs.append(msg or "You can't {}.".format(reformat(command, param))) msgs.append(msg or "you can't {}.".format(reformat(command, param)))
if msgs:
joiner = ' ' if len(msgs) == 1 else '\n- '
msgs.insert(0, "I'm sorry, @{}:".format(login))
Feedback.create({
'repository': self.repository.id,
'pull_request': self.number,
'message': joiner.join(msgs),
})
msg = [] msg = []
if applied: if applied:
msg.append('applied ' + ' '.join(applied)) msg.append('applied ' + ' '.join(applied))
if ignored: if ignored:
ignoredstr = ' '.join(ignored) ignoredstr = ' '.join(ignored)
msg.append('ignored ' + ignoredstr) msg.append('ignored ' + ignoredstr)
if msgs:
msgs.insert(0, "I'm sorry, @{}.".format(login))
Feedback.create({
'repository': self.repository.id,
'pull_request': self.number,
'message': ' '.join(msgs),
})
return '\n'.join(msg) return '\n'.join(msg)
def _pr_acl(self, user): def _pr_acl(self, user):
@ -1020,7 +1062,7 @@ class PullRequests(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({ self.env['runbot_merge.pull_requests.feedback'].create({
'repository': self.repository.id, 'repository': self.repository.id,
'pull_request': self.number, 'pull_request': self.number,
'message': "%r failed on this reviewed PR." % ci, 'message': "%s%r failed on this reviewed PR." % (self.ping(), ci),
}) })
def _auto_init(self): def _auto_init(self):
@ -1089,6 +1131,12 @@ class PullRequests(models.Model):
def write(self, vals): def write(self, vals):
if vals.get('squash'): if vals.get('squash'):
vals['merge_method'] = False vals['merge_method'] = False
prev = None
if 'target' in vals or 'message' in vals:
prev = {
pr.id: {'target': pr.target, 'message': pr.message}
for pr in self
}
w = super().write(vals) w = super().write(vals)
@ -1096,6 +1144,18 @@ class PullRequests(models.Model):
if newhead: if newhead:
c = self.env['runbot_merge.commit'].search([('sha', '=', newhead)]) c = self.env['runbot_merge.commit'].search([('sha', '=', newhead)])
self._validate(json.loads(c.statuses or '{}')) self._validate(json.loads(c.statuses or '{}'))
if prev:
for pr in self:
old_target = prev[pr.id]['target']
if pr.target != old_target:
pr.unstage(
"target (base) branch was changed from %r to %r",
old_target.display_name, pr.target.display_name,
)
old_message = prev[pr.id]['message']
if pr.merge_method in ('merge', 'rebase-merge') and pr.message != old_message:
pr.unstage("merge message updated")
return w return w
def _check_linked_prs_statuses(self, commit=False): def _check_linked_prs_statuses(self, commit=False):
@ -1139,11 +1199,9 @@ class PullRequests(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({ self.env['runbot_merge.pull_requests.feedback'].create({
'repository': r.repository.id, 'repository': r.repository.id,
'pull_request': r.number, 'pull_request': r.number,
'message': "Linked pull request(s) {} not ready. Linked PRs are not staged until all of them are ready.".format( 'message': "{}linked pull request(s) {} not ready. Linked PRs are not staged until all of them are ready.".format(
', '.join(map( r.ping(),
'{0.display_name}'.format, ', '.join(map('{0.display_name}'.format, unready))
unready
))
) )
}) })
r.link_warned = True r.link_warned = True
@ -1152,6 +1210,11 @@ class PullRequests(models.Model):
# send feedback for multi-commit PRs without a merge_method (which # send feedback for multi-commit PRs without a merge_method (which
# we've not warned yet) # we've not warned yet)
methods = ''.join(
'* `%s` to %s\n' % pair
for pair in type(self).merge_method.selection
if pair[0] != 'squash'
)
for r in self.search([ for r in self.search([
('state', '=', 'ready'), ('state', '=', 'ready'),
('squash', '=', False), ('squash', '=', False),
@ -1161,10 +1224,9 @@ class PullRequests(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({ self.env['runbot_merge.pull_requests.feedback'].create({
'repository': r.repository.id, 'repository': r.repository.id,
'pull_request': r.number, 'pull_request': r.number,
'message': "Because this PR has multiple commits, I need to know how to merge it:\n\n" + ''.join( 'message': "%sbecause this PR has multiple commits, I need to know how to merge it:\n\n%s" % (
'* `%s` to %s\n' % pair r.ping(),
for pair in type(self).merge_method.selection methods,
if pair[0] != 'squash'
) )
}) })
r.method_warned = True r.method_warned = True
@ -1348,7 +1410,7 @@ class PullRequests(models.Model):
# else remove this batch from the split # else remove this batch from the split
b.split_id = False b.split_id = False
self.staging_id.cancel(reason, *args) self.staging_id.cancel('%s ' + reason, self.display_name, *args)
def _try_closing(self, by): def _try_closing(self, by):
# ignore if the PR is already being updated in a separate transaction # ignore if the PR is already being updated in a separate transaction
@ -1368,11 +1430,7 @@ class PullRequests(models.Model):
''', [self.id]) ''', [self.id])
self.env.cr.commit() self.env.cr.commit()
self.modified(['state']) self.modified(['state'])
self.unstage( self.unstage("closed by %s", by)
"PR %s closed by %s",
self.display_name,
by
)
return True return True
# state changes on reviews # state changes on reviews
@ -1501,20 +1559,24 @@ class Feedback(models.Model):
try: try:
message = f.message message = f.message
if f.close: with contextlib.suppress(json.JSONDecodeError):
gh.close(f.pull_request) data = json.loads(message or '')
try: message = data.get('message')
data = json.loads(message or '')
except json.JSONDecodeError: if data.get('base'):
pass gh('PATCH', f'pulls/{f.pull_request}', json={'base': data['base']})
else:
if f.close:
pr_to_notify = self.env['runbot_merge.pull_requests'].search([ pr_to_notify = self.env['runbot_merge.pull_requests'].search([
('repository', '=', repo.id), ('repository', '=', repo.id),
('number', '=', f.pull_request), ('number', '=', f.pull_request),
]) ])
if pr_to_notify: if pr_to_notify:
pr_to_notify._notify_merged(gh, data) pr_to_notify._notify_merged(gh, data)
message = None
if f.close:
gh.close(f.pull_request)
if message: if message:
gh.comment(f.pull_request, message) gh.comment(f.pull_request, message)
except Exception: except Exception:
@ -1621,6 +1683,17 @@ class Stagings(models.Model):
statuses = fields.Binary(compute='_compute_statuses') statuses = fields.Binary(compute='_compute_statuses')
def name_get(self):
return [
(staging.id, "%d (%s, %s%s)" % (
staging.id,
staging.target.name,
staging.state,
(', ' + staging.reason) if staging.reason else '',
))
for staging in self
]
@api.depends('heads') @api.depends('heads')
def _compute_statuses(self): def _compute_statuses(self):
""" Fetches statuses associated with the various heads, returned as """ Fetches statuses associated with the various heads, returned as
@ -1725,7 +1798,7 @@ class Stagings(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({ self.env['runbot_merge.pull_requests.feedback'].create({
'repository': pr.repository.id, 'repository': pr.repository.id,
'pull_request': pr.number, 'pull_request': pr.number,
'message':"Staging failed: %s" % message 'message': "%sstaging failed: %s" % (pr.ping(), message),
}) })
self.batch_ids.write({'active': False}) self.batch_ids.write({'active': False})
@ -1988,8 +2061,9 @@ class Batch(models.Model):
gh = meta[pr.repository]['gh'] gh = meta[pr.repository]['gh']
_logger.info( _logger.info(
"Staging pr %s for target %s; squash=%s", "Staging pr %s for target %s; method=%s",
pr.display_name, pr.target.name, pr.squash pr.display_name, pr.target.name,
pr.merge_method or (pr.squash and 'single') or None
) )
target = 'tmp.{}'.format(pr.target.name) target = 'tmp.{}'.format(pr.target.name)
@ -2027,11 +2101,11 @@ class Batch(models.Model):
self.env['runbot_merge.pull_requests.feedback'].create({ self.env['runbot_merge.pull_requests.feedback'].create({
'repository': pr.repository.id, 'repository': pr.repository.id,
'pull_request': pr.number, 'pull_request': pr.number,
'message': "We apparently missed an update to this PR " 'message': "%swe apparently missed an update to this PR "
"and tried to stage it in a state which " "and tried to stage it in a state which "
"might not have been approved. PR has been " "might not have been approved. PR has been "
"updated to %s, please check and approve or " "updated to %s, please check and approve or "
"re-approve." % new_head "re-approve." % (pr.ping(), new_head)
}) })
return self.env['runbot_merge.batch'] return self.env['runbot_merge.batch']
@ -2097,7 +2171,7 @@ def to_status(v):
return v return v
return {'state': v, 'target_url': None, 'description': None} return {'state': v, 'target_url': None, 'description': None}
refline = re.compile(rb'([0-9a-f]{40}) ([^\0\n]+)(\0.*)?\n$') refline = re.compile(rb'([\da-f]{40}) ([^\0\n]+)(\0.*)?\n?$')
ZERO_REF = b'0'*40 ZERO_REF = b'0'*40
def parse_refs_smart(read): def parse_refs_smart(read):
""" yields pkt-line data (bytes), or None for flush lines """ """ yields pkt-line data (bytes), or None for flush lines """
@ -2108,9 +2182,8 @@ def parse_refs_smart(read):
return read(length - 4) return read(length - 4)
header = read_line() header = read_line()
assert header == b'# service=git-upload-pack\n', header assert header.rstrip() == b'# service=git-upload-pack', header
sep = read_line() assert read_line() is None, "failed to find first flush line"
assert sep is None, sep
# read lines until second delimiter # read lines until second delimiter
for line in iter(read_line, None): for line in iter(read_line, None):
if line.startswith(ZERO_REF): if line.startswith(ZERO_REF):

View File

@ -13,6 +13,7 @@ class CIText(fields.Char):
class Partner(models.Model): class Partner(models.Model):
_inherit = 'res.partner' _inherit = 'res.partner'
email = fields.Char(index=True)
github_login = CIText() github_login = CIText()
delegate_reviewer = fields.Many2many('runbot_merge.pull_requests') delegate_reviewer = fields.Many2many('runbot_merge.pull_requests')
formatted_email = fields.Char(string="commit email", compute='_rfc5322_formatted') formatted_email = fields.Char(string="commit email", compute='_rfc5322_formatted')

View File

@ -1,7 +1,8 @@
id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
access_runbot_merge_project_admin,Admin access to project,model_runbot_merge_project,runbot_merge.group_admin,1,1,1,1 access_runbot_merge_project_admin,Admin access to project,model_runbot_merge_project,runbot_merge.group_admin,1,1,1,1
access_runbot_merge_project_freeze,Admin access to freeze wizard,model_runbot_merge_project_freeze,runbot_merge.group_admin,1,1,0,0 access_runbot_merge_project_freeze,Admin access to freeze wizard,model_runbot_merge_project_freeze,runbot_merge.group_admin,1,1,0,0
access_runbot_merge_project_freeze_prs,Admin access to freeze wizard prs,model_runbot_merge_project_freeze_prs,runbot_merge.group_admin,1,1,0,1 access_runbot_merge_project_freeze_prs,Admin access to freeze wizard release prs,model_runbot_merge_project_freeze_prs,runbot_merge.group_admin,1,1,0,1
access_runbot_merge_project_freeze_bumps,Admin access to freeze wizard bump prs,model_runbot_merge_project_freeze_bumps,runbot_merge.group_admin,1,1,1,1
access_runbot_merge_repository_admin,Admin access to repo,model_runbot_merge_repository,runbot_merge.group_admin,1,1,1,1 access_runbot_merge_repository_admin,Admin access to repo,model_runbot_merge_repository,runbot_merge.group_admin,1,1,1,1
access_runbot_merge_repository_status_admin,Admin access to repo statuses,model_runbot_merge_repository_status,runbot_merge.group_admin,1,1,1,1 access_runbot_merge_repository_status_admin,Admin access to repo statuses,model_runbot_merge_repository_status,runbot_merge.group_admin,1,1,1,1
access_runbot_merge_branch_admin,Admin access to branches,model_runbot_merge_branch,runbot_merge.group_admin,1,1,1,1 access_runbot_merge_branch_admin,Admin access to branches,model_runbot_merge_branch,runbot_merge.group_admin,1,1,1,1

1 id name model_id:id group_id:id perm_read perm_write perm_create perm_unlink
2 access_runbot_merge_project_admin Admin access to project model_runbot_merge_project runbot_merge.group_admin 1 1 1 1
3 access_runbot_merge_project_freeze Admin access to freeze wizard model_runbot_merge_project_freeze runbot_merge.group_admin 1 1 0 0
4 access_runbot_merge_project_freeze_prs Admin access to freeze wizard prs Admin access to freeze wizard release prs model_runbot_merge_project_freeze_prs runbot_merge.group_admin 1 1 0 1
5 access_runbot_merge_project_freeze_bumps Admin access to freeze wizard bump prs model_runbot_merge_project_freeze_bumps runbot_merge.group_admin 1 1 1 1
6 access_runbot_merge_repository_admin Admin access to repo model_runbot_merge_repository runbot_merge.group_admin 1 1 1 1
7 access_runbot_merge_repository_status_admin Admin access to repo statuses model_runbot_merge_repository_status runbot_merge.group_admin 1 1 1 1
8 access_runbot_merge_branch_admin Admin access to branches model_runbot_merge_branch runbot_merge.group_admin 1 1 1 1

View File

@ -40,23 +40,34 @@ h5 { font-size: 1em; }
// mergebot layouting // mergebot layouting
.stagings { .stagings {
display: flex; display: flex;
align-items: stretch; align-items: stretch;
}
.stagings > li {
flex: 1;
padding: 0.1em; > li {
padding-left: 0.5em; flex: 1;
} // prevent content-based autosizing otherwise that's flex' starting point
.stagings > li:not(:last-child) { width: 0;
border-right: 1px solid lightgray;
} padding: 0.1em 0.1em 0.1em 0.5em;
.batch:not(:last-child) {
border-bottom: 1px solid lightgray; &:not(:last-child) {
} border-right: 1px solid lightgray;
.batch a:not(:last-of-type) a:after { }
}
.batch {
// cut off branch names if they can't be line-wrapped and would break the
// layout, works with flex to force all columns to be at the same size
overflow: hidden;
text-overflow: ellipsis;
&:not(:last-child) {
border-bottom: 1px solid lightgray;
}
}
.batch a:not(:last-of-type) a:after {
content: ","; content: ",";
}
} }
.pr-listing > * { display: inline-block; } .pr-listing > * { display: inline-block; }
.pr-awaiting { opacity: 0.8; } .pr-awaiting { opacity: 0.8; }
@ -84,3 +95,7 @@ dl.runbot-merge-fields {
@extend .col-sm-10; @extend .col-sm-10;
} }
} }
.staging-statuses {
cursor: wait;
}

View File

@ -6,7 +6,7 @@ import time
from unittest import mock from unittest import mock
import pytest import pytest
from lxml import html from lxml import html, etree
import odoo import odoo
from utils import _simple_init, seen, re_matches, get_partner, Commit, pr_page, to_pr, part_of from utils import _simple_init, seen, re_matches, get_partner, Commit, pr_page, to_pr, part_of
@ -26,21 +26,34 @@ def repo(env, project, make_repo, users, setreviewers):
def test_trivial_flow(env, repo, page, users, config): def test_trivial_flow(env, repo, page, users, config):
# create base branch # create base branch
with repo: with repo:
m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) [m] = repo.make_commits(None, Commit("initial", tree={'a': 'some content'}), ref='heads/master')
repo.make_ref('heads/master', m)
# create PR with 2 commits # create PR with 2 commits
c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) _, c1 = repo.make_commits(
c1 = repo.make_commit(c0, 'add file', None, tree={'a': 'some other content', 'b': 'a second file'}) m,
pr = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c1,) Commit('replace file contents', tree={'a': 'some other content'}),
Commit('add file', tree={'b': 'a second file'}),
ref='heads/other'
)
pr = repo.make_pr(title="gibberish", body="blahblah", target='master', head='other')
pr_id = to_pr(env, pr) pr_id = to_pr(env, pr)
assert pr_id.state == 'opened' assert pr_id.state == 'opened'
env.run_crons() env.run_crons()
assert pr.comments == [seen(env, pr, users)] assert pr.comments == [seen(env, pr, users)]
s = pr_page(page, pr).cssselect('.alert-info > ul > li')
pr_dashboard = pr_page(page, pr)
s = pr_dashboard.cssselect('.alert-info > ul > li')
assert [it.get('class') for it in s] == ['fail', 'fail', ''],\ assert [it.get('class') for it in s] == ['fail', 'fail', ''],\
"merge method unset, review missing, no CI" "merge method unset, review missing, no CI"
assert dict(zip(
[e.text_content() for e in pr_dashboard.cssselect('dl.runbot-merge-fields dt')],
[e.text_content() for e in pr_dashboard.cssselect('dl.runbot-merge-fields dd')],
)) == {
'label': f"{config['github']['owner']}:other",
'head': c1,
'target': 'master',
}
with repo: with repo:
repo.post_status(c1, 'success', 'legal/cla') repo.post_status(c1, 'success', 'legal/cla')
@ -63,7 +76,6 @@ def test_trivial_flow(env, repo, page, users, config):
] ]
assert statuses == [('legal/cla', 'ok'), ('ci/runbot', 'ok')] assert statuses == [('legal/cla', 'ok'), ('ci/runbot', 'ok')]
with repo: with repo:
pr.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) pr.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token'])
assert pr_id.state == 'ready' assert pr_id.state == 'ready'
@ -443,8 +455,8 @@ def test_staging_conflict_first(env, repo, users, config, page):
assert pr.comments == [ assert pr.comments == [
(users['reviewer'], 'hansen r+ rebase-merge'), (users['reviewer'], 'hansen r+ rebase-merge'),
seen(env, pr, users), seen(env, pr, users),
(users['user'], 'Merge method set to rebase and merge, using the PR as merge commit message'), (users['user'], 'Merge method set to rebase and merge, using the PR as merge commit message.'),
(users['user'], re_matches('^Unable to stage PR')), (users['user'], '@%(user)s @%(reviewer)s unable to stage: merge conflict' % users),
] ]
dangerbox = pr_page(page, pr).cssselect('.alert-danger span') dangerbox = pr_page(page, pr).cssselect('.alert-danger span')
@ -566,15 +578,15 @@ def test_staging_ci_failure_single(env, repo, users, config, page):
assert pr.comments == [ assert pr.comments == [
(users['reviewer'], 'hansen r+ rebase-merge'), (users['reviewer'], 'hansen r+ rebase-merge'),
seen(env, pr, users), seen(env, pr, users),
(users['user'], "Merge method set to rebase and merge, using the PR as merge commit message"), (users['user'], "Merge method set to rebase and merge, using the PR as merge commit message."),
(users['user'], 'Staging failed: ci/runbot') (users['user'], '@%(user)s @%(reviewer)s staging failed: ci/runbot' % users)
] ]
dangerbox = pr_page(page, pr).cssselect('.alert-danger span') dangerbox = pr_page(page, pr).cssselect('.alert-danger span')
assert dangerbox assert dangerbox
assert dangerbox[0].text == 'ci/runbot' assert dangerbox[0].text == 'ci/runbot'
def test_ff_failure(env, repo, config): def test_ff_failure(env, repo, config, page):
""" target updated while the PR is being staged => redo staging """ """ target updated while the PR is being staged => redo staging """
with repo: with repo:
m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) m = repo.make_commit(None, 'initial', None, tree={'m': 'm'})
@ -587,10 +599,11 @@ def test_ff_failure(env, repo, config):
repo.post_status(prx.head, 'success', 'ci/runbot') repo.post_status(prx.head, 'success', 'ci/runbot')
prx.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) prx.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token'])
env.run_crons() env.run_crons()
assert env['runbot_merge.pull_requests'].search([ st = env['runbot_merge.pull_requests'].search([
('repository.name', '=', repo.name), ('repository.name', '=', repo.name),
('number', '=', prx.number) ('number', '=', prx.number)
]).staging_id ]).staging_id
assert st
with repo: with repo:
m2 = repo.make_commit('heads/master', 'cockblock', None, tree={'m': 'm', 'm2': 'm2'}) m2 = repo.make_commit('heads/master', 'cockblock', None, tree={'m': 'm', 'm2': 'm2'})
@ -603,6 +616,14 @@ def test_ff_failure(env, repo, config):
repo.post_status(staging.id, 'success', 'ci/runbot') repo.post_status(staging.id, 'success', 'ci/runbot')
env.run_crons() env.run_crons()
assert st.reason == 'update is not a fast forward'
# check that it's added as title on the staging
doc = html.fromstring(page('/runbot_merge'))
_new, prev = doc.cssselect('li.staging')
assert 'bg-gray-lighter' in prev.classes, "ff failure is ~ cancelling"
assert prev.get('title') == re_matches('fast forward failed \(update is not a fast forward\)')
assert env['runbot_merge.pull_requests'].search([ assert env['runbot_merge.pull_requests'].search([
('repository.name', '=', repo.name), ('repository.name', '=', repo.name),
('number', '=', prx.number) ('number', '=', prx.number)
@ -684,7 +705,7 @@ def test_ff_failure_batch(env, repo, users, config):
} }
class TestPREdition: class TestPREdition:
def test_edit(self, env, repo): def test_edit(self, env, repo, config):
""" Editing PR: """ Editing PR:
* title (-> message) * title (-> message)
@ -705,15 +726,29 @@ class TestPREdition:
c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'})
c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'})
prx = repo.make_pr(title='title', body='body', target='master', head=c2) prx = repo.make_pr(title='title', body='body', target='master', head=c2)
repo.post_status(prx.head, 'success', 'legal/cla')
repo.post_status(prx.head, 'success', 'ci/runbot')
prx.post_comment('hansen rebase-ff r+', config['role_reviewer']['token'])
env.run_crons()
pr = env['runbot_merge.pull_requests'].search([ pr = env['runbot_merge.pull_requests'].search([
('repository.name', '=', repo.name), ('repository.name', '=', repo.name),
('number', '=', prx.number) ('number', '=', prx.number),
]) ])
assert pr.state == 'ready'
st = pr.staging_id
assert st
assert pr.message == 'title\n\nbody' assert pr.message == 'title\n\nbody'
with repo: prx.title = "title 2" with repo: prx.title = "title 2"
assert pr.message == 'title 2\n\nbody' assert pr.message == 'title 2\n\nbody'
with repo: prx.body = None
assert pr.message == "title 2"
assert pr.staging_id, \
"message edition does not affect staging of rebased PRs"
with repo: prx.base = '1.0' with repo: prx.base = '1.0'
assert pr.target == branch_1 assert pr.target == branch_1
assert not pr.staging_id, "updated the base of a staged PR should have unstaged it"
assert st.reason == f"{pr.display_name} target (base) branch was changed from 'master' to '1.0'"
with repo: prx.base = '2.0' with repo: prx.base = '2.0'
assert not pr.exists() assert not pr.exists()
@ -949,9 +984,9 @@ def test_ci_failure_after_review(env, repo, users, config):
assert prx.comments == [ assert prx.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, prx, users), seen(env, prx, users),
(users['user'], "'ci/runbot' failed on this reviewed PR.".format_map(users)), (users['user'], "@{user} @{reviewer} 'ci/runbot' failed on this reviewed PR.".format_map(users)),
(users['user'], "'legal/cla' failed on this reviewed PR.".format_map(users)), (users['user'], "@{user} @{reviewer} 'legal/cla' failed on this reviewed PR.".format_map(users)),
(users['user'], "'legal/cla' failed on this reviewed PR.".format_map(users)), (users['user'], "@{user} @{reviewer} 'legal/cla' failed on this reviewed PR.".format_map(users)),
] ]
def test_reopen_merged_pr(env, repo, config, users): def test_reopen_merged_pr(env, repo, config, users):
@ -995,7 +1030,7 @@ def test_reopen_merged_pr(env, repo, config, users):
assert prx.comments == [ assert prx.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, prx, users), seen(env, prx, users),
(users['user'], "@%s ya silly goose you can't reopen a PR that's been merged PR." % users['other']) (users['user'], "@%s ya silly goose you can't reopen a merged PR." % users['other'])
] ]
class TestNoRequiredStatus: class TestNoRequiredStatus:
@ -1191,7 +1226,7 @@ class TestRetry:
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
(users['reviewer'], 'hansen retry'), (users['reviewer'], 'hansen retry'),
seen(env, prx, users), seen(env, prx, users),
(users['user'], "I'm sorry, @{}. Retry makes no sense when the PR is not in error.".format(users['reviewer'])), (users['user'], "I'm sorry, @{reviewer}: retry makes no sense when the PR is not in error.".format_map(users)),
] ]
@pytest.mark.parametrize('disabler', ['user', 'other', 'reviewer']) @pytest.mark.parametrize('disabler', ['user', 'other', 'reviewer'])
@ -1387,12 +1422,13 @@ class TestMergeMethod:
assert prx.comments == [ assert prx.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, prx, users), seen(env, prx, users),
(users['user'], """Because this PR has multiple commits, I need to know how to merge it: (users['user'], """@{user} @{reviewer} because this PR has multiple \
commits, I need to know how to merge it:
* `merge` to merge directly, using the PR as merge commit message * `merge` to merge directly, using the PR as merge commit message
* `rebase-merge` to rebase and merge, using the PR as merge commit message * `rebase-merge` to rebase and merge, using the PR as merge commit message
* `rebase-ff` to rebase and fast-forward * `rebase-ff` to rebase and fast-forward
"""), """.format_map(users)),
] ]
def test_pr_method_no_review(self, repo, env, users, config): def test_pr_method_no_review(self, repo, env, users, config):
@ -1432,11 +1468,11 @@ class TestMergeMethod:
assert prx.comments == [ assert prx.comments == [
(users['reviewer'], 'hansen rebase-merge'), (users['reviewer'], 'hansen rebase-merge'),
seen(env, prx, users), seen(env, prx, users),
(users['user'], "Merge method set to rebase and merge, using the PR as merge commit message"), (users['user'], "Merge method set to rebase and merge, using the PR as merge commit message."),
(users['reviewer'], 'hansen merge'), (users['reviewer'], 'hansen merge'),
(users['user'], "Merge method set to merge directly, using the PR as merge commit message"), (users['user'], "Merge method set to merge directly, using the PR as merge commit message."),
(users['reviewer'], 'hansen rebase-ff'), (users['reviewer'], 'hansen rebase-ff'),
(users['user'], "Merge method set to rebase and fast-forward"), (users['user'], "Merge method set to rebase and fast-forward."),
] ]
def test_pr_rebase_merge(self, repo, env, users, config): def test_pr_rebase_merge(self, repo, env, users, config):
@ -1498,6 +1534,20 @@ class TestMergeMethod:
frozenset([nm2, nb1]) frozenset([nm2, nb1])
) )
assert staging == merge_head assert staging == merge_head
st = pr_id.staging_id
assert st
with repo: prx.title = 'title 2'
assert not pr_id.staging_id, "updating the message of a merge-staged PR should unstage rien"
assert st.reason == f'{pr_id.display_name} merge message updated'
# since we updated the description, the merge_head value is impacted,
# and it's checked again later on
merge_head = (
merge_head[0].replace('title', 'title 2'),
merge_head[1],
)
env.run_crons()
assert pr_id.staging_id, "PR should immediately be re-stageable"
with repo: with repo:
repo.post_status('heads/staging.master', 'success', 'legal/cla') repo.post_status('heads/staging.master', 'success', 'legal/cla')
@ -1990,7 +2040,7 @@ Part-of: {pr_id.display_name}"""
assert pr1.comments == [ assert pr1.comments == [
seen(env, pr1, users), seen(env, pr1, users),
(users['reviewer'], 'hansen r+ squash'), (users['reviewer'], 'hansen r+ squash'),
(users['user'], 'Merge method set to squash') (users['user'], 'Merge method set to squash.')
] ]
merged_head = repo.commit('master') merged_head = repo.commit('master')
assert merged_head.message == f"""first pr assert merged_head.message == f"""first pr
@ -2014,13 +2064,13 @@ Signed-off-by: {get_partner(env, users["reviewer"]).formatted_email}\
assert pr2.comments == [ assert pr2.comments == [
seen(env, pr2, users), seen(env, pr2, users),
(users['reviewer'], 'hansen r+ squash'), (users['reviewer'], 'hansen r+ squash'),
(users['user'], f"I'm sorry, @{users['reviewer']}. Squash can only be used with a single commit at this time."), (users['user'], f"I'm sorry, @{users['reviewer']}: squash can only be used with a single commit at this time."),
(users['user'], """Because this PR has multiple commits, I need to know how to merge it: (users['user'], """@{user} @{reviewer} because this PR has multiple commits, I need to know how to merge it:
* `merge` to merge directly, using the PR as merge commit message * `merge` to merge directly, using the PR as merge commit message
* `rebase-merge` to rebase and merge, using the PR as merge commit message * `rebase-merge` to rebase and merge, using the PR as merge commit message
* `rebase-ff` to rebase and fast-forward * `rebase-ff` to rebase and fast-forward
""") """.format_map(users))
] ]
@pytest.mark.xfail(reason="removed support for squash- command") @pytest.mark.xfail(reason="removed support for squash- command")
@ -2683,14 +2733,16 @@ class TestBatching(object):
repo.make_ref('heads/master', m) repo.make_ref('heads/master', m)
pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'])
p_1 = to_pr(env, pr1)
pr2 = self._pr(repo, 'PR2', [{'a': 'some content', 'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) pr2 = self._pr(repo, 'PR2', [{'a': 'some content', 'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'])
p_2 = to_pr(env, pr2)
env.run_crons() env.run_crons()
p_1 = to_pr(env, pr1)
p_2 = to_pr(env, pr2)
st = env['runbot_merge.stagings'].search([]) st = env['runbot_merge.stagings'].search([])
# both prs should be part of the staging # both prs should be part of the staging
assert st.mapped('batch_ids.prs') == p_1 | p_2 assert st.mapped('batch_ids.prs') == p_1 | p_2
# add CI failure # add CI failure
with repo: with repo:
repo.post_status('heads/staging.master', 'failure', 'ci/runbot') repo.post_status('heads/staging.master', 'failure', 'ci/runbot')
@ -2824,7 +2876,7 @@ class TestReviewing(object):
(users['user'], "I'm sorry, @{}. I'm afraid I can't do that.".format(users['other'])), (users['user'], "I'm sorry, @{}. I'm afraid I can't do that.".format(users['other'])),
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
(users['user'], "I'm sorry, @{}. This PR is already reviewed, reviewing it again is useless.".format( (users['user'], "I'm sorry, @{}: this PR is already reviewed, reviewing it again is useless.".format(
users['reviewer'])), users['reviewer'])),
] ]
@ -2853,7 +2905,7 @@ class TestReviewing(object):
assert prx.comments == [ assert prx.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, prx, users), seen(env, prx, users),
(users['user'], "I'm sorry, @{}. You can't review+.".format(users['reviewer'])), (users['user'], "I'm sorry, @{}: you can't review+.".format(users['reviewer'])),
] ]
def test_self_review_success(self, env, repo, users, config): def test_self_review_success(self, env, repo, users, config):
@ -3009,7 +3061,7 @@ class TestReviewing(object):
seen(env, pr, users), seen(env, pr, users),
(users['reviewer'], 'hansen delegate+'), (users['reviewer'], 'hansen delegate+'),
(users['user'], 'hansen r+'), (users['user'], 'hansen r+'),
(users['user'], f"I'm sorry, @{users['user']}. I must know your email before you can review PRs. Please contact an administrator."), (users['user'], f"I'm sorry, @{users['user']}: I must know your email before you can review PRs. Please contact an administrator."),
] ]
user_partner.fetch_github_email() user_partner.fetch_github_email()
assert user_partner.email assert user_partner.email
@ -3073,9 +3125,9 @@ class TestUnknownPR:
seen(env, prx, users), seen(env, prx, users),
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
(users['user'], "Sorry, I didn't know about this PR and had to " (users['user'], "I didn't know about this PR and had to "
"retrieve its information, you may have to " "retrieve its information, you may have to "
"re-approve it."), "re-approve it as I didn't see previous commands."),
seen(env, prx, users), seen(env, prx, users),
] ]
@ -3126,9 +3178,9 @@ class TestUnknownPR:
assert pr.comments == [ assert pr.comments == [
seen(env, pr, users), seen(env, pr, users),
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
(users['user'], "Sorry, I didn't know about this PR and had to " (users['user'], "I didn't know about this PR and had to retrieve "
"retrieve its information, you may have to " "its information, you may have to re-approve it "
"re-approve it."), "as I didn't see previous commands."),
seen(env, pr, users), seen(env, pr, users),
] ]
@ -3153,8 +3205,8 @@ class TestUnknownPR:
assert prx.comments == [ assert prx.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
(users['user'], "This PR targets the un-managed branch %s:branch, it can not be merged." % repo.name), (users['user'], "This PR targets the un-managed branch %s:branch, it needs to be retargeted before it can be merged." % repo.name),
(users['user'], "I'm sorry. Branch `branch` is not within my remit."), (users['user'], "Branch `branch` is not within my remit, imma just ignore it."),
] ]
def test_rplus_review_unmanaged(self, env, repo, users, config): def test_rplus_review_unmanaged(self, env, repo, users, config):
@ -3551,7 +3603,7 @@ class TestFeedback:
assert prx.comments == [ assert prx.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, prx, users), seen(env, prx, users),
(users['user'], "'ci/runbot' failed on this reviewed PR.") (users['user'], "@%(user)s @%(reviewer)s 'ci/runbot' failed on this reviewed PR." % users)
] ]
def test_review_failed(self, repo, env, users, config): def test_review_failed(self, repo, env, users, config):
@ -3581,9 +3633,11 @@ class TestFeedback:
assert prx.comments == [ assert prx.comments == [
seen(env, prx, users), seen(env, prx, users),
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
(users['user'], "@%s, you may want to rebuild or fix this PR as it has failed CI." % users['reviewer']) (users['user'], "@%s you may want to rebuild or fix this PR as it has failed CI." % users['reviewer'])
] ]
class TestInfrastructure: class TestInfrastructure:
@pytest.mark.skip(reason="Don't want to implement")
def test_protection(self, repo): def test_protection(self, repo):
""" force-pushing on a protected ref should fail """ force-pushing on a protected ref should fail
""" """

View File

@ -1,6 +1,6 @@
from utils import seen, Commit from utils import seen, Commit, pr_page
def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, config, users): def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, config, users, page):
""" PRs to disabled branches are ignored, but what if the PR exists *before* """ PRs to disabled branches are ignored, but what if the PR exists *before*
the branch is disabled? the branch is disabled?
""" """
@ -13,7 +13,8 @@ def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, conf
repo_id = env['runbot_merge.repository'].create({ repo_id = env['runbot_merge.repository'].create({
'project_id': project.id, 'project_id': project.id,
'name': repo.name, 'name': repo.name,
'status_ids': [(0, 0, {'context': 'status'})] 'status_ids': [(0, 0, {'context': 'status'})],
'group_id': False,
}) })
setreviewers(*project.repo_ids) setreviewers(*project.repo_ids)
@ -25,42 +26,57 @@ def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, conf
[c] = repo.make_commits(ot, Commit('wheee', tree={'b': '2'})) [c] = repo.make_commits(ot, Commit('wheee', tree={'b': '2'}))
pr = repo.make_pr(title="title", body='body', target='other', head=c) pr = repo.make_pr(title="title", body='body', target='other', head=c)
repo.post_status(c, 'success', 'status') repo.post_status(c, 'success', 'status')
pr.post_comment('hansen r+', config['role_reviewer']['token'])
env.run_crons()
pr_id = env['runbot_merge.pull_requests'].search([ pr_id = env['runbot_merge.pull_requests'].search([
('repository', '=', repo_id.id), ('repository', '=', repo_id.id),
('number', '=', pr.number), ('number', '=', pr.number),
]) ])
branch_id = pr_id.target
assert pr_id.staging_id
staging_id = branch_id.active_staging_id
assert staging_id == pr_id.staging_id
# disable branch "other" # disable branch "other"
project.branch_ids.filtered(lambda b: b.name == 'other').active = False branch_id.active = False
# r+ the PR
with repo:
pr.post_comment('hansen r+', config['role_reviewer']['token'])
env.run_crons() env.run_crons()
# nothing should happen, the PR should be unstaged forever, maybe? assert not branch_id.active_staging_id
assert pr_id.state == 'ready' assert staging_id.state == 'cancelled', \
"closing the PRs should have canceled the staging"
p = pr_page(page, pr)
target = dict(zip(
(e.text for e in p.cssselect('dl.runbot-merge-fields dt')),
(p.cssselect('dl.runbot-merge-fields dd'))
))['target']
assert target.text_content() == 'other (inactive)'
assert target.get('class') == 'text-muted bg-warning'
# the PR should have been closed implicitly
assert pr_id.state == 'closed'
assert not pr_id.staging_id assert not pr_id.staging_id
with repo:
pr.open()
pr.post_comment('hansen r+', config['role_reviewer']['token'])
assert pr_id.state == 'ready', "pr should be reopenable"
env.run_crons()
assert pr.comments == [
(users['reviewer'], "hansen r+"),
seen(env, pr, users),
(users['user'], "@%(user)s @%(reviewer)s the target branch 'other' has been disabled, closing this PR." % users),
(users['reviewer'], "hansen r+"),
(users['user'], "This PR targets the disabled branch %s:other, it needs to be retargeted before it can be merged." % repo.name),
]
with repo: with repo:
[c2] = repo.make_commits(ot, Commit('wheee', tree={'b': '3'})) [c2] = repo.make_commits(ot, Commit('wheee', tree={'b': '3'}))
repo.update_ref(pr.ref, c2, force=True) repo.update_ref(pr.ref, c2, force=True)
assert pr_id.head == c2, "pr should be aware of its update" assert pr_id.head == c2, "pr should be aware of its update"
with repo:
pr.close()
assert pr_id.state == 'closed', "pr should be closeable"
with repo:
pr.open()
assert pr_id.state == 'opened', "pr should be reopenable (state reset due to force push"
env.run_crons()
assert pr.comments == [
(users['reviewer'], "hansen r+"),
seen(env, pr, users),
(users['user'], "This PR targets the disabled branch %s:other, it can not be merged." % repo.name),
], "reopening a PR to an inactive branch should send feedback, but not closing it"
with repo: with repo:
pr.base = 'other2' pr.base = 'other2'
repo.post_status(c2, 'success', 'status') repo.post_status(c2, 'success', 'status')
@ -96,7 +112,7 @@ def test_new_pr_no_branch(env, project, make_repo, setreviewers, users):
('number', '=', pr.number), ('number', '=', pr.number),
]), "the PR should not have been created in the backend" ]), "the PR should not have been created in the backend"
assert pr.comments == [ assert pr.comments == [
(users['user'], "This PR targets the un-managed branch %s:other, it can not be merged." % repo.name), (users['user'], "This PR targets the un-managed branch %s:other, it needs to be retargeted before it can be merged." % repo.name),
] ]
def test_new_pr_disabled_branch(env, project, make_repo, setreviewers, users): def test_new_pr_disabled_branch(env, project, make_repo, setreviewers, users):
@ -131,45 +147,6 @@ def test_new_pr_disabled_branch(env, project, make_repo, setreviewers, users):
assert pr_id, "the PR should have been created in the backend" assert pr_id, "the PR should have been created in the backend"
assert pr_id.state == 'opened' assert pr_id.state == 'opened'
assert pr.comments == [ assert pr.comments == [
(users['user'], "This PR targets the disabled branch %s:other, it can not be merged." % repo.name), (users['user'], "This PR targets the disabled branch %s:other, it needs to be retargeted before it can be merged." % repo.name),
seen(env, pr, users), seen(env, pr, users),
] ]
def test_retarget_from_disabled(env, make_repo, project, setreviewers):
""" Retargeting a PR from a disabled branch should not duplicate the PR
"""
repo = make_repo('repo')
project.write({'branch_ids': [(0, 0, {'name': '1.0'}), (0, 0, {'name': '2.0'})]})
repo_id = env['runbot_merge.repository'].create({
'project_id': project.id,
'name': repo.name,
'required_statuses': 'legal/cla,ci/runbot',
})
setreviewers(repo_id)
with repo:
[c0] = repo.make_commits(None, Commit('0', tree={'a': '0'}), ref='heads/1.0')
[c1] = repo.make_commits(c0, Commit('1', tree={'a': '1'}), ref='heads/2.0')
repo.make_commits(c1, Commit('2', tree={'a': '2'}), ref='heads/master')
# create PR on 1.0
repo.make_commits(c0, Commit('c', tree={'a': '0', 'b': '0'}), ref='heads/changes')
prx = repo.make_pr(head='changes', target='1.0')
branch_1 = project.branch_ids.filtered(lambda b: b.name == '1.0')
# there should only be a single PR in the system at this point
[pr] = env['runbot_merge.pull_requests'].search([])
assert pr.target == branch_1
# branch 1 is EOL, disable it
branch_1.active = False
with repo:
# we forgot we had active PRs for it, and we may want to merge them
# still, retarget them!
prx.base = '2.0'
# check that we still only have one PR in the system
[pr_] = env['runbot_merge.pull_requests'].search([])
# and that it's the same as the old one, just edited with a new target
assert pr_ == pr
assert pr.target == project.branch_ids.filtered(lambda b: b.name == '2.0')

View File

@ -7,12 +7,13 @@ are staged concurrently in all repos
""" """
import json import json
import time import time
import xmlrpc.client
import pytest import pytest
import requests import requests
from lxml.etree import XPath, tostring from lxml.etree import XPath
from utils import seen, re_matches, get_partner, pr_page, to_pr, Commit from utils import seen, get_partner, pr_page, to_pr, Commit
@pytest.fixture @pytest.fixture
@ -429,7 +430,7 @@ def test_merge_fail(env, project, repo_a, repo_b, users, config):
assert pr1b.comments == [ assert pr1b.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr1b, users), seen(env, pr1b, users),
(users['user'], re_matches('^Unable to stage PR')), (users['user'], '@%(user)s @%(reviewer)s unable to stage: merge conflict' % users),
] ]
other = to_pr(env, pr1a) other = to_pr(env, pr1a)
reviewer = get_partner(env, users["reviewer"]).formatted_email reviewer = get_partner(env, users["reviewer"]).formatted_email
@ -527,14 +528,22 @@ class TestCompanionsNotReady:
assert p_a.comments == [ assert p_a.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, p_a, users), seen(env, p_a, users),
(users['user'], "Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (repo_b.name, p_b.number)), (users['user'], "@%s @%s linked pull request(s) %s not ready. Linked PRs are not staged until all of them are ready." % (
users['user'],
users['reviewer'],
pr_b.display_name,
)),
] ]
# ensure the message is only sent once per PR # ensure the message is only sent once per PR
env.run_crons('runbot_merge.check_linked_prs_status') env.run_crons('runbot_merge.check_linked_prs_status')
assert p_a.comments == [ assert p_a.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, p_a, users), seen(env, p_a, users),
(users['user'], "Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (repo_b.name, p_b.number)), (users['user'], "@%s @%s linked pull request(s) %s not ready. Linked PRs are not staged until all of them are ready." % (
users['user'],
users['reviewer'],
pr_b.display_name,
)),
] ]
assert p_b.comments == [seen(env, p_b, users)] assert p_b.comments == [seen(env, p_b, users)]
@ -570,7 +579,8 @@ class TestCompanionsNotReady:
assert pr_b.comments == [ assert pr_b.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr_b, users), seen(env, pr_b, users),
(users['user'], "Linked pull request(s) %s#%d, %s#%d not ready. Linked PRs are not staged until all of them are ready." % ( (users['user'], "@%s @%s linked pull request(s) %s#%d, %s#%d not ready. Linked PRs are not staged until all of them are ready." % (
users['user'], users['reviewer'],
repo_a.name, pr_a.number, repo_a.name, pr_a.number,
repo_c.name, pr_c.number repo_c.name, pr_c.number
)) ))
@ -609,7 +619,8 @@ class TestCompanionsNotReady:
assert pr_b.comments == [ assert pr_b.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr_b, users), seen(env, pr_b, users),
(users['user'], "Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % ( (users['user'], "@%s @%s linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (
users['user'], users['reviewer'],
repo_a.name, pr_a.number repo_a.name, pr_a.number
)) ))
] ]
@ -617,7 +628,8 @@ class TestCompanionsNotReady:
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr_c, users), seen(env, pr_c, users),
(users['user'], (users['user'],
"Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % ( "@%s @%s linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (
users['user'], users['reviewer'],
repo_a.name, pr_a.number repo_a.name, pr_a.number
)) ))
] ]
@ -655,7 +667,10 @@ def test_other_failed(env, project, repo_a, repo_b, users, config):
assert pr_a.comments == [ assert pr_a.comments == [
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr_a, users), seen(env, pr_a, users),
(users['user'], 'Staging failed: ci/runbot on %s (view more at http://example.org/b)' % sth) (users['user'], '@%s @%s staging failed: ci/runbot on %s (view more at http://example.org/b)' % (
users['user'], users['reviewer'],
sth
))
] ]
class TestMultiBatches: class TestMultiBatches:
@ -670,12 +685,16 @@ class TestMultiBatches:
make_branch(repo_b, 'master', 'initial', {'b': 'b0'}) make_branch(repo_b, 'master', 'initial', {'b': 'b0'})
prs = [( prs = [(
a and to_pr(env, make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)), a and make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']),
b and to_pr(env, make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)) b and make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']),
) )
for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)]) for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)])
] ]
env.run_crons() env.run_crons()
prs = [
(a and to_pr(env, a), b and to_pr(env, b))
for (a, b) in prs
]
st = env['runbot_merge.stagings'].search([]) st = env['runbot_merge.stagings'].search([])
assert st assert st
@ -699,12 +718,16 @@ class TestMultiBatches:
make_branch(repo_b, 'master', 'initial', {'b': 'b0'}) make_branch(repo_b, 'master', 'initial', {'b': 'b0'})
prs = [( prs = [(
a and to_pr(env, make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)), a and make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']),
b and to_pr(env, make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)) b and make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']),
) )
for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)]) for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)])
] ]
env.run_crons() env.run_crons()
prs = [
(a and to_pr(env, a), b and to_pr(env, b))
for (a, b) in prs
]
st0 = env['runbot_merge.stagings'].search([]) st0 = env['runbot_merge.stagings'].search([])
assert len(st0.batch_ids) == 5 assert len(st0.batch_ids) == 5
@ -916,7 +939,8 @@ class TestSubstitutions:
'label': original, 'label': original,
'sha': format(pr_number, 'x')*40, 'sha': format(pr_number, 'x')*40,
} }
} },
'sender': {'login': 'pytest'}
} }
) )
pr = env['runbot_merge.pull_requests'].search([ pr = env['runbot_merge.pull_requests'].search([
@ -1081,6 +1105,7 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
* have a project with 3 repos, and two branches (1.0 and master) each * have a project with 3 repos, and two branches (1.0 and master) each
* have 2 PRs required for the freeze * have 2 PRs required for the freeze
* prep 3 freeze PRs * prep 3 freeze PRs
* prep 1 bump PR
* trigger the freeze wizard * trigger the freeze wizard
* trigger it again (check that the same object is returned, there should * trigger it again (check that the same object is returned, there should
only be one freeze per project at a time) only be one freeze per project at a time)
@ -1099,49 +1124,13 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
(0, 0, {'name': '1.0', 'sequence': 2}), (0, 0, {'name': '1.0', 'sequence': 2}),
] ]
masters = [] [
for r in [repo_a, repo_b, repo_c]: (master_head_a, master_head_b, master_head_c),
with r: (pr_required_a, _, pr_required_c),
[root, _] = r.make_commits( (pr_rel_a, pr_rel_b, pr_rel_c),
None, pr_bump_a,
Commit('base', tree={'version': '', 'f': '0'}), pr_other
Commit('release 1.0', tree={'version': '1.0'} if r is repo_a else None), ] = setup_mess(repo_a, repo_b, repo_c)
ref='heads/1.0'
)
masters.extend(r.make_commits(root, Commit('other', tree={'f': '1'}), ref='heads/master'))
# have 2 PRs required for the freeze
with repo_a:
repo_a.make_commits(masters[0], Commit('super important file', tree={'g': 'x'}), ref='heads/apr')
pr_required_a = repo_a.make_pr(target='master', head='apr')
with repo_c:
repo_c.make_commits(masters[2], Commit('update thing', tree={'f': '2'}), ref='heads/cpr')
pr_required_c = repo_c.make_pr(target='master', head='cpr')
# have 3 release PRs, only the first one updates the tree (version file)
with repo_a:
repo_a.make_commits(
masters[0],
Commit('Release 1.1 (A)', tree={'version': '1.1'}),
ref='heads/release-1.1'
)
pr_rel_a = repo_a.make_pr(target='master', head='release-1.1')
with repo_b:
repo_b.make_commits(
masters[1],
Commit('Release 1.1 (B)', tree=None),
ref='heads/release-1.1'
)
pr_rel_b = repo_b.make_pr(target='master', head='release-1.1')
with repo_c:
repo_c.make_commits(masters[2], Commit("Some change", tree={'a': '1'}), ref='heads/whocares')
pr_other = repo_c.make_pr(target='master', head='whocares')
repo_c.make_commits(
masters[2],
Commit('Release 1.1 (C)', tree=None),
ref='heads/release-1.1'
)
pr_rel_c = repo_c.make_pr(target='master', head='release-1.1')
env.run_crons() # process the PRs env.run_crons() # process the PRs
release_prs = { release_prs = {
@ -1149,6 +1138,7 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
repo_b.name: to_pr(env, pr_rel_b), repo_b.name: to_pr(env, pr_rel_b),
repo_c.name: to_pr(env, pr_rel_c), repo_c.name: to_pr(env, pr_rel_c),
} }
pr_bump_id = to_pr(env, pr_bump_a)
# trigger the ~~tree~~ freeze wizard # trigger the ~~tree~~ freeze wizard
w = project.action_prepare_freeze() w = project.action_prepare_freeze()
w2 = project.action_prepare_freeze() w2 = project.action_prepare_freeze()
@ -1166,6 +1156,11 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
for r in w_id.release_pr_ids: for r in w_id.release_pr_ids:
r.pr_id = release_prs[r.repository_id.name].id r.pr_id = release_prs[r.repository_id.name].id
w_id.release_pr_ids[-1].pr_id = to_pr(env, pr_other).id w_id.release_pr_ids[-1].pr_id = to_pr(env, pr_other).id
# configure bump
assert not w_id.bump_pr_ids, "there is no bump pr by default"
w_id.write({'bump_pr_ids': [
(0, 0, {'repository_id': pr_bump_id.repository.id, 'pr_id': pr_bump_id.id})
]})
r = w_id.action_freeze() r = w_id.action_freeze()
assert r == w, "the freeze is not ready so the wizard should redirect to itself" assert r == w, "the freeze is not ready so the wizard should redirect to itself"
owner = repo_c.owner owner = repo_c.owner
@ -1210,15 +1205,34 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
assert r['res_model'] == 'runbot_merge.project' assert r['res_model'] == 'runbot_merge.project'
assert r['res_id'] == project.id assert r['res_id'] == project.id
env.run_crons() # stage the release prs # stuff that's done directly
for repo in [repo_a, repo_b, repo_c]: for pr_id in release_prs.values():
with repo: assert pr_id.state == 'merged'
repo.post_status('staging.1.1', 'success', 'ci/runbot') assert pr_bump_id.state == 'merged'
repo.post_status('staging.1.1', 'success', 'legal/cla')
env.run_crons() # get the release prs merged # stuff that's behind a cron
env.run_crons()
assert pr_rel_a.state == "closed"
assert pr_rel_a.base['ref'] == '1.1'
assert pr_rel_b.state == "closed"
assert pr_rel_b.base['ref'] == '1.1'
assert pr_rel_c.state == "closed"
assert pr_rel_c.base['ref'] == '1.1'
for pr_id in release_prs.values(): for pr_id in release_prs.values():
assert pr_id.target.name == '1.1' assert pr_id.target.name == '1.1'
assert pr_id.state == 'merged'
assert pr_bump_a.state == 'closed'
assert pr_bump_a.base['ref'] == 'master'
assert pr_bump_id.target.name == 'master'
m_a = repo_a.commit('master')
assert m_a.message.startswith('Bump A')
assert repo_a.read_tree(m_a) == {
'f': '1', # from master
'g': 'x', # from required PR (merged into master before forking)
'version': '1.2-alpha', # from bump PR
}
c_a = repo_a.commit('1.1') c_a = repo_a.commit('1.1')
assert c_a.message.startswith('Release 1.1 (A)') assert c_a.message.startswith('Release 1.1 (A)')
@ -1229,19 +1243,71 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
} }
c_a_parent = repo_a.commit(c_a.parents[0]) c_a_parent = repo_a.commit(c_a.parents[0])
assert c_a_parent.message.startswith('super important file') assert c_a_parent.message.startswith('super important file')
assert c_a_parent.parents[0] == masters[0] assert c_a_parent.parents[0] == master_head_a
c_b = repo_b.commit('1.1') c_b = repo_b.commit('1.1')
assert c_b.message.startswith('Release 1.1 (B)') assert c_b.message.startswith('Release 1.1 (B)')
assert repo_b.read_tree(c_b) == {'f': '1', 'version': ''} assert repo_b.read_tree(c_b) == {'f': '1', 'version': ''}
assert c_b.parents[0] == masters[1] assert c_b.parents[0] == master_head_b
c_c = repo_c.commit('1.1') c_c = repo_c.commit('1.1')
assert c_c.message.startswith('Release 1.1 (C)') assert c_c.message.startswith('Release 1.1 (C)')
assert repo_c.read_tree(c_c) == {'f': '2', 'version': ''} assert repo_c.read_tree(c_c) == {'f': '2', 'version': ''}
assert repo_c.commit(c_c.parents[0]).parents[0] == masters[2] assert repo_c.commit(c_c.parents[0]).parents[0] == master_head_c
def setup_mess(repo_a, repo_b, repo_c):
master_heads = []
for r in [repo_a, repo_b, repo_c]:
with r:
[root, _] = r.make_commits(
None,
Commit('base', tree={'version': '', 'f': '0'}),
Commit('release 1.0', tree={'version': '1.0'} if r is repo_a else None),
ref='heads/1.0'
)
master_heads.extend(r.make_commits(root, Commit('other', tree={'f': '1'}), ref='heads/master'))
# have 2 PRs required for the freeze
with repo_a:
repo_a.make_commits(master_heads[0], Commit('super important file', tree={'g': 'x'}), ref='heads/apr')
pr_required_a = repo_a.make_pr(target='master', head='apr')
with repo_c:
repo_c.make_commits(master_heads[2], Commit('update thing', tree={'f': '2'}), ref='heads/cpr')
pr_required_c = repo_c.make_pr(target='master', head='cpr')
# have 3 release PRs, only the first one updates the tree (version file)
with repo_a:
repo_a.make_commits(
master_heads[0],
Commit('Release 1.1 (A)', tree={'version': '1.1'}),
ref='heads/release-1.1'
)
pr_rel_a = repo_a.make_pr(target='master', head='release-1.1')
with repo_b:
repo_b.make_commits(
master_heads[1],
Commit('Release 1.1 (B)', tree=None),
ref='heads/release-1.1'
)
pr_rel_b = repo_b.make_pr(target='master', head='release-1.1')
with repo_c:
repo_c.make_commits(master_heads[2], Commit("Some change", tree={'a': '1'}), ref='heads/whocares')
pr_other = repo_c.make_pr(target='master', head='whocares')
repo_c.make_commits(
master_heads[2],
Commit('Release 1.1 (C)', tree=None),
ref='heads/release-1.1'
)
pr_rel_c = repo_c.make_pr(target='master', head='release-1.1')
# have one bump PR on repo A
with repo_a:
repo_a.make_commits(
master_heads[0],
Commit("Bump A", tree={'version': '1.2-alpha'}),
ref='heads/bump-1.1',
)
pr_bump_a = repo_a.make_pr(target='master', head='bump-1.1')
return master_heads, (pr_required_a, None, pr_required_c), (pr_rel_a, pr_rel_b, pr_rel_c), pr_bump_a, pr_other
def test_freeze_subset(env, project, repo_a, repo_b, repo_c, users, config): def test_freeze_subset(env, project, repo_a, repo_b, repo_c, users, config):
"""It should be possible to only freeze a subset of a project when e.g. one """It should be possible to only freeze a subset of a project when e.g. one
of the repository is managed differently than the rest and has of the repository is managed differently than the rest and has
@ -1312,3 +1378,61 @@ def test_freeze_subset(env, project, repo_a, repo_b, repo_c, users, config):
except AssertionError: except AssertionError:
... ...
# can't stage because we (wilfully) don't have branches 1.1 in repos B and C # can't stage because we (wilfully) don't have branches 1.1 in repos B and C
@pytest.mark.skip("env's session is not thread-safe sadface")
def test_race_conditions():
"""need the ability to dup the env in order to send concurrent requests to
the inner odoo
- try to run the action_freeze during a cron (merge or staging), should
error (recover and return nice message?)
- somehow get ahead of the action and update master's commit between moment
where it is fetched and moment where the bump pr is fast-forwarded,
there's actually a bit of time thanks to the rate limiting (fetch of base,
update of tmp to base, rebase of commits on tmp, wait 1s, for each release
and bump PR, then the release branches are created, and finally the bump
prs)
"""
...
def test_freeze_conflict(env, project, repo_a, repo_b, repo_c, users, config):
"""If one of the branches we're trying to create already exists, the wizard
fails.
"""
project.branch_ids = [
(1, project.branch_ids.id, {'sequence': 1}),
(0, 0, {'name': '1.0', 'sequence': 2}),
]
heads, _, (pr_rel_a, pr_rel_b, pr_rel_c), bump, other = \
setup_mess(repo_a, repo_b, repo_c)
env.run_crons()
release_prs = {
repo_a.name: to_pr(env, pr_rel_a),
repo_b.name: to_pr(env, pr_rel_b),
repo_c.name: to_pr(env, pr_rel_c),
}
w = project.action_prepare_freeze()
w_id = env[w['res_model']].browse([w['res_id']])
for repo, release_pr in release_prs.items():
w_id.release_pr_ids\
.filtered(lambda r: r.repository_id.name == repo)\
.pr_id = release_pr.id
# create conflicting branch
with repo_c:
repo_c.make_ref('heads/1.1', heads[2])
# actually perform the freeze
with pytest.raises(xmlrpc.client.Fault) as e:
w_id.action_freeze()
assert f"Unable to create branch {repo_c.name}:1.1" in e.value.faultString
# branches a and b should have been deleted
with pytest.raises(AssertionError) as e:
repo_a.get_ref('heads/1.1')
assert e.value.args[0].startswith("Not Found")
with pytest.raises(AssertionError) as e:
repo_b.get_ref('heads/1.1')
assert e.value.args[0].startswith("Not Found")

View File

@ -1,11 +1,10 @@
import requests
from utils import Commit, to_pr from utils import Commit, to_pr
def test_partner_merge(env): def test_partner_merge(env):
p_src = env['res.partner'].create({ p_src = env['res.partner'].create({
'name': 'kfhsf',
'github_login': 'tyu'
}) | env['res.partner'].create({
'name': "xxx", 'name': "xxx",
'github_login': 'xxx' 'github_login': 'xxx'
}) })
@ -97,3 +96,36 @@ def test_message_desync(env, project, make_repo, users, setreviewers, config):
assert st.message.startswith('title\n\nbody'),\ assert st.message.startswith('title\n\nbody'),\
"the stored PR message should have been ignored when staging" "the stored PR message should have been ignored when staging"
assert st.parents == [m, c], "check the staging's ancestry is the right one" assert st.parents == [m, c], "check the staging's ancestry is the right one"
def test_unreviewer(env, project, port):
repo = env['runbot_merge.repository'].create({
'project_id': project.id,
'name': 'a_test_repo',
'status_ids': [(0, 0, {'context': 'status'})]
})
p = env['res.partner'].create({
'name': 'George Pearce',
'github_login': 'emubitch',
'review_rights': [(0, 0, {'repository_id': repo.id, 'review': True})]
})
r = requests.post(f'http://localhost:{port}/runbot_merge/get_reviewers', json={
'jsonrpc': '2.0',
'id': None,
'method': 'call',
'params': {},
})
r.raise_for_status()
assert 'error' not in r.json()
assert r.json()['result'] == ['emubitch']
r = requests.post(f'http://localhost:{port}/runbot_merge/remove_reviewers', json={
'jsonrpc': '2.0',
'id': None,
'method': 'call',
'params': {'github_logins': ['emubitch']},
})
r.raise_for_status()
assert 'error' not in r.json()
assert p.review_rights == env['res.partner.review']

View File

@ -0,0 +1,102 @@
import pytest
import requests
GEORGE = {
'name': "George Pearce",
'email': 'george@example.org',
'github_login': 'emubitch',
'sub': '19321102'
}
def test_basic_provisioning(env, port):
r = provision_user(port, [GEORGE])
assert r == [1, 0]
g = env['res.users'].search([('login', '=', GEORGE['email'])])
assert g.partner_id.name == GEORGE['name']
assert g.partner_id.github_login == GEORGE['github_login']
assert g.oauth_uid == GEORGE['sub']
(model, g_id) = env['ir.model.data']\
.check_object_reference('base', 'group_user')
assert model == 'res.groups'
assert g.groups_id.id == g_id, "check that users were provisioned as internal (not portal)"
# repeated provisioning should be a no-op
r = provision_user(port, [GEORGE])
assert r == [0, 0]
# the email (real login) should be the determinant, any other field is
# updatable
r = provision_user(port, [{**GEORGE, 'name': "x"}])
assert r == [0, 1]
r = provision_user(port, [dict(GEORGE, name="x", github_login="y", sub="42")])
assert r == [0, 1]
# can't fail anymore because github_login now used to look up the existing
# user
# with pytest.raises(Exception):
# provision_user(port, [{
# 'name': "other@example.org",
# 'email': "x",
# 'github_login': "y",
# 'sub': "42"
# }])
r = provision_user(port, [dict(GEORGE, active=False)])
assert r == [0, 1]
assert not env['res.users'].search([('login', '=', GEORGE['email'])])
assert env['res.partner'].search([('email', '=', GEORGE['email'])])
def test_upgrade_partner(env, port):
# If a partner exists for a github login (and / or email?) it can be
# upgraded by creating a user for it
p = env['res.partner'].create({
'name': GEORGE['name'],
'email': GEORGE['email'],
})
r = provision_user(port, [GEORGE])
assert r == [1, 0]
assert p.user_ids.read(['email', 'github_login', 'oauth_uid']) == [{
'id': p.user_ids.id,
'github_login': GEORGE['github_login'],
'oauth_uid': GEORGE['sub'],
'email': GEORGE['email'],
}]
p.user_ids.unlink()
p.unlink()
p = env['res.partner'].create({
'name': GEORGE['name'],
'github_login': GEORGE['github_login'],
})
r = provision_user(port, [GEORGE])
assert r == [1, 0]
assert p.user_ids.read(['email', 'github_login', 'oauth_uid']) == [{
'id': p.user_ids.id,
'github_login': GEORGE['github_login'],
'oauth_uid': GEORGE['sub'],
'email': GEORGE['email'],
}]
p.user_ids.unlink()
p.unlink()
def test_no_email(env, port):
""" Provisioning system should ignore email-less entries
"""
r = provision_user(port, [{**GEORGE, 'email': None}])
assert r == [0, 0]
def provision_user(port, users):
r = requests.post(f'http://localhost:{port}/runbot_merge/provision', json={
'jsonrpc': '2.0',
'id': None,
'method': 'call',
'params': {'users': users},
})
r.raise_for_status()
json = r.json()
assert 'error' not in json
return json['result']

View File

@ -89,7 +89,7 @@ def test_basic(env, project, make_repo, users, setreviewers, config):
(users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen r+'),
seen(env, pr, users), seen(env, pr, users),
(users['reviewer'], 'hansen override=l/int'), (users['reviewer'], 'hansen override=l/int'),
(users['user'], "I'm sorry, @{}. You are not allowed to override this status.".format(users['reviewer'])), (users['user'], "I'm sorry, @{}: you are not allowed to override this status.".format(users['reviewer'])),
(users['other'], "hansen override=l/int"), (users['other'], "hansen override=l/int"),
] ]
assert pr_id.statuses == '{}' assert pr_id.statuses == '{}'

View File

@ -0,0 +1,43 @@
<odoo>
<record id="action_overrides" model="ir.actions.act_window">
<field name="name">CI / statuses overrides</field>
<field name="res_model">res.partner.override</field>
</record>
<record id="tree_overrides" model="ir.ui.view">
<field name="name">Overrides List</field>
<field name="model">res.partner.override</field>
<field name="arch" type="xml">
<tree editable="bottom">
<field name="context"/>
<field name="repository_id"/>
<field name="partner_ids" widget="many2many_tags"/>
</tree>
</field>
</record>
<record id="action_review" model="ir.actions.act_window">
<field name="name">Review Rights</field>
<field name="res_model">res.partner.review</field>
<field name="context">{'search_default_group_by_repository': True}</field>
</record>
<record id="tree_review" model="ir.ui.view">
<field name="name">Review Rights</field>
<field name="model">res.partner.review</field>
<field name="arch" type="xml">
<tree editable="bottom">
<field name="repository_id"/>
<field name="partner_id"/>
<field name="review"/>
<field name="self_review"/>
</tree>
</field>
</record>
<menuitem name="Configuration" id="menu_configuration" parent="runbot_merge_menu"/>
<menuitem name="CI Overrides" id="menu_configuration_overrides"
parent="menu_configuration"
action="action_overrides"/>
<menuitem name="Review Rights" id="menu_configuration_review"
parent="menu_configuration"
action="action_review"/>
</odoo>

View File

@ -77,6 +77,8 @@
<field name="number"/> <field name="number"/>
<field name="target"/> <field name="target"/>
<field name="state"/> <field name="state"/>
<field name="author"/>
<field name="reviewed_by"/>
</tree> </tree>
</field> </field>
</record> </record>
@ -97,10 +99,10 @@
<field name="target"/> <field name="target"/>
<field name="state"/> <field name="state"/>
<field name="author"/> <field name="author"/>
<field name="priority"/>
</group> </group>
<group> <group>
<field name="label"/> <field name="label"/>
<field name="priority"/>
<field name="squash"/> <field name="squash"/>
</group> </group>
</group> </group>
@ -108,6 +110,8 @@
<group colspan="4"> <group colspan="4">
<field name="head"/> <field name="head"/>
<field name="statuses"/> <field name="statuses"/>
</group>
<group colspan="4">
<field name="overrides"/> <field name="overrides"/>
</group> </group>
</group> </group>
@ -196,7 +200,8 @@
<group string="Batches"> <group string="Batches">
<field name="batch_ids" colspan="4" nolabel="1"> <field name="batch_ids" colspan="4" nolabel="1">
<tree> <tree>
<field name="prs"/> <field name="prs" widget="many2many_tags"
options="{'no_quick_create': True}"/>
</tree> </tree>
</field> </field>
</group> </group>
@ -205,66 +210,33 @@
</field> </field>
</record> </record>
<record id="runbot_merge_action_fetches" model="ir.actions.act_window"> <record id="runbot_merge_action_commits" model="ir.actions.act_window">
<field name="name">PRs to fetch</field> <field name="name">Commit Statuses</field>
<field name="res_model">runbot_merge.fetch_job</field> <field name="res_model">runbot_merge.commit</field>
<field name="view_mode">tree</field> <field name="view_mode">tree,form</field>
<field name="context">{'default_active': True}</field>
</record> </record>
<record id="runbot_merge_search_fetches" model="ir.ui.view"> <record id="runbot_merge_commits_tree" model="ir.ui.view">
<field name="name">Fetches Search</field> <field name="name">commits list</field>
<field name="model">runbot_merge.fetch_job</field> <field name="model">runbot_merge.commit</field>
<field name="arch" type="xml">
<search>
<filter string="Active" name="active"
domain="[('active', '=', True)]"/>
<field name="repository"/>
<field name="number"/>
</search>
</field>
</record>
<record id="runbot_merge_tree_fetches" model="ir.ui.view">
<field name="name">Fetches Tree</field>
<field name="model">runbot_merge.fetch_job</field>
<field name="arch" type="xml"> <field name="arch" type="xml">
<tree> <tree>
<field name="repository"/> <field name="sha"/>
<field name="number"/> <field name="statuses"/>
</tree>
</field>
</record>
<record id="runbot_merge_action_overrides" model="ir.actions.act_window">
<field name="name">CI / statuses overrides</field>
<field name="res_model">res.partner.override</field>
</record>
<record id="runot_merge_tree_overrides" model="ir.ui.view">
<field name="name">Overrides List</field>
<field name="model">res.partner.override</field>
<field name="arch" type="xml">
<tree editable="bottom">
<field name="context"/>
<field name="repository_id"/>
<field name="partner_ids" widget="many2many_tags"/>
</tree> </tree>
</field> </field>
</record> </record>
<menuitem name="Mergebot" id="runbot_merge_menu"/> <menuitem name="Mergebot" id="runbot_merge_menu"/>
<menuitem name="Projects" id="runbot_merge_menu_project" <menuitem name="Projects" id="runbot_merge_menu_project"
parent="runbot_merge_menu" parent="runbot_merge_menu"
action="runbot_merge_action_projects"/> action="runbot_merge_action_projects"/>
<menuitem name="Pull Requests" id="runbot_merge_menu_prs" <menuitem name="Pull Requests" id="runbot_merge_menu_prs"
parent="runbot_merge_menu" parent="runbot_merge_menu"
action="runbot_merge_action_prs"/> action="runbot_merge_action_prs"/>
<menuitem name="Stagings" id="runbot_merge_menu_stagings" <menuitem name="Stagings" id="runbot_merge_menu_stagings"
parent="runbot_merge_menu" parent="runbot_merge_menu"
action="runbot_merge_action_stagings"/> action="runbot_merge_action_stagings"/>
<menuitem name="Fetches" id="runbot_merge_menu_fetches" <menuitem name="Commits" id="runbot_merge_menu_commits"
parent="runbot_merge_menu" parent="runbot_merge_menu"
action="runbot_merge_action_fetches"/> action="runbot_merge_action_commits"/>
<menuitem name="Configuration" id="runbot_merge_menu_configuration" parent="runbot_merge_menu"/>
<menuitem name="CI Overrides" id="runbot_merge_menu_configuration_overrides"
parent="runbot_merge_menu"
action="runbot_merge_action_overrides"/>
</odoo> </odoo>

View File

@ -0,0 +1,97 @@
<odoo>
<!--
Queues mergebot menu: contains various list views inspecting the cron tasks
(mostly)
-->
<record id="action_splits" model="ir.actions.act_window">
<field name="name">Splits</field>
<field name="res_model">runbot_merge.split</field>
</record>
<record id="tree_splits" model="ir.ui.view">
<field name="name">Splits</field>
<field name="model">runbot_merge.split</field>
<field name="arch" type="xml">
<tree>
<field name="id"/>
<field name="target"/>
</tree>
</field>
</record>
<record id="action_feedback" model="ir.actions.act_window">
<field name="name">Feedback</field>
<field name="res_model">runbot_merge.pull_requests.feedback</field>
</record>
<record id="tree_feedback" model="ir.ui.view">
<field name="name">Feedback</field>
<field name="model">runbot_merge.pull_requests.feedback</field>
<field name="arch" type="xml">
<tree>
<field name="repository"/>
<field name="pull_request"/>
<field name="message"/>
<field name="close"/>
</tree>
</field>
</record>
<record id="action_tagging" model="ir.actions.act_window">
<field name="name">Tagging</field>
<field name="res_model">runbot_merge.pull_requests.tagging</field>
</record>
<record id="tree_tagging" model="ir.ui.view">
<field name="name">Tagging</field>
<field name="model">runbot_merge.pull_requests.tagging</field>
<field name="arch" type="xml">
<tree editable="bottom">
<field name="repository"/>
<field name="pull_request"/>
<field name="tags_add"/>
<field name="tags_remove"/>
</tree>
</field>
</record>
<record id="action_fetches" model="ir.actions.act_window">
<field name="name">PRs to fetch</field>
<field name="res_model">runbot_merge.fetch_job</field>
<field name="view_mode">tree</field>
<field name="context">{'default_active': True}</field>
</record>
<record id="search_fetches" model="ir.ui.view">
<field name="name">Fetches Search</field>
<field name="model">runbot_merge.fetch_job</field>
<field name="arch" type="xml">
<search>
<filter string="Active" name="active"
domain="[('active', '=', True)]"/>
<field name="repository"/>
<field name="number"/>
</search>
</field>
</record>
<record id="tree_fetches" model="ir.ui.view">
<field name="name">Fetches Tree</field>
<field name="model">runbot_merge.fetch_job</field>
<field name="arch" type="xml">
<tree>
<field name="repository"/>
<field name="number"/>
</tree>
</field>
</record>
<menuitem name="Queues" id="menu_queues" parent="runbot_merge_menu"/>
<menuitem name="Splits" id="menu_queues_splits"
parent="menu_queues"
action="action_splits"/>
<menuitem name="Feedback" id="menu_queues_feedback"
parent="menu_queues"
action="action_feedback"/>
<menuitem name="Tagging" id="menu_queues_tagging"
parent="menu_queues"
action="action_tagging"/>
<menuitem name="Fetches" id="menu_fetches"
parent="menu_queues"
action="action_fetches"/>
</odoo>

View File

@ -70,7 +70,14 @@
</group> </group>
<group> <group>
<group colspan="4" string="Delegate On"> <group colspan="4" string="Delegate On">
<field name="delegate_reviewer" nolabel="1"/> <field name="delegate_reviewer" nolabel="1">
<tree>
<field name="repository"/>
<field name="number"/>
<field name="target"/>
<field name="state"/>
</tree>
</field>
</group> </group>
</group> </group>
</page> </page>

View File

@ -10,22 +10,73 @@
</xpath> </xpath>
</template> </template>
<template id="link-pr" name="create a link to `pr`">
<t t-set="title">
<t t-if="pr.repository.group_id &lt;= env.user.groups_id">
<t t-esc="pr.message.split('\n')[0]"/>
</t>
</t>
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}"
t-att-title="pr.blocked or title.strip()"
t-att-target="target or None"
><t t-esc="pr.display_name"/></a>
</template>
<template id="staging-statuses" name="dropdown statuses list of stagings">
<div class="dropdown" t-if="staging.heads">
<button class="btn btn-link dropdown-toggle"
type="button"
data-toggle="dropdown"
aria-haspopup="true"
aria-expanded="true"
t-attf-title="Staged at {{staging.staged_at}}Z"
>
<t t-raw="0"/>
<span class="caret"></span>
</button>
<ul class="dropdown-menu staging-statuses">
<li groups="runbot_merge.group_admin">
<a t-attf-href="/web#id={{staging.id}}&amp;view_type=form&amp;model=runbot_merge.stagings"
target="new">
Open Staging
</a>
</li>
<t t-set="statuses" t-value="{(r, c): (s, t) for r, c, s, t in staging.statuses}"/>
<t t-foreach="repo_statuses._for_staging(staging)" t-as="req">
<t t-set="st" t-value="statuses.get((req.repo_id.name, req.context)) or (None, None)"/>
<li t-att-class="
'bg-success' if st[0] == 'success'
else 'bg-danger' if st[0] in ('error', 'failure')
else 'bg-info' if st[0]
else 'bg-light'"
><a t-att-href="st[1]" target="new">
<t t-esc="req.repo_id.name"/>: <t t-esc="req.context"/>
</a></li>
</t>
</ul>
</div>
</template>
<template id="alerts">
<div id="alerts" class="row text-center">
<div class="alert alert-light col-md-12 h6 mb-0">
<a href="/runbot_merge/changelog">Changelog</a>
</div>
<t t-set="stagingcron" t-value="env(user=1).ref('runbot_merge.staging_cron')"/>
<div t-if="not stagingcron.active" class="alert alert-warning col-12 mb-0" role="alert">
Staging is disabled, "ready" pull requests will not be staged.
</div>
<t t-set="mergecron" t-value="env(user=1).ref('runbot_merge.merge_cron')"/>
<div t-if="not mergecron.active" class="alert alert-warning col-12 mb-0" role="alert">
Merging is disabled, stagings will not be integrated.
</div>
</div>
</template>
<template id="dashboard" name="mergebot dashboard"> <template id="dashboard" name="mergebot dashboard">
<t t-call="website.layout"> <t t-call="website.layout">
<div id="wrap"><div class="container-fluid"> <div id="wrap"><div class="container-fluid">
<div id="alerts" class="row text-center"> <t t-call="runbot_merge.alerts"/>
<div class="alert alert-light col-md-12 h6 mb-0">
<a href="/runbot_merge/changelog">Changelog</a>
</div>
<t t-set="stagingcron" t-value="env(user=1).ref('runbot_merge.staging_cron')"/>
<div t-if="not stagingcron.active" class="alert alert-warning col-12 mb-0" role="alert">
Staging is disabled, "ready" pull requests will not be staged.
</div>
<t t-set="mergecron" t-value="env(user=1).ref('runbot_merge.merge_cron')"/>
<div t-if="not mergecron.active" class="alert alert-warning col-12 mb-0" role="alert">
Merging is disabled, stagings will not be disabled.
</div>
</div>
<section t-foreach="projects.with_context(active_test=False)" t-as="project" class="row"> <section t-foreach="projects.with_context(active_test=False)" t-as="project" class="row">
<h1 class="col-md-12"><t t-esc="project.name"/></h1> <h1 class="col-md-12"><t t-esc="project.name"/></h1>
<div class="col-md-12"> <div class="col-md-12">
@ -63,10 +114,7 @@
<li t-foreach="splits" t-as="split"> <li t-foreach="splits" t-as="split">
<ul class="pr-listing list-inline list-unstyled mb0"> <ul class="pr-listing list-inline list-unstyled mb0">
<li t-foreach="split.mapped('batch_ids.prs')" t-as="pr"> <li t-foreach="split.mapped('batch_ids.prs')" t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}" <t t-call="runbot_merge.link-pr"/>
t-att-title="pr.message.split('\n')[0]">
<t t-esc="pr.repository.name"/>#<t t-esc="pr.number"/>
</a>
</li> </li>
</ul> </ul>
</li> </li>
@ -76,10 +124,7 @@
<h5>Awaiting</h5> <h5>Awaiting</h5>
<ul class="list-inline"> <ul class="list-inline">
<li t-foreach="ready" t-as="pr"> <li t-foreach="ready" t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}" <t t-call="runbot_merge.link-pr"/>
t-att-title="pr.message.split('\n')[0]">
<t t-esc="pr.repository.name"/>#<t t-esc="pr.number"/>
</a>
</li> </li>
</ul> </ul>
</div> </div>
@ -87,10 +132,7 @@
<h5>Blocked</h5> <h5>Blocked</h5>
<ul class="list-inline"> <ul class="list-inline">
<li t-foreach="blocked" t-as="pr"> <li t-foreach="blocked" t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}" <t t-call="runbot_merge.link-pr"/>
t-att-title="pr.blocked">
<t t-esc="pr.repository.name"/>#<t t-esc="pr.number"/>
</a>
</li> </li>
</ul> </ul>
</div> </div>
@ -105,10 +147,7 @@
<h5>Failed</h5> <h5>Failed</h5>
<ul class="list-inline"> <ul class="list-inline">
<li t-foreach="failed" t-as="pr"> <li t-foreach="failed" t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}" <t t-call="runbot_merge.link-pr"/>
t-att-title="pr.message.split('\n')[0]">
<t t-esc="pr.repository.name"/>#<t t-esc="pr.number"/>
</a>
</li> </li>
</ul> </ul>
</div> </div>
@ -135,49 +174,24 @@
<t t-if="staging_index >= 4">visible-lg-block</t> <t t-if="staging_index >= 4">visible-lg-block</t>
</t> </t>
<t t-set="title"> <t t-set="title">
<t t-if="staging.state == 'canceled'">Cancelled: <t t-esc="staging.reason"/></t> <t t-if="staging.state == 'ff_failed'">fast forward failed (<t t-esc="staging.reason"/>)</t>
<t t-if="staging.state == 'ff_failed'">Fast Forward Failed</t> <t t-if="staging.state == 'pending'">last status</t>
<t t-if="staging.state not in ('canceled', 'ff_failed')"><t t-esc="staging.reason"/></t>
</t> </t>
<li t-attf-class="staging {{stateclass.strip()}} {{decorationclass.strip()}}" t-att-title="title.strip() or None"> <!-- separate concatenation to avoid having line-break in title as some browsers trigger it -->
<!-- write-date may have microsecond precision, strip that information -->
<!-- use write-date under assumption that a staging is last modified when it ends -->
<t t-set="title"><t t-esc="title.strip() or staging.reason"/> at <t t-esc="staging.write_date.replace(microsecond=0)"/>Z</t>
<li t-attf-class="staging {{stateclass.strip()}} {{decorationclass.strip()}}" t-att-title="title">
<ul class="list-unstyled"> <ul class="list-unstyled">
<li t-foreach="staging.batch_ids" t-as="batch" class="batch"> <li t-foreach="staging.batch_ids" t-as="batch" class="batch">
<t t-esc="batch.prs[:1].label"/> <t t-esc="batch.prs[:1].label"/>
<t t-foreach="batch.prs" t-as="pr"> <t t-foreach="batch.prs" t-as="pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}" <t t-call="runbot_merge.link-pr"/>
t-att-title="pr.message.split('\n')[0]"><t t-esc="pr.repository.name"/>#<t t-esc="pr.number"/></a>
</t> </t>
</li> </li>
</ul> </ul>
<t t-if="staging.heads"> <t t-call="runbot_merge.staging-statuses">
<div class="dropdown"> Staged <span t-field="staging.staged_at" t-options="{'widget': 'relative'}"/>
<button class="btn btn-link dropdown-toggle" type="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true"
t-attf-title="{{staging.staged_at}}Z">
Staged <span t-field="staging.staged_at" t-options="{'widget': 'relative'}"/>
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li groups="runbot_merge.group_admin">
<a t-attf-href="/web#id={{staging.id}}&amp;view_type=form&amp;model=runbot_merge.stagings" target="new">
Open Staging
</a>
</li>
<t t-set="statuses" t-value="{(r, c): (s, t) for r, c, s, t in staging.statuses}"/>
<t t-foreach="repo_statuses._for_staging(staging)" t-as="req">
<t t-set="st" t-value="statuses.get((req.repo_id.name, req.context)) or ('pending', None)"/>
<li t-att-class="
'bg-success' if st[0] == 'success'
else 'bg-danger' if st[0] in ('error', 'failure')
else 'bg-info'"
>
<a t-att-href="st[1]" target="new">
<t t-esc="req.repo_id.name"/>: <t t-esc="req.context"/>
</a>
</li>
</t>
</ul>
</div>
</t> </t>
</li> </li>
</t> </t>
@ -224,68 +238,32 @@
<span t-field="staging.staged_at" <span t-field="staging.staged_at"
t-options="{'format': 'yyyy-MM-dd\'T\'HH:mm:ssZ'}"/> t-options="{'format': 'yyyy-MM-dd\'T\'HH:mm:ssZ'}"/>
</t> </t>
<div class="dropdown" t-if="staging.heads"> <t t-call="runbot_merge.staging-statuses">
<button class="btn btn-link dropdown-toggle" <span t-field="staging.staged_at"
type="button" t-options="{'format': 'yyyy-MM-dd\'T\'HH:mm:ssZ'}"/>
data-toggle="dropdown" </t>
aria-haspopup="true"
aria-expanded="true">
<span t-field="staging.staged_at"
t-options="{'format': 'yyyy-MM-dd\'T\'HH:mm:ssZ'}"/>
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li groups="runbot_merge.group_admin">
<a t-attf-href="/web#id={{staging.id}}&amp;view_type=form&amp;model=runbot_merge.stagings"
target="new">
Open Staging
</a>
</li>
<t t-set="statuses" t-value="{(r, c): (s, t) for r, c, s, t in staging.statuses}"/>
<t t-foreach="repo_statuses._for_staging(staging)" t-as="req">
<t t-set="st" t-value="statuses.get((req.repo_id.name, req.context)) or ('pending', None)"/>
<li t-att-class="
'bg-success' if st[0] == 'success'
else 'bg-danger' if st[0] in ('error', 'failure')
else 'bg-info'"
>
<a t-att-href="st[1]"
target="new">
<t t-esc="req.repo_id.name"/>:
<t t-esc="req.context"/>
</a>
</li>
</t>
</ul>
</div>
</th> </th>
<td> <td>
<ul class="list-inline list-unstyled mb0"> <ul class="list-inline list-unstyled mb0">
<t t-foreach="staging.batch_ids" <t t-foreach="staging.batch_ids"
t-as="batch"> t-as="batch">
<t t-set="first_pr" <t t-set="first_pr"
t-value="batch.prs[-1]"/> t-value="batch.prs[-1:]"/>
<li class="dropdown"> <li class="dropdown" t-if="first_pr">
<button class="btn btn-link dropdown-toggle" <button class="btn btn-link dropdown-toggle"
type="button" type="button"
data-toggle="dropdown" data-toggle="dropdown"
aria-haspopup="true" aria-haspopup="true"
aria-expanded="true" aria-expanded="true"
t-att-title="first_pr.message.split('\n')[0]"
> >
<t t-esc="first_pr.label"/> <t t-esc="first_pr.label"/>
<span class="caret"></span> <span class="caret"></span>
</button> </button>
<ul class="dropdown-menu"> <ul class="dropdown-menu">
<li t-foreach="batch.prs" <li t-foreach="batch.prs" t-as="pr">
t-as="pr"> <t t-call="runbot_merge.link-pr">
<a t-attf-href="https://github.com/{{ pr.repository.name }}/pull/{{ pr.number }}" <t t-set="target">new</t>
t-att-title="pr.message.split('\n')[0]" </t>
target="new">
<t t-esc="pr.repository.name"/>
#
<t t-esc="pr.number"/>
</a>
</li> </li>
</ul> </ul>
</li> </li>
@ -411,12 +389,13 @@
<template id="view_pull_request"> <template id="view_pull_request">
<t t-call="website.layout"> <t t-call="website.layout">
<div id="wrap"><div class="container-fluid"> <div id="wrap"><div class="container-fluid">
<t t-call="runbot_merge.alerts"/>
<h1> <h1>
<a t-att-href="pr.github_url" t-field="pr.display_name"> <a t-att-href="pr.github_url" t-field="pr.display_name">
</a> </a>
<a t-attf-href="/web#view_type=form&amp;model=runbot_merge.pull_requests&amp;id={{pr.id}}" <a t-attf-href="/web#view_type=form&amp;model=runbot_merge.pull_requests&amp;id={{pr.id}}"
class="btn btn-sm btn-secondary align-top float-right" class="btn btn-sm btn-secondary align-top float-right"
groups="base.group_user">View in backend</a> groups="base.group_user">View in backend</a>
</h1> </h1>
<h6>Created by <span t-field="pr.author.display_name"/></h6> <h6>Created by <span t-field="pr.author.display_name"/></h6>
<t t-set="tmpl"> <t t-set="tmpl">
@ -425,11 +404,14 @@
<t t-else="">open</t> <t t-else="">open</t>
</t> </t>
<t t-call="runbot_merge.view_pull_request_info_{{tmpl.strip()}}"/> <t t-call="runbot_merge.view_pull_request_info_{{tmpl.strip()}}"/>
<t t-set="target_cls" t-value="None if pr.target.active else 'text-muted bg-warning'"/>
<dl class="runbot-merge-fields"> <dl class="runbot-merge-fields">
<dt>label</dt> <dt>label</dt>
<dd><span t-field="pr.label"/></dd> <dd><span t-field="pr.label"/></dd>
<dt>head</dt> <dt>head</dt>
<dd><a t-attf-href="{{pr.github_url}}/commits/{{pr.head}}"><span t-field="pr.head"/></a></dd> <dd><a t-attf-href="{{pr.github_url}}/commits/{{pr.head}}"><span t-field="pr.head"/></a></dd>
<dt t-att-class="target_cls">target</dt>
<dd t-att-class="target_cls"><span t-field="pr.target"/></dd>
</dl> </dl>
<p t-field="pr.message"/> <p t-field="pr.message"/>
</div></div> </div></div>