[ADD] forwardbot

* Cherrypicking is handrolled because there seems to be no easy way to
  programmatically edit commit messages during the cherrypicking
  sequence: `-n` basically squashes all commits and `-e` invokes a
  subprocess. `-e` with `VISUAL=false` kinda sorta works (in that it
  interrupts the process before each commit), however there doesn't
  seem to be clean status codes so it's difficult to know if the
  cherrypick failed or if it's just waiting for a commit of this step.

  Instead, cherrypick commits individually then edit / rewrite their
  commit messages:

  * add a reference to the original commit
  * convert signed-off-by to something else as the original commit was
    signed off but not necessarily this one

* Can't assign users when creating PRs: only repository collaborators
  or people who commented on the issue / PR (which we're in the
  process of creating) can be assigned.

  PR authors are as likely to be collaborators as not, and we can have
  non-collaborator reviewers. So pinging via a regular comment seems
  less fraught as a way to notify users.
This commit is contained in:
Xavier Morel 2019-08-23 16:16:30 +02:00 committed by xmo-odoo
parent 1b1aa637fe
commit f671dcc828
17 changed files with 2951 additions and 52 deletions

1
forwardport/__init__.py Normal file
View File

@ -0,0 +1 @@
from . import models

View File

@ -0,0 +1,11 @@
# -*- coding: utf-8 -*-
{
'name': 'forward port bot',
'summary': "A port which forward ports successful PRs.",
'depends': ['runbot_merge'],
'data': [
'data/security.xml',
'data/crons.xml',
'data/views.xml',
],
}

View File

@ -0,0 +1,46 @@
<odoo>
<record model="ir.cron" id="port_forward">
<field name="name">Check if there are merged PRs to port</field>
<field name="model_id" ref="model_forwardport_batches"/>
<field name="state">code</field>
<field name="code">model._process()</field>
<field name="interval_number">1</field>
<field name="interval_type">minutes</field>
<field name="numbercall">-1</field>
<field name="doall" eval="False"/>
</record>
<record model="ir.cron" id="updates">
<field name="name">Update followup FP PRs</field>
<field name="model_id" ref="model_forwardport_updates"/>
<field name="state">code</field>
<field name="code">model._process()</field>
<field name="interval_number">1</field>
<field name="interval_type">minutes</field>
<field name="numbercall">-1</field>
<field name="doall" eval="False"/>
</record>
<record model="ir.cron" id="reminder">
<field name="name">Remind open PR</field>
<field name="model_id" ref="model_forwardport_updates"/>
<field name="state">code</field>
<field name="code">
for pr in env['runbot_merge.pull_requests'].search([
# only FP PRs
('source_id', '!=', False),
# active
('state', 'not in', ['merged', 'closed']),
]):
env['runbot_merge.pull_requests.feedback'].create({
'repository': pr.repository.id,
'pull_request': pr.number,
'message': "This forward port PR is awaiting action",
})
</field>
<field name="interval_number">1</field>
<field name="interval_type">weeks</field>
<field name="numbercall">-1</field>
<field name="doall" eval="False"/>
</record>
</odoo>

View File

@ -0,0 +1,37 @@
<odoo>
<record id="access_forwardport_batches_admin" model="ir.model.access">
<field name="name">Admin access to batches</field>
<field name="model_id" ref="model_forwardport_batches"/>
<field name="group_id" ref="runbot_merge.group_admin"/>
<field name="perm_read">1</field>
<field name="perm_create">1</field>
<field name="perm_write">1</field>
<field name="perm_unlink">1</field>
</record>
<record id="access_forwardport_updates_admin" model="ir.model.access">
<field name="name">Admin access to updates</field>
<field name="model_id" ref="model_forwardport_updates"/>
<field name="group_id" ref="runbot_merge.group_admin"/>
<field name="perm_read">1</field>
<field name="perm_create">1</field>
<field name="perm_write">1</field>
<field name="perm_unlink">1</field>
</record>
<record id="access_forwardport_batches" model="ir.model.access">
<field name="name">No normal access to batches</field>
<field name="model_id" ref="model_forwardport_batches"/>
<field name="perm_read">0</field>
<field name="perm_create">0</field>
<field name="perm_write">0</field>
<field name="perm_unlink">0</field>
</record>
<record id="access_forwardport_updates" model="ir.model.access">
<field name="name">No normal access to updates</field>
<field name="model_id" ref="model_forwardport_updates"/>
<field name="perm_read">0</field>
<field name="perm_create">0</field>
<field name="perm_write">0</field>
<field name="perm_unlink">0</field>
</record>
</odoo>

View File

@ -0,0 +1,63 @@
<odoo>
<record model="ir.ui.view" id="project">
<field name="name">Show forwardport project fields</field>
<field name="inherit_id" ref="runbot_merge.runbot_merge_form_project"/>
<field name="model">runbot_merge.project</field>
<field name="arch" type="xml">
<xpath expr="//sheet/group[2]" position="after">
<group string="Forwardport Configuration">
<group>
<field string="Token" name="fp_github_token"/>
</group>
<group>
<field string="Bot Name" name="fp_github_name"/>
<field string="Bot Email" name="fp_github_email"/>
</group>
</group>
</xpath>
<xpath expr="//field[@name='repo_ids']/tree" position="inside">
<field string="FP remote" name="fp_remote_target"
help="Repository where forward port branches will be created"
/>
</xpath>
<xpath expr="//field[@name='branch_ids']/tree" position="inside">
<field name="fp_target" string="FP to"
help="This branch will be forward-ported to (from lower ones)"
/>
<field name="fp_sequence" string="FP sequence"
help="Overrides the normal sequence"
/>
</xpath>
</field>
</record>
<record model="ir.ui.view" id="pr">
<field name="name">Show forwardport PR fields</field>
<field name="inherit_id" ref="runbot_merge.runbot_merge_form_prs"/>
<field name="model">runbot_merge.pull_requests</field>
<field name="arch" type="xml">
<xpath expr="//sheet/group[2]" position="after">
<separator string="Forwardported" attrs="{'invisible': [('source_id', '=', False)]}"/>
<group attrs="{'invisible': [('source_id', '=', False)]}">
<group>
<field string="Original PR" name="source_id"/>
</group>
<group attrs="{'invisible': [('parent_id', '=', False)]}">
<field name="parent_id"/>
</group>
<group colspan="4" attrs="{'invisible': [('parent_id', '!=', False)]}">
<span>
Detached from forward porting (either conflicting
or explicitly updated).
</span>
</group>
<group>
<field string="Forward ported up to" name="limit_id"/>
</group>
</group>
</xpath>
</field>
</record>
</odoo>

View File

@ -0,0 +1,3 @@
# -*- coding: utf-8 -*-
from . import project
from . import forwardport

View File

@ -0,0 +1,91 @@
# -*- coding: utf-8 -*-
import logging
from contextlib import ExitStack
import subprocess
from odoo import fields, models
_logger = logging.getLogger(__name__)
class Queue:
def _process_item(self):
raise NotImplementedError
def _process(self):
while True:
b = self.search([], limit=1)
if not b:
return
b._process_item()
b.unlink()
self.env.cr.commit()
class BatchQueue(models.Model, Queue):
_name = 'forwardport.batches'
_description = 'batches which got merged and are candidates for forward-porting'
batch_id = fields.Many2one('runbot_merge.batch', required=True)
source = fields.Selection([
('merge', 'Merge'),
('fp', 'Forward Port Followup'),
], required=True)
def _process_item(self):
batch = self.batch_id
# only some prs of the batch have a parent, that's weird
with_parent = batch.prs.filtered(lambda p: p.parent_id)
if with_parent and with_parent != batch.prs:
_logger.warn("Found a subset of batch %s (%s) with parents: %s, should probably investigate (normally either they're all parented or none are)", batch, batch.prs, with_parent)
newbatch = batch.prs._port_forward()
if newbatch:
_logger.info(
"Processing %s (from %s): %s (%s) -> %s (%s)",
self.id, self.source,
batch, batch.prs,
newbatch, newbatch.prs,
)
else: # reached end of seq (or batch is empty)
# FIXME: or configuration is fucky so doesn't want to FP (maybe should error and retry?)
_logger.info(
"Processing %s (from %s): %s (%s) -> end of the sequence",
self.id, self.source,
batch, batch.prs
)
batch.active = False
class UpdateQueue(models.Model, Queue):
_name = 'forwardport.updates'
_description = 'if a forward-port PR gets updated & has followups (cherrypick succeeded) the followups need to be updated as well'
original_root = fields.Many2one('runbot_merge.pull_requests')
new_root = fields.Many2one('runbot_merge.pull_requests')
def _process_item(self):
previous = self.new_root
with ExitStack() as s:
for child in self.new_root._iter_descendants():
# QUESTION: update PR to draft if there are conflicts?
_, working_copy = previous._create_fp_branch(
child.target, child.refname, s)
new_head = working_copy.stdout().rev_parse(child.refname).stdout.decode().strip()
# update child's head to the head we're going to push
child.with_context(ignore_head_update=True).head = new_head
working_copy.push('-f', 'target', child.refname)
# committing here means github could technically trigger its
# webhook before sending a response, but committing before
# would mean we can update the PR in database but fail to
# update on github, which is probably worse?
# alternatively we can commit, push, and rollback if the push
# fails
# FIXME: handle failures (especially on non-first update)
self.env.cr.commit()
previous = child

View File

@ -0,0 +1,689 @@
# -*- coding: utf-8 -*-
"""
Technically could be independent from mergebot but would require a lot of
duplicate work e.g. keeping track of statuses (including on commits which
might not be in PRs yet), handling co-dependent PRs, ...
However extending the mergebot also leads to messiness: fpbot should have
its own user / feedback / API keys, mergebot and fpbot both have branch
ordering but for mergebot it's completely cosmetics, being slaved to mergebot
means PR creation is trickier (as mergebot assumes opened event will always
lead to PR creation but fpbot wants to attach meaning to the PR when setting
it up), ...
"""
import base64
import contextlib
import itertools
import json
import logging
import os
import pathlib
import subprocess
import tempfile
import re
import requests
from odoo.exceptions import UserError
from odoo.tools.appdirs import user_cache_dir
from odoo import _, models, fields, api
from odoo.addons.runbot_merge import utils
from odoo.addons.runbot_merge.models.pull_requests import RPLUS
_logger = logging.getLogger('odoo.addons.forwardport')
class Project(models.Model):
_inherit = 'runbot_merge.project'
fp_github_token = fields.Char()
fp_github_name = fields.Char(store=True, compute="_compute_git_identity")
fp_github_email = fields.Char(store=True, compute="_compute_git_identity")
def _find_commands(self, comment):
if self.env.context.get('without_forward_port'):
return super()._find_commands(comment)
return re.findall(
'^\s*[@|#]?{}:? (.*)$'.format(self.fp_github_name),
comment, re.MULTILINE | re.IGNORECASE
) + super()._find_commands(comment)
# technically the email could change at any moment...
@api.depends('fp_github_token')
def _compute_git_identity(self):
s = requests.Session()
for project in self:
if not project.fp_github_token:
continue
r0 = s.get('https://api.github.com/user', headers={
'Authorization': 'token %s' % project.fp_github_token
})
if 'user:email' not in set(re.split(r',\s*', r0.headers['x-oauth-scopes'])):
raise UserError(_("The forward-port github token needs the user:email scope to fetch the bot's identity."))
r1 = s.get('https://api.github.com/user/emails', headers={
'Authorization': 'token %s' % project.fp_github_token
})
if not (r0.ok and r1.ok):
_logger.warn("Failed to fetch bot information for project %s: %s", project.name, (r0.text or r0.content) if not r0.ok else (r1.text or r1.content))
continue
project.fp_github_name = r0.json()['login']
project.fp_github_email = next((
entry['email']
for entry in r1.json()
if entry['primary']
), None)
if not project.fp_github_email:
raise UserError(_("The forward-port bot needs a primary email set up."))
class Repository(models.Model):
_inherit = 'runbot_merge.repository'
fp_remote_target = fields.Char(help="where FP branches get pushed")
class Branch(models.Model):
_inherit = 'runbot_merge.branch'
fp_sequence = fields.Integer(default=50)
fp_target = fields.Boolean(default=False)
fp_enabled = fields.Boolean(compute='_compute_fp_enabled')
@api.depends('active', 'fp_target')
def _compute_fp_enabled(self):
for b in self:
b.fp_enabled = b.active and b.fp_target
# FIXME: this should be per-project...
def _forward_port_ordered(self):
""" Returns all branches in forward port order (from the lowest to
the highest usually master)
"""
return self.search([], order=self._forward_port_ordering())
def _forward_port_ordering(self):
return ','.join(
f[:-5] if f.lower().endswith(' desc') else f + ' desc'
for f in re.split(r',\s*', 'fp_sequence,' + self._order)
)
class PullRequests(models.Model):
_inherit = 'runbot_merge.pull_requests'
# TODO: delete remote branches of merged FP PRs
# QUESTION: should the limit be copied on each child, or should it be inferred from the parent? Also what happens when detaching, is the detached PR configured independently?
# QUESTION: what happens if the limit_id is deactivated with extant PRs?
limit_id = fields.Many2one(
'runbot_merge.branch',
default=lambda self: self.env['runbot_merge.branch']._forward_port_ordered()[-1],
help="Up to which branch should this PR be forward-ported"
)
parent_id = fields.Many2one(
'runbot_merge.pull_requests', index=True,
help="a PR with a parent is an automatic forward port"
)
source_id = fields.Many2one('runbot_merge.pull_requests', index=True, help="the original source of this FP even if parents were detached along the way")
refname = fields.Char(compute='_compute_refname')
@api.depends('label')
def _compute_refname(self):
for pr in self:
pr.refname = pr.label.split(':', 1)[-1]
def create(self, vals):
# PR opened event always creates a new PR, override so we can precreate PRs
existing = self.search([
('repository', '=', vals['repository']),
('number', '=', vals['number']),
])
if existing:
return existing
if vals.get('parent_id') and 'source_id' not in vals:
vals['source_id'] = self.browse(vals['parent_id'])._get_root().id
return super().create(vals)
def write(self, vals):
# if the PR's head is updated, detach (should split off the FP lines as this is not the original code)
# TODO: better way to do this? Especially because we don't want to
# recursively create updates
# also a bit odd to only handle updating 1 head at a time, but then
# again 2 PRs with same head is weird so...
newhead = vals.get('head')
if newhead and not self.env.context.get('ignore_head_update') and newhead != self.head:
vals.setdefault('parent_id', False)
# if any children, this is an FP PR being updated, enqueue
# updating children
if self.search_count([('parent_id', '=', self.id)]):
self.env['forwardport.updates'].create({
'original_root': self._get_root().id,
'new_root': self.id
})
if vals.get('parent_id') and 'source_id' not in vals:
vals['source_id'] = self.browse(vals['parent_id'])._get_root().id
return super().write(vals)
def _try_closing(self, by):
r = super()._try_closing(by)
if r:
self.parent_id = False
return r
def _parse_commands(self, author, comment, login):
super(PullRequests, self.with_context(without_forward_port=True))._parse_commands(author, comment, login)
tokens = [
token
for line in re.findall('^\s*[@|#]?{}:? (.*)$'.format(self.repository.project_id.fp_github_name), comment, re.MULTILINE | re.IGNORECASE)
for token in line.split()
]
if not tokens:
_logger.info("found no commands in comment of %s (%s) (%s)", author.github_login, author.display_name,
utils.shorten(comment, 50)
)
return
# TODO: don't use a mutable tokens iterator
tokens = iter(tokens)
for token in tokens:
if token in ('r+', 'review+') and self.source_id._pr_acl(author).is_reviewer:
# don't update the root ever
for pr in filter(lambda p: p.parent_id, self._iter_ancestors()):
newstate = RPLUS.get(pr.state)
if newstate:
pr.state = newstate
pr.reviewed_by = author
# TODO: logging & feedback
elif token == 'up' and next(tokens, None) == 'to' and self._pr_acl(author).is_reviewer:
limit = next(tokens, None)
if not limit:
msg = "Please provide a branch to forward-port to."
else:
limit_id = self.env['runbot_merge.branch'].with_context(active_test=False).search([
('project_id', '=', self.repository.project_id.id),
('name', '=', limit),
])
if not limit_id:
msg = "There is no branch %r, it can't be used as a forward port target." % limit
elif not limit_id.fp_enabled:
msg = "Branch %r is disabled, it can't be used as a forward port target." % limit_id.name
else:
msg = "Forward-porting to %r." % limit_id.name
self.limit_id = limit_id
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': self.repository.id,
'pull_request': self.number,
'message': msg,
})
def _validate(self, statuses):
_logger = logging.getLogger(__name__).getChild('forwardport.next')
super()._validate(statuses)
# if the PR has a parent and is CI-validated, enqueue the next PR
for pr in self:
_logger.info('Checking if forward-port %s (%s)', pr, pr.number)
if not pr.parent_id or pr.state not in ('validated', 'ready'):
_logger.info('-> no parent or wrong state (%s)', pr.state)
continue
# if it already has a child, bail
if self.search_count([('parent_id', '=', self.id)]):
_logger.info('-> already has a child')
continue
# check if we've already selected this PR's batch for forward
# porting and just haven't come around to it yet
batch = pr.batch_id
_logger.info("%s %s %s", pr, batch, batch.prs)
if self.env['forwardport.batches'].search_count([('batch_id', '=', batch.id)]):
_logger.warn('-> already recorded')
continue
# check if batch-mate are all valid
mates = batch.prs
# wait until all of them are validated or ready
if any(pr.state not in ('validated', 'ready') for pr in mates):
_logger.warn("-> not ready (%s)", [(pr.number, pr.state) for pr in mates])
continue
# check that there's no weird-ass state
if not all(pr.parent_id for pr in mates):
_logger.warn("Found a batch (%s) with only some PRs having parents, ignoring", mates)
continue
if self.search_count([('parent_id', 'in', mates.ids)]):
_logger.warn("Found a batch (%s) with only some of the PRs having children", mates)
continue
_logger.info('-> ok')
self.env['forwardport.batches'].create({
'batch_id': batch.id,
'source': 'fp',
})
def _forward_port_sequence(self):
# risk: we assume disabled branches are still at the correct location
# in the FP sequence (in case a PR was merged to a branch which is now
# disabled, could happen right around the end of the support window)
# (or maybe we can ignore this entirely and assume all relevant
# branches are active?)
fp_complete = self.env['runbot_merge.branch'].with_context(active_test=False)._forward_port_ordered()
candidates = iter(fp_complete)
for b in candidates:
if b == self.target:
break
# the candidates iterator is just past the current branch's target
for target in candidates:
if target.fp_enabled:
yield target
if target == self.limit_id:
break
def _find_next_target(self, reference):
""" Finds the branch between target and limit_id which follows
reference
"""
if reference.target == self.limit_id:
return
# NOTE: assumes even disabled branches are properly sequenced, would
# probably be a good idea to have the FP view show all branches
branches = list(self.env['runbot_merge.branch'].with_context(active_test=False)._forward_port_ordered())
# get all branches between max(root.target, ref.target) (excluded) and limit (included)
from_ = max(branches.index(self.target), branches.index(reference.target))
to_ = branches.index(self.limit_id)
# return the first active branch in the set
return next((
branch
for branch in branches[from_+1:to_+1]
if branch.fp_enabled
), None)
def _iter_descendants(self):
pr = self
while True:
pr = self.search([('parent_id', '=', pr.id)])
if pr:
yield pr
else:
break
def _iter_ancestors(self):
while self:
yield self
self = self.parent_id
def _get_root(self):
root = self
while root.parent_id:
root = root.parent_id
return root
def _port_forward(self):
if not self:
return
ref = self[0]
base = ref.source_id or ref._get_root()
target = base._find_next_target(ref)
if target is None:
_logger.info(
"Will not forward-port %s#%s: no next target",
ref.repository.name, ref.number,
)
return # QUESTION: do the prs need to be updated?
proj = self.mapped('target.project_id')
if not proj.fp_github_token:
_logger.warn(
"Can not forward-port %s#%s: no token on project %s",
ref.repository.name, ref.number,
proj.name
)
return
notarget = [p.repository.name for p in self if not p.repository.fp_remote_target]
if notarget:
_logger.warn(
"Can not forward-port %s: repos %s don't have a remote configured",
self, ', '.join(notarget)
)
return
# take only the branch bit
new_branch = '%s-%s-%s-forwardport' % (
target.name,
base.refname,
# avoid collisions between fp branches (labels can be reused
# or conflict especially as we're chopping off the owner)
base64.b32encode(os.urandom(5)).decode()
)
# TODO: send outputs to logging?
conflicts = {}
with contextlib.ExitStack() as s:
for pr in self:
conflicts[pr], working_copy = pr._create_fp_branch(
target, new_branch, s)
working_copy.push('target', new_branch)
has_conflicts = any(conflicts.values())
# problemo: this should forward port a batch at a time, if porting
# one of the PRs in the batch fails is huge problem, though this loop
# only concerns itself with the creation of the followup objects so...
new_batch = self.browse(())
for pr in self:
owner, _ = pr.repository.fp_remote_target.split('/', 1)
source = pr.source_id or pr
message = source.message
(h, out, err) = conflicts.get(pr) or (None, None, None)
if h:
message = """Cherrypicking %s of source #%d failed with the following
stdout:
```
%s
```
stderr:
```
%s
```
Either perform the forward-port manually (and push to this branch, proceeding
as usual) or close this PR (maybe?).
In the former case, you may want to edit this PR message as well.
""" % (h, source.number, out, err)
r = requests.post(
'https://api.github.com/repos/{}/pulls'.format(pr.repository.name), json={
'title': "Forward Port of #%d to %s%s" % (
source.number,
target.name,
' (failed)' if has_conflicts else ''
),
'message': message,
'head': '%s:%s' % (owner, new_branch),
'base': target.name,
'draft': has_conflicts,
}, headers={
'Accept': 'application/vnd.github.shadow-cat-preview+json',
'Authorization': 'token %s' % pr.repository.project_id.fp_github_token,
}
)
assert 200 <= r.status_code < 300, r.json()
new_pr = self._from_gh(r.json())
_logger.info("Created forward-port PR %s", new_pr)
new_batch |= new_pr
new_pr.write({
'merge_method': pr.merge_method,
'source_id': source.id,
# only link to previous PR of sequence if cherrypick passed
'parent_id': pr.id if not has_conflicts else False,
})
assignees = (new_pr.source_id.author | new_pr.source_id.reviewed_by).mapped('github_login')
self.env['runbot_merge.pull_requests.feedback'].create({
'repository': new_pr.repository.id,
'pull_request': new_pr.number,
'message': "Ping %s" % ', '.join('@' + login for login in assignees if login),
})
# not great but we probably want to avoid the risk of the webhook
# creating the PR from under us. There's still a "hole" between
# the POST being executed on gh and the commit but...
self.env.cr.commit()
# batch the PRs so _validate can perform the followup FP properly
# (with the entire batch). If there are conflict then create a
# deactivated batch so the interface is coherent but we don't pickup
# an active batch we're never going to deactivate.
return self.env['runbot_merge.batch'].create({
'target': target.id,
'prs': [(6, 0, new_batch.ids)],
'active': not has_conflicts,
})
def _create_fp_branch(self, target_branch, fp_branch_name, cleanup):
""" Creates a forward-port for the current PR to ``target_branch`` under
``fp_branch_name``.
:param target_branch: the branch to port forward to
:param fp_branch_name: the name of the branch to create the FP under
:param ExitStack cleanup: so the working directories can be cleaned up
:return: (conflictp, working_copy)
:rtype: (bool, Repo)
"""
source = self._get_local_directory()
# update all the branches & PRs
source.with_params('gc.pruneExpire=1.day.ago').fetch('-p', 'origin')
# FIXME: check that pr.head is pull/{number}'s head instead?
source.cat_file(e=self.head)
# create working copy
working_copy = source.clone(
cleanup.enter_context(tempfile.TemporaryDirectory()),
branch=target_branch.name
)
project_id = self.repository.project_id
# configure local repo so commits automatically pickup bot identity
working_copy.config('--local', 'user.name', project_id.fp_github_name)
working_copy.config('--local', 'user.email', project_id.fp_github_email)
# add target remote
working_copy.remote(
'add', 'target',
'https://{p.fp_github_name}:{p.fp_github_token}@github.com/{r.fp_remote_target}'.format(
r=self.repository,
p=project_id
)
)
working_copy.checkout(b=fp_branch_name)
root = self._get_root()
try:
root._cherry_pick(working_copy)
except CherrypickError as e:
# apply the diff of the PR to the target
# in git diff, "A...B" selects the bits of B not in A
# (which is the other way around from gitrevisions(7)
# because git)
diff = working_copy.stdout().diff('%s...%s' % root._reference_branches()).stdout
# apply probably fails (because conflict, we don't care
working_copy.with_config(input=diff).apply('-3', '-')
working_copy.commit(
a=True, allow_empty=True,
message="""Cherry pick of %s failed
stdout:
%s
stderr:
%s
""" % e.args)
return e.args, working_copy
else:
return None, working_copy
def _cherry_pick(self, working_copy):
""" Cherrypicks ``self`` into the working copy
:return: ``True`` if the cherrypick was successful, ``False`` otherwise
"""
# <xxx>.cherrypick.<number>
logger = _logger.getChild('cherrypick').getChild(str(self.number))
cmap = json.loads(self.commits_map)
# original head so we can reset
h = working_copy.stdout().rev_parse('HEAD').stdout
commits_range = self._reference_branches()
# need to cherry-pick commit by commit because `-n` doesn't yield
# back control on each op. `-e` with VISUAL=false kinda works but
# doesn't seem to provide a good way to know when we're done
# cherrypicking
rev_list = working_copy.lazy().stdout()\
.rev_list('--reverse', '%s..%s' % commits_range)
commits = list(rev_list.stdout)
logger.info("%d commit(s) on %s", len(commits), h.decode())
for commit in commits:
commit = commit.decode().strip()
r = working_copy.with_config(
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False,
).cherry_pick(commit)
if r.returncode: # pick failed, reset and bail
logger.info("%s: failed", commit)
working_copy.reset('--hard', h.decode().strip())
raise CherrypickError(commit, r.stdout.decode(), r.stderr.decode())
original_msg = working_copy.stdout().show('-s', '--format=%B', commit).stdout.decode()
msg = self._parse_commit_message(original_msg)
# original signed-off-er should be retained but didn't necessarily
# sign off here, so convert signed-off-by to something else
sob = msg.headers.getlist('signed-off-by')
if sob:
msg.headers.remove('signed-off-by')
msg.headers.extend(
('original-signed-off-by', v)
for v in sob
)
# write the *merged* commit as "original", not the PR's
msg.headers['x-original-commit'] = cmap.get(commit, commit)
# replace existing commit message with massaged one
working_copy\
.with_config(input=str(msg).encode())\
.commit(amend=True, file='-')
new = working_copy.stdout().rev_parse('HEAD').stdout.decode()
logger.info('%s: success -> %s', commit, new)
def _reference_branches(self):
"""
:returns: (PR target, PR branch)
"""
ancestor_branch = 'origin/pull/%d' % self.number
source_target = 'origin/%s' % self.target.name
return source_target, ancestor_branch
def _get_local_directory(self):
repos_dir = pathlib.Path(user_cache_dir('forwardport'))
repos_dir.mkdir(parents=True, exist_ok=True)
repo_dir = repos_dir / self.repository.name
if repo_dir.is_dir():
return git(repo_dir)
else:
subprocess.run([
'git', 'clone', '--bare',
'https://{}:{}@github.com/{}'.format(
self.repository.project_id.fp_github_name,
self.repository.project_id.fp_github_token,
self.repository.name,
),
str(repo_dir)
], check=True)
# add PR branches as local but namespaced (?)
repo = git(repo_dir)
repo.config('--add', 'remote.origin.fetch', '+refs/pull/*/head:refs/heads/pull/*')
return repo
class Stagings(models.Model):
_inherit = 'runbot_merge.stagings'
def write(self, vals):
r = super().write(vals)
if vals.get('active') == False and self.state == 'success':
for b in self.with_context(active_test=False).batch_ids:
# if batch PRs have parents they're part of an FP sequence and
# thus handled separately
if not b.mapped('prs.parent_id'):
self.env['forwardport.batches'].create({
'batch_id': b.id,
'source': 'merge',
})
return r
def git(directory): return Repo(directory, check=True)
class Repo:
def __init__(self, directory, **config):
self._directory = str(directory)
self._config = config
self._params = ()
self._opener = subprocess.run
def __getattr__(self, name):
return GitCommand(self, name.replace('_', '-'))
def _run(self, *args, **kwargs):
opts = {**self._config, **kwargs}
return self._opener(
('git', '-C', self._directory)
+ tuple(itertools.chain.from_iterable(('-c', p) for p in self._params))
+ args,
**opts
)
def stdout(self, flag=True):
if flag is True:
return self.with_config(stdout=subprocess.PIPE)
elif flag is False:
return self.with_config(stdout=None)
return self.with_config(stdout=flag)
def lazy(self):
r = self.with_config()
r._config.pop('check', None)
r._opener = subprocess.Popen
return r
def check(self, flag):
return self.with_config(check=flag)
def with_config(self, **kw):
opts = {**self._config, **kw}
r = Repo(self._directory, **opts)
r._opener = self._opener
r._params = self._params
return r
def with_params(self, *args):
r = self.with_config()
r._params = args
return r
def clone(self, to, branch=None):
self._run(
'git', 'clone', '-s',
*([] if branch is None else ['-b', branch]),
self._directory, to,
)
return Repo(to)
class GitCommand:
def __init__(self, repo, name):
self._name = name
self._repo = repo
def __call__(self, *args, **kwargs):
return self._repo._run(self._name, *args, *self._to_options(kwargs))
def _to_options(self, d):
for k, v in d.items():
if len(k) == 1:
yield '-' + k
else:
yield '--' + k.replace('_', '-')
if v not in (None, True):
assert v is not False
yield str(v)
class CherrypickError(Exception):
...

View File

@ -0,0 +1,701 @@
# -*- coding: utf-8 -*-
import base64
import collections
import copy
import itertools
import logging
import pathlib
import socket
import time
import uuid
import xmlrpc.client
from contextlib import closing
import pytest
import subprocess
import re
import requests
from shutil import rmtree
from odoo.tools.appdirs import user_cache_dir
DEFAULT_CRONS = [
'runbot_merge.process_updated_commits',
'runbot_merge.merge_cron',
'forwardport.port_forward',
'forwardport.updates',
'runbot_merge.check_linked_prs_status',
'runbot_merge.feedback_cron',
]
def pytest_report_header(config):
return 'Running against database ' + config.getoption('--db')
def pytest_addoption(parser):
parser.addoption('--db', help="DB to run the tests against", default=str(uuid.uuid4()))
parser.addoption('--addons-path')
parser.addoption("--no-delete", action="store_true", help="Don't delete repo after a failed run")
def wait_for_hook(n=1):
time.sleep(10 * n)
def wait_for_server(db, port, proc, mod, timeout=120):
""" Polls for server to be response & have installed our module.
Raises socket.timeout on failure
"""
limit = time.time() + timeout
while True:
if proc.poll() is not None:
raise Exception("Server unexpectedly closed")
try:
uid = xmlrpc.client.ServerProxy(
'http://localhost:{}/xmlrpc/2/common'.format(port))\
.authenticate(db, 'admin', 'admin', {})
mods = xmlrpc.client.ServerProxy(
'http://localhost:{}/xmlrpc/2/object'.format(port))\
.execute_kw(
db, uid, 'admin', 'ir.module.module', 'search_read', [
[('name', '=', mod)], ['state']
])
if mods and mods[0].get('state') == 'installed':
break
except ConnectionRefusedError:
if time.time() > limit:
raise socket.timeout()
# public_repo — necessary to leave comments
# admin:repo_hook — to set up hooks (duh)
# delete_repo — to cleanup repos created under a user
# user:email — fetch token/user's email addresses
TOKEN_SCOPES = {
'github': {'admin:repo_hook', 'delete_repo', 'public_repo', 'user:email'},
# TODO: user:email so they can fetch the user's email?
'role_reviewer': {'public_repo'},# 'delete_repo'},
'role_self_reviewer': {'public_repo'},# 'delete_repo'},
'role_other': {'public_repo'},# 'delete_repo'},
}
@pytest.fixture(autouse=True, scope='session')
def _check_scopes(config):
for section, vals in config.items():
required_scopes = TOKEN_SCOPES.get(section)
if required_scopes is None:
continue
response = requests.get('https://api.github.com/rate_limit', headers={
'Authorization': 'token %s' % vals['token']
})
assert response.status_code == 200
x_oauth_scopes = response.headers['X-OAuth-Scopes']
token_scopes = set(re.split(r',\s+', x_oauth_scopes))
assert token_scopes >= required_scopes, \
"%s should have scopes %s, found %s" % (section, token_scopes, required_scopes)
@pytest.fixture(autouse=True)
def _cleanup_cache(config, users):
""" forwardport has a repo cache which it assumes is unique per name
but tests always use the same repo paths / names for different repos
(the repos get re-created), leading to divergent repo histories.
So clear cache after each test, two tests should not share repos.
"""
yield
cache_root = pathlib.Path(user_cache_dir('forwardport'))
rmtree(cache_root / config['github']['owner'], ignore_errors=True)
for login in users.values():
rmtree(cache_root / login, ignore_errors=True)
@pytest.fixture(autouse=True)
def users(users_):
return users_
@pytest.fixture
def project(env, config):
return env['runbot_merge.project'].create({
'name': 'odoo',
'github_token': config['github']['token'],
'github_prefix': 'hansen',
'fp_github_token': config['github']['token'],
'required_statuses': 'legal/cla,ci/runbot',
})
@pytest.fixture(scope='session')
def module():
""" When a test function is (going to be) run, selects the containing
module (as needing to be installed)
"""
# NOTE: no request.fspath (because no request.function) in session-scoped fixture so can't put module() at the toplevel
return 'forwardport'
@pytest.fixture(scope='session')
def port():
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
s.bind(('', 0))
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s.getsockname()[1]
@pytest.fixture(scope='session')
def dbcache(request, module):
""" Creates template DB once per run, then just duplicates it before
starting odoo and running the testcase
"""
db = request.config.getoption('--db')
subprocess.run([
'odoo', '--no-http',
'--addons-path', request.config.getoption('--addons-path'),
'-d', db, '-i', module,
'--max-cron-threads', '0',
'--stop-after-init'
], check=True)
yield db
subprocess.run(['dropdb', db])
@pytest.fixture
def db(request, dbcache):
rundb = str(uuid.uuid4())
subprocess.run(['createdb', '-T', dbcache, rundb], check=True)
yield rundb
if not request.config.getoption('--no-delete'):
subprocess.run(['dropdb', rundb], check=True)
@pytest.fixture
def server(request, db, port, module):
p = subprocess.Popen([
'odoo', '--http-port', str(port),
'--addons-path', request.config.getoption('--addons-path'),
'-d', db,
'--max-cron-threads', '0', # disable cron threads (we're running crons by hand)
])
try:
wait_for_server(db, port, p, module)
yield p
finally:
p.terminate()
p.wait(timeout=30)
@pytest.fixture
def env(port, server, db):
yield Environment(port, db)
# users is just so I can avoid autouse on toplevel users fixture b/c it (seems
# to) break the existing local tests
@pytest.fixture
def make_repo(request, config, tunnel, users):
owner = config['github']['owner']
github = requests.Session()
github.headers['Authorization'] = 'token %s' % config['github']['token']
# check whether "owner" is a user or an org, as repo-creation endpoint is
# different
q = github.get('https://api.github.com/users/{}'.format(owner))
q.raise_for_status()
if q.json().get('type') == 'Organization':
endpoint = 'https://api.github.com/orgs/{}/repos'.format(owner)
else:
endpoint = 'https://api.github.com/user/repos'
r = github.get('https://api.github.com/user')
r.raise_for_status()
assert r.json()['login'] == owner
repos = []
def repomaker(name):
fullname = '{}/{}'.format(owner, name)
repo_url = 'https://api.github.com/repos/{}'.format(fullname)
if request.config.getoption('--no-delete'):
if github.head(repo_url).ok:
pytest.skip("Repository {} already exists".format(fullname))
else:
# just try to delete the repo, we don't really care
if github.delete(repo_url).ok:
# if we did delete a repo, wait a bit as gh might need to
# propagate the thing?
time.sleep(30)
# create repo
r = github.post(endpoint, json={
'name': name,
'has_issues': False,
'has_projects': False,
'has_wiki': False,
'auto_init': False,
# at least one merge method must be enabled :(
'allow_squash_merge': False,
# 'allow_merge_commit': False,
'allow_rebase_merge': False,
})
r.raise_for_status()
new_repo = Repo(github, fullname, repos)
# create webhook
github.post('{}/hooks'.format(repo_url), json={
'name': 'web',
'config': {
'url': '{}/runbot_merge/hooks'.format(tunnel),
'content_type': 'json',
'insecure_ssl': '1',
},
'events': ['pull_request', 'issue_comment', 'status', 'pull_request_review']
})
github.put('https://api.github.com/repos/{}/contents/{}'.format(fullname, 'a'), json={
'path': 'a',
'message': 'github returns a 409 (Git Repository is Empty) if trying to create a tree in a repo with no objects',
'content': base64.b64encode(b'whee').decode('ascii'),
'branch': 'garbage_%s' % uuid.uuid4()
}).raise_for_status()
return new_repo
yield repomaker
if not request.config.getoption('--no-delete'):
for repo in reversed(repos):
repo.delete()
Commit = collections.namedtuple('Commit', 'id tree message author committer parents')
class Repo:
def __init__(self, session, fullname, repos):
self._session = session
self.name = fullname
self._repos = repos
self.hook = False
repos.append(self)
# unwatch repo
self.unsubscribe()
def unsubscribe(self, token=None):
self._get_session(token).put('https://api.github.com/repos/{}/subscription'.format(self.name), json={
'subscribed': False,
'ignored': True,
})
def delete(self):
r = self._session.delete('https://api.github.com/repos/{}'.format(self.name))
if r.status_code != 204:
logging.getLogger(__name__).warn("Unable to delete repository %s", self.name)
def commit(self, ref):
if not re.match(r'[0-9a-f]{40}', ref):
if not ref.startswith(('heads/', 'refs/heads/')):
ref = 'refs/heads/' + ref
# apparently heads/<branch> ~ refs/heads/<branch> but are not
# necessarily up to date ??? unlike the git ref system where :ref
# starts at heads/
if ref.startswith('heads/'):
ref = 'refs/' + ref
r = self._session.get('https://api.github.com/repos/{}/commits/{}'.format(self.name, ref))
response = r.json()
assert 200 <= r.status_code < 300, response
return self._commit_from_gh(response)
def _commit_from_gh(self, gh_commit):
c = gh_commit['commit']
return Commit(
id=gh_commit['sha'],
tree=c['tree']['sha'],
message=c['message'],
author=c['author'],
committer=c['committer'],
parents=[p['sha'] for p in gh_commit['parents']],
)
def log(self, ref_or_sha):
for page in itertools.count(1):
r = self._session.get(
'https://api.github.com/repos/{}/commits'.format(self.name),
params={'sha': ref_or_sha, 'page': page}
)
assert 200 <= r.status_code < 300, r.json()
yield from map(self._commit_from_gh, r.json())
if not r.links.get('next'):
return
def read_tree(self, commit):
""" read tree object from commit
:param Commit commit:
:rtype: Dict[str, str]
"""
r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree))
assert 200 <= r.status_code < 300, r.json()
# read tree's blobs
tree = {}
for t in r.json()['tree']:
assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases"
r = self._session.get('https://api.github.com/repos/{}/git/blobs/{}'.format(self.name, t['sha']))
assert 200 <= r.status_code < 300, r.json()
tree[t['path']] = base64.b64decode(r.json()['content']).decode()
return tree
def make_ref(self, name, commit, force=False):
assert self.hook
assert name.startswith('heads/')
r = self._session.post('https://api.github.com/repos/{}/git/refs'.format(self.name), json={
'ref': 'refs/' + name,
'sha': commit,
})
if force and r.status_code == 422:
self.update_ref(name, commit, force=True)
return
assert 200 <= r.status_code < 300, r.json()
def update_ref(self, name, commit, force=False):
r = self._session.patch('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, name), json={'sha': commit, 'force': force})
assert 200 <= r.status_code < 300, r.json()
def make_commits(self, root, *commits, ref=None):
assert self.hook
if root:
c = self.commit(root)
tree = c.tree
parents = [c.id]
else:
tree = None
parents = []
hashes = []
for commit in commits:
r = self._session.post('https://api.github.com/repos/{}/git/trees'.format(self.name), json={
'tree': [
{'path': k, 'mode': '100644', 'type': 'blob', 'content': v}
for k, v in commit.tree.items()
],
'base_tree': tree
})
assert 200 <= r.status_code < 300, r.json()
tree = r.json()['sha']
data = {
'parents': parents,
'message': commit.message,
'tree': tree,
}
if commit.author:
data['author'] = commit.author
if commit.committer:
data['committer'] = commit.committer
r = self._session.post('https://api.github.com/repos/{}/git/commits'.format(self.name), json=data)
assert 200 <= r.status_code < 300, r.json()
hashes.append(r.json()['sha'])
parents = [hashes[-1]]
if ref:
self.make_ref(ref, hashes[-1], force=True)
return hashes
def fork(self, *, token=None):
s = self._get_session(token)
r = s.post('https://api.github.com/repos/{}/forks'.format(self.name))
assert 200 <= r.status_code < 300, r.json()
repo_name = r.json()['full_name']
repo_url = 'https://api.github.com/repos/' + repo_name
# poll for end of fork
limit = time.time() + 60
while s.head(repo_url, timeout=5).status_code != 200:
if time.time() > limit:
raise TimeoutError("No response for repo %s over 60s" % repo_name)
time.sleep(1)
return Repo(s, repo_name, self._repos)
def _get_session(self, token):
s = self._session
if token:
s = requests.Session()
s.headers['Authorization'] = 'token %s' % token
return s
def get_pr(self, number):
# ensure PR exists before returning it
self._session.head('https://api.github.com/repos/{}/pulls/{}'.format(
self.name,
number,
)).raise_for_status()
return PR(self, number)
def make_pr(self, *, title=None, body=None, target, head, token=None):
assert self.hook
self.hook = 2
if title is None:
assert ":" not in head, \
"will not auto-infer titles for PRs in a remote repo"
c = self.commit(head)
parts = iter(c.message.split('\n\n', 1))
title = next(parts)
body = next(parts, None)
headers = {}
if token:
headers['Authorization'] = 'token {}'.format(token)
r = self._session.post(
'https://api.github.com/repos/{}/pulls'.format(self.name),
json={
'title': title,
'body': body,
'head': head,
'base': target,
},
headers=headers,
)
pr = r.json()
assert 200 <= r.status_code < 300, pr
return PR(self, pr['number'])
def post_status(self, ref, status, context='default', **kw):
assert self.hook
assert status in ('error', 'failure', 'pending', 'success')
r = self._session.post('https://api.github.com/repos/{}/statuses/{}'.format(self.name, self.commit(ref).id), json={
'state': status,
'context': context,
**kw
})
assert 200 <= r.status_code < 300, r.json()
def __enter__(self):
self.hook = 1
return self
def __exit__(self, *args):
wait_for_hook(self.hook)
self.hook = 0
class PR:
__slots__ = ['number', 'repo']
def __init__(self, repo, number):
self.repo = repo
self.number = number
@property
def _pr(self):
r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number))
assert 200 <= r.status_code < 300, r.json()
return r.json()
@property
def head(self):
return self._pr['head']['sha']
@property
def comments(self):
r = self.repo._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number))
assert 200 <= r.status_code < 300, r.json()
return [
(c['user']['login'], c['body'])
for c in r.json()
]
def post_comment(self, body, token=None):
assert self.repo.hook
headers = {}
if token:
headers['Authorization'] = 'token %s' % token
r = self.repo._session.post(
'https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number),
json={'body': body},
headers=headers,
)
assert 200 <= r.status_code < 300, r.json()
return r.json()['id']
def _set_prop(self, prop, value):
assert self.repo.hook
r = self.repo._session.patch('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number), json={
prop: value
})
assert 200 <= r.status_code < 300, r.json()
def open(self):
self._set_prop('state', 'open')
def close(self):
self._set_prop('state', 'closed')
@property
def branch(self):
r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format(
self.repo.name,
self.number,
))
assert 200 <= r.status_code < 300, r.json()
info = r.json()
repo = self.repo
reponame = info['head']['repo']['full_name']
if reponame != self.repo.name:
# not sure deep copying the session object is safe / proper...
repo = Repo(copy.deepcopy(self.repo._session), reponame, [])
return PRBranch(repo, info['head']['ref'])
PRBranch = collections.namedtuple('PRBranch', 'repo branch')
class Environment:
def __init__(self, port, db):
self._uid = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/common'.format(port)).authenticate(db, 'admin', 'admin', {})
self._object = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/object'.format(port))
self._db = db
def __call__(self, model, method, *args, **kwargs):
return self._object.execute_kw(
self._db, self._uid, 'admin',
model, method,
args, kwargs
)
def __getitem__(self, name):
return Model(self, name)
def run_crons(self, *xids):
crons = xids or DEFAULT_CRONS
for xid in crons:
_, model, cron_id = self('ir.model.data', 'xmlid_lookup', xid)
assert model == 'ir.cron', "Expected {} to be a cron, got {}".format(xid, model)
self('ir.cron', 'method_direct_trigger', [cron_id])
# sleep for some time as a lot of crap may have happened (?)
wait_for_hook()
class Model:
__slots__ = ['_env', '_model', '_ids', '_fields']
def __init__(self, env, model, ids=(), fields=None):
object.__setattr__(self, '_env', env)
object.__setattr__(self, '_model', model)
object.__setattr__(self, '_ids', tuple(ids or ()))
object.__setattr__(self, '_fields', fields or self._env(self._model, 'fields_get', attributes=['type', 'relation']))
@property
def ids(self):
return self._ids
def __bool__(self):
return bool(self._ids)
def __len__(self):
return len(self._ids)
def __eq__(self, other):
if not isinstance(other, Model):
return NotImplemented
return self._model == other._model and self._ids == other._ids
def __repr__(self):
return "{}({})".format(self._model, ', '.join(str(id) for id in self._ids))
def exists(self):
ids = self._env(self._model, 'exists', self._ids)
return Model(self._env, self._model, ids)
def search(self, *args, **kwargs):
ids = self._env(self._model, 'search', *args, **kwargs)
return Model(self._env, self._model, ids)
def create(self, values):
return Model(self._env, self._model, [self._env(self._model, 'create', values)])
def write(self, values):
return self._env(self._model, 'write', self._ids, values)
def read(self, fields):
return self._env(self._model, 'read', self._ids, fields)
def unlink(self):
return self._env(self._model, 'unlink', self._ids)
def __getitem__(self, index):
if isinstance(index, str):
return getattr(self, index)
ids = self._ids[index]
if isinstance(ids, int):
ids = [ids]
return Model(self._env, self._model, ids, fields=self._fields)
def __getattr__(self, fieldname):
if not self._ids:
return False
assert len(self._ids) == 1
if fieldname == 'id':
return self._ids[0]
val = self.read([fieldname])[0][fieldname]
field_description = self._fields[fieldname]
if field_description['type'] in ('many2one', 'one2many', 'many2many'):
val = val or []
if field_description['type'] == 'many2one':
val = val[:1] # (id, name) => [id]
return Model(self._env, field_description['relation'], val)
return val
def __setattr__(self, fieldname, value):
assert self._fields[fieldname]['type'] not in ('many2one', 'one2many', 'many2many')
self._env(self._model, 'write', self._ids, {fieldname: value})
def __iter__(self):
return (
Model(self._env, self._model, [i], fields=self._fields)
for i in self._ids
)
def mapped(self, path):
field, *rest = path.split('.', 1)
descr = self._fields[field]
if descr['type'] in ('many2one', 'one2many', 'many2many'):
result = Model(self._env, descr['relation'])
for record in self:
result |= getattr(record, field)
return result.mapped(rest[0]) if rest else result
assert not rest
return [getattr(r, field) for r in self]
def filtered(self, fn):
result = Model(self._env, self._model, fields=self._fields)
for record in self:
if fn(record):
result |= record
return result
def __sub__(self, other):
if not isinstance(other, Model) or self._model != other._model:
return NotImplemented
return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ not in other._ids), fields=self._fields)
def __or__(self, other):
if not isinstance(other, Model) or self._model != other._model:
return NotImplemented
return Model(self._env, self._model, {*self._ids, *other._ids}, fields=self._fields)
__add__ = __or__
def __and__(self, other):
if not isinstance(other, Model) or self._model != other._model:
return NotImplemented
return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ in other._ids), fields=self._fields)
def invalidate_cache(self, fnames=None, ids=None):
pass # not a concern when every access is an RPC call

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,143 @@
# -*- coding: utf-8 -*-
import sys
import pytest
import re
from utils import *
def make_basic(env, config, make_repo, *, fp_token, fp_remote):
""" Creates a basic repo with 3 forking branches
0 -- 1 -- 2 -- 3 -- 4 : a
|
`-- 11 -- 22 : b
|
`-- 111 : c
each branch just adds and modifies a file (resp. f, g and h) through the
contents sequence a b c d e
"""
Projects = env['runbot_merge.project']
project = Projects.search([('name', '=', 'myproject')])
if not project:
project = env['runbot_merge.project'].create({
'name': 'myproject',
'github_token': config['github']['token'],
'github_prefix': 'hansen',
'fp_github_token': fp_token and config['github']['token'],
'required_statuses': 'legal/cla,ci/runbot',
'branch_ids': [
(0, 0, {'name': 'a', 'fp_sequence': 2, 'fp_target': True}),
(0, 0, {'name': 'b', 'fp_sequence': 1, 'fp_target': True}),
(0, 0, {'name': 'c', 'fp_sequence': 0, 'fp_target': True}),
],
})
prod = make_repo('proj')
with prod:
a_0, a_1, a_2, a_3, a_4, = prod.make_commits(
None,
Commit("0", tree={'f': 'a'}),
Commit("1", tree={'f': 'b'}),
Commit("2", tree={'f': 'c'}),
Commit("3", tree={'f': 'd'}),
Commit("4", tree={'f': 'e'}),
ref='heads/a',
)
b_1, b_2 = prod.make_commits(
a_2,
Commit('11', tree={'g': 'a'}),
Commit('22', tree={'g': 'b'}),
ref='heads/b',
)
prod.make_commits(
b_1,
Commit('111', tree={'h': 'a'}),
ref='heads/c',
)
other = prod.fork()
project.write({
'repo_ids': [(0, 0, {
'name': prod.name,
'fp_remote_target': fp_remote and other.name,
})],
})
return project, prod, other
def test_no_token(env, config, make_repo):
""" if there's no token on the repo, nothing should break though should
log
"""
# create project configured with remotes on the repo but no token
proj, prod, _ = make_basic(env, config, make_repo, fp_token=False, fp_remote=True)
with prod:
prod.make_commits(
'a', Commit('c0', tree={'a': '0'}), ref='heads/abranch'
)
pr = prod.make_pr(target='a', head='abranch')
prod.post_status(pr.head, 'success', 'legal/cla')
prod.post_status(pr.head, 'success', 'ci/runbot')
pr.post_comment('hansen r+', config['role_reviewer']['token'])
env.run_crons()
with prod:
prod.post_status('staging.a', 'success', 'legal/cla')
prod.post_status('staging.a', 'success', 'ci/runbot')
# wanted to use capfd, however it's not compatible with the subprocess
# being created beforehand and server() depending on capfd() would remove
# all its output from the normal pytest capture (dumped on test failure)
#
# so I'd really have to hand-roll the entire thing by having server()
# pipe stdout/stderr to temp files, yield those temp files, and have the
# tests mess around with reading those files, and finally have the server
# dump the file contents back to the test runner's stdout/stderr on
# fixture teardown...
env.run_crons()
assert len(env['runbot_merge.pull_requests'].search([], order='number')) == 1,\
"should not have created forward port"
def test_remove_token(env, config, make_repo):
proj, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=True)
proj.fp_github_token = False
with prod:
prod.make_commits(
'a', Commit('c0', tree={'a': '0'}), ref='heads/abranch'
)
pr = prod.make_pr(target='a', head='abranch')
prod.post_status(pr.head, 'success', 'legal/cla')
prod.post_status(pr.head, 'success', 'ci/runbot')
pr.post_comment('hansen r+', config['role_reviewer']['token'])
env.run_crons()
with prod:
prod.post_status('staging.a', 'success', 'legal/cla')
prod.post_status('staging.a', 'success', 'ci/runbot')
env.run_crons()
assert len(env['runbot_merge.pull_requests'].search([], order='number')) == 1,\
"should not have created forward port"
def test_no_target(env, config, make_repo):
proj, prod, _ = make_basic(env, config, make_repo, fp_token=True, fp_remote=False)
with prod:
prod.make_commits(
'a', Commit('c0', tree={'a': '0'}), ref='heads/abranch'
)
pr = prod.make_pr(target='a', head='abranch')
prod.post_status(pr.head, 'success', 'legal/cla')
prod.post_status(pr.head, 'success', 'ci/runbot')
pr.post_comment('hansen r+', config['role_reviewer']['token'])
env.run_crons()
with prod:
prod.post_status('staging.a', 'success', 'legal/cla')
prod.post_status('staging.a', 'success', 'ci/runbot')
env.run_crons()
assert len(env['runbot_merge.pull_requests'].search([], order='number')) == 1,\
"should not have created forward port"

View File

@ -0,0 +1,24 @@
# -*- coding: utf-8 -*-
# target branch '-' source branch '-' base32 unique '-forwardport'
import itertools
MESSAGE_TEMPLATE = """{message}
closes {repo}#{number}
{headers}Signed-off-by: {name} <{login}@users.noreply.github.com>"""
REF_PATTERN = r'{target}-{source}-\w{{8}}-forwardport'
class Commit:
def __init__(self, message, *, author=None, committer=None, tree):
self.id = None
self.message = message
self.author = author
self.committer = committer
self.tree = tree
def validate_all(repos, refs, contexts=('ci/runbot', 'legal/cla')):
""" Post a "success" status for each context on each ref of each repo
"""
for repo, branch, context in itertools.product(repos, refs, contexts):
repo.post_status(branch, 'success', context)

View File

@ -163,40 +163,12 @@ def handle_pr(env, event):
# don't marked merged PRs as closed (!!!)
if event['action'] == 'closed' and pr_obj.state != 'merged':
# ignore if the PR is already being updated in a separate transaction
# (most likely being merged?)
env.cr.execute('''
SELECT id, state FROM runbot_merge_pull_requests
WHERE id = %s AND state != 'merged'
FOR UPDATE SKIP LOCKED;
''', [pr_obj.id])
res = env.cr.fetchone()
# FIXME: store some sort of "try to close it later" if the merge fails?
if not res:
if pr_obj._try_closing(event['sender']['login']):
return 'Closed {}'.format(pr_obj.id)
else:
return 'Ignored: could not lock rows (probably being merged)'
env.cr.execute('''
UPDATE runbot_merge_pull_requests
SET state = 'closed'
WHERE id = %s AND state != 'merged'
''', [pr_obj.id])
env.cr.commit()
pr_obj.invalidate_cache(fnames=['state'], ids=[pr_obj.id])
if env.cr.rowcount:
env['runbot_merge.pull_requests.tagging'].create({
'pull_request': pr_obj.number,
'repository': repo.id,
'state_from': res[1] if not pr_obj.staging_id else 'staged',
'state_to': 'closed',
})
pr_obj.unstage(
"PR %s:%s closed by %s",
pr_obj.repository.name, pr_obj.number,
event['sender']['login']
)
return 'Closed {}'.format(pr_obj.id)
if event['action'] == 'reopened' and pr_obj.state == 'closed':
pr_obj.write({
'state': 'opened',

View File

@ -1,4 +1,5 @@
import base64
import collections
import datetime
import io
import json
@ -6,6 +7,7 @@ import logging
import os
import pprint
import re
import sys
import time
from itertools import takewhile
@ -433,7 +435,10 @@ class Branch(models.Model):
'state_to': 'staged',
})
logger.info("Created staging %s (%s)", st, staged)
logger.info("Created staging %s (%s) to %s", st, ', '.join(
'%s[%s]' % (batch, batch.prs)
for batch in staged
), st.target.name)
return st
def _check_visibility(self, repo, branch_name, expected_head, token):
@ -453,6 +458,7 @@ class Branch(models.Model):
return head == expected_head
return False
ACL = collections.namedtuple('ACL', 'is_admin is_reviewer is_author')
class PullRequests(models.Model):
_name = 'runbot_merge.pull_requests'
_order = 'number desc'
@ -533,7 +539,7 @@ class PullRequests(models.Model):
else:
separator = 's '
return '<pull_request%s%s>' % (separator, ' '.join(
'%s:%s' % (p.repository.name, p.number)
'{0.id} ({0.repository.name}:{0.number})'.format(p)
for p in self
))
@ -653,10 +659,7 @@ class PullRequests(models.Model):
(login, name) = (author.github_login, author.display_name) if author else (login, 'not in system')
is_admin = (author.reviewer and self.author != author) or (author.self_reviewer and self.author == author)
is_reviewer = is_admin or self in author.delegate_reviewer
# TODO: should delegate reviewers be able to retry PRs?
is_author = is_reviewer or self.author == author
is_admin, is_reviewer, is_author = self._pr_acl(author)
commands = dict(
ps
@ -709,7 +712,6 @@ class PullRequests(models.Model):
elif command == 'review':
if param and is_reviewer:
newstate = RPLUS.get(self.state)
print('r+', self.state, self.status)
if newstate:
self.state = newstate
self.reviewed_by = author
@ -794,6 +796,16 @@ class PullRequests(models.Model):
})
return '\n'.join(msg)
def _pr_acl(self, user):
if not self:
return ACL(False, False, False)
is_admin = (user.reviewer and self.author != user) or (user.self_reviewer and self.author == user)
is_reviewer = is_admin or self in user.delegate_reviewer
# TODO: should delegate reviewers be able to retry PRs?
is_author = is_reviewer or self.author == user
return ACL(is_admin, is_reviewer, is_author)
def _validate(self, statuses):
# could have two PRs (e.g. one open and one closed) at least
# temporarily on the same head, or on the same head with different
@ -1108,6 +1120,39 @@ class PullRequests(models.Model):
self.staging_id.cancel(reason, *args)
def _try_closing(self, by):
# ignore if the PR is already being updated in a separate transaction
# (most likely being merged?)
self.env.cr.execute('''
SELECT id, state FROM runbot_merge_pull_requests
WHERE id = %s AND state != 'merged'
FOR UPDATE SKIP LOCKED;
''', [self.id])
res = self.env.cr.fetchone()
if not res:
return False
self.env.cr.execute('''
UPDATE runbot_merge_pull_requests
SET state = 'closed'
WHERE id = %s AND state != 'merged'
''', [self.id])
self.env.cr.commit()
self.invalidate_cache(fnames=['state'], ids=[self.id])
if self.env.cr.rowcount:
self.env['runbot_merge.pull_requests.tagging'].create({
'pull_request': self.number,
'repository': self.repository.id,
'state_from': res[1] if not self.staging_id else 'staged',
'state_to': 'closed',
})
self.unstage(
"PR %s:%s closed by %s",
self.repository.name, self.number,
by
)
return True
# state changes on reviews
RPLUS = {
'opened': 'approved',

View File

@ -119,7 +119,7 @@ def read_object(store, tid):
# recursively reads tree of objects
o = store[tid]
if isinstance(o, bytes):
return o
return o.decode()
return {
k: read_object(store, v)
for k, v in o.items()

View File

@ -534,7 +534,11 @@ class Repo:
)
def read_tree(self, commit):
# read tree object
""" read tree object from commit
:param Commit commit:
:rtype: Dict[str, str]
"""
r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree))
assert 200 <= r.status_code < 300, r.json()
@ -542,9 +546,10 @@ class Repo:
tree = {}
for t in r.json()['tree']:
assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases"
r = self._session.get('https://api.github.com/repos/{}/git/blobs/{}'.format(self.name, t['sha']))
r = self._session.get(t['url'])
assert 200 <= r.status_code < 300, r.json()
tree[t['path']] = base64.b64decode(r.json()['content'])
# assume all test content is textual
tree[t['path']] = base64.b64decode(r.json()['content']).decode()
return tree

View File

@ -91,8 +91,8 @@ def test_trivial_flow(env, repo, page, users):
# with default-rebase, only one parent is "known"
assert master.parents[0] == m
assert repo.read_tree(master) == {
'a': b'some other content',
'b': b'a second file',
'a': 'some other content',
'b': 'a second file',
}
assert master.message == "gibberish\n\nblahblah\n\ncloses {repo.name}#1"\
"\n\nSigned-off-by: {reviewer.formatted_email}"\
@ -803,8 +803,8 @@ def test_rebase_failure(env, repo, users, remote_p):
(users['reviewer'], 'hansen r+'),
]
assert repo.read_tree(repo.commit('heads/staging.master')) == {
'm': b'm',
'b': b'b',
'm': 'm',
'b': 'b',
}
def test_ci_failure_after_review(env, repo, users):
@ -1010,7 +1010,7 @@ class TestMergeMethod:
"the pr head should not be an ancestor of the staging branch in a squash merge"
assert re.match('^force rebuild', staging.message)
assert repo.read_tree(staging) == {
'm': b'c1', 'm2': b'm2',
'm': 'c1', 'm2': 'm2',
}, "the tree should still be correctly merged"
[actual_sha] = staging.parents
actual = repo.commit(actual_sha)
@ -1218,7 +1218,7 @@ class TestMergeMethod:
assert master == merge_head
head = repo.commit('heads/master')
final_tree = repo.read_tree(head)
assert final_tree == {'m': b'2', 'b': b'1'}, "sanity check of final tree"
assert final_tree == {'m': '2', 'b': '1'}, "sanity check of final tree"
r1 = repo.commit(head.parents[1])
r0 = repo.commit(r1.parents[0])
assert json.loads(pr.commits_map) == {
@ -1291,7 +1291,7 @@ class TestMergeMethod:
assert master == nb1
head = repo.commit('heads/master')
final_tree = repo.read_tree(head)
assert final_tree == {'m': b'2', 'b': b'1'}, "sanity check of final tree"
assert final_tree == {'m': '2', 'b': '1'}, "sanity check of final tree"
m1 = head
m0 = repo.commit(m1.parents[0])
@ -1455,7 +1455,7 @@ class TestMergeMethod:
master = repo.commit('heads/master')
assert master.parents == [m2, c1]
assert repo.read_tree(master) == {'a': b'1', 'b': b'2', 'bb': b'bb'}
assert repo.read_tree(master) == {'a': '1', 'b': '2', 'bb': 'bb'}
m1 = node('M1')
reviewer = get_partner(env, users["reviewer"]).formatted_email
@ -1495,7 +1495,7 @@ class TestMergeMethod:
assert staging.parents == [m2],\
"the previous master's tip should be the sole parent of the staging commit"
assert repo.read_tree(staging) == {
'm': b'c2', 'm2': b'm2',
'm': 'c2', 'm2': 'm2',
}, "the tree should still be correctly merged"
repo.post_status(staging.id, 'success', 'legal/cla')
@ -1533,7 +1533,7 @@ class TestMergeMethod:
assert repo.is_ancestor(prx.head, of=staging.id)
assert staging.parents == [m2, c1]
assert repo.read_tree(staging) == {
'm': b'c1', 'm2': b'm2',
'm': 'c1', 'm2': 'm2',
}
repo.post_status(staging.id, 'success', 'legal/cla')