[IMP] *: unify gh test API between runbot and fw-bot

The fw-bot testing API should improve the perfs of mergebot tests
somewhat (less waiting around for instance).

The code has been updated to the bare minimum (context-managing repos,
change to PRs and replacing rolenames by explicit token provisions)
but extra facilities were used to avoid changing *everything*
e.g. make_commit (singular), automatic generation of PR refs, ...

The tests should eventually be updated to remove these.

Also remove the local fake / mock. Being so much faster is a huge
draw, but I don't really want to spend more time updating it,
especially when fwbot doesn't get to take advantage. A local /
lightweight fake github (as an external service over http) might
eventually be a good idea though, and more applicable (including to
third-parties).
This commit is contained in:
Xavier Morel 2019-10-10 09:22:12 +02:00
parent 557878afe9
commit d453943252
10 changed files with 2428 additions and 3715 deletions

View File

@ -1,9 +1,58 @@
# -*- coding: utf-8 -*-
"""
Configuration:
* an ``odoo`` binary in the path, which runs the relevant odoo; to ensure a
clean slate odoo is re-started and a new database is created before each
test (technically a "template" db is created first, then that DB is cloned
and the fresh clone is used for each test)
* pytest.ini (at the root of the runbot repo or higher) with the following
sections and keys
``github``
- owner, the name of the account (personal or org) under which test repos
will be created & deleted (note: some repos might be created under role
accounts as well)
- token, either personal or oauth, must have the scopes ``public_repo``,
``delete_repo`` and ``admin:repo_hook``, if personal the owner must be
the corresponding user account, not an org. Also user:email for the
forwardport / forwardbot tests
``role_reviewer``, ``role_self_reviewer`` and ``role_other``
- name (optional, used as partner name when creating that, otherwise github
login gets used)
- token, a personal access token with the ``public_repo`` scope (otherwise
the API can't leave comments), maybe eventually delete_repo (for personal
forks)
.. warning:: the accounts must *not* be flagged, or the webhooks on
commenting or creating reviews will not trigger, and the
tests will fail
* either ``ngrok`` or ``lt`` (localtunnel) available on the path. ngrok with
a configured account is recommended: ngrok is more reliable than localtunnel
but a free account is necessary to get a high-enough rate limiting for some
of the multi-repo tests to work
Finally the tests aren't 100% reliable as they rely on quite a bit of network
traffic, it's possible that the tests fail due to network issues rather than
logic errors.
"""
import base64
import collections
import configparser
import copy
import itertools
import logging
import re
import socket
import subprocess
import sys
import time
import uuid
import xmlrpc.client
from contextlib import closing
import psutil
import pytest
@ -15,7 +64,7 @@ NGROK_CLI = [
def pytest_addoption(parser):
parser.addoption('--addons-path')
parser.addoption('--db', help="DB to run the tests against", default=str(uuid.uuid4()))
parser.addoption('--db', help="DB to run the tests against", default='template_%s' % uuid.uuid4())
parser.addoption("--no-delete", action="store_true", help="Don't delete repo after a failed run")
parser.addoption(
@ -39,10 +88,15 @@ def config(pytestconfig):
"""
conf = configparser.ConfigParser(interpolation=None)
conf.read([pytestconfig.inifile])
return {
cnf = {
name: dict(s.items())
for name, s in conf.items()
}
# special case user / owner / ...
cnf['role_user'] = {
'token': conf['github']['token']
}
return cnf
@pytest.fixture(scope='session')
def rolemap(config):
@ -62,15 +116,8 @@ def rolemap(config):
rolemap[role] = data['user'] = r.json()['login']
return rolemap
# apparently conftests can override one another's fixtures but plugins can't
# override conftest fixtures (?) so if this is defined as "users" it replaces
# the one from runbot_merge/tests/local and everything breaks.
#
# Alternatively this could be special-cased using remote_p or something but
# that's even more gross. It might be possible to handle that via pytest's
# hooks as well but I didn't check
@pytest.fixture
def users_(env, config, rolemap):
def users(env, config, rolemap):
for role, login in rolemap.items():
if role in ('user', 'other'):
continue
@ -102,40 +149,53 @@ def tunnel(pytestconfig, port):
requests.get('http://localhost:4040/api')
except requests.exceptions.ConnectionError:
subprocess.Popen(NGROK_CLI, stdout=subprocess.DEVNULL)
time.sleep(1)
time.sleep(2)
requests.post('http://localhost:4040/api/tunnels', json={
'name': str(port),
'proto': 'http',
'bind_tls': True,
'bind_tls': True, # only https
'addr': addr,
'inspect': False,
})
time.sleep(5)
'inspect': True,
}).raise_for_status()
tunnel = 'http://localhost:4040/api/tunnels/%s' % port
try:
r = requests.get('http://localhost:4040/api/tunnels')
for _ in range(10):
time.sleep(2)
r = requests.get(tunnel)
# not created yet, wait and retry
if r.status_code == 404:
continue
# check for weird responses
r.raise_for_status()
yield next(
t['public_url']
for t in r.json()['tunnels']
if t['proto'] == 'https'
if t['config']['addr'].endswith(addr)
)
finally:
requests.delete('http://localhost:4040/api/tunnels/%s' % port)
time.sleep(5) # apparently tearing down the tunnel can take some time
r = requests.get('http://localhost:4040/api/tunnels')
if r.ok and r.json()['tunnels']:
try:
yield r.json()['public_url']
finally:
requests.delete('http://localhost:4040/api/tunnels/%s' % port)
for _ in range(10):
time.sleep(1)
r = requests.get(tunnel)
# check if deletion is done
if r.status_code == 404:
break
r.raise_for_status()
else:
raise TimeoutError("ngrok tunnel deletion failed")
r = requests.get('http://localhost:4040/api/tunnels')
# there are still tunnels in the list -> bail
if r.ok and r.json()['tunnels']:
return
# ngrok is broken or all tunnels have been shut down -> try to
# find and kill it (but only if it looks a lot like we started it)
for p in psutil.process_iter():
if p.name() == 'ngrok' and p.cmdline() == NGROK_CLI:
p.terminate()
break
return
# ngrok is broken or all tunnels have been shut down -> try to
# find and kill it (but only if it looks a lot like we started it)
for p in psutil.process_iter():
if p.name() == 'ngrok' and p.cmdline() == NGROK_CLI:
p.terminate()
break
else:
raise TimeoutError("ngrok tunnel creation failed (?)")
elif tunnel == 'localtunnel':
p = subprocess.Popen(['lt', '-p', str(port)], stdout=subprocess.PIPE)
try:
@ -163,7 +223,7 @@ def dbcache(request, module):
'--stop-after-init'
], check=True)
yield db
subprocess.run(['dropdb', db])
subprocess.run(['dropdb', db], check=True)
@pytest.fixture
def db(request, dbcache):
@ -174,3 +234,789 @@ def db(request, dbcache):
if not request.config.getoption('--no-delete'):
subprocess.run(['dropdb', rundb], check=True)
def wait_for_hook(n=1):
time.sleep(10 * n)
def wait_for_server(db, port, proc, mod, timeout=120):
""" Polls for server to be response & have installed our module.
Raises socket.timeout on failure
"""
limit = time.time() + timeout
while True:
if proc.poll() is not None:
raise Exception("Server unexpectedly closed")
try:
uid = xmlrpc.client.ServerProxy(
'http://localhost:{}/xmlrpc/2/common'.format(port))\
.authenticate(db, 'admin', 'admin', {})
mods = xmlrpc.client.ServerProxy(
'http://localhost:{}/xmlrpc/2/object'.format(port))\
.execute_kw(
db, uid, 'admin', 'ir.module.module', 'search_read', [
[('name', '=', mod)], ['state']
])
if mods and mods[0].get('state') == 'installed':
break
except ConnectionRefusedError:
if time.time() > limit:
raise socket.timeout()
@pytest.fixture(scope='session')
def port():
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
s.bind(('', 0))
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s.getsockname()[1]
@pytest.fixture
def server(request, db, port, module):
p = subprocess.Popen([
'odoo', '--http-port', str(port),
'--addons-path', request.config.getoption('--addons-path'),
'-d', db,
'--max-cron-threads', '0', # disable cron threads (we're running crons by hand)
])
try:
wait_for_server(db, port, p, module)
yield p
finally:
p.terminate()
p.wait(timeout=30)
@pytest.fixture
def env(port, server, db, default_crons):
yield Environment(port, db, default_crons)
# users is just so I can avoid autouse on toplevel users fixture b/c it (seems
# to) break the existing local tests
@pytest.fixture
def make_repo(request, config, tunnel, users):
owner = config['github']['owner']
github = requests.Session()
github.headers['Authorization'] = 'token %s' % config['github']['token']
# check whether "owner" is a user or an org, as repo-creation endpoint is
# different
q = github.get('https://api.github.com/users/{}'.format(owner))
q.raise_for_status()
if q.json().get('type') == 'Organization':
endpoint = 'https://api.github.com/orgs/{}/repos'.format(owner)
else:
endpoint = 'https://api.github.com/user/repos'
r = github.get('https://api.github.com/user')
r.raise_for_status()
assert r.json()['login'] == owner
repos = []
def repomaker(name):
fullname = '{}/{}'.format(owner, name)
repo_url = 'https://api.github.com/repos/{}'.format(fullname)
if request.config.getoption('--no-delete'):
if github.head(repo_url).ok:
pytest.skip("Repository {} already exists".format(fullname))
else:
# just try to delete the repo, we don't really care
if github.delete(repo_url).ok:
# if we did delete a repo, wait a bit as gh might need to
# propagate the thing?
time.sleep(30)
# create repo
r = github.post(endpoint, json={
'name': name,
'has_issues': False,
'has_projects': False,
'has_wiki': False,
'auto_init': False,
# at least one merge method must be enabled :(
'allow_squash_merge': False,
# 'allow_merge_commit': False,
'allow_rebase_merge': False,
})
r.raise_for_status()
# create webhook
github.post('{}/hooks'.format(repo_url), json={
'name': 'web',
'config': {
'url': '{}/runbot_merge/hooks'.format(tunnel),
'content_type': 'json',
'insecure_ssl': '1',
},
'events': ['pull_request', 'issue_comment', 'status', 'pull_request_review']
})
github.put('{}/contents/{}'.format(repo_url, 'a'), json={
'path': 'a',
'message': 'github returns a 409 (Git Repository is Empty) if trying to create a tree in a repo with no objects',
'content': base64.b64encode(b'whee').decode('ascii'),
'branch': 'garbage_%s' % uuid.uuid4()
}).raise_for_status()
return Repo(github, fullname, repos)
yield repomaker
if not request.config.getoption('--no-delete'):
for repo in reversed(repos):
repo.delete()
Commit = collections.namedtuple('Commit', 'id tree message author committer parents')
class Repo:
def __init__(self, session, fullname, repos):
self._session = session
self.name = fullname
self._repos = repos
self.hook = False
repos.append(self)
# unwatch repo
self.unsubscribe()
def unsubscribe(self, token=None):
self._get_session(token).put('https://api.github.com/repos/{}/subscription'.format(self.name), json={
'subscribed': False,
'ignored': True,
})
def _get_session(self, token):
s = self._session
if token:
s = requests.Session()
s.headers['Authorization'] = 'token %s' % token
return s
def delete(self):
r = self._session.delete('https://api.github.com/repos/{}'.format(self.name))
if r.status_code != 204:
logging.getLogger(__name__).warning("Unable to delete repository %s", self.name)
def set_secret(self, secret):
assert self.hook
r = self._session.get(
'https://api.github.com/repos/{}/hooks'.format(self.name))
response = r.json()
assert 200 <= r.status_code < 300, response
[hook] = response
r = self._session.patch('https://api.github.com/repos/{}/hooks/{}'.format(self.name, hook['id']), json={
'config': {**hook['config'], 'secret': secret},
})
assert 200 <= r.status_code < 300, r.json()
def get_ref(self, ref):
return self.commit(ref).id
def commit(self, ref):
if not re.match(r'[0-9a-f]{40}', ref):
if not ref.startswith(('heads/', 'refs/heads/')):
ref = 'refs/heads/' + ref
# apparently heads/<branch> ~ refs/heads/<branch> but are not
# necessarily up to date ??? unlike the git ref system where :ref
# starts at heads/
if ref.startswith('heads/'):
ref = 'refs/' + ref
r = self._session.get('https://api.github.com/repos/{}/commits/{}'.format(self.name, ref))
response = r.json()
assert 200 <= r.status_code < 300, response
return self._commit_from_gh(response)
def _commit_from_gh(self, gh_commit):
c = gh_commit['commit']
return Commit(
id=gh_commit['sha'],
tree=c['tree']['sha'],
message=c['message'],
author=c['author'],
committer=c['committer'],
parents=[p['sha'] for p in gh_commit['parents']],
)
def log(self, ref_or_sha):
for page in itertools.count(1):
r = self._session.get(
'https://api.github.com/repos/{}/commits'.format(self.name),
params={'sha': ref_or_sha, 'page': page}
)
assert 200 <= r.status_code < 300, r.json()
yield from map(self._commit_from_gh, r.json())
if not r.links.get('next'):
return
def read_tree(self, commit):
""" read tree object from commit
:param Commit commit:
:rtype: Dict[str, str]
"""
r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree))
assert 200 <= r.status_code < 300, r.json()
# read tree's blobs
tree = {}
for t in r.json()['tree']:
assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases"
r = self._session.get('https://api.github.com/repos/{}/git/blobs/{}'.format(self.name, t['sha']))
assert 200 <= r.status_code < 300, r.json()
tree[t['path']] = base64.b64decode(r.json()['content']).decode()
return tree
def make_ref(self, name, commit, force=False):
assert self.hook
assert name.startswith('heads/')
r = self._session.post('https://api.github.com/repos/{}/git/refs'.format(self.name), json={
'ref': 'refs/' + name,
'sha': commit,
})
if force and r.status_code == 422:
self.update_ref(name, commit, force=force)
return
assert 200 <= r.status_code < 300, r.json()
def update_ref(self, name, commit, force=False):
assert self.hook
r = self._session.patch('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, name), json={'sha': commit, 'force': force})
assert 200 <= r.status_code < 300, r.json()
def protect(self, branch):
assert self.hook
r = self._session.put('https://api.github.com/repos/{}/branches/{}/protection'.format(self.name, branch), json={
'required_status_checks': None,
'enforce_admins': True,
'required_pull_request_reviews': None,
'restrictions': None,
})
assert 200 <= r.status_code < 300, r.json()
# FIXME: remove this (runbot_merge should use make_commits directly)
def make_commit(self, ref, message, author, committer=None, tree=None, wait=True):
assert tree
if isinstance(ref, list):
assert all(re.match(r'[0-9a-f]{40}', r) for r in ref)
ancestor_id = ref
ref = None
else:
ancestor_id = self.get_ref(ref) if ref else None
# if ref is already a commit id, don't pass it in
if ancestor_id == ref:
ref = None
[h] = self.make_commits(
ancestor_id,
MakeCommit(message, tree=tree, author=author, committer=committer, reset=True),
ref=ref
)
return h
def make_commits(self, root, *commits, ref=None):
assert self.hook
if isinstance(root, list):
parents = root
tree = None
elif root:
c = self.commit(root)
tree = c.tree
parents = [c.id]
else:
tree = None
parents = []
hashes = []
for commit in commits:
if commit.reset:
tree = None
r = self._session.post('https://api.github.com/repos/{}/git/trees'.format(self.name), json={
'tree': [
{'path': k, 'mode': '100644', 'type': 'blob', 'content': v}
for k, v in commit.tree.items()
],
'base_tree': tree
})
assert 200 <= r.status_code < 300, r.json()
tree = r.json()['sha']
data = {
'parents': parents,
'message': commit.message,
'tree': tree,
}
if commit.author:
data['author'] = commit.author
if commit.committer:
data['committer'] = commit.committer
r = self._session.post('https://api.github.com/repos/{}/git/commits'.format(self.name), json=data)
assert 200 <= r.status_code < 300, r.json()
hashes.append(r.json()['sha'])
parents = [hashes[-1]]
if ref:
self.make_ref(ref, hashes[-1], force=True)
return hashes
def fork(self, *, token=None):
s = self._get_session(token)
r = s.post('https://api.github.com/repos/{}/forks'.format(self.name))
assert 200 <= r.status_code < 300, r.json()
repo_name = r.json()['full_name']
repo_url = 'https://api.github.com/repos/' + repo_name
# poll for end of fork
limit = time.time() + 60
while s.head(repo_url, timeout=5).status_code != 200:
if time.time() > limit:
raise TimeoutError("No response for repo %s over 60s" % repo_name)
time.sleep(1)
return Repo(s, repo_name, self._repos)
def get_pr(self, number):
# ensure PR exists before returning it
self._session.head('https://api.github.com/repos/{}/pulls/{}'.format(
self.name,
number,
)).raise_for_status()
return PR(self, number)
def make_pr(self, *, title=None, body=None, target, head, token=None):
assert self.hook
self.hook = 2
if title is None:
assert ":" not in head, \
"will not auto-infer titles for PRs in a remote repo"
c = self.commit(head)
parts = iter(c.message.split('\n\n', 1))
title = next(parts)
body = next(parts, None)
headers = {}
if token:
headers['Authorization'] = 'token {}'.format(token)
# FIXME: change tests which pass a commit id to make_pr & remove this
if re.match(r'[0-9a-f]{40}', head):
ref = "temp_trash_because_head_must_be_a_ref_%d" % next(ct)
self.make_ref('heads/' + ref, head)
head = ref
r = self._session.post(
'https://api.github.com/repos/{}/pulls'.format(self.name),
json={
'title': title,
'body': body,
'head': head,
'base': target,
},
headers=headers,
)
pr = r.json()
assert 200 <= r.status_code < 300, pr
return PR(self, pr['number'])
def post_status(self, ref, status, context='default', **kw):
assert self.hook
assert status in ('error', 'failure', 'pending', 'success')
r = self._session.post('https://api.github.com/repos/{}/statuses/{}'.format(self.name, self.commit(ref).id), json={
'state': status,
'context': context,
**kw
})
assert 200 <= r.status_code < 300, r.json()
def read_tree(self, commit):
""" read tree object from commit
:param Commit commit:
:rtype: Dict[str, str]
"""
r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree))
assert 200 <= r.status_code < 300, r.json()
# read tree's blobs
tree = {}
for t in r.json()['tree']:
assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases"
r = self._session.get(t['url'])
assert 200 <= r.status_code < 300, r.json()
# assume all test content is textual
tree[t['path']] = base64.b64decode(r.json()['content']).decode()
return tree
def is_ancestor(self, sha, of):
return any(c['sha'] == sha for c in self.log(of))
def log(self, ref_or_sha):
for page in itertools.count(1):
r = self._session.get(
'https://api.github.com/repos/{}/commits'.format(self.name),
params={'sha': ref_or_sha, 'page': page}
)
assert 200 <= r.status_code < 300, r.json()
yield from r.json()
if not r.links.get('next'):
return
def __enter__(self):
self.hook = 1
return self
def __exit__(self, *args):
wait_for_hook(self.hook)
self.hook = 0
class Commit:
def __init__(self, message, *, author=None, committer=None, tree, reset=False):
self.id = None
self.message = message
self.author = author
self.committer = committer
self.tree = tree
self.reset = reset
MakeCommit = Repo.Commit
ct = itertools.count()
class PR:
def __init__(self, repo, number):
self.repo = repo
self.number = number
self.labels = LabelsProxy(self)
@property
def _pr(self):
r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number))
assert 200 <= r.status_code < 300, r.json()
return r.json()
@property
def title(self):
raise NotImplementedError()
title = title.setter(lambda self, v: self._set_prop('title', v))
@property
def base(self):
raise NotImplementedError()
base = base.setter(lambda self, v: self._set_prop('base', v))
@property
def head(self):
return self._pr['head']['sha']
@property
def user(self):
return self._pr['user']['login']
@property
def state(self):
return self._pr['state']
@property
def comments(self):
r = self.repo._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number))
assert 200 <= r.status_code < 300, r.json()
return [
(c['user']['login'], c['body'])
for c in r.json()
]
@property
def ref(self):
return 'heads/' + self.branch[1]
def post_comment(self, body, token=None):
assert self.repo.hook
headers = {}
if token:
headers['Authorization'] = 'token %s' % token
r = self.repo._session.post(
'https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number),
json={'body': body},
headers=headers,
)
assert 200 <= r.status_code < 300, r.json()
return r.json()['id']
def edit_comment(self, cid, body, token=None):
assert self.repo.hook
headers = {}
if token:
headers['Authorization'] = 'token %s' % token
r = self.repo._session.patch(
'https://api.github.com/repos/{}/issues/comments/{}'.format(self.repo.name, cid),
json={'body': body},
headers=headers
)
assert 200 <= r.status_code < 300, r.json()
wait_for_hook()
def delete_comment(self, cid, token=None):
assert self.repo.hook
headers = {}
if token:
headers['Authorization'] = 'token %s' % token
r = self.repo._session.delete(
'https://api.github.com/repos/{}/issues/comments/{}'.format(self.repo.name, cid),
headers=headers
)
assert r.status_code == 204, r.json()
def _set_prop(self, prop, value):
assert self.repo.hook
r = self.repo._session.patch('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number), json={
prop: value
})
assert 200 <= r.status_code < 300, r.json()
def open(self):
self._set_prop('state', 'open')
def close(self):
self._set_prop('state', 'closed')
@property
def branch(self):
r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format(
self.repo.name,
self.number,
))
assert 200 <= r.status_code < 300, r.json()
info = r.json()
repo = self.repo
reponame = info['head']['repo']['full_name']
if reponame != self.repo.name:
# not sure deep copying the session object is safe / proper...
repo = Repo(copy.deepcopy(self.repo._session), reponame, [])
return PRBranch(repo, info['head']['ref'])
def post_review(self, state, body, token=None):
assert self.repo.hook
headers = {}
if token:
headers['Authorization'] = 'token %s' % token
r = self.repo._session.post(
'https://api.github.com/repos/{}/pulls/{}/reviews'.format(self.repo.name, self.number),
json={'body': body, 'event': state,},
headers=headers
)
assert 200 <= r.status_code < 300, r.json()
PRBranch = collections.namedtuple('PRBranch', 'repo branch')
class LabelsProxy(collections.abc.MutableSet):
def __init__(self, pr):
self._pr = pr
@property
def _labels(self):
pr = self._pr
r = pr.repo._session.get('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number))
assert r.ok, r.json()
return {label['name'] for label in r.json()}
def __repr__(self):
return '<LabelsProxy %r>' % self._labels
def __eq__(self, other):
if isinstance(other, collections.abc.Set):
return other == self._labels
return NotImplemented
def __contains__(self, label):
return label in self._labels
def __iter__(self):
return iter(self._labels)
def __len__(self):
return len(self._labels)
def add(self, label):
pr = self._pr
assert pr.repo.hook
r = pr.repo._session.post('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number), json={
'labels': [label]
})
assert r.ok, r.json()
def discard(self, label):
pr = self._pr
assert pr.repo.hook
r = pr.repo._session.delete('https://api.github.com/repos/{}/issues/{}/labels/{}'.format(pr.repo.name, pr.number, label))
# discard should do nothing if the item didn't exist in the set
assert r.ok or r.status_code == 404, r.json()
def update(self, *others):
pr = self._pr
assert pr.repo.hook
# because of course that one is not provided by MutableMapping...
r = pr.repo._session.post('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number), json={
'labels': list(set(itertools.chain.from_iterable(others)))
})
assert r.ok, r.json()
class Environment:
def __init__(self, port, db, default_crons=()):
self._uid = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/common'.format(port)).authenticate(db, 'admin', 'admin', {})
self._object = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/object'.format(port))
self._db = db
self._default_crons = default_crons
def __call__(self, model, method, *args, **kwargs):
return self._object.execute_kw(
self._db, self._uid, 'admin',
model, method,
args, kwargs
)
def __getitem__(self, name):
return Model(self, name)
def run_crons(self, *xids, **kw):
crons = xids or self._default_crons
print('running crons', crons, file=sys.stderr)
for xid in crons:
print('\trunning cron', xid, '...', file=sys.stderr)
_, model, cron_id = self('ir.model.data', 'xmlid_lookup', xid)
assert model == 'ir.cron', "Expected {} to be a cron, got {}".format(xid, model)
self('ir.cron', 'method_direct_trigger', [cron_id], **kw)
print('\tdone', file=sys.stderr)
print('done', file=sys.stderr)
# sleep for some time as a lot of crap may have happened (?)
wait_for_hook()
class Model:
__slots__ = ['_env', '_model', '_ids', '_fields']
def __init__(self, env, model, ids=(), fields=None):
object.__setattr__(self, '_env', env)
object.__setattr__(self, '_model', model)
object.__setattr__(self, '_ids', tuple(ids or ()))
object.__setattr__(self, '_fields', fields or self._env(self._model, 'fields_get', attributes=['type', 'relation']))
@property
def ids(self):
return self._ids
def __bool__(self):
return bool(self._ids)
def __len__(self):
return len(self._ids)
def __eq__(self, other):
if not isinstance(other, Model):
return NotImplemented
return self._model == other._model and self._ids == other._ids
def __repr__(self):
return "{}({})".format(self._model, ', '.join(str(id_) for id_ in self._ids))
def exists(self):
ids = self._env(self._model, 'exists', self._ids)
return Model(self._env, self._model, ids)
def search(self, *args, **kwargs):
ids = self._env(self._model, 'search', *args, **kwargs)
return Model(self._env, self._model, ids)
def create(self, values):
return Model(self._env, self._model, [self._env(self._model, 'create', values)])
def write(self, values):
return self._env(self._model, 'write', self._ids, values)
def read(self, fields):
return self._env(self._model, 'read', self._ids, fields)
def unlink(self):
return self._env(self._model, 'unlink', self._ids)
def __getitem__(self, index):
if isinstance(index, str):
return getattr(self, index)
ids = self._ids[index]
if isinstance(ids, int):
ids = [ids]
return Model(self._env, self._model, ids, fields=self._fields)
def __getattr__(self, fieldname):
if not self._ids:
return False
assert len(self._ids) == 1
if fieldname == 'id':
return self._ids[0]
val = self.read([fieldname])[0][fieldname]
field_description = self._fields[fieldname]
if field_description['type'] in ('many2one', 'one2many', 'many2many'):
val = val or []
if field_description['type'] == 'many2one':
val = val[:1] # (id, name) => [id]
return Model(self._env, field_description['relation'], val)
return val
def __setattr__(self, fieldname, value):
assert self._fields[fieldname]['type'] not in ('many2one', 'one2many', 'many2many')
self._env(self._model, 'write', self._ids, {fieldname: value})
def __iter__(self):
return (
Model(self._env, self._model, [i], fields=self._fields)
for i in self._ids
)
def mapped(self, path):
field, *rest = path.split('.', 1)
descr = self._fields[field]
if descr['type'] in ('many2one', 'one2many', 'many2many'):
result = Model(self._env, descr['relation'])
for record in self:
result |= getattr(record, field)
return result.mapped(rest[0]) if rest else result
assert not rest
return [getattr(r, field) for r in self]
def filtered(self, fn):
result = Model(self._env, self._model, fields=self._fields)
for record in self:
if fn(record):
result |= record
return result
def __sub__(self, other):
if not isinstance(other, Model) or self._model != other._model:
return NotImplemented
return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ not in other._ids), fields=self._fields)
def __or__(self, other):
if not isinstance(other, Model) or self._model != other._model:
return NotImplemented
return Model(self._env, self._model, {*self._ids, *other._ids}, fields=self._fields)
__add__ = __or__
def __and__(self, other):
if not isinstance(other, Model) or self._model != other._model:
return NotImplemented
return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ in other._ids), fields=self._fields)
def invalidate_cache(self, fnames=None, ids=None):
pass # not a concern when every access is an RPC call

View File

@ -1,62 +1,23 @@
# -*- coding: utf-8 -*-
import base64
import collections
import copy
import itertools
import logging
import pathlib
import socket
import time
import uuid
import xmlrpc.client
from contextlib import closing
import pytest
import subprocess
import re
import requests
from shutil import rmtree
import pytest
from odoo.tools.appdirs import user_cache_dir
DEFAULT_CRONS = [
'runbot_merge.process_updated_commits',
'runbot_merge.merge_cron',
'forwardport.port_forward',
'forwardport.updates',
'runbot_merge.check_linked_prs_status',
'runbot_merge.feedback_cron',
]
def wait_for_hook(n=1):
time.sleep(10 * n)
def wait_for_server(db, port, proc, mod, timeout=120):
""" Polls for server to be response & have installed our module.
Raises socket.timeout on failure
"""
limit = time.time() + timeout
while True:
if proc.poll() is not None:
raise Exception("Server unexpectedly closed")
try:
uid = xmlrpc.client.ServerProxy(
'http://localhost:{}/xmlrpc/2/common'.format(port))\
.authenticate(db, 'admin', 'admin', {})
mods = xmlrpc.client.ServerProxy(
'http://localhost:{}/xmlrpc/2/object'.format(port))\
.execute_kw(
db, uid, 'admin', 'ir.module.module', 'search_read', [
[('name', '=', mod)], ['state']
])
if mods and mods[0].get('state') == 'installed':
break
except ConnectionRefusedError:
if time.time() > limit:
raise socket.timeout()
@pytest.fixture
def default_crons():
return [
'runbot_merge.process_updated_commits',
'runbot_merge.merge_cron',
'forwardport.port_forward',
'forwardport.updates',
'runbot_merge.check_linked_prs_status',
'runbot_merge.feedback_cron',
]
# public_repo — necessary to leave comments
# admin:repo_hook — to set up hooks (duh)
@ -99,20 +60,6 @@ def _cleanup_cache(config, users):
for login in users.values():
rmtree(cache_root / login, ignore_errors=True)
@pytest.fixture(autouse=True)
def users(users_):
return users_
@pytest.fixture
def project(env, config):
return env['runbot_merge.project'].create({
'name': 'odoo',
'github_token': config['github']['token'],
'github_prefix': 'hansen',
'fp_github_token': config['github']['token'],
'required_statuses': 'legal/cla,ci/runbot',
})
@pytest.fixture(scope='session')
def module():
""" When a test function is (going to be) run, selects the containing
@ -120,549 +67,3 @@ def module():
"""
# NOTE: no request.fspath (because no request.function) in session-scoped fixture so can't put module() at the toplevel
return 'forwardport'
@pytest.fixture(scope='session')
def port():
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
s.bind(('', 0))
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s.getsockname()[1]
@pytest.fixture
def server(request, db, port, module):
p = subprocess.Popen([
'odoo', '--http-port', str(port),
'--addons-path', request.config.getoption('--addons-path'),
'-d', db,
'--max-cron-threads', '0', # disable cron threads (we're running crons by hand)
])
try:
wait_for_server(db, port, p, module)
yield p
finally:
p.terminate()
p.wait(timeout=30)
@pytest.fixture
def env(port, server, db):
yield Environment(port, db)
# users is just so I can avoid autouse on toplevel users fixture b/c it (seems
# to) break the existing local tests
@pytest.fixture
def make_repo(request, config, tunnel, users):
owner = config['github']['owner']
github = requests.Session()
github.headers['Authorization'] = 'token %s' % config['github']['token']
# check whether "owner" is a user or an org, as repo-creation endpoint is
# different
q = github.get('https://api.github.com/users/{}'.format(owner))
q.raise_for_status()
if q.json().get('type') == 'Organization':
endpoint = 'https://api.github.com/orgs/{}/repos'.format(owner)
else:
endpoint = 'https://api.github.com/user/repos'
r = github.get('https://api.github.com/user')
r.raise_for_status()
assert r.json()['login'] == owner
repos = []
def repomaker(name):
fullname = '{}/{}'.format(owner, name)
repo_url = 'https://api.github.com/repos/{}'.format(fullname)
if request.config.getoption('--no-delete'):
if github.head(repo_url).ok:
pytest.skip("Repository {} already exists".format(fullname))
else:
# just try to delete the repo, we don't really care
if github.delete(repo_url).ok:
# if we did delete a repo, wait a bit as gh might need to
# propagate the thing?
time.sleep(30)
# create repo
r = github.post(endpoint, json={
'name': name,
'has_issues': False,
'has_projects': False,
'has_wiki': False,
'auto_init': False,
# at least one merge method must be enabled :(
'allow_squash_merge': False,
# 'allow_merge_commit': False,
'allow_rebase_merge': False,
})
r.raise_for_status()
new_repo = Repo(github, fullname, repos)
# create webhook
github.post('{}/hooks'.format(repo_url), json={
'name': 'web',
'config': {
'url': '{}/runbot_merge/hooks'.format(tunnel),
'content_type': 'json',
'insecure_ssl': '1',
},
'events': ['pull_request', 'issue_comment', 'status', 'pull_request_review']
})
github.put('https://api.github.com/repos/{}/contents/{}'.format(fullname, 'a'), json={
'path': 'a',
'message': 'github returns a 409 (Git Repository is Empty) if trying to create a tree in a repo with no objects',
'content': base64.b64encode(b'whee').decode('ascii'),
'branch': 'garbage_%s' % uuid.uuid4()
}).raise_for_status()
return new_repo
yield repomaker
if not request.config.getoption('--no-delete'):
for repo in reversed(repos):
repo.delete()
Commit = collections.namedtuple('Commit', 'id tree message author committer parents')
class Repo:
def __init__(self, session, fullname, repos):
self._session = session
self.name = fullname
self._repos = repos
self.hook = False
repos.append(self)
# unwatch repo
self.unsubscribe()
def unsubscribe(self, token=None):
self._get_session(token).put('https://api.github.com/repos/{}/subscription'.format(self.name), json={
'subscribed': False,
'ignored': True,
})
def delete(self):
r = self._session.delete('https://api.github.com/repos/{}'.format(self.name))
if r.status_code != 204:
logging.getLogger(__name__).warn("Unable to delete repository %s", self.name)
def commit(self, ref):
if not re.match(r'[0-9a-f]{40}', ref):
if not ref.startswith(('heads/', 'refs/heads/')):
ref = 'refs/heads/' + ref
# apparently heads/<branch> ~ refs/heads/<branch> but are not
# necessarily up to date ??? unlike the git ref system where :ref
# starts at heads/
if ref.startswith('heads/'):
ref = 'refs/' + ref
r = self._session.get('https://api.github.com/repos/{}/commits/{}'.format(self.name, ref))
response = r.json()
assert 200 <= r.status_code < 300, response
return self._commit_from_gh(response)
def _commit_from_gh(self, gh_commit):
c = gh_commit['commit']
return Commit(
id=gh_commit['sha'],
tree=c['tree']['sha'],
message=c['message'],
author=c['author'],
committer=c['committer'],
parents=[p['sha'] for p in gh_commit['parents']],
)
def log(self, ref_or_sha):
for page in itertools.count(1):
r = self._session.get(
'https://api.github.com/repos/{}/commits'.format(self.name),
params={'sha': ref_or_sha, 'page': page}
)
assert 200 <= r.status_code < 300, r.json()
yield from map(self._commit_from_gh, r.json())
if not r.links.get('next'):
return
def read_tree(self, commit):
""" read tree object from commit
:param Commit commit:
:rtype: Dict[str, str]
"""
r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree))
assert 200 <= r.status_code < 300, r.json()
# read tree's blobs
tree = {}
for t in r.json()['tree']:
assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases"
r = self._session.get('https://api.github.com/repos/{}/git/blobs/{}'.format(self.name, t['sha']))
assert 200 <= r.status_code < 300, r.json()
tree[t['path']] = base64.b64decode(r.json()['content']).decode()
return tree
def make_ref(self, name, commit, force=False):
assert self.hook
assert name.startswith('heads/')
r = self._session.post('https://api.github.com/repos/{}/git/refs'.format(self.name), json={
'ref': 'refs/' + name,
'sha': commit,
})
if force and r.status_code == 422:
self.update_ref(name, commit, force=True)
return
assert 200 <= r.status_code < 300, r.json()
def update_ref(self, name, commit, force=False):
r = self._session.patch('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, name), json={'sha': commit, 'force': force})
assert 200 <= r.status_code < 300, r.json()
def make_commits(self, root, *commits, ref=None):
assert self.hook
if root:
c = self.commit(root)
tree = c.tree
parents = [c.id]
else:
tree = None
parents = []
hashes = []
for commit in commits:
if commit.reset:
tree = None
r = self._session.post('https://api.github.com/repos/{}/git/trees'.format(self.name), json={
'tree': [
{'path': k, 'mode': '100644', 'type': 'blob', 'content': v}
for k, v in commit.tree.items()
],
'base_tree': tree
})
assert 200 <= r.status_code < 300, r.json()
tree = r.json()['sha']
data = {
'parents': parents,
'message': commit.message,
'tree': tree,
}
if commit.author:
data['author'] = commit.author
if commit.committer:
data['committer'] = commit.committer
r = self._session.post('https://api.github.com/repos/{}/git/commits'.format(self.name), json=data)
assert 200 <= r.status_code < 300, r.json()
hashes.append(r.json()['sha'])
parents = [hashes[-1]]
if ref:
self.make_ref(ref, hashes[-1], force=True)
return hashes
def fork(self, *, token=None):
s = self._get_session(token)
r = s.post('https://api.github.com/repos/{}/forks'.format(self.name))
assert 200 <= r.status_code < 300, r.json()
repo_name = r.json()['full_name']
repo_url = 'https://api.github.com/repos/' + repo_name
# poll for end of fork
limit = time.time() + 60
while s.head(repo_url, timeout=5).status_code != 200:
if time.time() > limit:
raise TimeoutError("No response for repo %s over 60s" % repo_name)
time.sleep(1)
return Repo(s, repo_name, self._repos)
def _get_session(self, token):
s = self._session
if token:
s = requests.Session()
s.headers['Authorization'] = 'token %s' % token
return s
def get_pr(self, number):
# ensure PR exists before returning it
self._session.head('https://api.github.com/repos/{}/pulls/{}'.format(
self.name,
number,
)).raise_for_status()
return PR(self, number)
def make_pr(self, *, title=None, body=None, target, head, token=None):
assert self.hook
self.hook = 2
if title is None:
assert ":" not in head, \
"will not auto-infer titles for PRs in a remote repo"
c = self.commit(head)
parts = iter(c.message.split('\n\n', 1))
title = next(parts)
body = next(parts, None)
headers = {}
if token:
headers['Authorization'] = 'token {}'.format(token)
r = self._session.post(
'https://api.github.com/repos/{}/pulls'.format(self.name),
json={
'title': title,
'body': body,
'head': head,
'base': target,
},
headers=headers,
)
pr = r.json()
assert 200 <= r.status_code < 300, pr
return PR(self, pr['number'])
def post_status(self, ref, status, context='default', **kw):
assert self.hook
assert status in ('error', 'failure', 'pending', 'success')
r = self._session.post('https://api.github.com/repos/{}/statuses/{}'.format(self.name, self.commit(ref).id), json={
'state': status,
'context': context,
**kw
})
assert 200 <= r.status_code < 300, r.json()
def __enter__(self):
self.hook = 1
return self
def __exit__(self, *args):
wait_for_hook(self.hook)
self.hook = 0
class PR:
__slots__ = ['number', 'repo']
def __init__(self, repo, number):
self.repo = repo
self.number = number
@property
def _pr(self):
r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number))
assert 200 <= r.status_code < 300, r.json()
return r.json()
@property
def head(self):
return self._pr['head']['sha']
@property
def comments(self):
r = self.repo._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number))
assert 200 <= r.status_code < 300, r.json()
return [
(c['user']['login'], c['body'])
for c in r.json()
]
def post_comment(self, body, token=None):
assert self.repo.hook
headers = {}
if token:
headers['Authorization'] = 'token %s' % token
r = self.repo._session.post(
'https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number),
json={'body': body},
headers=headers,
)
assert 200 <= r.status_code < 300, r.json()
return r.json()['id']
def _set_prop(self, prop, value):
assert self.repo.hook
r = self.repo._session.patch('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number), json={
prop: value
})
assert 200 <= r.status_code < 300, r.json()
def open(self):
self._set_prop('state', 'open')
def close(self):
self._set_prop('state', 'closed')
@property
def branch(self):
r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format(
self.repo.name,
self.number,
))
assert 200 <= r.status_code < 300, r.json()
info = r.json()
repo = self.repo
reponame = info['head']['repo']['full_name']
if reponame != self.repo.name:
# not sure deep copying the session object is safe / proper...
repo = Repo(copy.deepcopy(self.repo._session), reponame, [])
return PRBranch(repo, info['head']['ref'])
PRBranch = collections.namedtuple('PRBranch', 'repo branch')
class Environment:
def __init__(self, port, db):
self._uid = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/common'.format(port)).authenticate(db, 'admin', 'admin', {})
self._object = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/object'.format(port))
self._db = db
def __call__(self, model, method, *args, **kwargs):
return self._object.execute_kw(
self._db, self._uid, 'admin',
model, method,
args, kwargs
)
def __getitem__(self, name):
return Model(self, name)
def run_crons(self, *xids, **kw):
crons = xids or DEFAULT_CRONS
for xid in crons:
_, model, cron_id = self('ir.model.data', 'xmlid_lookup', xid)
assert model == 'ir.cron', "Expected {} to be a cron, got {}".format(xid, model)
self('ir.cron', 'method_direct_trigger', [cron_id], **kw)
# sleep for some time as a lot of crap may have happened (?)
wait_for_hook()
class Model:
__slots__ = ['_env', '_model', '_ids', '_fields']
def __init__(self, env, model, ids=(), fields=None):
object.__setattr__(self, '_env', env)
object.__setattr__(self, '_model', model)
object.__setattr__(self, '_ids', tuple(ids or ()))
object.__setattr__(self, '_fields', fields or self._env(self._model, 'fields_get', attributes=['type', 'relation']))
@property
def ids(self):
return self._ids
def __bool__(self):
return bool(self._ids)
def __len__(self):
return len(self._ids)
def __eq__(self, other):
if not isinstance(other, Model):
return NotImplemented
return self._model == other._model and self._ids == other._ids
def __repr__(self):
return "{}({})".format(self._model, ', '.join(str(id) for id in self._ids))
def exists(self):
ids = self._env(self._model, 'exists', self._ids)
return Model(self._env, self._model, ids)
def search(self, *args, **kwargs):
ids = self._env(self._model, 'search', *args, **kwargs)
return Model(self._env, self._model, ids)
def create(self, values):
return Model(self._env, self._model, [self._env(self._model, 'create', values)])
def write(self, values):
return self._env(self._model, 'write', self._ids, values)
def read(self, fields):
return self._env(self._model, 'read', self._ids, fields)
def unlink(self):
return self._env(self._model, 'unlink', self._ids)
def __getitem__(self, index):
if isinstance(index, str):
return getattr(self, index)
ids = self._ids[index]
if isinstance(ids, int):
ids = [ids]
return Model(self._env, self._model, ids, fields=self._fields)
def __getattr__(self, fieldname):
if not self._ids:
return False
assert len(self._ids) == 1
if fieldname == 'id':
return self._ids[0]
val = self.read([fieldname])[0][fieldname]
field_description = self._fields[fieldname]
if field_description['type'] in ('many2one', 'one2many', 'many2many'):
val = val or []
if field_description['type'] == 'many2one':
val = val[:1] # (id, name) => [id]
return Model(self._env, field_description['relation'], val)
return val
def __setattr__(self, fieldname, value):
assert self._fields[fieldname]['type'] not in ('many2one', 'one2many', 'many2many')
self._env(self._model, 'write', self._ids, {fieldname: value})
def __iter__(self):
return (
Model(self._env, self._model, [i], fields=self._fields)
for i in self._ids
)
def mapped(self, path):
field, *rest = path.split('.', 1)
descr = self._fields[field]
if descr['type'] in ('many2one', 'one2many', 'many2many'):
result = Model(self._env, descr['relation'])
for record in self:
result |= getattr(record, field)
return result.mapped(rest[0]) if rest else result
assert not rest
return [getattr(r, field) for r in self]
def filtered(self, fn):
result = Model(self._env, self._model, fields=self._fields)
for record in self:
if fn(record):
result |= record
return result
def __sub__(self, other):
if not isinstance(other, Model) or self._model != other._model:
return NotImplemented
return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ not in other._ids), fields=self._fields)
def __or__(self, other):
if not isinstance(other, Model) or self._model != other._model:
return NotImplemented
return Model(self._env, self._model, {*self._ids, *other._ids}, fields=self._fields)
__add__ = __or__
def __and__(self, other):
if not isinstance(other, Model) or self._model != other._model:
return NotImplemented
return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ in other._ids), fields=self._fields)
def invalidate_cache(self, fnames=None, ids=None):
pass # not a concern when every access is an RPC call

View File

@ -1,8 +1,42 @@
import uuid
import pytest
pytest_plugins = ["local"]
import requests
@pytest.fixture(scope='session')
def module():
return 'runbot_merge'
@pytest.fixture
def page(port):
s = requests.Session()
def get(url):
r = s.get('http://localhost:{}{}'.format(port, url))
r.raise_for_status()
return r.content
return get
# env['runbot_merge.project']._check_fetch()
# runbot_merge.fetch_prs_cron
@pytest.fixture
def default_crons():
return [
# env['runbot_merge.project']._check_fetch()
# 'runbot_merge.fetch_prs_cron',
# env['runbot_merge.commit']._notify()
'runbot_merge.process_updated_commits',
# env['runbot_merge.project']._check_progress()
'runbot_merge.merge_cron',
# env['runbot_merge.pull_requests']._check_linked_prs_statuses()
'runbot_merge.check_linked_prs_status',
# env['runbot_merge.project']._send_feedback()
'runbot_merge.feedback_cron',
]
@pytest.fixture
def project(env, config):
return env['runbot_merge.project'].create({
'name': 'odoo',
'github_token': config['github']['token'],
'github_prefix': 'hansen',
'branch_ids': [(0, 0, {'name': 'master'})],
'required_statuses': 'legal/cla,ci/runbot',
})

View File

@ -1,878 +0,0 @@
import collections
import datetime
import hashlib
import hmac
import io
import itertools
import json
import logging
import re
import responses
import werkzeug.urls
import werkzeug.test
import werkzeug.wrappers
from werkzeug.urls import url_parse, url_encode
from . import git
REPOS_API_PATTERN = re.compile(
r'https://api.github.com/repos/(?P<repo>\w+/\w+)/(?P<path>.+)'
)
USERS_API_PATTERN = re.compile(
r"https://api.github.com/users/(?P<user>\w+)"
)
class APIResponse(responses.BaseResponse):
def __init__(self, sim, url):
super(APIResponse, self).__init__(
method=None,
url=url
)
self.sim = sim
self.content_type = 'application/json'
self.stream = False
def matches(self, request):
return self._url_matches(self.url, request.url, self.match_querystring)
def get_response(self, request):
m = self.url.match(request.url)
r = self.dispatch(request, m)
if isinstance(r, responses.HTTPResponse):
return r
(status, r) = r
headers = self.get_headers()
if r is None:
body = io.BytesIO(b'')
headers['Content-Type'] = 'text/plain'
else:
body = io.BytesIO(json.dumps(r).encode('utf-8'))
return responses.HTTPResponse(
status=status,
reason=r.get('message') if isinstance(r, dict) else "bollocks",
body=body,
headers=headers,
preload_content=False, )
class ReposAPIResponse(APIResponse):
def __init__(self, sim):
super().__init__(sim, REPOS_API_PATTERN)
def dispatch(self, request, match):
return self.sim.repos[match.group('repo')].api(match.group('path'), request)
class UsersAPIResponse(APIResponse):
def __init__(self, sim):
super().__init__(sim, url=USERS_API_PATTERN)
def dispatch(self, request, match):
return self.sim._read_user(request, match.group('user'))
class Github(object):
""" Github simulator
When enabled (by context-managing):
* intercepts all ``requests`` calls & replies to api.github.com
* sends relevant hooks (registered per-repo as pairs of WSGI app and URL)
* stores repo content
"""
def __init__(self):
# {repo: {name, issues, objects, refs, hooks}}
self.repos = {}
def repo(self, name, hooks=()):
r = self.repos[name] = Repo(name)
for hook, events in hooks:
r.hook(hook, events)
return self.repos[name]
def __enter__(self):
# otherwise swallows errors from within the test
self._requests = responses.RequestsMock(assert_all_requests_are_fired=False).__enter__()
self._requests.add(ReposAPIResponse(self))
self._requests.add(UsersAPIResponse(self))
return self
def __exit__(self, *args):
return self._requests.__exit__(*args)
def _read_user(self, _, user):
return (200, {
'id': id(user),
'type': 'User',
'login': user,
'name': user.capitalize(),
})
class Repo(object):
def __init__(self, name):
self.name = name
self.issues = {}
#: we're cheating, instead of storing serialised in-memory
#: objects we're storing the Python stuff directly, Commit
#: objects for commits, {str: hash} for trees and bytes for
#: blobs. We're still indirecting via hashes and storing a
#: h:o map because going through the API probably requires it
self.objects = {}
# branches: refs/heads/*
# PRs: refs/pull/*
self.refs = {}
# {event: (wsgi_app, url)}
self.hooks = collections.defaultdict(list)
self.protected = set()
def hook(self, hook, events):
for event in events:
self.hooks[event].append(Client(*hook))
def notify(self, event_type, *payload):
for client in self.hooks.get(event_type, []):
getattr(client, event_type)(*payload)
def set_secret(self, secret):
for clients in self.hooks.values():
for client in clients:
client.secret = secret
def issue(self, number):
return self.issues[number]
def make_issue(self, title, body):
return Issue(self, title, body)
def make_pr(self, title, body, target, ctid, user, label=None):
assert 'heads/%s' % target in self.refs
return PR(self, title, body, target, ctid, user=user, label='{}:{}'.format(user, label or target))
def get_ref(self, ref):
if re.match(r'[0-9a-f]{40}', ref):
return ref
sha = self.refs.get(ref)
assert sha, "no ref %s" % ref
return sha
def make_ref(self, name, commit, force=False):
assert isinstance(self.objects[commit], Commit)
if not force and name in self.refs:
raise ValueError("ref %s already exists" % name)
self.refs[name] = commit
def protect(self, branch):
ref = 'heads/%s' % branch
assert ref in self.refs
self.protected.add(ref)
def update_ref(self, name, commit, force=False):
current = self.refs.get(name)
assert current is not None
assert name not in self.protected and force or git.is_ancestor(
self.objects, current, commit)
self.make_ref(name, commit, force=True)
def commit(self, ref):
sha = self.refs.get(ref) or ref
commit = self.objects[sha]
assert isinstance(commit, Commit)
return commit
def log(self, ref):
commits = [self.commit(ref)]
while commits:
c = commits.pop(0)
commits.extend(self.commit(r) for r in c.parents)
yield c.to_json()
def post_status(self, ref, state, context='default', **kw):
assert state in ('error', 'failure', 'pending', 'success')
c = self.commit(ref)
c.statuses.append({'state': state, 'context': context, **kw})
self.notify('status', self.name, context, state, c.id, kw)
def make_commit(self, ref, message, author, committer=None, tree=None, wait=True):
assert tree, "a commit must provide either a full tree"
refs = ref or []
if not isinstance(refs, list):
refs = [ref]
pids = [
ref if re.match(r'[0-9a-f]{40}', ref) else self.refs[ref]
for ref in refs
]
if type(tree) is type(u''):
assert isinstance(self.objects.get(tree), dict)
tid = tree
else:
tid = self._save_tree(tree)
c = Commit(tid, message, author, committer or author, parents=pids)
self.objects[c.id] = c
if refs and refs[0] != pids[0]:
self.refs[refs[0]] = c.id
return c.id
def _save_tree(self, t):
""" t: Dict String (String | Tree)
"""
t = {name: self._make_obj(obj) for name, obj in t.items()}
h, _ = git.make_tree(
self.objects,
t
)
self.objects[h] = t
return h
def _make_obj(self, o):
if type(o) is type(u''):
o = o.encode('utf-8')
if type(o) is bytes:
h, b = git.make_blob(o)
self.objects[h] = o
return h
return self._save_tree(o)
def api(self, path, request):
# a better version would be some sort of longest-match?
for method, pattern, handler in sorted(self._handlers, key=lambda t: -len(t[1])):
if method and request.method != method:
continue
# FIXME: remove qs from path & ensure path is entirely matched, maybe finally use proper routing?
m = re.match(pattern, path)
if m:
return handler(self, request, **m.groupdict())
return (404, {'message': "No match for {} {}".format(request.method, path)})
def read_tree(self, commit):
return git.read_object(self.objects, commit.tree)
def is_ancestor(self, sha, of):
assert not git.is_ancestor(self.objects, sha, of=of)
def _read_ref(self, _, ref):
obj = self.refs.get(ref)
if obj is None:
return (404, None)
return (200, {
"ref": "refs/%s" % ref,
"object": {
"type": "commit",
"sha": obj,
}
})
def _create_ref(self, r):
body = json.loads(r.body)
ref = body['ref']
# ref must start with refs/ and contain at least two slashes
if not (ref.startswith('refs/') and ref.count('/') >= 2):
return (400, None)
ref = ref[5:]
# if ref already exists conflict?
if ref in self.refs:
return (409, None)
sha = body['sha']
obj = self.objects.get(sha)
# if sha is not in the repo or not a commit, 404
if not isinstance(obj, Commit):
return (404, None)
self.make_ref(ref, sha)
return (201, {
"ref": "refs/%s" % ref,
"object": {
"type": "commit",
"sha": sha,
}
})
def _write_ref(self, r, ref):
current = self.refs.get(ref)
if current is None:
return (404, None)
body = json.loads(r.body)
sha = body['sha']
if sha not in self.objects:
return (404, None)
try:
self.update_ref(ref, sha, body.get('force') or False)
except AssertionError:
return (400, None)
return (200, {
"ref": "refs/%s" % ref,
"object": {
"type": "commit",
"sha": sha,
}
})
def _create_commit(self, r):
body = json.loads(r.body)
author = body.get('author')
try:
sha = self.make_commit(
ref=body.get('parents'),
message=body['message'],
author=author,
committer=body.get('committer'),
tree=body['tree']
)
except (KeyError, AssertionError):
# either couldn't find the parent or couldn't find the tree
return (404, None)
return (201, self._read_commit(r, sha)[1])
def _read_commit(self, _, sha):
c = self.objects.get(sha)
if not isinstance(c, Commit):
return (404, None)
return (200, {
"sha": sha,
"author": c.author.to_json(),
"committer": c.committer.to_json(),
"message": c.message,
"tree": {"sha": c.tree},
"parents": [{"sha": p} for p in c.parents],
})
def _read_statuses(self, _, ref):
try:
c = self.commit(ref)
except KeyError:
return (404, None)
return (200, {
'sha': c.id,
'total_count': len(c.statuses),
# TODO: combined?
'statuses': [
{'description': None, 'target_url': None, **st}
for st in reversed(c.statuses)]
})
def _read_issue(self, r, number):
try:
issue = self.issues[int(number)]
except KeyError:
return (404, None)
attr = {'pull_request': True} if isinstance(issue, PR) else {}
return (200, {'number': issue.number, **attr})
def _read_issue_comments(self, r, number):
try:
issue = self.issues[int(number)]
except KeyError:
return (404, None)
return (200, [{
'user': {'login': author},
'body': body,
} for author, body in issue.comments
if not body.startswith('REVIEW')
])
def _create_issue_comment(self, r, number):
try:
issue = self.issues[int(number)]
except KeyError:
return (404, None)
try:
body = json.loads(r.body)['body']
except KeyError:
return (400, None)
issue.post_comment(body, "user")
return (201, {
'id': 0,
'body': body,
'user': { 'login': "user" },
})
def _read_pr(self, r, number):
try:
pr = self.issues[int(number)]
except KeyError:
return (404, None)
# FIXME: dedup with Client
return (200, {
'number': pr.number,
'head': {
'sha': pr.head,
'label': pr.label,
},
'base': {
'ref': pr.base,
'repo': {
'name': self.name.split('/')[1],
'full_name': self.name,
},
},
'title': pr.title,
'body': pr.body,
'commits': len(pr.commits),
'user': {'login': pr.user},
})
def _edit_pr(self, r, number):
try:
pr = self.issues[int(number)]
except KeyError:
return (404, None)
body = json.loads(r.body)
if not body.keys() & {'title', 'body', 'state', 'base'}:
# FIXME: return PR content
return (200, {})
assert body.get('state') in ('open', 'closed', None)
pr.state = body.get('state') or pr.state
if body.get('title'):
pr.title = body.get('title')
if body.get('body'):
pr.body = body.get('body')
if body.get('base'):
pr.base = body.get('base')
if body.get('state') == 'open':
self.notify('pull_request', 'reopened', pr)
elif body.get('state') == 'closed':
self.notify('pull_request', 'closed', pr)
return (200, {})
def _read_pr_reviews(self, _, number):
pr = self.issues.get(int(number))
if not isinstance(pr, PR):
return (404, None)
return (200, [{
'user': {'login': author},
'state': r.group(1),
'body': r.group(2),
}
for author, body in pr.comments
for r in [re.match(r'REVIEW (\w+)\n\n(.*)', body)]
if r
])
def _read_pr_commits(self, r, number):
pr = self.issues.get(int(number))
if not isinstance(pr, PR):
return (404, None)
url = url_parse(r.url)
qs = url.decode_query()
# github pages are 1-indexeds
page = int(qs.get('page') or 1) - 1
per_page = int(qs.get('per_page') or 100)
offset = page * per_page
limit = page + 1 * per_page
headers = {'Content-Type': 'application/json'}
if len(pr.commits) > limit:
nextlink = url.replace(query=url_encode(dict(qs, page=page+1)))
headers['Link'] = '<%s>; rel="next"' % str(nextlink)
commits = [
c.to_json()
for c in sorted(
pr.commits,
key=lambda c: (c.author.date, c.committer.date)
)[offset:limit]
]
body = io.BytesIO(json.dumps(commits).encode('utf-8'))
return responses.HTTPResponse(
status=200, reason="OK",
headers=headers,
body=body, preload_content=False,
)
def _get_labels(self, r, number):
try:
pr = self.issues[int(number)]
except KeyError:
return (404, None)
return (200, [{'name': label} for label in pr.labels])
def _reset_labels(self, r, number):
try:
pr = self.issues[int(number)]
except KeyError:
return (404, None)
pr.labels = set(json.loads(r.body)['labels'])
return (200, {})
def _do_merge(self, r):
body = json.loads(r.body) # {base, head, commit_message}
if not body.get('commit_message'):
return (400, {'message': "Merges require a commit message"})
base = 'heads/%s' % body['base']
target = self.refs.get(base)
if not target:
return (404, {'message': "Base does not exist"})
# head can be either a branch or a sha
sha = self.refs.get('heads/%s' % body['head']) or body['head']
if sha not in self.objects:
return (404, {'message': "Head does not exist"})
if git.is_ancestor(self.objects, sha, of=target):
return (204, None)
# merging according to read-tree:
# get common ancestor (base) of commits
try:
merge_base = git.merge_base(self.objects, target, sha)
except Exception:
return (400, {'message': "No common ancestor between %(base)s and %(head)s" % body})
try:
tid = git.merge_objects(
self.objects,
self.objects[merge_base].tree,
self.objects[target].tree,
self.objects[sha].tree,
)
except Exception as e:
logging.exception("Merge Conflict")
return (409, {'message': 'Merge Conflict %r' % e})
c = Commit(tid, body['commit_message'], author=None, committer=None, parents=[target, sha])
self.objects[c.id] = c
self.refs[base] = c.id
return (201, c.to_json())
_handlers = [
('POST', r'git/refs', _create_ref),
('GET', r'git/refs/(?P<ref>.*)', _read_ref),
('PATCH', r'git/refs/(?P<ref>.*)', _write_ref),
# nb: there's a different commits at /commits with repo-level metadata
('GET', r'git/commits/(?P<sha>[0-9A-Fa-f]{40})', _read_commit),
('POST', r'git/commits', _create_commit),
('GET', r'commits/(?P<ref>[^/]+)/status', _read_statuses),
('GET', r'issues/(?P<number>\d+)', _read_issue),
('GET', r'issues/(?P<number>\d+)/comments', _read_issue_comments),
('POST', r'issues/(?P<number>\d+)/comments', _create_issue_comment),
('POST', r'merges', _do_merge),
('GET', r'pulls/(?P<number>\d+)', _read_pr),
('PATCH', r'pulls/(?P<number>\d+)', _edit_pr),
('GET', r'pulls/(?P<number>\d+)/reviews', _read_pr_reviews),
('GET', r'pulls/(?P<number>\d+)/commits', _read_pr_commits),
('GET', r'issues/(?P<number>\d+)/labels', _get_labels),
('PUT', r'issues/(?P<number>\d+)/labels', _reset_labels),
]
class Issue(object):
def __init__(self, repo, title, body):
self.repo = repo
self._title = title
self._body = body
self.number = max(repo.issues or [0]) + 1
self._comments = []
self.labels = set()
repo.issues[self.number] = self
@property
def comments(self):
return [(c.user, c.body) for c in self._comments]
def post_comment(self, body, user):
c = Comment(user, body)
self._comments.append(c)
self.repo.notify('issue_comment', self, 'created', c)
return c.id
def edit_comment(self, cid, newbody, user):
c = next(c for c in self._comments if c.id == cid)
c.body = newbody
self.repo.notify('issue_comment', self, 'edited', c)
def delete_comment(self, cid, user):
c = next(c for c in self._comments if c.id == cid)
self._comments.remove(c)
self.repo.notify('issue_comment', self, 'deleted', c)
@property
def title(self):
return self._title
@title.setter
def title(self, value):
self._title = value
@property
def body(self):
return self._body
@body.setter
def body(self, value):
self._body = value
class Comment:
_cseq = itertools.count()
def __init__(self, user, body, id=None):
self.user = user
self.body = body
self.id = id or next(self._cseq)
class PR(Issue):
def __init__(self, repo, title, body, target, ctid, user, label):
super(PR, self).__init__(repo, title, body)
assert ctid in repo.objects
repo.refs['pull/%d' % self.number] = ctid
self.head = ctid
self._base = target
self.user = user
self.label = label
self.state = 'open'
repo.notify('pull_request', 'opened', self)
@Issue.title.setter
def title(self, value):
old = self.title
Issue.title.fset(self, value)
self.repo.notify('pull_request', 'edited', self, {
'title': {'from': old}
})
@Issue.body.setter
def body(self, value):
old = self.body
Issue.body.fset(self, value)
self.repo.notify('pull_request', 'edited', self, {
'body': {'from': old}
})
@property
def base(self):
return self._base
@base.setter
def base(self, value):
old, self._base = self._base, value
self.repo.notify('pull_request', 'edited', self, {
'base': {'ref': {'from': old}}
})
def push(self, sha):
self.head = sha
self.repo.notify('pull_request', 'synchronize', self)
def open(self):
assert self.state == 'closed'
self.state = 'open'
self.repo.notify('pull_request', 'reopened', self)
def close(self):
self.state = 'closed'
self.repo.notify('pull_request', 'closed', self)
@property
def commits(self):
store = self.repo.objects
target = self.repo.commit('heads/%s' % self.base).id
base = {h for h, _ in git.walk_ancestors(store, target, False)}
own = [
h for h, _ in git.walk_ancestors(store, self.head, False)
if h not in base
]
return list(map(self.repo.commit, reversed(own)))
def post_review(self, state, user, body):
self.comments.append((user, "REVIEW %s\n\n%s " % (state, body)))
self.repo.notify('pull_request_review', state, self, user, body)
FMT = '%Y-%m-%dT%H:%M:%SZ'
class Author(object):
__slots__ = ['name', 'email', 'date']
def __init__(self, name, email, date):
self.name = name
self.email = email
self.date = date or datetime.datetime.now().strftime(FMT)
@classmethod
def from_(cls, d):
if not d:
return None
return Author(**d)
def to_json(self):
return {
'name': self.name,
'email': self.email,
'date': self.date,
}
def __str__(self):
return '%s <%s> %d Z' % (
self.name,
self.email,
int(datetime.datetime.strptime(self.date, FMT).timestamp())
)
class Commit(object):
__slots__ = ['tree', 'message', 'author', 'committer', 'parents', 'statuses']
def __init__(self, tree, message, author, committer, parents):
self.tree = tree
self.message = message.strip()
self.author = Author.from_(author) or Author('', '', '')
self.committer = Author.from_(committer) or self.author
self.parents = parents
self.statuses = []
@property
def id(self):
return git.make_commit(self.tree, self.message, self.author, self.committer, parents=self.parents)[0]
def to_json(self):
return {
"sha": self.id,
"commit": {
"author": self.author.to_json(),
"committer": self.committer.to_json(),
"message": self.message,
"tree": {"sha": self.tree},
},
"parents": [{"sha": p} for p in self.parents]
}
def __str__(self):
parents = '\n'.join('parent {}'.format(p) for p in self.parents) + '\n'
return """commit {}
tree {}
{}author {}
committer {}
{}""".format(
self.id,
self.tree,
parents,
self.author,
self.committer,
self.message
)
class Client(werkzeug.test.Client):
def __init__(self, application, path):
self._webhook_path = path
self.secret = None
super(Client, self).__init__(application, werkzeug.wrappers.BaseResponse)
def _make_env(self, event_type, data):
headers = [('X-Github-Event', event_type)]
body = json.dumps(data).encode('utf-8')
if self.secret:
sig = hmac.new(self.secret.encode('ascii'), body, hashlib.sha1).hexdigest()
headers.append(('X-Hub-Signature', 'sha1=' + sig))
return werkzeug.test.EnvironBuilder(
path=self._webhook_path,
method='POST',
headers=headers,
content_type='application/json',
data=body,
)
def _repo(self, name):
return {
'name': name.split('/')[1],
'full_name': name,
}
def pull_request(self, action, pr, changes=None):
assert action in ('opened', 'reopened', 'closed', 'synchronize', 'edited')
return self.open(self._make_env(
'pull_request', {
'action': action,
'pull_request': self._pr(pr),
'repository': self._repo(pr.repo.name),
'sender': {'login': '<>'},
**({'changes': changes} if changes else {})
}
))
def pull_request_review(self, action, pr, user, body):
"""
:type action: 'APPROVE' | 'REQUEST_CHANGES' | 'COMMENT'
:type pr: PR
:type user: str
:type body: str
"""
assert action in ('APPROVE', 'REQUEST_CHANGES', 'COMMENT')
return self.open(self._make_env(
'pull_request_review', {
'action': 'submitted',
'review': {
'state': 'APPROVED' if action == 'APPROVE' else action,
'body': body,
'user': {'login': user},
},
'pull_request': self._pr(pr),
'repository': self._repo(pr.repo.name),
}
))
def status(self, repository, context, state, sha, kw):
assert state in ('success', 'failure', 'pending')
return self.open(self._make_env(
'status', {
'name': repository,
'context': context,
'state': state,
'sha': sha,
'repository': self._repo(repository),
'target_url': None,
'description': None,
**(kw or {})
}
))
def issue_comment(self, issue, action, comment):
assert action in ('created', 'edited', 'deleted')
contents = {
'action': action,
'issue': { 'number': issue.number },
'repository': self._repo(issue.repo.name),
'comment': { 'id': comment.id, 'body': comment.body, 'user': {'login': comment.user } },
}
if isinstance(issue, PR):
contents['issue']['pull_request'] = { 'url': 'fake' }
return self.open(self._make_env('issue_comment', contents))
def _pr(self, pr):
"""
:type pr: PR
"""
return {
'number': pr.number,
'head': {
'sha': pr.head,
'label': pr.label,
},
'base': {
'ref': pr.base,
'repo': self._repo(pr.repo.name),
},
'title': pr.title,
'body': pr.body,
'commits': len(pr.commits),
'user': {'login': pr.user},
}

View File

@ -1,126 +0,0 @@
import collections
import hashlib
def make_obj(t, contents):
assert t in ('blob', 'tree', 'commit')
obj = b'%s %d\0%s' % (t.encode('utf-8'), len(contents), contents)
return hashlib.sha1(obj).hexdigest(), obj
def make_blob(contents):
return make_obj('blob', contents)
def make_tree(store, objs):
""" objs should be a mapping or iterable of (name, object)
"""
if isinstance(objs, collections.Mapping):
objs = objs.items()
return make_obj('tree', b''.join(
b'%s %s\0%s' % (
b'040000' if isinstance(obj, collections.Mapping) else b'100644',
name.encode('utf-8'),
h.encode('utf-8'),
)
for name, h in sorted(objs)
for obj in [store[h]]
# TODO: check that obj is a blob or tree
))
def make_commit(tree, message, author, committer=None, parents=()):
contents = ['tree %s' % tree]
for parent in parents:
contents.append('parent %s' % parent)
contents.append('author %s' % author)
contents.append('committer %s' % committer or author)
contents.append('')
contents.append(message)
return make_obj('commit', '\n'.join(contents).encode('utf-8'))
def walk_ancestors(store, commit, exclude_self=True):
"""
:param store: mapping of hashes to commit objects (w/ a parents attribute)
:param str commit: starting commit's hash
:param exclude_self: whether the starting commit shoudl be returned as
part of the sequence
:rtype: Iterator[(str, int)]
"""
q = [(commit, 0)]
while q:
node, distance = q.pop()
q.extend((p, distance+1) for p in store[node].parents)
if not (distance == 0 and exclude_self):
yield (node, distance)
def is_ancestor(store, candidate, of):
# could have candidate == of after all
return any(
current == candidate
for current, _ in walk_ancestors(store, of, exclude_self=False)
)
def merge_base(store, c1, c2):
""" Find LCA between two commits. Brute-force: get all ancestors of A,
all ancestors of B, intersect, and pick the one with the lowest distance
"""
a1 = walk_ancestors(store, c1, exclude_self=False)
# map of sha:distance
a2 = dict(walk_ancestors(store, c2, exclude_self=False))
# find lowest ancestor by distance(ancestor, c1) + distance(ancestor, c2)
_distance, lca = min(
(d1 + d2, a)
for a, d1 in a1
for d2 in [a2.get(a)]
if d2 is not None
)
return lca
def merge_objects(store, b, o1, o2):
""" Merges trees and blobs.
Store = Mapping<Hash, (Blob | Tree)>
Blob = bytes
Tree = Mapping<Name, Hash>
"""
# FIXME: handle None input (similarly named entry added in two
# branches, or delete in one branch & change in other)
if not (b and o1 or o2):
raise ValueError("Don't know how to merge additions/removals yet")
b, o1, o2 = store[b], store[o1], store[o2]
if any(isinstance(o, bytes) for o in [b, o1, o2]):
raise TypeError("Don't know how to merge blobs")
entries = sorted(set(b).union(o1, o2))
t = {}
for entry in entries:
base = b.get(entry)
e1 = o1.get(entry)
e2 = o2.get(entry)
if e1 == e2:
merged = e1 # either no change or same change on both side
elif base == e1:
merged = e2 # e1 did not change, use e2
elif base == e2:
merged = e1 # e2 did not change, use e1
else:
merged = merge_objects(store, base, e1, e2)
# None => entry removed
if merged is not None:
t[entry] = merged
# FIXME: fix partial redundancy with make_tree
tid, _ = make_tree(store, t)
store[tid] = t
return tid
def read_object(store, tid):
# recursively reads tree of objects
o = store[tid]
if isinstance(o, bytes):
return o.decode()
return {
k: read_object(store, v)
for k, v in o.items()
}

View File

@ -1,133 +0,0 @@
# -*- coding: utf-8 -*-
import inspect
import logging
import pytest
import werkzeug.test, werkzeug.wrappers
import odoo
import fake_github
@pytest.fixture(scope='session')
def remote_p():
return False
@pytest.fixture
def gh():
with fake_github.Github() as gh:
yield gh
@pytest.fixture
def db(dbcache):
return dbcache
@pytest.fixture(scope='session')
def registry(request):
""" Set up Odoo & yields a registry to the specified db
"""
db = request.config.getoption('--db')
addons = request.config.getoption('--addons-path')
odoo.tools.config.parse_config(['--addons-path', addons, '-d', db, '--db-filter', db])
try:
odoo.service.db._create_empty_database(db)
odoo.service.db._initialize_db(None, db, False, False, 'admin')
except odoo.service.db.DatabaseExists:
pass
#odoo.service.server.load_server_wide_modules()
#odoo.service.server.preload_registries([db])
with odoo.api.Environment.manage():
# ensure module is installed
r0 = odoo.registry(db)
with r0.cursor() as cr:
env = odoo.api.Environment(cr, 1, {})
[mod] = env['ir.module.module'].search([('name', '=', 'runbot_merge')])
mod.button_immediate_install()
from odoo.addons.runbot_merge.models import pull_requests
pull_requests.STAGING_SLEEP = 0
yield odoo.registry(db)
@pytest.fixture
def cr(registry):
# in v12, enter_test_mode flags an existing cursor while in v11 it sets one up
if inspect.signature(registry.enter_test_mode).parameters:
with registry.cursor() as cr:
registry.enter_test_mode(cr)
yield cr
registry.leave_test_mode()
cr.rollback()
else:
registry.enter_test_mode()
with registry.cursor() as cr:
yield cr
cr.rollback()
registry.leave_test_mode()
@pytest.fixture
def env(cr):
env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {})
ctx = env['res.users'].context_get()
yield env(context=ctx)
@pytest.fixture
def owner():
return 'user'
@pytest.fixture(autouse=True)
def users(env):
env['res.partner'].create({
'name': "Reviewer",
'github_login': 'reviewer',
'reviewer': True,
'email': "reviewer@example.com",
})
env['res.partner'].create({
'name': "Self Reviewer",
'github_login': 'self_reviewer',
'self_reviewer': True,
})
return {
'reviewer': 'reviewer',
'self_reviewer': 'self_reviewer',
'other': 'other',
'user': 'user',
}
@pytest.fixture
def project(env):
return env['runbot_merge.project'].create({
'name': 'odoo',
'github_token': 'okokok',
'github_prefix': 'hansen',
'branch_ids': [(0, 0, {'name': 'master'})],
'required_statuses': 'legal/cla,ci/runbot',
})
@pytest.fixture
def make_repo(gh, project):
def make_repo(name):
fullname = 'org/' + name
project.write({'repo_ids': [(0, 0, {'name': fullname})]})
return gh.repo(fullname, hooks=[
((odoo.http.root, '/runbot_merge/hooks'), [
'pull_request', 'issue_comment', 'status', 'pull_request_review'
])
])
return make_repo
@pytest.fixture
def page():
c = werkzeug.test.Client(odoo.http.root, werkzeug.wrappers.BaseResponse)
def get(url):
r = c.get(url)
assert r.status_code == 200
return r.data
return get
# TODO: project fixture
# TODO: repos (indirect/parameterize?) w/ WS hook
# + repo proxy object

View File

@ -1,724 +0,0 @@
"""
Replaces relevant fixtures to allow running the test suite against github
actual (instead of a mocked version).
To enable this plugin, load it using ``-p runbot_merge.tests.remote``
.. WARNING:: this requires running ``python -mpytest`` from the root of the
runbot repository, running ``pytest`` directly will not pick it
up (as it does not setup ``sys.path``)
Configuration:
* an ``odoo`` binary in the path, which runs the relevant odoo; to ensure a
clean slate odoo is re-started and a new database is created before each
test
* pytest.ini (at the root of the runbot repo) with the following sections and
keys
``github``
- owner, the name of the account (personal or org) under which test repos
will be created & deleted
- token, either personal or oauth, must have the scopes ``public_repo``,
``delete_repo`` and ``admin:repo_hook``, if personal the owner must be
the corresponding user account, not an org
``role_reviewer``, ``role_self_reviewer`` and ``role_other``
- name (optional)
- token, a personal access token with the ``public_repo`` scope (otherwise
the API can't leave comments)
.. warning:: the accounts must *not* be flagged, or the webhooks on
commenting or creating reviews will not trigger, and the
tests will fail
* either ``ngrok`` or ``lt`` (localtunnel) available on the path. ngrok with
a configured account is recommended: ngrok is more reliable than localtunnel
but a free account is necessary to get a high-enough rate limiting for some
of the multi-repo tests to work
Finally the tests aren't 100% reliable as they rely on quite a bit of network
traffic, it's possible that the tests fail due to network issues rather than
logic errors.
"""
import base64
import collections
import itertools
import re
import socket
import subprocess
import time
import xmlrpc.client
import pytest
import requests
# Should be pytest_configure, but apparently once a plugin is registered
# its fixtures don't get unloaded even if it's unregistered, so prevent
# registering local entirely. This works because explicit plugins (-p)
# are loaded before conftest and conftest-specified plugins (officially:
# https://docs.pytest.org/en/latest/writing_plugins.html#plugin-discovery-order-at-tool-startup).
def pytest_addhooks(pluginmanager):
pluginmanager.set_blocked('local')
PORT=8069
@pytest.fixture(scope='session')
def port():
return PORT
def wait_for_hook(n=1):
# TODO: find better way to wait for roundtrip of actions which can trigger webhooks
time.sleep(10 * n)
@pytest.fixture
def page():
s = requests.Session()
def get(url):
r = s.get('http://localhost:{}{}'.format(PORT, url))
r.raise_for_status()
return r.content
return get
def wait_for_server(db, timeout=120):
""" Polls for server to be response & have installed our module.
Raises socket.timeout on failure
"""
limit = time.time() + timeout
while True:
try:
uid = xmlrpc.client.ServerProxy(
'http://localhost:{}/xmlrpc/2/common'.format(PORT))\
.authenticate(db, 'admin', 'admin', {})
xmlrpc.client.ServerProxy(
'http://localhost:{}/xmlrpc/2/object'.format(PORT)) \
.execute_kw(db, uid, 'admin', 'runbot_merge.batch', 'search',
[[]], {'limit': 1})
break
except ConnectionRefusedError:
if time.time() > limit:
raise socket.timeout()
@pytest.fixture(scope='session')
def remote_p():
return True
@pytest.fixture
def env(request):
"""
creates a db & an environment object as a proxy to xmlrpc calls
"""
db = request.config.getoption('--db')
p = subprocess.Popen([
'odoo', '--http-port', str(PORT),
'--addons-path', request.config.getoption('--addons-path'),
'-d', db, '-i', 'runbot_merge',
'--load', 'base,web,runbot_merge',
'--max-cron-threads', '0', # disable cron threads (we're running crons by hand)
])
try:
wait_for_server(db)
yield Environment(PORT, db)
db_service = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/db'.format(PORT))
db_service.drop('admin', db)
finally:
p.terminate()
p.wait(timeout=30)
@pytest.fixture(autouse=True)
def users(users_):
return users_
@pytest.fixture
def project(env, config):
return env['runbot_merge.project'].create({
'name': 'odoo',
'github_token': config['github']['token'],
'github_prefix': 'hansen',
'branch_ids': [(0, 0, {'name': 'master'})],
'required_statuses': 'legal/cla,ci/runbot',
})
@pytest.fixture(scope='session')
def github(config):
s = requests.Session()
s.headers['Authorization'] = 'token {}'.format(config['github']['token'])
return s
@pytest.fixture
def owner(config):
return config['github']['owner']
@pytest.fixture
def make_repo(request, config, project, github, tunnel, users, owner):
# check whether "owner" is a user or an org, as repo-creation endpoint is
# different
q = github.get('https://api.github.com/users/{}'.format(owner))
q.raise_for_status()
if q.json().get('type') == 'Organization':
endpoint = 'https://api.github.com/orgs/{}/repos'.format(owner)
else:
# if not creating repos under an org, ensure the token matches the owner
assert users['user'] == owner, "when testing against a user (rather than an organisation) the API token must be the user's"
endpoint = 'https://api.github.com/user/repos'
repos = []
def repomaker(name):
fullname = '{}/{}'.format(owner, name)
repo_url = 'https://api.github.com/repos/{}'.format(fullname)
if request.config.getoption('--no-delete'):
if github.head(repo_url).ok:
pytest.skip("Repository {} already exists".format(fullname))
else:
# just try to delete the repo, we don't really care
if github.delete(repo_url).ok:
# if we did delete a repo, wait a bit as gh might need to
# propagate the thing?
time.sleep(30)
# create repo
r = github.post(endpoint, json={
'name': name,
'has_issues': False,
'has_projects': False,
'has_wiki': False,
'auto_init': False,
# at least one merge method must be enabled :(
'allow_squash_merge': False,
# 'allow_merge_commit': False,
'allow_rebase_merge': False,
})
r.raise_for_status()
repos.append(fullname)
# unwatch repo
github.put('{}/subscription'.format(repo_url), json={
'subscribed': False,
'ignored': True,
})
# create webhook
github.post('{}/hooks'.format(repo_url), json={
'name': 'web',
'config': {
'url': '{}/runbot_merge/hooks'.format(tunnel),
'content_type': 'json',
'insecure_ssl': '1',
},
'events': ['pull_request', 'issue_comment', 'status', 'pull_request_review']
})
project.write({'repo_ids': [(0, 0, {'name': fullname})]})
role_tokens = {
n[5:]: vals['token']
for n, vals in config.items()
if n.startswith('role_')
}
role_tokens['user'] = config['github']['token']
return Repo(github, fullname, role_tokens)
yield repomaker
if not request.config.getoption('--no-delete'):
for repo in reversed(repos):
github.delete('https://api.github.com/repos/{}'.format(repo)).raise_for_status()
class Environment:
def __init__(self, port, db):
self._uid = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/common'.format(port)).authenticate(db, 'admin', 'admin', {})
self._object = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/object'.format(port))
self._db = db
def __call__(self, model, method, *args, **kwargs):
return self._object.execute_kw(
self._db, self._uid, 'admin',
model, method,
args, kwargs
)
def __getitem__(self, name):
return Model(self, name)
class Model:
__slots__ = ['_env', '_model', '_ids', '_fields']
def __init__(self, env, model, ids=(), fields=None):
object.__setattr__(self, '_env', env)
object.__setattr__(self, '_model', model)
object.__setattr__(self, '_ids', tuple(ids or ()))
object.__setattr__(self, '_fields', fields or self._env(self._model, 'fields_get', attributes=['type', 'relation']))
@property
def ids(self):
return self._ids
def __bool__(self):
return bool(self._ids)
def __len__(self):
return len(self._ids)
def __eq__(self, other):
if not isinstance(other, Model):
return NotImplemented
return self._model == other._model and self._ids == other._ids
def __repr__(self):
return "{}({})".format(self._model, ', '.join(str(id) for id in self._ids))
def exists(self):
ids = self._env(self._model, 'exists', self._ids)
return Model(self._env, self._model, ids)
def search(self, domain, **kw):
ids = self._env(self._model, 'search', domain, **kw)
return Model(self._env, self._model, ids)
def create(self, values):
return Model(self._env, self._model, [self._env(self._model, 'create', values)])
def write(self, values):
return self._env(self._model, 'write', self._ids, values)
def read(self, fields):
return self._env(self._model, 'read', self._ids, fields)
def unlink(self):
return self._env(self._model, 'unlink', self._ids)
def _check_progress(self):
assert self._model == 'runbot_merge.project'
self._run_cron('runbot_merge.merge_cron')
def _check_fetch(self):
assert self._model == 'runbot_merge.project'
self._run_cron('runbot_merge.fetch_prs_cron')
def _send_feedback(self):
assert self._model == 'runbot_merge.project'
self._run_cron('runbot_merge.feedback_cron')
def _check_linked_prs_statuses(self):
assert self._model == 'runbot_merge.pull_requests'
self._run_cron('runbot_merge.check_linked_prs_status')
def _notify(self):
assert self._model == 'runbot_merge.commit'
self._run_cron('runbot_merge.process_updated_commits')
def _run_cron(self, xid):
_, model, cron_id = self._env('ir.model.data', 'xmlid_lookup', xid)
assert model == 'ir.cron', "Expected {} to be a cron, got {}".format(xid, model)
self._env('ir.cron', 'method_direct_trigger', [cron_id])
# sleep for some time as a lot of crap may have happened (?)
wait_for_hook()
def __getattr__(self, fieldname):
if not self._ids:
return False
assert len(self._ids) == 1
if fieldname == 'id':
return self._ids[0]
val = self.read([fieldname])[0][fieldname]
field_description = self._fields[fieldname]
if field_description['type'] in ('many2one', 'one2many', 'many2many'):
val = val or []
if field_description['type'] == 'many2one':
val = val[:1] # (id, name) => [id]
return Model(self._env, field_description['relation'], val)
return val
def __setattr__(self, fieldname, value):
assert self._fields[fieldname]['type'] not in ('many2one', 'one2many', 'many2many')
self._env(self._model, 'write', self._ids, {fieldname: value})
def __iter__(self):
return (
Model(self._env, self._model, [i], fields=self._fields)
for i in self._ids
)
def mapped(self, path):
field, *rest = path.split('.', 1)
descr = self._fields[field]
if descr['type'] in ('many2one', 'one2many', 'many2many'):
result = Model(self._env, descr['relation'])
for record in self:
result |= getattr(record, field)
return result.mapped(rest[0]) if rest else result
assert not rest
return [getattr(r, field) for r in self]
def __or__(self, other):
if not isinstance(other, Model) or self._model != other._model:
return NotImplemented
return Model(self._env, self._model, {*self._ids, *other._ids}, fields=self._fields)
def invalidate_cache(self, fnames=None, ids=None):
pass # not a concern when every access is an RPC call
class Repo:
__slots__ = ['name', '_session', '_tokens']
def __init__(self, session, name, user_tokens):
self.name = name
self._session = session
self._tokens = user_tokens
def set_secret(self, secret):
r = self._session.get(
'https://api.github.com/repos/{}/hooks'.format(self.name))
response = r.json()
assert 200 <= r.status_code < 300, response
[hook] = response
r = self._session.patch('https://api.github.com/repos/{}/hooks/{}'.format(self.name, hook['id']), json={
'config': {**hook['config'], 'secret': secret},
})
assert 200 <= r.status_code < 300, r.json()
def get_ref(self, ref):
if re.match(r'[0-9a-f]{40}', ref):
return ref
assert ref.startswith('heads/')
r = self._session.get('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, ref))
response = r.json()
assert 200 <= r.status_code < 300, response
assert isinstance(response, dict), "{} doesn't exist (got {} refs)".format(ref, len(response))
assert response['object']['type'] == 'commit'
return response['object']['sha']
def make_ref(self, name, commit, force=False):
assert name.startswith('heads/')
r = self._session.post('https://api.github.com/repos/{}/git/refs'.format(self.name), json={
'ref': 'refs/' + name,
'sha': commit,
})
if force and r.status_code == 422:
r = self._session.patch('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, name), json={'sha': commit, 'force': True})
assert 200 <= r.status_code < 300, r.json()
wait_for_hook()
def protect(self, branch):
r = self._session.put('https://api.github.com/repos/{}/branches/{}/protection'.format(self.name, branch), json={
'required_status_checks': None,
'enforce_admins': True,
'required_pull_request_reviews': None,
'restrictions': None,
})
assert 200 <= r.status_code < 300, r.json()
wait_for_hook()
def update_ref(self, name, commit, force=False):
r = self._session.patch('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, name), json={'sha': commit, 'force': force})
assert 200 <= r.status_code < 300, r.json()
wait_for_hook()
def make_commit(self, ref, message, author, committer=None, tree=None, wait=True):
assert tree, "not supporting changes/updates"
if not ref: # None / []
# apparently github refuses to create trees/commits in empty repos
# using the regular API...
[(path, contents)] = tree.items()
r = self._session.put('https://api.github.com/repos/{}/contents/{}'.format(self.name, path), json={
'path': path,
'message': message,
'content': base64.b64encode(contents.encode('utf-8')).decode('ascii'),
'branch': 'nootherwaytocreateaninitialcommitbutidontwantamasteryet%d' % next(ct)
})
assert 200 <= r.status_code < 300, r.json()
return r.json()['commit']['sha']
if isinstance(ref, list):
refs = ref
else:
refs = [ref]
parents = [self.get_ref(r) for r in refs]
r = self._session.post('https://api.github.com/repos/{}/git/trees'.format(self.name), json={
'tree': [
{'path': k, 'mode': '100644', 'type': 'blob', 'content': v}
for k, v in tree.items()
]
})
assert 200 <= r.status_code < 300, r.json()
h = r.json()['sha']
data = {
'parents': parents,
'message': message,
'tree': h,
}
if author:
data['author'] = author
if committer:
data['committer'] = committer
r = self._session.post('https://api.github.com/repos/{}/git/commits'.format(self.name), json=data)
assert 200 <= r.status_code < 300, r.json()
commit_sha = r.json()['sha']
# if the first parent is an actual ref (rather than a hash) update it
if parents[0] != refs[0]:
self.update_ref(refs[0], commit_sha)
elif wait:
wait_for_hook()
return commit_sha
def make_pr(self, title, body, target, ctid, user, label=None):
# github only allows PRs from actual branches, so create an actual branch
ref = label or "temp_trash_because_head_must_be_a_ref_%d" % next(ct)
self.make_ref('heads/' + ref, ctid)
r = self._session.post(
'https://api.github.com/repos/{}/pulls'.format(self.name),
json={'title': title, 'body': body, 'head': ref, 'base': target,},
headers={'Authorization': 'token {}'.format(self._tokens[user])}
)
assert 200 <= r.status_code < 300, r.json()
# wait extra for PRs creating many PRs and relying on their ordering
# (test_batching & test_batching_split)
# would be nice to make the tests more reliable but not quite sure
# how...
wait_for_hook(2)
return PR(self, 'heads/' + ref, r.json()['number'])
def post_status(self, ref, status, context='default', **kw):
assert status in ('error', 'failure', 'pending', 'success')
r = self._session.post('https://api.github.com/repos/{}/statuses/{}'.format(self.name, self.get_ref(ref)), json={
'state': status,
'context': context,
**kw
})
assert 200 <= r.status_code < 300, r.json()
wait_for_hook()
def commit(self, ref):
# apparently heads/<branch> ~ refs/heads/<branch> but are not
# necessarily up to date ??? unlike the git ref system where :ref
# starts at heads/
if ref.startswith('heads/'):
ref = 'refs/' + ref
r = self._session.get('https://api.github.com/repos/{}/commits/{}'.format(self.name, ref))
response = r.json()
assert 200 <= r.status_code < 300, response
c = response['commit']
return Commit(
id=response['sha'],
tree=c['tree']['sha'],
message=c['message'],
author=c['author'],
committer=c['committer'],
parents=[p['sha'] for p in response['parents']],
)
def read_tree(self, commit):
""" read tree object from commit
:param Commit commit:
:rtype: Dict[str, str]
"""
r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree))
assert 200 <= r.status_code < 300, r.json()
# read tree's blobs
tree = {}
for t in r.json()['tree']:
assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases"
r = self._session.get(t['url'])
assert 200 <= r.status_code < 300, r.json()
# assume all test content is textual
tree[t['path']] = base64.b64decode(r.json()['content']).decode()
return tree
def is_ancestor(self, sha, of):
return any(c['sha'] == sha for c in self.log(of))
def log(self, ref_or_sha):
for page in itertools.count(1):
r = self._session.get(
'https://api.github.com/repos/{}/commits'.format(self.name),
params={'sha': ref_or_sha, 'page': page}
)
assert 200 <= r.status_code < 300, r.json()
yield from r.json()
if not r.links.get('next'):
return
ct = itertools.count()
Commit = collections.namedtuple('Commit', 'id tree message author committer parents')
from odoo.tools.func import lazy_property
class LabelsProxy(collections.abc.MutableSet):
def __init__(self, pr):
self._pr = pr
@property
def _labels(self):
pr = self._pr
r = pr._session.get('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number))
assert r.ok, r.json()
return {label['name'] for label in r.json()}
def __repr__(self):
return '<LabelsProxy %r>' % self._labels
def __eq__(self, other):
if isinstance(other, collections.abc.Set):
return other == self._labels
return NotImplemented
def __contains__(self, label):
return label in self._labels
def __iter__(self):
return iter(self._labels)
def __len__(self):
return len(self._labels)
def add(self, label):
pr = self._pr
r = pr._session.post('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number), json={
'labels': [label]
})
assert r.ok, r.json()
def discard(self, label):
pr = self._pr
r = pr._session.delete('https://api.github.com/repos/{}/issues/{}/labels/{}'.format(pr.repo.name, pr.number, label))
# discard should do nothing if the item didn't exist in the set
assert r.ok or r.status_code == 404, r.json()
def update(self, *others):
pr = self._pr
# because of course that one is not provided by MutableMapping...
r = pr._session.post('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number), json={
'labels': list(set(itertools.chain.from_iterable(others)))
})
assert r.ok, r.json()
class PR:
__slots__ = ['number', '_branch', 'repo', 'labels']
def __init__(self, repo, branch, number):
"""
:type repo: Repo
:type branch: str
:type number: int
"""
self.number = number
self._branch = branch
self.repo = repo
self.labels = LabelsProxy(self)
@property
def _session(self):
return self.repo._session
@property
def _pr(self):
r = self._session.get('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number))
assert 200 <= r.status_code < 300, r.json()
return r.json()
@property
def head(self):
return self._pr['head']['sha']
@property
def user(self):
return self._pr['user']['login']
@property
def state(self):
return self._pr['state']
@property
def comments(self):
r = self._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number))
assert 200 <= r.status_code < 300, r.json()
return [
(c['user']['login'], c['body'])
for c in r.json()
]
def _set_prop(self, prop, value):
r = self._session.patch('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number), json={
prop: value
})
assert 200 <= r.status_code < 300, r.json()
wait_for_hook()
@property
def title(self):
raise NotImplementedError()
title = title.setter(lambda self, v: self._set_prop('title', v))
@property
def base(self):
raise NotImplementedError()
base = base.setter(lambda self, v: self._set_prop('base', v))
def post_comment(self, body, user):
r = self._session.post(
'https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number),
json={'body': body},
headers={'Authorization': 'token {}'.format(self.repo._tokens[user])}
)
assert 200 <= r.status_code < 300, r.json()
wait_for_hook()
return r.json()['id']
def edit_comment(self, cid, body, user):
r = self._session.patch(
'https://api.github.com/repos/{}/issues/comments/{}'.format(self.repo.name, cid),
json={'body': body},
headers={'Authorization': 'token {}'.format(self.repo._tokens[user])}
)
assert 200 <= r.status_code < 300, r.json()
wait_for_hook()
def delete_comment(self, cid, user):
r = self._session.delete(
'https://api.github.com/repos/{}/issues/comments/{}'.format(self.repo.name, cid),
headers={'Authorization': 'token {}'.format(self.repo._tokens[user])}
)
assert r.status_code == 204, r.json()
wait_for_hook()
def open(self):
self._set_prop('state', 'open')
def close(self):
self._set_prop('state', 'closed')
def push(self, sha):
self.repo.update_ref(self._branch, sha, force=True)
def post_review(self, state, user, body):
r = self._session.post(
'https://api.github.com/repos/{}/pulls/{}/reviews'.format(self.repo.name, self.number),
json={'body': body, 'event': state,},
headers={'Authorization': 'token {}'.format(self.repo._tokens[user])}
)
assert 200 <= r.status_code < 300, r.json()
wait_for_hook()

File diff suppressed because it is too large Load Diff

View File

@ -9,23 +9,29 @@ import json
import pytest
from test_utils import re_matches, run_crons, get_partner
from test_utils import re_matches, get_partner
@pytest.fixture
def repo_a(make_repo):
return make_repo('a')
def repo_a(project, make_repo):
repo = make_repo('a')
project.write({'repo_ids': [(0, 0, {'name': repo.name})]})
return repo
@pytest.fixture
def repo_b(make_repo):
return make_repo('b')
def repo_b(project, make_repo):
repo = make_repo('b')
project.write({'repo_ids': [(0, 0, {'name': repo.name})]})
return repo
@pytest.fixture
def repo_c(make_repo):
return make_repo('c')
def repo_c(project, make_repo):
repo = make_repo('c')
project.write({'repo_ids': [(0, 0, {'name': repo.name})]})
return repo
def make_pr(repo, prefix, trees, *, target='master', user='user', label=None,
def make_pr(repo, prefix, trees, *, target='master', user,
statuses=(('ci/runbot', 'success'), ('legal/cla', 'success')),
reviewer='reviewer'):
reviewer):
"""
:type repo: fake_github.Repo
:type prefix: str
@ -37,14 +43,16 @@ def make_pr(repo, prefix, trees, *, target='master', user='user', label=None,
:type reviewer: str | None
:rtype: fake_github.PR
"""
base = repo.commit('heads/{}'.format(target))
tree = repo.read_tree(base)
c = base.id
for i, t in enumerate(trees):
tree.update(t)
c = repo.make_commit(c, 'commit_{}_{:02}'.format(prefix, i), None,
tree=dict(tree))
pr = repo.make_pr('title {}'.format(prefix), 'body {}'.format(prefix), target=target, ctid=c, user=user, label=label)
*_, c = repo.make_commits(
'heads/{}'.format(target),
*(
repo.Commit('commit_{}_{:02}'.format(prefix, i), tree=tree)
for i, tree in enumerate(trees)
),
ref='heads/{}'.format(prefix)
)
pr = repo.make_pr(title='title {}'.format(prefix), body='body {}'.format(prefix),
target=target, head=prefix, token=user)
for context, result in statuses:
repo.post_status(c, result, context)
if reviewer:
@ -62,35 +70,51 @@ def make_branch(repo, name, message, tree, protect=True):
repo.protect(name)
return c
def test_stage_one(env, project, repo_a, repo_b):
def test_stage_one(env, project, repo_a, repo_b, config):
""" First PR is non-matched from A => should not select PR from B
"""
project.batch_limit = 1
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
pr_a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing')
with repo_a:
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
pr_a = make_pr(
repo_a, 'A', [{'a': 'a_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'])
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
pr_b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-other-thing')
run_crons(env)
with repo_b:
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
pr_b = make_pr(
repo_b, 'B', [{'a': 'b_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
env.run_crons()
assert to_pr(env, pr_a).state == 'ready'
assert to_pr(env, pr_a).staging_id
assert to_pr(env, pr_b).state == 'ready'
assert not to_pr(env, pr_b).staging_id
def test_stage_match(env, project, repo_a, repo_b):
def test_stage_match(env, project, repo_a, repo_b, config):
""" First PR is matched from A, => should select matched PR from B
"""
project.batch_limit = 1
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
pr_a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing')
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
pr_b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-a-thing')
run_crons(env)
with repo_a:
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
pr_a = make_pr(
repo_a, 'do-a-thing', [{'a': 'a_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
with repo_b:
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
pr_b = make_pr(repo_b, 'do-a-thing', [{'a': 'b_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
env.run_crons()
pr_a = to_pr(env, pr_a)
pr_b = to_pr(env, pr_b)
@ -102,7 +126,7 @@ def test_stage_match(env, project, repo_a, repo_b):
assert pr_a.staging_id == pr_b.staging_id, \
"branch-matched PRs should be part of the same staging"
def test_unmatch_patch(env, project, repo_a, repo_b):
def test_unmatch_patch(env, project, repo_a, repo_b, config):
""" When editing files via the UI for a project you don't have write
access to, a branch called patch-XXX is automatically created in your
profile to hold the change.
@ -115,13 +139,21 @@ def test_unmatch_patch(env, project, repo_a, repo_b):
-> PRs with a branch name of patch-* should not be label-matched
"""
project.batch_limit = 1
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
pr_a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='patch-1')
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
pr_b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='patch-1')
run_crons(env)
with repo_a:
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
pr_a = make_pr(
repo_a, 'patch-1', [{'a': 'a_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
with repo_b:
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
pr_b = make_pr(
repo_b, 'patch-1', [{'a': 'b_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
env.run_crons()
pr_a = to_pr(env, pr_a)
pr_b = to_pr(env, pr_b)
@ -130,20 +162,27 @@ def test_unmatch_patch(env, project, repo_a, repo_b):
assert pr_b.state == 'ready'
assert not pr_b.staging_id, 'patch-* PRs should not be branch-matched'
def test_sub_match(env, project, repo_a, repo_b, repo_c):
def test_sub_match(env, project, repo_a, repo_b, repo_c, config):
""" Branch-matching should work on a subset of repositories
"""
project.batch_limit = 1
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
# no pr here
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
pr_b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-a-thing')
make_branch(repo_c, 'master', 'initial', {'a': 'c_0'})
pr_c = make_pr(repo_c, 'C', [{'a': 'c_1'}], label='do-a-thing')
run_crons(env)
with repo_a: # no pr here
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
with repo_b:
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
pr_b = make_pr(
repo_b, 'do-a-thing', [{'a': 'b_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
with repo_c:
make_branch(repo_c, 'master', 'initial', {'a': 'c_0'})
pr_c = make_pr(
repo_c, 'do-a-thing', [{'a': 'c_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
env.run_crons()
pr_b = to_pr(env, pr_b)
pr_c = to_pr(env, pr_c)
@ -167,28 +206,44 @@ def test_sub_match(env, project, repo_a, repo_b, repo_c):
repo_c.name + '^': c_staging.parents[0],
}
def test_merge_fail(env, project, repo_a, repo_b, users):
def test_merge_fail(env, project, repo_a, repo_b, users, config):
""" In a matched-branch scenario, if merging in one of the linked repos
fails it should revert the corresponding merges
"""
project.batch_limit = 1
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
with repo_a, repo_b:
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
# first set of matched PRs
pr1a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing')
pr1b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-a-thing')
# first set of matched PRs
pr1a = make_pr(
repo_a, 'do-a-thing', [{'a': 'a_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
pr1b = make_pr(
repo_b, 'do-a-thing', [{'a': 'b_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
# add a conflicting commit to B so the staging fails
repo_b.make_commit('heads/master', 'cn', None, tree={'a': 'cn'})
# add a conflicting commit to B so the staging fails
repo_b.make_commit('heads/master', 'cn', None, tree={'a': 'cn'})
# and a second set of PRs which should get staged while the first set
# fails
pr2a = make_pr(repo_a, 'A2', [{'b': 'ok'}], label='do-b-thing')
pr2b = make_pr(repo_b, 'B2', [{'b': 'ok'}], label='do-b-thing')
run_crons(env)
# and a second set of PRs which should get staged while the first set
# fails
pr2a = make_pr(
repo_a, 'do-b-thing', [{'b': 'ok'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
pr2b = make_pr(
repo_b, 'do-b-thing', [{'b': 'ok'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
env.run_crons()
s2 = to_pr(env, pr2a) | to_pr(env, pr2b)
st = env['runbot_merge.stagings'].search([])
@ -208,33 +263,43 @@ def test_merge_fail(env, project, repo_a, repo_b, users):
for c in repo_a.log('heads/staging.master')
] == [
re_matches('^force rebuild'),
'commit_A2_00\n\ncloses %s#2\n\nSigned-off-by: %s' % (repo_a.name, reviewer),
'commit_do-b-thing_00\n\ncloses %s#2\n\nSigned-off-by: %s' % (repo_a.name, reviewer),
'initial'
], "dummy commit + squash-merged PR commit + root commit"
def test_ff_fail(env, project, repo_a, repo_b):
def test_ff_fail(env, project, repo_a, repo_b, config):
""" In a matched-branch scenario, fast-forwarding one of the repos fails
the entire thing should be rolled back
"""
project.batch_limit = 1
root_a = make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing')
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-a-thing')
with repo_a, repo_b:
root_a = make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
make_pr(
repo_a, 'do-a-thing', [{'a': 'a_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
run_crons(env)
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
make_pr(
repo_b, 'do-a-thing', [{'a': 'b_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
env.run_crons()
# add second commit blocking FF
cn = repo_b.make_commit('heads/master', 'second', None, tree={'a': 'b_0', 'b': 'other'})
with repo_b:
cn = repo_b.make_commit('heads/master', 'second', None, tree={'a': 'b_0', 'b': 'other'})
assert repo_b.commit('heads/master').id == cn
repo_a.post_status('heads/staging.master', 'success', 'ci/runbot')
repo_a.post_status('heads/staging.master', 'success', 'legal/cla')
repo_b.post_status('heads/staging.master', 'success', 'ci/runbot')
repo_b.post_status('heads/staging.master', 'success', 'legal/cla')
env['runbot_merge.project']._check_progress()
with repo_a, repo_b:
repo_a.post_status('heads/staging.master', 'success', 'ci/runbot')
repo_a.post_status('heads/staging.master', 'success', 'legal/cla')
repo_b.post_status('heads/staging.master', 'success', 'ci/runbot')
repo_b.post_status('heads/staging.master', 'success', 'legal/cla')
env.run_crons('runbot_merge.merge_cron')
assert repo_b.commit('heads/master').id == cn,\
"B should still be at the conflicting commit"
assert repo_a.commit('heads/master').id == root_a,\
@ -246,23 +311,32 @@ def test_ff_fail(env, project, repo_a, repo_b):
assert len(st.batch_ids.prs) == 2
class TestCompanionsNotReady:
def test_one_pair(self, env, project, repo_a, repo_b, owner, users):
def test_one_pair(self, env, project, repo_a, repo_b, config, users):
""" If the companion of a ready branch-matched PR is not ready,
they should not get staged
"""
project.batch_limit = 1
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
# pr_a is born ready
p_a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing')
with repo_a, repo_b:
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
# pr_a is born ready
p_a = make_pr(
repo_a, 'do-a-thing', [{'a': 'a_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
p_b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-a-thing', reviewer=None)
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
p_b = make_pr(
repo_b, 'do-a-thing', [{'a': 'b_1'}],
user=config['role_user']['token'],
reviewer=None,
)
pr_a = to_pr(env, p_a)
pr_b = to_pr(env, p_b)
assert pr_a.label == pr_b.label == '{}:do-a-thing'.format(owner)
assert pr_a.label == pr_b.label == '{}:do-a-thing'.format(config['github']['owner'])
run_crons(env)
env.run_crons()
assert pr_a.state == 'ready'
assert pr_b.state == 'validated'
@ -275,28 +349,41 @@ class TestCompanionsNotReady:
(users['user'], "Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (repo_b.name, p_b.number)),
]
# ensure the message is only sent once per PR
env['runbot_merge.pull_requests']._check_linked_prs_statuses()
env.run_crons('runbot_merge.check_linked_prs_status')
assert p_a.comments == [
(users['reviewer'], 'hansen r+'),
(users['user'], "Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (repo_b.name, p_b.number)),
]
assert p_b.comments == []
def test_two_of_three_unready(self, env, project, repo_a, repo_b, repo_c, owner, users):
def test_two_of_three_unready(self, env, project, repo_a, repo_b, repo_c, users, config):
""" In a 3-batch, if two of the PRs are not ready both should be
linked by the first one
"""
project.batch_limit = 1
make_branch(repo_a, 'master', 'initial', {'f': 'a0'})
pr_a = make_pr(repo_a, 'A', [{'f': 'a1'}], label='a-thing', reviewer=None)
with repo_a, repo_b, repo_c:
make_branch(repo_a, 'master', 'initial', {'f': 'a0'})
pr_a = make_pr(
repo_a, 'a-thing', [{'f': 'a1'}],
user=config['role_user']['token'],
reviewer=None,
)
make_branch(repo_b, 'master', 'initial', {'f': 'b0'})
pr_b = make_pr(repo_b, 'B', [{'f': 'b1'}], label='a-thing')
make_branch(repo_b, 'master', 'initial', {'f': 'b0'})
pr_b = make_pr(
repo_b, 'a-thing', [{'f': 'b1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
make_branch(repo_c, 'master', 'initial', {'f': 'c0'})
pr_c = make_pr(repo_c, 'C', [{'f': 'c1'}], label='a-thing', reviewer=None)
make_branch(repo_c, 'master', 'initial', {'f': 'c0'})
pr_c = make_pr(
repo_c, 'a-thing', [{'f': 'c1'}],
user=config['role_user']['token'],
reviewer=None,
)
env.run_crons()
run_crons(env)
assert pr_a.comments == []
assert pr_b.comments == [
(users['reviewer'], 'hansen r+'),
@ -307,21 +394,34 @@ class TestCompanionsNotReady:
]
assert pr_c.comments == []
def test_one_of_three_unready(self, env, project, repo_a, repo_b, repo_c, owner, users):
def test_one_of_three_unready(self, env, project, repo_a, repo_b, repo_c, users, config):
""" In a 3-batch, if one PR is not ready it should be linked on the
other two
"""
project.batch_limit = 1
make_branch(repo_a, 'master', 'initial', {'f': 'a0'})
pr_a = make_pr(repo_a, 'A', [{'f': 'a1'}], label='a-thing', reviewer=None)
with repo_a, repo_b, repo_c:
make_branch(repo_a, 'master', 'initial', {'f': 'a0'})
pr_a = make_pr(
repo_a, 'a-thing', [{'f': 'a1'}],
user=config['role_user']['token'],
reviewer=None,
)
make_branch(repo_b, 'master', 'initial', {'f': 'b0'})
pr_b = make_pr(repo_b, 'B', [{'f': 'b1'}], label='a-thing')
make_branch(repo_b, 'master', 'initial', {'f': 'b0'})
pr_b = make_pr(
repo_b, 'a-thing', [{'f': 'b1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
make_branch(repo_c, 'master', 'initial', {'f': 'c0'})
pr_c = make_pr(repo_c, 'C', [{'f': 'c1'}], label='a-thing')
make_branch(repo_c, 'master', 'initial', {'f': 'c0'})
pr_c = make_pr(
repo_c, 'a-thing', [{'f': 'c1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
env.run_crons()
run_crons(env)
assert pr_a.comments == []
assert pr_b.comments == [
(users['reviewer'], 'hansen r+'),
@ -337,26 +437,32 @@ class TestCompanionsNotReady:
))
]
def test_other_failed(env, project, repo_a, repo_b, owner, users):
def test_other_failed(env, project, repo_a, repo_b, users, config):
""" In a non-matched-branch scenario, if the companion staging (copy of
targets) fails when built with the PR, it should provide a non-useless
message
"""
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
# pr_a is born ready
pr_a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing')
with repo_a, repo_b:
make_branch(repo_a, 'master', 'initial', {'a': 'a_0'})
# pr_a is born ready
pr_a = make_pr(
repo_a, 'do-a-thing', [{'a': 'a_1'}],
user=config['role_user']['token'],
reviewer=config['role_reviewer']['token'],
)
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
make_branch(repo_b, 'master', 'initial', {'a': 'b_0'})
env.run_crons()
run_crons(env)
pr = to_pr(env, pr_a)
assert pr.staging_id
repo_a.post_status('heads/staging.master', 'success', 'legal/cla')
repo_a.post_status('heads/staging.master', 'success', 'ci/runbot', target_url="http://example.org/a")
repo_b.post_status('heads/staging.master', 'success', 'legal/cla')
repo_b.post_status('heads/staging.master', 'failure', 'ci/runbot', target_url="http://example.org/b")
run_crons(env)
with repo_a, repo_b:
repo_a.post_status('heads/staging.master', 'success', 'legal/cla')
repo_a.post_status('heads/staging.master', 'success', 'ci/runbot', target_url="http://example.org/a")
repo_b.post_status('heads/staging.master', 'success', 'legal/cla')
repo_b.post_status('heads/staging.master', 'failure', 'ci/runbot', target_url="http://example.org/b")
env.run_crons()
sth = repo_b.commit('heads/staging.master').id
assert not pr.staging_id
@ -367,22 +473,23 @@ def test_other_failed(env, project, repo_a, repo_b, owner, users):
]
class TestMultiBatches:
def test_batching(self, env, project, repo_a, repo_b):
def test_batching(self, env, project, repo_a, repo_b, config):
""" If multiple batches (label groups) are ready they should get batched
together (within the limits of teh project's batch limit)
"""
project.batch_limit = 3
make_branch(repo_a, 'master', 'initial', {'a': 'a0'})
make_branch(repo_b, 'master', 'initial', {'b': 'b0'})
prs = [(
a and to_pr(env, make_pr(repo_a, 'A{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], label='batch{}'.format(i))),
b and to_pr(env, make_pr(repo_b, 'B{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], label='batch{}'.format(i)))
)
for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)])
]
with repo_a, repo_b:
make_branch(repo_a, 'master', 'initial', {'a': 'a0'})
make_branch(repo_b, 'master', 'initial', {'b': 'b0'})
run_crons(env)
prs = [(
a and to_pr(env, make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)),
b and to_pr(env, make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],))
)
for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)])
]
env.run_crons()
st = env['runbot_merge.stagings'].search([])
assert st
@ -398,20 +505,20 @@ class TestMultiBatches:
assert not prs[3][1].staging_id
assert not prs[4][0].staging_id
def test_batching_split(self, env, repo_a, repo_b):
def test_batching_split(self, env, repo_a, repo_b, config):
""" If a staging fails, it should get split properly across repos
"""
make_branch(repo_a, 'master', 'initial', {'a': 'a0'})
make_branch(repo_b, 'master', 'initial', {'b': 'b0'})
with repo_a, repo_b:
make_branch(repo_a, 'master', 'initial', {'a': 'a0'})
make_branch(repo_b, 'master', 'initial', {'b': 'b0'})
prs = [(
a and to_pr(env, make_pr(repo_a, 'A{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], label='batch{}'.format(i))),
b and to_pr(env, make_pr(repo_b, 'B{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], label='batch{}'.format(i)))
)
for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)])
]
run_crons(env)
prs = [(
a and to_pr(env, make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)),
b and to_pr(env, make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],))
)
for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)])
]
env.run_crons()
st0 = env['runbot_merge.stagings'].search([])
assert len(st0.batch_ids) == 5
@ -419,10 +526,10 @@ class TestMultiBatches:
# mark b.staging as failed -> should create two splits with (0, 1)
# and (2, 3, 4) and stage the first one
repo_b.post_status('heads/staging.master', 'success', 'legal/cla')
repo_b.post_status('heads/staging.master', 'failure', 'ci/runbot')
run_crons(env)
with repo_b:
repo_b.post_status('heads/staging.master', 'success', 'legal/cla')
repo_b.post_status('heads/staging.master', 'failure', 'ci/runbot')
env.run_crons()
assert not st0.active
@ -440,21 +547,21 @@ class TestMultiBatches:
assert sp.mapped('batch_ids.prs') == \
prs[2][0] | prs[2][1] | prs[3][0] | prs[3][1] | prs[4][0]
def test_urgent(env, repo_a, repo_b):
def test_urgent(env, repo_a, repo_b, config):
""" Either PR of a co-dependent pair being p=0 leads to the entire pair
being prioritized
"""
make_branch(repo_a, 'master', 'initial', {'a0': 'a'})
make_branch(repo_b, 'master', 'initial', {'b0': 'b'})
with repo_a, repo_b:
make_branch(repo_a, 'master', 'initial', {'a0': 'a'})
make_branch(repo_b, 'master', 'initial', {'b0': 'b'})
pr_a = make_pr(repo_a, 'A', [{'a1': 'a'}, {'a2': 'a'}], label='batch', reviewer=None, statuses=[])
pr_b = make_pr(repo_b, 'B', [{'b1': 'b'}, {'b2': 'b'}], label='batch', reviewer=None, statuses=[])
pr_c = make_pr(repo_a, 'C', [{'c1': 'c', 'c2': 'c'}])
pr_a = make_pr(repo_a, 'batch', [{'a1': 'a'}, {'a2': 'a'}], user=config['role_user']['token'], reviewer=None, statuses=[])
pr_b = make_pr(repo_b, 'batch', [{'b1': 'b'}, {'b2': 'b'}], user=config['role_user']['token'], reviewer=None, statuses=[])
pr_c = make_pr(repo_a, 'C', [{'c1': 'c', 'c2': 'c'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)
pr_a.post_comment('hansen rebase-merge', 'reviewer')
pr_b.post_comment('hansen rebase-merge p=0', 'reviewer')
run_crons(env)
pr_a.post_comment('hansen rebase-merge', config['role_reviewer']['token'])
pr_b.post_comment('hansen rebase-merge p=0', config['role_reviewer']['token'])
env.run_crons()
# should have batched pr_a and pr_b despite neither being reviewed or
# approved
p_a, p_b = to_pr(env, pr_a), to_pr(env, pr_b)
@ -464,79 +571,81 @@ def test_urgent(env, repo_a, repo_b):
assert not p_c.staging_id
class TestBlocked:
def test_merge_method(self, env, repo_a):
make_branch(repo_a, 'master', 'initial', {'a0': 'a'})
def test_merge_method(self, env, repo_a, config):
with repo_a:
make_branch(repo_a, 'master', 'initial', {'a0': 'a'})
pr = make_pr(repo_a, 'A', [{'a1': 'a'}, {'a2': 'a'}])
run_crons(env)
pr = make_pr(repo_a, 'A', [{'a1': 'a'}, {'a2': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)
env.run_crons()
p = to_pr(env, pr)
assert p.state == 'ready'
print(p.id, p.squash, p.merge_method)
assert p.blocked
pr.post_comment('hansen rebase-merge', 'reviewer')
with repo_a: pr.post_comment('hansen rebase-merge', config['role_reviewer']['token'])
assert not p.blocked
def test_linked_closed(self, env, repo_a, repo_b):
make_branch(repo_a, 'master', 'initial', {'a0': 'a'})
make_branch(repo_b, 'master', 'initial', {'b0': 'b'})
def test_linked_closed(self, env, repo_a, repo_b, config):
with repo_a, repo_b:
make_branch(repo_a, 'master', 'initial', {'a0': 'a'})
make_branch(repo_b, 'master', 'initial', {'b0': 'b'})
pr = make_pr(repo_a, 'A', [{'a1': 'a'}], label='xxx')
b = make_pr(repo_b, 'B', [{'b1': 'b'}], label='xxx', statuses=[])
run_crons(env)
pr = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)
b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'], statuses=[])
env.run_crons()
p = to_pr(env, pr)
assert p.blocked
b.close()
with repo_b: b.close()
# FIXME: find a way for PR.blocked to depend on linked PR somehow so this isn't needed
p.invalidate_cache(['blocked'], [p.id])
assert not p.blocked
def test_linked_merged(self, env, repo_a, repo_b):
make_branch(repo_a, 'master', 'initial', {'a0': 'a'})
make_branch(repo_b, 'master', 'initial', {'b0': 'b'})
def test_linked_merged(self, env, repo_a, repo_b, config):
with repo_a, repo_b:
make_branch(repo_a, 'master', 'initial', {'a0': 'a'})
make_branch(repo_b, 'master', 'initial', {'b0': 'b'})
b = make_pr(repo_b, 'B', [{'b1': 'b'}], label='xxx')
b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)
env.run_crons() # stage b and c
run_crons(env) # stage b and c
repo_a.post_status('heads/staging.master', 'success', 'legal/cla')
repo_a.post_status('heads/staging.master', 'success', 'ci/runbot')
repo_b.post_status('heads/staging.master', 'success', 'legal/cla')
repo_b.post_status('heads/staging.master', 'success', 'ci/runbot')
run_crons(env) # merge b and c
with repo_a, repo_b:
repo_a.post_status('heads/staging.master', 'success', 'legal/cla')
repo_a.post_status('heads/staging.master', 'success', 'ci/runbot')
repo_b.post_status('heads/staging.master', 'success', 'legal/cla')
repo_b.post_status('heads/staging.master', 'success', 'ci/runbot')
env.run_crons() # merge b and c
assert to_pr(env, b).state == 'merged'
pr = make_pr(repo_a, 'A', [{'a1': 'a'}], label='xxx')
run_crons(env) # merge b and c
with repo_a:
pr = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)
env.run_crons() # merge b and c
p = to_pr(env, pr)
assert not p.blocked
def test_linked_unready(self, env, repo_a, repo_b):
def test_linked_unready(self, env, repo_a, repo_b, config):
""" Create a PR A linked to a non-ready PR B,
* A is blocked by default
* A is not blocked if A.p=0
* A is not blocked if B.p=0
"""
make_branch(repo_a, 'master', 'initial', {'a0': 'a'})
make_branch(repo_b, 'master', 'initial', {'b0': 'b'})
with repo_a, repo_b:
make_branch(repo_a, 'master', 'initial', {'a0': 'a'})
make_branch(repo_b, 'master', 'initial', {'b0': 'b'})
a = make_pr(repo_a, 'A', [{'a1': 'a'}], label='xxx')
b = make_pr(repo_b, 'B', [{'b1': 'b'}], label='xxx', statuses=[])
run_crons(env)
a = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)
b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'], statuses=[])
env.run_crons()
pr_a = to_pr(env, a)
assert pr_a.blocked
a.post_comment('hansen p=0', 'reviewer')
with repo_a: a.post_comment('hansen p=0', config['role_reviewer']['token'])
assert not pr_a.blocked
a.post_comment('hansen p=2', 'reviewer')
with repo_a: a.post_comment('hansen p=2', config['role_reviewer']['token'])
assert pr_a.blocked
b.post_comment('hansen p=0', 'reviewer')
with repo_b: b.post_comment('hansen p=0', config['role_reviewer']['token'])
assert not pr_a.blocked

View File

@ -12,13 +12,6 @@ class re_matches:
def __repr__(self):
return '~' + self._r.pattern + '~'
def run_crons(env):
"Helper to run all crons (in a relevant order) except for the fetch PR one"
env['runbot_merge.commit']._notify()
env['runbot_merge.project']._check_progress()
env['runbot_merge.pull_requests']._check_linked_prs_statuses()
env['runbot_merge.project']._send_feedback()
def get_partner(env, gh_login):
return env['res.partner'].search([('github_login', '=', gh_login)])
@ -30,5 +23,5 @@ def _simple_init(repo):
repo.make_ref('heads/master', m)
c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'})
c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'})
prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user')
prx = repo.make_pr(title='title', body='body', target='master', head=c2)
return prx