From d45394325277c8be7f98f1e139bb4d5c53861089 Mon Sep 17 00:00:00 2001 From: Xavier Morel Date: Thu, 10 Oct 2019 09:22:12 +0200 Subject: [PATCH] [IMP] *: unify gh test API between runbot and fw-bot The fw-bot testing API should improve the perfs of mergebot tests somewhat (less waiting around for instance). The code has been updated to the bare minimum (context-managing repos, change to PRs and replacing rolenames by explicit token provisions) but extra facilities were used to avoid changing *everything* e.g. make_commit (singular), automatic generation of PR refs, ... The tests should eventually be updated to remove these. Also remove the local fake / mock. Being so much faster is a huge draw, but I don't really want to spend more time updating it, especially when fwbot doesn't get to take advantage. A local / lightweight fake github (as an external service over http) might eventually be a good idea though, and more applicable (including to third-parties). --- conftest.py | 920 +++++++- forwardport/tests/conftest.py | 623 +----- runbot_merge/tests/conftest.py | 40 +- runbot_merge/tests/fake_github/__init__.py | 878 -------- runbot_merge/tests/fake_github/git.py | 126 -- runbot_merge/tests/local.py | 133 -- runbot_merge/tests/remote.py | 724 ------- runbot_merge/tests/test_basic.py | 2209 +++++++++++--------- runbot_merge/tests/test_multirepo.py | 481 +++-- runbot_merge/tests/test_utils.py | 9 +- 10 files changed, 2428 insertions(+), 3715 deletions(-) delete mode 100644 runbot_merge/tests/fake_github/__init__.py delete mode 100644 runbot_merge/tests/fake_github/git.py delete mode 100644 runbot_merge/tests/local.py diff --git a/conftest.py b/conftest.py index d6db1698..1b432a0c 100644 --- a/conftest.py +++ b/conftest.py @@ -1,9 +1,58 @@ # -*- coding: utf-8 -*- +""" +Configuration: + +* an ``odoo`` binary in the path, which runs the relevant odoo; to ensure a + clean slate odoo is re-started and a new database is created before each + test (technically a "template" db is created first, then that DB is cloned + and the fresh clone is used for each test) + +* pytest.ini (at the root of the runbot repo or higher) with the following + sections and keys + + ``github`` + - owner, the name of the account (personal or org) under which test repos + will be created & deleted (note: some repos might be created under role + accounts as well) + - token, either personal or oauth, must have the scopes ``public_repo``, + ``delete_repo`` and ``admin:repo_hook``, if personal the owner must be + the corresponding user account, not an org. Also user:email for the + forwardport / forwardbot tests + + ``role_reviewer``, ``role_self_reviewer`` and ``role_other`` + - name (optional, used as partner name when creating that, otherwise github + login gets used) + - token, a personal access token with the ``public_repo`` scope (otherwise + the API can't leave comments), maybe eventually delete_repo (for personal + forks) + + .. warning:: the accounts must *not* be flagged, or the webhooks on + commenting or creating reviews will not trigger, and the + tests will fail + +* either ``ngrok`` or ``lt`` (localtunnel) available on the path. ngrok with + a configured account is recommended: ngrok is more reliable than localtunnel + but a free account is necessary to get a high-enough rate limiting for some + of the multi-repo tests to work + +Finally the tests aren't 100% reliable as they rely on quite a bit of network +traffic, it's possible that the tests fail due to network issues rather than +logic errors. +""" +import base64 +import collections import configparser +import copy +import itertools +import logging import re +import socket import subprocess +import sys import time import uuid +import xmlrpc.client +from contextlib import closing import psutil import pytest @@ -15,7 +64,7 @@ NGROK_CLI = [ def pytest_addoption(parser): parser.addoption('--addons-path') - parser.addoption('--db', help="DB to run the tests against", default=str(uuid.uuid4())) + parser.addoption('--db', help="DB to run the tests against", default='template_%s' % uuid.uuid4()) parser.addoption("--no-delete", action="store_true", help="Don't delete repo after a failed run") parser.addoption( @@ -39,10 +88,15 @@ def config(pytestconfig): """ conf = configparser.ConfigParser(interpolation=None) conf.read([pytestconfig.inifile]) - return { + cnf = { name: dict(s.items()) for name, s in conf.items() } + # special case user / owner / ... + cnf['role_user'] = { + 'token': conf['github']['token'] + } + return cnf @pytest.fixture(scope='session') def rolemap(config): @@ -62,15 +116,8 @@ def rolemap(config): rolemap[role] = data['user'] = r.json()['login'] return rolemap -# apparently conftests can override one another's fixtures but plugins can't -# override conftest fixtures (?) so if this is defined as "users" it replaces -# the one from runbot_merge/tests/local and everything breaks. -# -# Alternatively this could be special-cased using remote_p or something but -# that's even more gross. It might be possible to handle that via pytest's -# hooks as well but I didn't check @pytest.fixture -def users_(env, config, rolemap): +def users(env, config, rolemap): for role, login in rolemap.items(): if role in ('user', 'other'): continue @@ -102,40 +149,53 @@ def tunnel(pytestconfig, port): requests.get('http://localhost:4040/api') except requests.exceptions.ConnectionError: subprocess.Popen(NGROK_CLI, stdout=subprocess.DEVNULL) - time.sleep(1) + time.sleep(2) requests.post('http://localhost:4040/api/tunnels', json={ 'name': str(port), 'proto': 'http', - 'bind_tls': True, + 'bind_tls': True, # only https 'addr': addr, - 'inspect': False, - }) - time.sleep(5) + 'inspect': True, + }).raise_for_status() + tunnel = 'http://localhost:4040/api/tunnels/%s' % port - try: - r = requests.get('http://localhost:4040/api/tunnels') + for _ in range(10): + time.sleep(2) + r = requests.get(tunnel) + # not created yet, wait and retry + if r.status_code == 404: + continue + # check for weird responses r.raise_for_status() - yield next( - t['public_url'] - for t in r.json()['tunnels'] - if t['proto'] == 'https' - if t['config']['addr'].endswith(addr) - ) - finally: - requests.delete('http://localhost:4040/api/tunnels/%s' % port) - time.sleep(5) # apparently tearing down the tunnel can take some time - r = requests.get('http://localhost:4040/api/tunnels') - if r.ok and r.json()['tunnels']: + try: + yield r.json()['public_url'] + finally: + requests.delete('http://localhost:4040/api/tunnels/%s' % port) + for _ in range(10): + time.sleep(1) + r = requests.get(tunnel) + # check if deletion is done + if r.status_code == 404: + break + r.raise_for_status() + else: + raise TimeoutError("ngrok tunnel deletion failed") + + r = requests.get('http://localhost:4040/api/tunnels') + # there are still tunnels in the list -> bail + if r.ok and r.json()['tunnels']: + return + + # ngrok is broken or all tunnels have been shut down -> try to + # find and kill it (but only if it looks a lot like we started it) + for p in psutil.process_iter(): + if p.name() == 'ngrok' and p.cmdline() == NGROK_CLI: + p.terminate() + break return - - # ngrok is broken or all tunnels have been shut down -> try to - # find and kill it (but only if it looks a lot like we started it) - for p in psutil.process_iter(): - if p.name() == 'ngrok' and p.cmdline() == NGROK_CLI: - p.terminate() - break - + else: + raise TimeoutError("ngrok tunnel creation failed (?)") elif tunnel == 'localtunnel': p = subprocess.Popen(['lt', '-p', str(port)], stdout=subprocess.PIPE) try: @@ -163,7 +223,7 @@ def dbcache(request, module): '--stop-after-init' ], check=True) yield db - subprocess.run(['dropdb', db]) + subprocess.run(['dropdb', db], check=True) @pytest.fixture def db(request, dbcache): @@ -174,3 +234,789 @@ def db(request, dbcache): if not request.config.getoption('--no-delete'): subprocess.run(['dropdb', rundb], check=True) + +def wait_for_hook(n=1): + time.sleep(10 * n) + +def wait_for_server(db, port, proc, mod, timeout=120): + """ Polls for server to be response & have installed our module. + + Raises socket.timeout on failure + """ + limit = time.time() + timeout + while True: + if proc.poll() is not None: + raise Exception("Server unexpectedly closed") + + try: + uid = xmlrpc.client.ServerProxy( + 'http://localhost:{}/xmlrpc/2/common'.format(port))\ + .authenticate(db, 'admin', 'admin', {}) + mods = xmlrpc.client.ServerProxy( + 'http://localhost:{}/xmlrpc/2/object'.format(port))\ + .execute_kw( + db, uid, 'admin', 'ir.module.module', 'search_read', [ + [('name', '=', mod)], ['state'] + ]) + if mods and mods[0].get('state') == 'installed': + break + except ConnectionRefusedError: + if time.time() > limit: + raise socket.timeout() + +@pytest.fixture(scope='session') +def port(): + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: + s.bind(('', 0)) + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + return s.getsockname()[1] + +@pytest.fixture +def server(request, db, port, module): + p = subprocess.Popen([ + 'odoo', '--http-port', str(port), + '--addons-path', request.config.getoption('--addons-path'), + '-d', db, + '--max-cron-threads', '0', # disable cron threads (we're running crons by hand) + ]) + + try: + wait_for_server(db, port, p, module) + + yield p + finally: + p.terminate() + p.wait(timeout=30) + +@pytest.fixture +def env(port, server, db, default_crons): + yield Environment(port, db, default_crons) + +# users is just so I can avoid autouse on toplevel users fixture b/c it (seems +# to) break the existing local tests +@pytest.fixture +def make_repo(request, config, tunnel, users): + owner = config['github']['owner'] + github = requests.Session() + github.headers['Authorization'] = 'token %s' % config['github']['token'] + + # check whether "owner" is a user or an org, as repo-creation endpoint is + # different + q = github.get('https://api.github.com/users/{}'.format(owner)) + q.raise_for_status() + if q.json().get('type') == 'Organization': + endpoint = 'https://api.github.com/orgs/{}/repos'.format(owner) + else: + endpoint = 'https://api.github.com/user/repos' + r = github.get('https://api.github.com/user') + r.raise_for_status() + assert r.json()['login'] == owner + + repos = [] + def repomaker(name): + fullname = '{}/{}'.format(owner, name) + repo_url = 'https://api.github.com/repos/{}'.format(fullname) + if request.config.getoption('--no-delete'): + if github.head(repo_url).ok: + pytest.skip("Repository {} already exists".format(fullname)) + else: + # just try to delete the repo, we don't really care + if github.delete(repo_url).ok: + # if we did delete a repo, wait a bit as gh might need to + # propagate the thing? + time.sleep(30) + + # create repo + r = github.post(endpoint, json={ + 'name': name, + 'has_issues': False, + 'has_projects': False, + 'has_wiki': False, + 'auto_init': False, + # at least one merge method must be enabled :( + 'allow_squash_merge': False, + # 'allow_merge_commit': False, + 'allow_rebase_merge': False, + }) + r.raise_for_status() + + # create webhook + github.post('{}/hooks'.format(repo_url), json={ + 'name': 'web', + 'config': { + 'url': '{}/runbot_merge/hooks'.format(tunnel), + 'content_type': 'json', + 'insecure_ssl': '1', + }, + 'events': ['pull_request', 'issue_comment', 'status', 'pull_request_review'] + }) + + github.put('{}/contents/{}'.format(repo_url, 'a'), json={ + 'path': 'a', + 'message': 'github returns a 409 (Git Repository is Empty) if trying to create a tree in a repo with no objects', + 'content': base64.b64encode(b'whee').decode('ascii'), + 'branch': 'garbage_%s' % uuid.uuid4() + }).raise_for_status() + + return Repo(github, fullname, repos) + + yield repomaker + + if not request.config.getoption('--no-delete'): + for repo in reversed(repos): + repo.delete() + +Commit = collections.namedtuple('Commit', 'id tree message author committer parents') +class Repo: + def __init__(self, session, fullname, repos): + self._session = session + self.name = fullname + self._repos = repos + self.hook = False + repos.append(self) + + # unwatch repo + self.unsubscribe() + + def unsubscribe(self, token=None): + self._get_session(token).put('https://api.github.com/repos/{}/subscription'.format(self.name), json={ + 'subscribed': False, + 'ignored': True, + }) + + def _get_session(self, token): + s = self._session + if token: + s = requests.Session() + s.headers['Authorization'] = 'token %s' % token + return s + + def delete(self): + r = self._session.delete('https://api.github.com/repos/{}'.format(self.name)) + if r.status_code != 204: + logging.getLogger(__name__).warning("Unable to delete repository %s", self.name) + + def set_secret(self, secret): + assert self.hook + r = self._session.get( + 'https://api.github.com/repos/{}/hooks'.format(self.name)) + response = r.json() + assert 200 <= r.status_code < 300, response + [hook] = response + + r = self._session.patch('https://api.github.com/repos/{}/hooks/{}'.format(self.name, hook['id']), json={ + 'config': {**hook['config'], 'secret': secret}, + }) + assert 200 <= r.status_code < 300, r.json() + + def get_ref(self, ref): + return self.commit(ref).id + + def commit(self, ref): + if not re.match(r'[0-9a-f]{40}', ref): + if not ref.startswith(('heads/', 'refs/heads/')): + ref = 'refs/heads/' + ref + # apparently heads/ ~ refs/heads/ but are not + # necessarily up to date ??? unlike the git ref system where :ref + # starts at heads/ + if ref.startswith('heads/'): + ref = 'refs/' + ref + + r = self._session.get('https://api.github.com/repos/{}/commits/{}'.format(self.name, ref)) + response = r.json() + assert 200 <= r.status_code < 300, response + + return self._commit_from_gh(response) + + def _commit_from_gh(self, gh_commit): + c = gh_commit['commit'] + return Commit( + id=gh_commit['sha'], + tree=c['tree']['sha'], + message=c['message'], + author=c['author'], + committer=c['committer'], + parents=[p['sha'] for p in gh_commit['parents']], + ) + + def log(self, ref_or_sha): + for page in itertools.count(1): + r = self._session.get( + 'https://api.github.com/repos/{}/commits'.format(self.name), + params={'sha': ref_or_sha, 'page': page} + ) + assert 200 <= r.status_code < 300, r.json() + yield from map(self._commit_from_gh, r.json()) + if not r.links.get('next'): + return + + def read_tree(self, commit): + """ read tree object from commit + + :param Commit commit: + :rtype: Dict[str, str] + """ + r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree)) + assert 200 <= r.status_code < 300, r.json() + + # read tree's blobs + tree = {} + for t in r.json()['tree']: + assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases" + r = self._session.get('https://api.github.com/repos/{}/git/blobs/{}'.format(self.name, t['sha'])) + assert 200 <= r.status_code < 300, r.json() + tree[t['path']] = base64.b64decode(r.json()['content']).decode() + + return tree + + def make_ref(self, name, commit, force=False): + assert self.hook + assert name.startswith('heads/') + r = self._session.post('https://api.github.com/repos/{}/git/refs'.format(self.name), json={ + 'ref': 'refs/' + name, + 'sha': commit, + }) + if force and r.status_code == 422: + self.update_ref(name, commit, force=force) + return + assert 200 <= r.status_code < 300, r.json() + + def update_ref(self, name, commit, force=False): + assert self.hook + r = self._session.patch('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, name), json={'sha': commit, 'force': force}) + assert 200 <= r.status_code < 300, r.json() + + def protect(self, branch): + assert self.hook + r = self._session.put('https://api.github.com/repos/{}/branches/{}/protection'.format(self.name, branch), json={ + 'required_status_checks': None, + 'enforce_admins': True, + 'required_pull_request_reviews': None, + 'restrictions': None, + }) + assert 200 <= r.status_code < 300, r.json() + + # FIXME: remove this (runbot_merge should use make_commits directly) + def make_commit(self, ref, message, author, committer=None, tree=None, wait=True): + assert tree + if isinstance(ref, list): + assert all(re.match(r'[0-9a-f]{40}', r) for r in ref) + ancestor_id = ref + ref = None + else: + ancestor_id = self.get_ref(ref) if ref else None + # if ref is already a commit id, don't pass it in + if ancestor_id == ref: + ref = None + + [h] = self.make_commits( + ancestor_id, + MakeCommit(message, tree=tree, author=author, committer=committer, reset=True), + ref=ref + ) + return h + + def make_commits(self, root, *commits, ref=None): + assert self.hook + if isinstance(root, list): + parents = root + tree = None + elif root: + c = self.commit(root) + tree = c.tree + parents = [c.id] + else: + tree = None + parents = [] + + hashes = [] + for commit in commits: + if commit.reset: + tree = None + r = self._session.post('https://api.github.com/repos/{}/git/trees'.format(self.name), json={ + 'tree': [ + {'path': k, 'mode': '100644', 'type': 'blob', 'content': v} + for k, v in commit.tree.items() + ], + 'base_tree': tree + }) + assert 200 <= r.status_code < 300, r.json() + tree = r.json()['sha'] + + data = { + 'parents': parents, + 'message': commit.message, + 'tree': tree, + } + if commit.author: + data['author'] = commit.author + if commit.committer: + data['committer'] = commit.committer + + r = self._session.post('https://api.github.com/repos/{}/git/commits'.format(self.name), json=data) + assert 200 <= r.status_code < 300, r.json() + + hashes.append(r.json()['sha']) + parents = [hashes[-1]] + + if ref: + self.make_ref(ref, hashes[-1], force=True) + + return hashes + + def fork(self, *, token=None): + s = self._get_session(token) + + r = s.post('https://api.github.com/repos/{}/forks'.format(self.name)) + assert 200 <= r.status_code < 300, r.json() + + repo_name = r.json()['full_name'] + repo_url = 'https://api.github.com/repos/' + repo_name + # poll for end of fork + limit = time.time() + 60 + while s.head(repo_url, timeout=5).status_code != 200: + if time.time() > limit: + raise TimeoutError("No response for repo %s over 60s" % repo_name) + time.sleep(1) + + return Repo(s, repo_name, self._repos) + + def get_pr(self, number): + # ensure PR exists before returning it + self._session.head('https://api.github.com/repos/{}/pulls/{}'.format( + self.name, + number, + )).raise_for_status() + return PR(self, number) + + def make_pr(self, *, title=None, body=None, target, head, token=None): + assert self.hook + self.hook = 2 + + if title is None: + assert ":" not in head, \ + "will not auto-infer titles for PRs in a remote repo" + c = self.commit(head) + parts = iter(c.message.split('\n\n', 1)) + title = next(parts) + body = next(parts, None) + + headers = {} + if token: + headers['Authorization'] = 'token {}'.format(token) + + # FIXME: change tests which pass a commit id to make_pr & remove this + if re.match(r'[0-9a-f]{40}', head): + ref = "temp_trash_because_head_must_be_a_ref_%d" % next(ct) + self.make_ref('heads/' + ref, head) + head = ref + + r = self._session.post( + 'https://api.github.com/repos/{}/pulls'.format(self.name), + json={ + 'title': title, + 'body': body, + 'head': head, + 'base': target, + }, + headers=headers, + ) + pr = r.json() + assert 200 <= r.status_code < 300, pr + + return PR(self, pr['number']) + + def post_status(self, ref, status, context='default', **kw): + assert self.hook + assert status in ('error', 'failure', 'pending', 'success') + r = self._session.post('https://api.github.com/repos/{}/statuses/{}'.format(self.name, self.commit(ref).id), json={ + 'state': status, + 'context': context, + **kw + }) + assert 200 <= r.status_code < 300, r.json() + + def read_tree(self, commit): + """ read tree object from commit + + :param Commit commit: + :rtype: Dict[str, str] + """ + r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree)) + assert 200 <= r.status_code < 300, r.json() + + # read tree's blobs + tree = {} + for t in r.json()['tree']: + assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases" + r = self._session.get(t['url']) + assert 200 <= r.status_code < 300, r.json() + # assume all test content is textual + tree[t['path']] = base64.b64decode(r.json()['content']).decode() + + return tree + + def is_ancestor(self, sha, of): + return any(c['sha'] == sha for c in self.log(of)) + + def log(self, ref_or_sha): + for page in itertools.count(1): + r = self._session.get( + 'https://api.github.com/repos/{}/commits'.format(self.name), + params={'sha': ref_or_sha, 'page': page} + ) + assert 200 <= r.status_code < 300, r.json() + yield from r.json() + if not r.links.get('next'): + return + + def __enter__(self): + self.hook = 1 + return self + def __exit__(self, *args): + wait_for_hook(self.hook) + self.hook = 0 + class Commit: + def __init__(self, message, *, author=None, committer=None, tree, reset=False): + self.id = None + self.message = message + self.author = author + self.committer = committer + self.tree = tree + self.reset = reset +MakeCommit = Repo.Commit +ct = itertools.count() +class PR: + def __init__(self, repo, number): + self.repo = repo + self.number = number + self.labels = LabelsProxy(self) + + @property + def _pr(self): + r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number)) + assert 200 <= r.status_code < 300, r.json() + return r.json() + + @property + def title(self): + raise NotImplementedError() + title = title.setter(lambda self, v: self._set_prop('title', v)) + + @property + def base(self): + raise NotImplementedError() + base = base.setter(lambda self, v: self._set_prop('base', v)) + + @property + def head(self): + return self._pr['head']['sha'] + + @property + def user(self): + return self._pr['user']['login'] + + @property + def state(self): + return self._pr['state'] + + @property + def comments(self): + r = self.repo._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number)) + assert 200 <= r.status_code < 300, r.json() + return [ + (c['user']['login'], c['body']) + for c in r.json() + ] + + @property + def ref(self): + return 'heads/' + self.branch[1] + + def post_comment(self, body, token=None): + assert self.repo.hook + headers = {} + if token: + headers['Authorization'] = 'token %s' % token + r = self.repo._session.post( + 'https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number), + json={'body': body}, + headers=headers, + ) + assert 200 <= r.status_code < 300, r.json() + return r.json()['id'] + + def edit_comment(self, cid, body, token=None): + assert self.repo.hook + headers = {} + if token: + headers['Authorization'] = 'token %s' % token + r = self.repo._session.patch( + 'https://api.github.com/repos/{}/issues/comments/{}'.format(self.repo.name, cid), + json={'body': body}, + headers=headers + ) + assert 200 <= r.status_code < 300, r.json() + wait_for_hook() + + def delete_comment(self, cid, token=None): + assert self.repo.hook + headers = {} + if token: + headers['Authorization'] = 'token %s' % token + r = self.repo._session.delete( + 'https://api.github.com/repos/{}/issues/comments/{}'.format(self.repo.name, cid), + headers=headers + ) + assert r.status_code == 204, r.json() + + def _set_prop(self, prop, value): + assert self.repo.hook + r = self.repo._session.patch('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number), json={ + prop: value + }) + assert 200 <= r.status_code < 300, r.json() + + def open(self): + self._set_prop('state', 'open') + + def close(self): + self._set_prop('state', 'closed') + + @property + def branch(self): + r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format( + self.repo.name, + self.number, + )) + assert 200 <= r.status_code < 300, r.json() + info = r.json() + + repo = self.repo + reponame = info['head']['repo']['full_name'] + if reponame != self.repo.name: + # not sure deep copying the session object is safe / proper... + repo = Repo(copy.deepcopy(self.repo._session), reponame, []) + + return PRBranch(repo, info['head']['ref']) + + def post_review(self, state, body, token=None): + assert self.repo.hook + headers = {} + if token: + headers['Authorization'] = 'token %s' % token + r = self.repo._session.post( + 'https://api.github.com/repos/{}/pulls/{}/reviews'.format(self.repo.name, self.number), + json={'body': body, 'event': state,}, + headers=headers + ) + assert 200 <= r.status_code < 300, r.json() + +PRBranch = collections.namedtuple('PRBranch', 'repo branch') +class LabelsProxy(collections.abc.MutableSet): + def __init__(self, pr): + self._pr = pr + + @property + def _labels(self): + pr = self._pr + r = pr.repo._session.get('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number)) + assert r.ok, r.json() + return {label['name'] for label in r.json()} + + def __repr__(self): + return '' % self._labels + + def __eq__(self, other): + if isinstance(other, collections.abc.Set): + return other == self._labels + return NotImplemented + + def __contains__(self, label): + return label in self._labels + + def __iter__(self): + return iter(self._labels) + + def __len__(self): + return len(self._labels) + + def add(self, label): + pr = self._pr + assert pr.repo.hook + r = pr.repo._session.post('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number), json={ + 'labels': [label] + }) + assert r.ok, r.json() + + def discard(self, label): + pr = self._pr + assert pr.repo.hook + r = pr.repo._session.delete('https://api.github.com/repos/{}/issues/{}/labels/{}'.format(pr.repo.name, pr.number, label)) + # discard should do nothing if the item didn't exist in the set + assert r.ok or r.status_code == 404, r.json() + + def update(self, *others): + pr = self._pr + assert pr.repo.hook + # because of course that one is not provided by MutableMapping... + r = pr.repo._session.post('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number), json={ + 'labels': list(set(itertools.chain.from_iterable(others))) + }) + assert r.ok, r.json() + +class Environment: + def __init__(self, port, db, default_crons=()): + self._uid = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/common'.format(port)).authenticate(db, 'admin', 'admin', {}) + self._object = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/object'.format(port)) + self._db = db + self._default_crons = default_crons + + def __call__(self, model, method, *args, **kwargs): + return self._object.execute_kw( + self._db, self._uid, 'admin', + model, method, + args, kwargs + ) + + def __getitem__(self, name): + return Model(self, name) + + def run_crons(self, *xids, **kw): + crons = xids or self._default_crons + print('running crons', crons, file=sys.stderr) + for xid in crons: + print('\trunning cron', xid, '...', file=sys.stderr) + _, model, cron_id = self('ir.model.data', 'xmlid_lookup', xid) + assert model == 'ir.cron', "Expected {} to be a cron, got {}".format(xid, model) + self('ir.cron', 'method_direct_trigger', [cron_id], **kw) + print('\tdone', file=sys.stderr) + print('done', file=sys.stderr) + # sleep for some time as a lot of crap may have happened (?) + wait_for_hook() + +class Model: + __slots__ = ['_env', '_model', '_ids', '_fields'] + def __init__(self, env, model, ids=(), fields=None): + object.__setattr__(self, '_env', env) + object.__setattr__(self, '_model', model) + object.__setattr__(self, '_ids', tuple(ids or ())) + + object.__setattr__(self, '_fields', fields or self._env(self._model, 'fields_get', attributes=['type', 'relation'])) + + @property + def ids(self): + return self._ids + + def __bool__(self): + return bool(self._ids) + + def __len__(self): + return len(self._ids) + + def __eq__(self, other): + if not isinstance(other, Model): + return NotImplemented + return self._model == other._model and self._ids == other._ids + + def __repr__(self): + return "{}({})".format(self._model, ', '.join(str(id_) for id_ in self._ids)) + + def exists(self): + ids = self._env(self._model, 'exists', self._ids) + return Model(self._env, self._model, ids) + + def search(self, *args, **kwargs): + ids = self._env(self._model, 'search', *args, **kwargs) + return Model(self._env, self._model, ids) + + def create(self, values): + return Model(self._env, self._model, [self._env(self._model, 'create', values)]) + + def write(self, values): + return self._env(self._model, 'write', self._ids, values) + + def read(self, fields): + return self._env(self._model, 'read', self._ids, fields) + + def unlink(self): + return self._env(self._model, 'unlink', self._ids) + + def __getitem__(self, index): + if isinstance(index, str): + return getattr(self, index) + ids = self._ids[index] + if isinstance(ids, int): + ids = [ids] + + return Model(self._env, self._model, ids, fields=self._fields) + + def __getattr__(self, fieldname): + if not self._ids: + return False + + assert len(self._ids) == 1 + if fieldname == 'id': + return self._ids[0] + + val = self.read([fieldname])[0][fieldname] + field_description = self._fields[fieldname] + if field_description['type'] in ('many2one', 'one2many', 'many2many'): + val = val or [] + if field_description['type'] == 'many2one': + val = val[:1] # (id, name) => [id] + return Model(self._env, field_description['relation'], val) + + return val + + def __setattr__(self, fieldname, value): + assert self._fields[fieldname]['type'] not in ('many2one', 'one2many', 'many2many') + self._env(self._model, 'write', self._ids, {fieldname: value}) + + def __iter__(self): + return ( + Model(self._env, self._model, [i], fields=self._fields) + for i in self._ids + ) + + def mapped(self, path): + field, *rest = path.split('.', 1) + descr = self._fields[field] + if descr['type'] in ('many2one', 'one2many', 'many2many'): + result = Model(self._env, descr['relation']) + for record in self: + result |= getattr(record, field) + + return result.mapped(rest[0]) if rest else result + + assert not rest + return [getattr(r, field) for r in self] + + def filtered(self, fn): + result = Model(self._env, self._model, fields=self._fields) + for record in self: + if fn(record): + result |= record + return result + + def __sub__(self, other): + if not isinstance(other, Model) or self._model != other._model: + return NotImplemented + + return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ not in other._ids), fields=self._fields) + + def __or__(self, other): + if not isinstance(other, Model) or self._model != other._model: + return NotImplemented + + return Model(self._env, self._model, {*self._ids, *other._ids}, fields=self._fields) + __add__ = __or__ + + def __and__(self, other): + if not isinstance(other, Model) or self._model != other._model: + return NotImplemented + + return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ in other._ids), fields=self._fields) + + def invalidate_cache(self, fnames=None, ids=None): + pass # not a concern when every access is an RPC call diff --git a/forwardport/tests/conftest.py b/forwardport/tests/conftest.py index a48760eb..efbeb2b6 100644 --- a/forwardport/tests/conftest.py +++ b/forwardport/tests/conftest.py @@ -1,62 +1,23 @@ # -*- coding: utf-8 -*- -import base64 -import collections -import copy -import itertools -import logging import pathlib -import socket -import time -import uuid -import xmlrpc.client -from contextlib import closing - -import pytest -import subprocess - import re import requests from shutil import rmtree +import pytest + from odoo.tools.appdirs import user_cache_dir -DEFAULT_CRONS = [ - 'runbot_merge.process_updated_commits', - 'runbot_merge.merge_cron', - 'forwardport.port_forward', - 'forwardport.updates', - 'runbot_merge.check_linked_prs_status', - 'runbot_merge.feedback_cron', -] - -def wait_for_hook(n=1): - time.sleep(10 * n) - -def wait_for_server(db, port, proc, mod, timeout=120): - """ Polls for server to be response & have installed our module. - - Raises socket.timeout on failure - """ - limit = time.time() + timeout - while True: - if proc.poll() is not None: - raise Exception("Server unexpectedly closed") - - try: - uid = xmlrpc.client.ServerProxy( - 'http://localhost:{}/xmlrpc/2/common'.format(port))\ - .authenticate(db, 'admin', 'admin', {}) - mods = xmlrpc.client.ServerProxy( - 'http://localhost:{}/xmlrpc/2/object'.format(port))\ - .execute_kw( - db, uid, 'admin', 'ir.module.module', 'search_read', [ - [('name', '=', mod)], ['state'] - ]) - if mods and mods[0].get('state') == 'installed': - break - except ConnectionRefusedError: - if time.time() > limit: - raise socket.timeout() +@pytest.fixture +def default_crons(): + return [ + 'runbot_merge.process_updated_commits', + 'runbot_merge.merge_cron', + 'forwardport.port_forward', + 'forwardport.updates', + 'runbot_merge.check_linked_prs_status', + 'runbot_merge.feedback_cron', + ] # public_repo — necessary to leave comments # admin:repo_hook — to set up hooks (duh) @@ -99,20 +60,6 @@ def _cleanup_cache(config, users): for login in users.values(): rmtree(cache_root / login, ignore_errors=True) -@pytest.fixture(autouse=True) -def users(users_): - return users_ - -@pytest.fixture -def project(env, config): - return env['runbot_merge.project'].create({ - 'name': 'odoo', - 'github_token': config['github']['token'], - 'github_prefix': 'hansen', - 'fp_github_token': config['github']['token'], - 'required_statuses': 'legal/cla,ci/runbot', - }) - @pytest.fixture(scope='session') def module(): """ When a test function is (going to be) run, selects the containing @@ -120,549 +67,3 @@ def module(): """ # NOTE: no request.fspath (because no request.function) in session-scoped fixture so can't put module() at the toplevel return 'forwardport' - -@pytest.fixture(scope='session') -def port(): - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: - s.bind(('', 0)) - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - return s.getsockname()[1] - -@pytest.fixture -def server(request, db, port, module): - p = subprocess.Popen([ - 'odoo', '--http-port', str(port), - '--addons-path', request.config.getoption('--addons-path'), - '-d', db, - '--max-cron-threads', '0', # disable cron threads (we're running crons by hand) - ]) - - try: - wait_for_server(db, port, p, module) - - yield p - finally: - p.terminate() - p.wait(timeout=30) - -@pytest.fixture -def env(port, server, db): - yield Environment(port, db) - -# users is just so I can avoid autouse on toplevel users fixture b/c it (seems -# to) break the existing local tests -@pytest.fixture -def make_repo(request, config, tunnel, users): - owner = config['github']['owner'] - github = requests.Session() - github.headers['Authorization'] = 'token %s' % config['github']['token'] - - # check whether "owner" is a user or an org, as repo-creation endpoint is - # different - q = github.get('https://api.github.com/users/{}'.format(owner)) - q.raise_for_status() - if q.json().get('type') == 'Organization': - endpoint = 'https://api.github.com/orgs/{}/repos'.format(owner) - else: - endpoint = 'https://api.github.com/user/repos' - r = github.get('https://api.github.com/user') - r.raise_for_status() - assert r.json()['login'] == owner - - repos = [] - def repomaker(name): - fullname = '{}/{}'.format(owner, name) - repo_url = 'https://api.github.com/repos/{}'.format(fullname) - if request.config.getoption('--no-delete'): - if github.head(repo_url).ok: - pytest.skip("Repository {} already exists".format(fullname)) - else: - # just try to delete the repo, we don't really care - if github.delete(repo_url).ok: - # if we did delete a repo, wait a bit as gh might need to - # propagate the thing? - time.sleep(30) - - # create repo - r = github.post(endpoint, json={ - 'name': name, - 'has_issues': False, - 'has_projects': False, - 'has_wiki': False, - 'auto_init': False, - # at least one merge method must be enabled :( - 'allow_squash_merge': False, - # 'allow_merge_commit': False, - 'allow_rebase_merge': False, - }) - r.raise_for_status() - - new_repo = Repo(github, fullname, repos) - # create webhook - github.post('{}/hooks'.format(repo_url), json={ - 'name': 'web', - 'config': { - 'url': '{}/runbot_merge/hooks'.format(tunnel), - 'content_type': 'json', - 'insecure_ssl': '1', - }, - 'events': ['pull_request', 'issue_comment', 'status', 'pull_request_review'] - }) - - github.put('https://api.github.com/repos/{}/contents/{}'.format(fullname, 'a'), json={ - 'path': 'a', - 'message': 'github returns a 409 (Git Repository is Empty) if trying to create a tree in a repo with no objects', - 'content': base64.b64encode(b'whee').decode('ascii'), - 'branch': 'garbage_%s' % uuid.uuid4() - }).raise_for_status() - - return new_repo - - yield repomaker - - if not request.config.getoption('--no-delete'): - for repo in reversed(repos): - repo.delete() - -Commit = collections.namedtuple('Commit', 'id tree message author committer parents') -class Repo: - def __init__(self, session, fullname, repos): - self._session = session - self.name = fullname - self._repos = repos - self.hook = False - repos.append(self) - - # unwatch repo - self.unsubscribe() - - def unsubscribe(self, token=None): - self._get_session(token).put('https://api.github.com/repos/{}/subscription'.format(self.name), json={ - 'subscribed': False, - 'ignored': True, - }) - - def delete(self): - r = self._session.delete('https://api.github.com/repos/{}'.format(self.name)) - if r.status_code != 204: - logging.getLogger(__name__).warn("Unable to delete repository %s", self.name) - - def commit(self, ref): - if not re.match(r'[0-9a-f]{40}', ref): - if not ref.startswith(('heads/', 'refs/heads/')): - ref = 'refs/heads/' + ref - # apparently heads/ ~ refs/heads/ but are not - # necessarily up to date ??? unlike the git ref system where :ref - # starts at heads/ - if ref.startswith('heads/'): - ref = 'refs/' + ref - - r = self._session.get('https://api.github.com/repos/{}/commits/{}'.format(self.name, ref)) - response = r.json() - assert 200 <= r.status_code < 300, response - - return self._commit_from_gh(response) - - def _commit_from_gh(self, gh_commit): - c = gh_commit['commit'] - return Commit( - id=gh_commit['sha'], - tree=c['tree']['sha'], - message=c['message'], - author=c['author'], - committer=c['committer'], - parents=[p['sha'] for p in gh_commit['parents']], - ) - - def log(self, ref_or_sha): - for page in itertools.count(1): - r = self._session.get( - 'https://api.github.com/repos/{}/commits'.format(self.name), - params={'sha': ref_or_sha, 'page': page} - ) - assert 200 <= r.status_code < 300, r.json() - yield from map(self._commit_from_gh, r.json()) - if not r.links.get('next'): - return - - def read_tree(self, commit): - """ read tree object from commit - - :param Commit commit: - :rtype: Dict[str, str] - """ - r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree)) - assert 200 <= r.status_code < 300, r.json() - - # read tree's blobs - tree = {} - for t in r.json()['tree']: - assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases" - r = self._session.get('https://api.github.com/repos/{}/git/blobs/{}'.format(self.name, t['sha'])) - assert 200 <= r.status_code < 300, r.json() - tree[t['path']] = base64.b64decode(r.json()['content']).decode() - - return tree - - def make_ref(self, name, commit, force=False): - assert self.hook - assert name.startswith('heads/') - r = self._session.post('https://api.github.com/repos/{}/git/refs'.format(self.name), json={ - 'ref': 'refs/' + name, - 'sha': commit, - }) - if force and r.status_code == 422: - self.update_ref(name, commit, force=True) - return - assert 200 <= r.status_code < 300, r.json() - - def update_ref(self, name, commit, force=False): - r = self._session.patch('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, name), json={'sha': commit, 'force': force}) - assert 200 <= r.status_code < 300, r.json() - - def make_commits(self, root, *commits, ref=None): - assert self.hook - if root: - c = self.commit(root) - tree = c.tree - parents = [c.id] - else: - tree = None - parents = [] - - hashes = [] - for commit in commits: - if commit.reset: - tree = None - r = self._session.post('https://api.github.com/repos/{}/git/trees'.format(self.name), json={ - 'tree': [ - {'path': k, 'mode': '100644', 'type': 'blob', 'content': v} - for k, v in commit.tree.items() - ], - 'base_tree': tree - }) - assert 200 <= r.status_code < 300, r.json() - tree = r.json()['sha'] - - data = { - 'parents': parents, - 'message': commit.message, - 'tree': tree, - } - if commit.author: - data['author'] = commit.author - if commit.committer: - data['committer'] = commit.committer - - r = self._session.post('https://api.github.com/repos/{}/git/commits'.format(self.name), json=data) - assert 200 <= r.status_code < 300, r.json() - - hashes.append(r.json()['sha']) - parents = [hashes[-1]] - - if ref: - self.make_ref(ref, hashes[-1], force=True) - - return hashes - - def fork(self, *, token=None): - s = self._get_session(token) - - r = s.post('https://api.github.com/repos/{}/forks'.format(self.name)) - assert 200 <= r.status_code < 300, r.json() - - repo_name = r.json()['full_name'] - repo_url = 'https://api.github.com/repos/' + repo_name - # poll for end of fork - limit = time.time() + 60 - while s.head(repo_url, timeout=5).status_code != 200: - if time.time() > limit: - raise TimeoutError("No response for repo %s over 60s" % repo_name) - time.sleep(1) - - return Repo(s, repo_name, self._repos) - - def _get_session(self, token): - s = self._session - if token: - s = requests.Session() - s.headers['Authorization'] = 'token %s' % token - return s - - def get_pr(self, number): - # ensure PR exists before returning it - self._session.head('https://api.github.com/repos/{}/pulls/{}'.format( - self.name, - number, - )).raise_for_status() - return PR(self, number) - - def make_pr(self, *, title=None, body=None, target, head, token=None): - assert self.hook - self.hook = 2 - - if title is None: - assert ":" not in head, \ - "will not auto-infer titles for PRs in a remote repo" - c = self.commit(head) - parts = iter(c.message.split('\n\n', 1)) - title = next(parts) - body = next(parts, None) - - headers = {} - if token: - headers['Authorization'] = 'token {}'.format(token) - - r = self._session.post( - 'https://api.github.com/repos/{}/pulls'.format(self.name), - json={ - 'title': title, - 'body': body, - 'head': head, - 'base': target, - }, - headers=headers, - ) - pr = r.json() - assert 200 <= r.status_code < 300, pr - - return PR(self, pr['number']) - - def post_status(self, ref, status, context='default', **kw): - assert self.hook - assert status in ('error', 'failure', 'pending', 'success') - r = self._session.post('https://api.github.com/repos/{}/statuses/{}'.format(self.name, self.commit(ref).id), json={ - 'state': status, - 'context': context, - **kw - }) - assert 200 <= r.status_code < 300, r.json() - - def __enter__(self): - self.hook = 1 - return self - def __exit__(self, *args): - wait_for_hook(self.hook) - self.hook = 0 - -class PR: - __slots__ = ['number', 'repo'] - - def __init__(self, repo, number): - self.repo = repo - self.number = number - - @property - def _pr(self): - r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number)) - assert 200 <= r.status_code < 300, r.json() - return r.json() - - @property - def head(self): - return self._pr['head']['sha'] - - @property - def comments(self): - r = self.repo._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number)) - assert 200 <= r.status_code < 300, r.json() - return [ - (c['user']['login'], c['body']) - for c in r.json() - ] - - def post_comment(self, body, token=None): - assert self.repo.hook - headers = {} - if token: - headers['Authorization'] = 'token %s' % token - r = self.repo._session.post( - 'https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number), - json={'body': body}, - headers=headers, - ) - assert 200 <= r.status_code < 300, r.json() - return r.json()['id'] - - def _set_prop(self, prop, value): - assert self.repo.hook - r = self.repo._session.patch('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number), json={ - prop: value - }) - assert 200 <= r.status_code < 300, r.json() - - def open(self): - self._set_prop('state', 'open') - - def close(self): - self._set_prop('state', 'closed') - - @property - def branch(self): - r = self.repo._session.get('https://api.github.com/repos/{}/pulls/{}'.format( - self.repo.name, - self.number, - )) - assert 200 <= r.status_code < 300, r.json() - info = r.json() - - repo = self.repo - reponame = info['head']['repo']['full_name'] - if reponame != self.repo.name: - # not sure deep copying the session object is safe / proper... - repo = Repo(copy.deepcopy(self.repo._session), reponame, []) - - return PRBranch(repo, info['head']['ref']) -PRBranch = collections.namedtuple('PRBranch', 'repo branch') - -class Environment: - def __init__(self, port, db): - self._uid = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/common'.format(port)).authenticate(db, 'admin', 'admin', {}) - self._object = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/object'.format(port)) - self._db = db - - def __call__(self, model, method, *args, **kwargs): - return self._object.execute_kw( - self._db, self._uid, 'admin', - model, method, - args, kwargs - ) - - def __getitem__(self, name): - return Model(self, name) - - def run_crons(self, *xids, **kw): - crons = xids or DEFAULT_CRONS - for xid in crons: - _, model, cron_id = self('ir.model.data', 'xmlid_lookup', xid) - assert model == 'ir.cron', "Expected {} to be a cron, got {}".format(xid, model) - self('ir.cron', 'method_direct_trigger', [cron_id], **kw) - # sleep for some time as a lot of crap may have happened (?) - wait_for_hook() - -class Model: - __slots__ = ['_env', '_model', '_ids', '_fields'] - def __init__(self, env, model, ids=(), fields=None): - object.__setattr__(self, '_env', env) - object.__setattr__(self, '_model', model) - object.__setattr__(self, '_ids', tuple(ids or ())) - - object.__setattr__(self, '_fields', fields or self._env(self._model, 'fields_get', attributes=['type', 'relation'])) - - @property - def ids(self): - return self._ids - - def __bool__(self): - return bool(self._ids) - - def __len__(self): - return len(self._ids) - - def __eq__(self, other): - if not isinstance(other, Model): - return NotImplemented - return self._model == other._model and self._ids == other._ids - - def __repr__(self): - return "{}({})".format(self._model, ', '.join(str(id) for id in self._ids)) - - def exists(self): - ids = self._env(self._model, 'exists', self._ids) - return Model(self._env, self._model, ids) - - def search(self, *args, **kwargs): - ids = self._env(self._model, 'search', *args, **kwargs) - return Model(self._env, self._model, ids) - - def create(self, values): - return Model(self._env, self._model, [self._env(self._model, 'create', values)]) - - def write(self, values): - return self._env(self._model, 'write', self._ids, values) - - def read(self, fields): - return self._env(self._model, 'read', self._ids, fields) - - def unlink(self): - return self._env(self._model, 'unlink', self._ids) - - def __getitem__(self, index): - if isinstance(index, str): - return getattr(self, index) - ids = self._ids[index] - if isinstance(ids, int): - ids = [ids] - - return Model(self._env, self._model, ids, fields=self._fields) - - def __getattr__(self, fieldname): - if not self._ids: - return False - - assert len(self._ids) == 1 - if fieldname == 'id': - return self._ids[0] - - val = self.read([fieldname])[0][fieldname] - field_description = self._fields[fieldname] - if field_description['type'] in ('many2one', 'one2many', 'many2many'): - val = val or [] - if field_description['type'] == 'many2one': - val = val[:1] # (id, name) => [id] - return Model(self._env, field_description['relation'], val) - - return val - - def __setattr__(self, fieldname, value): - assert self._fields[fieldname]['type'] not in ('many2one', 'one2many', 'many2many') - self._env(self._model, 'write', self._ids, {fieldname: value}) - - def __iter__(self): - return ( - Model(self._env, self._model, [i], fields=self._fields) - for i in self._ids - ) - - def mapped(self, path): - field, *rest = path.split('.', 1) - descr = self._fields[field] - if descr['type'] in ('many2one', 'one2many', 'many2many'): - result = Model(self._env, descr['relation']) - for record in self: - result |= getattr(record, field) - - return result.mapped(rest[0]) if rest else result - - assert not rest - return [getattr(r, field) for r in self] - - def filtered(self, fn): - result = Model(self._env, self._model, fields=self._fields) - for record in self: - if fn(record): - result |= record - return result - - def __sub__(self, other): - if not isinstance(other, Model) or self._model != other._model: - return NotImplemented - - return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ not in other._ids), fields=self._fields) - - def __or__(self, other): - if not isinstance(other, Model) or self._model != other._model: - return NotImplemented - - return Model(self._env, self._model, {*self._ids, *other._ids}, fields=self._fields) - __add__ = __or__ - - def __and__(self, other): - if not isinstance(other, Model) or self._model != other._model: - return NotImplemented - - return Model(self._env, self._model, tuple(id_ for id_ in self._ids if id_ in other._ids), fields=self._fields) - - - def invalidate_cache(self, fnames=None, ids=None): - pass # not a concern when every access is an RPC call diff --git a/runbot_merge/tests/conftest.py b/runbot_merge/tests/conftest.py index b36d92ea..2a139056 100644 --- a/runbot_merge/tests/conftest.py +++ b/runbot_merge/tests/conftest.py @@ -1,8 +1,42 @@ -import uuid import pytest - -pytest_plugins = ["local"] +import requests @pytest.fixture(scope='session') def module(): return 'runbot_merge' + +@pytest.fixture +def page(port): + s = requests.Session() + def get(url): + r = s.get('http://localhost:{}{}'.format(port, url)) + r.raise_for_status() + return r.content + return get + +# env['runbot_merge.project']._check_fetch() +# runbot_merge.fetch_prs_cron +@pytest.fixture +def default_crons(): + return [ + # env['runbot_merge.project']._check_fetch() + # 'runbot_merge.fetch_prs_cron', + # env['runbot_merge.commit']._notify() + 'runbot_merge.process_updated_commits', + # env['runbot_merge.project']._check_progress() + 'runbot_merge.merge_cron', + # env['runbot_merge.pull_requests']._check_linked_prs_statuses() + 'runbot_merge.check_linked_prs_status', + # env['runbot_merge.project']._send_feedback() + 'runbot_merge.feedback_cron', + ] + +@pytest.fixture +def project(env, config): + return env['runbot_merge.project'].create({ + 'name': 'odoo', + 'github_token': config['github']['token'], + 'github_prefix': 'hansen', + 'branch_ids': [(0, 0, {'name': 'master'})], + 'required_statuses': 'legal/cla,ci/runbot', + }) \ No newline at end of file diff --git a/runbot_merge/tests/fake_github/__init__.py b/runbot_merge/tests/fake_github/__init__.py deleted file mode 100644 index cba86221..00000000 --- a/runbot_merge/tests/fake_github/__init__.py +++ /dev/null @@ -1,878 +0,0 @@ -import collections -import datetime -import hashlib -import hmac -import io -import itertools -import json -import logging -import re - -import responses -import werkzeug.urls -import werkzeug.test -import werkzeug.wrappers -from werkzeug.urls import url_parse, url_encode - -from . import git - -REPOS_API_PATTERN = re.compile( - r'https://api.github.com/repos/(?P\w+/\w+)/(?P.+)' -) -USERS_API_PATTERN = re.compile( - r"https://api.github.com/users/(?P\w+)" -) - -class APIResponse(responses.BaseResponse): - def __init__(self, sim, url): - super(APIResponse, self).__init__( - method=None, - url=url - ) - self.sim = sim - self.content_type = 'application/json' - self.stream = False - - def matches(self, request): - return self._url_matches(self.url, request.url, self.match_querystring) - - def get_response(self, request): - m = self.url.match(request.url) - - r = self.dispatch(request, m) - if isinstance(r, responses.HTTPResponse): - return r - - (status, r) = r - headers = self.get_headers() - if r is None: - body = io.BytesIO(b'') - headers['Content-Type'] = 'text/plain' - else: - body = io.BytesIO(json.dumps(r).encode('utf-8')) - - return responses.HTTPResponse( - status=status, - reason=r.get('message') if isinstance(r, dict) else "bollocks", - body=body, - headers=headers, - preload_content=False, ) - -class ReposAPIResponse(APIResponse): - def __init__(self, sim): - super().__init__(sim, REPOS_API_PATTERN) - - def dispatch(self, request, match): - return self.sim.repos[match.group('repo')].api(match.group('path'), request) - -class UsersAPIResponse(APIResponse): - def __init__(self, sim): - super().__init__(sim, url=USERS_API_PATTERN) - - def dispatch(self, request, match): - return self.sim._read_user(request, match.group('user')) - - -class Github(object): - """ Github simulator - - When enabled (by context-managing): - - * intercepts all ``requests`` calls & replies to api.github.com - * sends relevant hooks (registered per-repo as pairs of WSGI app and URL) - * stores repo content - """ - def __init__(self): - # {repo: {name, issues, objects, refs, hooks}} - self.repos = {} - - def repo(self, name, hooks=()): - r = self.repos[name] = Repo(name) - for hook, events in hooks: - r.hook(hook, events) - return self.repos[name] - - def __enter__(self): - # otherwise swallows errors from within the test - self._requests = responses.RequestsMock(assert_all_requests_are_fired=False).__enter__() - self._requests.add(ReposAPIResponse(self)) - self._requests.add(UsersAPIResponse(self)) - return self - - def __exit__(self, *args): - return self._requests.__exit__(*args) - - def _read_user(self, _, user): - return (200, { - 'id': id(user), - 'type': 'User', - 'login': user, - 'name': user.capitalize(), - }) - - -class Repo(object): - def __init__(self, name): - self.name = name - self.issues = {} - #: we're cheating, instead of storing serialised in-memory - #: objects we're storing the Python stuff directly, Commit - #: objects for commits, {str: hash} for trees and bytes for - #: blobs. We're still indirecting via hashes and storing a - #: h:o map because going through the API probably requires it - self.objects = {} - # branches: refs/heads/* - # PRs: refs/pull/* - self.refs = {} - # {event: (wsgi_app, url)} - self.hooks = collections.defaultdict(list) - self.protected = set() - - def hook(self, hook, events): - for event in events: - self.hooks[event].append(Client(*hook)) - - def notify(self, event_type, *payload): - for client in self.hooks.get(event_type, []): - getattr(client, event_type)(*payload) - - def set_secret(self, secret): - for clients in self.hooks.values(): - for client in clients: - client.secret = secret - - def issue(self, number): - return self.issues[number] - - def make_issue(self, title, body): - return Issue(self, title, body) - - def make_pr(self, title, body, target, ctid, user, label=None): - assert 'heads/%s' % target in self.refs - return PR(self, title, body, target, ctid, user=user, label='{}:{}'.format(user, label or target)) - - def get_ref(self, ref): - if re.match(r'[0-9a-f]{40}', ref): - return ref - - sha = self.refs.get(ref) - assert sha, "no ref %s" % ref - return sha - - def make_ref(self, name, commit, force=False): - assert isinstance(self.objects[commit], Commit) - if not force and name in self.refs: - raise ValueError("ref %s already exists" % name) - self.refs[name] = commit - - def protect(self, branch): - ref = 'heads/%s' % branch - assert ref in self.refs - self.protected.add(ref) - - def update_ref(self, name, commit, force=False): - current = self.refs.get(name) - assert current is not None - - assert name not in self.protected and force or git.is_ancestor( - self.objects, current, commit) - - self.make_ref(name, commit, force=True) - - def commit(self, ref): - sha = self.refs.get(ref) or ref - commit = self.objects[sha] - assert isinstance(commit, Commit) - return commit - - def log(self, ref): - commits = [self.commit(ref)] - while commits: - c = commits.pop(0) - commits.extend(self.commit(r) for r in c.parents) - yield c.to_json() - - def post_status(self, ref, state, context='default', **kw): - assert state in ('error', 'failure', 'pending', 'success') - c = self.commit(ref) - c.statuses.append({'state': state, 'context': context, **kw}) - self.notify('status', self.name, context, state, c.id, kw) - - def make_commit(self, ref, message, author, committer=None, tree=None, wait=True): - assert tree, "a commit must provide either a full tree" - - refs = ref or [] - if not isinstance(refs, list): - refs = [ref] - - pids = [ - ref if re.match(r'[0-9a-f]{40}', ref) else self.refs[ref] - for ref in refs - ] - - if type(tree) is type(u''): - assert isinstance(self.objects.get(tree), dict) - tid = tree - else: - tid = self._save_tree(tree) - - c = Commit(tid, message, author, committer or author, parents=pids) - self.objects[c.id] = c - if refs and refs[0] != pids[0]: - self.refs[refs[0]] = c.id - return c.id - - def _save_tree(self, t): - """ t: Dict String (String | Tree) - """ - t = {name: self._make_obj(obj) for name, obj in t.items()} - h, _ = git.make_tree( - self.objects, - t - ) - self.objects[h] = t - return h - - def _make_obj(self, o): - if type(o) is type(u''): - o = o.encode('utf-8') - - if type(o) is bytes: - h, b = git.make_blob(o) - self.objects[h] = o - return h - return self._save_tree(o) - - def api(self, path, request): - # a better version would be some sort of longest-match? - for method, pattern, handler in sorted(self._handlers, key=lambda t: -len(t[1])): - if method and request.method != method: - continue - # FIXME: remove qs from path & ensure path is entirely matched, maybe finally use proper routing? - m = re.match(pattern, path) - if m: - return handler(self, request, **m.groupdict()) - return (404, {'message': "No match for {} {}".format(request.method, path)}) - - def read_tree(self, commit): - return git.read_object(self.objects, commit.tree) - - def is_ancestor(self, sha, of): - assert not git.is_ancestor(self.objects, sha, of=of) - - def _read_ref(self, _, ref): - obj = self.refs.get(ref) - if obj is None: - return (404, None) - return (200, { - "ref": "refs/%s" % ref, - "object": { - "type": "commit", - "sha": obj, - } - }) - def _create_ref(self, r): - body = json.loads(r.body) - ref = body['ref'] - # ref must start with refs/ and contain at least two slashes - if not (ref.startswith('refs/') and ref.count('/') >= 2): - return (400, None) - ref = ref[5:] - # if ref already exists conflict? - if ref in self.refs: - return (409, None) - - sha = body['sha'] - obj = self.objects.get(sha) - # if sha is not in the repo or not a commit, 404 - if not isinstance(obj, Commit): - return (404, None) - - self.make_ref(ref, sha) - - return (201, { - "ref": "refs/%s" % ref, - "object": { - "type": "commit", - "sha": sha, - } - }) - - def _write_ref(self, r, ref): - current = self.refs.get(ref) - if current is None: - return (404, None) - body = json.loads(r.body) - sha = body['sha'] - if sha not in self.objects: - return (404, None) - - try: - self.update_ref(ref, sha, body.get('force') or False) - except AssertionError: - return (400, None) - - return (200, { - "ref": "refs/%s" % ref, - "object": { - "type": "commit", - "sha": sha, - } - }) - - def _create_commit(self, r): - body = json.loads(r.body) - author = body.get('author') - try: - sha = self.make_commit( - ref=body.get('parents'), - message=body['message'], - author=author, - committer=body.get('committer'), - tree=body['tree'] - ) - except (KeyError, AssertionError): - # either couldn't find the parent or couldn't find the tree - return (404, None) - - return (201, self._read_commit(r, sha)[1]) - - def _read_commit(self, _, sha): - c = self.objects.get(sha) - if not isinstance(c, Commit): - return (404, None) - return (200, { - "sha": sha, - "author": c.author.to_json(), - "committer": c.committer.to_json(), - "message": c.message, - "tree": {"sha": c.tree}, - "parents": [{"sha": p} for p in c.parents], - }) - - def _read_statuses(self, _, ref): - try: - c = self.commit(ref) - except KeyError: - return (404, None) - - return (200, { - 'sha': c.id, - 'total_count': len(c.statuses), - # TODO: combined? - 'statuses': [ - {'description': None, 'target_url': None, **st} - for st in reversed(c.statuses)] - }) - - def _read_issue(self, r, number): - try: - issue = self.issues[int(number)] - except KeyError: - return (404, None) - attr = {'pull_request': True} if isinstance(issue, PR) else {} - return (200, {'number': issue.number, **attr}) - - def _read_issue_comments(self, r, number): - try: - issue = self.issues[int(number)] - except KeyError: - return (404, None) - return (200, [{ - 'user': {'login': author}, - 'body': body, - } for author, body in issue.comments - if not body.startswith('REVIEW') - ]) - - def _create_issue_comment(self, r, number): - try: - issue = self.issues[int(number)] - except KeyError: - return (404, None) - try: - body = json.loads(r.body)['body'] - except KeyError: - return (400, None) - - issue.post_comment(body, "user") - return (201, { - 'id': 0, - 'body': body, - 'user': { 'login': "user" }, - }) - - def _read_pr(self, r, number): - try: - pr = self.issues[int(number)] - except KeyError: - return (404, None) - # FIXME: dedup with Client - return (200, { - 'number': pr.number, - 'head': { - 'sha': pr.head, - 'label': pr.label, - }, - 'base': { - 'ref': pr.base, - 'repo': { - 'name': self.name.split('/')[1], - 'full_name': self.name, - }, - }, - 'title': pr.title, - 'body': pr.body, - 'commits': len(pr.commits), - 'user': {'login': pr.user}, - }) - - def _edit_pr(self, r, number): - try: - pr = self.issues[int(number)] - except KeyError: - return (404, None) - - body = json.loads(r.body) - if not body.keys() & {'title', 'body', 'state', 'base'}: - # FIXME: return PR content - return (200, {}) - assert body.get('state') in ('open', 'closed', None) - - pr.state = body.get('state') or pr.state - if body.get('title'): - pr.title = body.get('title') - if body.get('body'): - pr.body = body.get('body') - if body.get('base'): - pr.base = body.get('base') - - if body.get('state') == 'open': - self.notify('pull_request', 'reopened', pr) - elif body.get('state') == 'closed': - self.notify('pull_request', 'closed', pr) - - return (200, {}) - - def _read_pr_reviews(self, _, number): - pr = self.issues.get(int(number)) - if not isinstance(pr, PR): - return (404, None) - - return (200, [{ - 'user': {'login': author}, - 'state': r.group(1), - 'body': r.group(2), - } - for author, body in pr.comments - for r in [re.match(r'REVIEW (\w+)\n\n(.*)', body)] - if r - ]) - - def _read_pr_commits(self, r, number): - pr = self.issues.get(int(number)) - if not isinstance(pr, PR): - return (404, None) - - url = url_parse(r.url) - qs = url.decode_query() - # github pages are 1-indexeds - page = int(qs.get('page') or 1) - 1 - per_page = int(qs.get('per_page') or 100) - - offset = page * per_page - limit = page + 1 * per_page - headers = {'Content-Type': 'application/json'} - if len(pr.commits) > limit: - nextlink = url.replace(query=url_encode(dict(qs, page=page+1))) - headers['Link'] = '<%s>; rel="next"' % str(nextlink) - - commits = [ - c.to_json() - for c in sorted( - pr.commits, - key=lambda c: (c.author.date, c.committer.date) - )[offset:limit] - ] - body = io.BytesIO(json.dumps(commits).encode('utf-8')) - - return responses.HTTPResponse( - status=200, reason="OK", - headers=headers, - body=body, preload_content=False, - ) - - def _get_labels(self, r, number): - try: - pr = self.issues[int(number)] - except KeyError: - return (404, None) - - return (200, [{'name': label} for label in pr.labels]) - - def _reset_labels(self, r, number): - try: - pr = self.issues[int(number)] - except KeyError: - return (404, None) - - pr.labels = set(json.loads(r.body)['labels']) - - return (200, {}) - - def _do_merge(self, r): - body = json.loads(r.body) # {base, head, commit_message} - if not body.get('commit_message'): - return (400, {'message': "Merges require a commit message"}) - base = 'heads/%s' % body['base'] - target = self.refs.get(base) - if not target: - return (404, {'message': "Base does not exist"}) - # head can be either a branch or a sha - sha = self.refs.get('heads/%s' % body['head']) or body['head'] - if sha not in self.objects: - return (404, {'message': "Head does not exist"}) - - if git.is_ancestor(self.objects, sha, of=target): - return (204, None) - - # merging according to read-tree: - # get common ancestor (base) of commits - try: - merge_base = git.merge_base(self.objects, target, sha) - except Exception: - return (400, {'message': "No common ancestor between %(base)s and %(head)s" % body}) - try: - tid = git.merge_objects( - self.objects, - self.objects[merge_base].tree, - self.objects[target].tree, - self.objects[sha].tree, - ) - except Exception as e: - logging.exception("Merge Conflict") - return (409, {'message': 'Merge Conflict %r' % e}) - - c = Commit(tid, body['commit_message'], author=None, committer=None, parents=[target, sha]) - self.objects[c.id] = c - self.refs[base] = c.id - - return (201, c.to_json()) - - _handlers = [ - ('POST', r'git/refs', _create_ref), - ('GET', r'git/refs/(?P.*)', _read_ref), - ('PATCH', r'git/refs/(?P.*)', _write_ref), - - # nb: there's a different commits at /commits with repo-level metadata - ('GET', r'git/commits/(?P[0-9A-Fa-f]{40})', _read_commit), - ('POST', r'git/commits', _create_commit), - ('GET', r'commits/(?P[^/]+)/status', _read_statuses), - - ('GET', r'issues/(?P\d+)', _read_issue), - ('GET', r'issues/(?P\d+)/comments', _read_issue_comments), - ('POST', r'issues/(?P\d+)/comments', _create_issue_comment), - - ('POST', r'merges', _do_merge), - - ('GET', r'pulls/(?P\d+)', _read_pr), - ('PATCH', r'pulls/(?P\d+)', _edit_pr), - ('GET', r'pulls/(?P\d+)/reviews', _read_pr_reviews), - ('GET', r'pulls/(?P\d+)/commits', _read_pr_commits), - - ('GET', r'issues/(?P\d+)/labels', _get_labels), - ('PUT', r'issues/(?P\d+)/labels', _reset_labels), - ] - -class Issue(object): - def __init__(self, repo, title, body): - self.repo = repo - self._title = title - self._body = body - self.number = max(repo.issues or [0]) + 1 - self._comments = [] - self.labels = set() - repo.issues[self.number] = self - - @property - def comments(self): - return [(c.user, c.body) for c in self._comments] - - def post_comment(self, body, user): - c = Comment(user, body) - self._comments.append(c) - self.repo.notify('issue_comment', self, 'created', c) - return c.id - - def edit_comment(self, cid, newbody, user): - c = next(c for c in self._comments if c.id == cid) - c.body = newbody - self.repo.notify('issue_comment', self, 'edited', c) - - def delete_comment(self, cid, user): - c = next(c for c in self._comments if c.id == cid) - self._comments.remove(c) - self.repo.notify('issue_comment', self, 'deleted', c) - - @property - def title(self): - return self._title - @title.setter - def title(self, value): - self._title = value - - @property - def body(self): - return self._body - @body.setter - def body(self, value): - self._body = value -class Comment: - _cseq = itertools.count() - def __init__(self, user, body, id=None): - self.user = user - self.body = body - self.id = id or next(self._cseq) - -class PR(Issue): - def __init__(self, repo, title, body, target, ctid, user, label): - super(PR, self).__init__(repo, title, body) - assert ctid in repo.objects - repo.refs['pull/%d' % self.number] = ctid - self.head = ctid - self._base = target - self.user = user - self.label = label - self.state = 'open' - - repo.notify('pull_request', 'opened', self) - - @Issue.title.setter - def title(self, value): - old = self.title - Issue.title.fset(self, value) - self.repo.notify('pull_request', 'edited', self, { - 'title': {'from': old} - }) - @Issue.body.setter - def body(self, value): - old = self.body - Issue.body.fset(self, value) - self.repo.notify('pull_request', 'edited', self, { - 'body': {'from': old} - }) - @property - def base(self): - return self._base - @base.setter - def base(self, value): - old, self._base = self._base, value - self.repo.notify('pull_request', 'edited', self, { - 'base': {'ref': {'from': old}} - }) - - def push(self, sha): - self.head = sha - self.repo.notify('pull_request', 'synchronize', self) - - def open(self): - assert self.state == 'closed' - self.state = 'open' - self.repo.notify('pull_request', 'reopened', self) - - def close(self): - self.state = 'closed' - self.repo.notify('pull_request', 'closed', self) - - @property - def commits(self): - store = self.repo.objects - target = self.repo.commit('heads/%s' % self.base).id - - base = {h for h, _ in git.walk_ancestors(store, target, False)} - own = [ - h for h, _ in git.walk_ancestors(store, self.head, False) - if h not in base - ] - return list(map(self.repo.commit, reversed(own))) - - def post_review(self, state, user, body): - self.comments.append((user, "REVIEW %s\n\n%s " % (state, body))) - self.repo.notify('pull_request_review', state, self, user, body) - -FMT = '%Y-%m-%dT%H:%M:%SZ' -class Author(object): - __slots__ = ['name', 'email', 'date'] - - def __init__(self, name, email, date): - self.name = name - self.email = email - self.date = date or datetime.datetime.now().strftime(FMT) - - @classmethod - def from_(cls, d): - if not d: - return None - return Author(**d) - - def to_json(self): - return { - 'name': self.name, - 'email': self.email, - 'date': self.date, - } - - def __str__(self): - return '%s <%s> %d Z' % ( - self.name, - self.email, - int(datetime.datetime.strptime(self.date, FMT).timestamp()) - ) - -class Commit(object): - __slots__ = ['tree', 'message', 'author', 'committer', 'parents', 'statuses'] - def __init__(self, tree, message, author, committer, parents): - self.tree = tree - self.message = message.strip() - self.author = Author.from_(author) or Author('', '', '') - self.committer = Author.from_(committer) or self.author - self.parents = parents - self.statuses = [] - - @property - def id(self): - return git.make_commit(self.tree, self.message, self.author, self.committer, parents=self.parents)[0] - - def to_json(self): - return { - "sha": self.id, - "commit": { - "author": self.author.to_json(), - "committer": self.committer.to_json(), - "message": self.message, - "tree": {"sha": self.tree}, - }, - "parents": [{"sha": p} for p in self.parents] - } - - def __str__(self): - parents = '\n'.join('parent {}'.format(p) for p in self.parents) + '\n' - return """commit {} -tree {} -{}author {} -committer {} - -{}""".format( - self.id, - self.tree, - parents, - self.author, - self.committer, - self.message -) - -class Client(werkzeug.test.Client): - def __init__(self, application, path): - self._webhook_path = path - self.secret = None - super(Client, self).__init__(application, werkzeug.wrappers.BaseResponse) - - def _make_env(self, event_type, data): - headers = [('X-Github-Event', event_type)] - body = json.dumps(data).encode('utf-8') - if self.secret: - sig = hmac.new(self.secret.encode('ascii'), body, hashlib.sha1).hexdigest() - headers.append(('X-Hub-Signature', 'sha1=' + sig)) - - return werkzeug.test.EnvironBuilder( - path=self._webhook_path, - method='POST', - headers=headers, - content_type='application/json', - data=body, - ) - def _repo(self, name): - return { - 'name': name.split('/')[1], - 'full_name': name, - } - - def pull_request(self, action, pr, changes=None): - assert action in ('opened', 'reopened', 'closed', 'synchronize', 'edited') - return self.open(self._make_env( - 'pull_request', { - 'action': action, - 'pull_request': self._pr(pr), - 'repository': self._repo(pr.repo.name), - 'sender': {'login': '<>'}, - **({'changes': changes} if changes else {}) - } - )) - - def pull_request_review(self, action, pr, user, body): - """ - :type action: 'APPROVE' | 'REQUEST_CHANGES' | 'COMMENT' - :type pr: PR - :type user: str - :type body: str - """ - assert action in ('APPROVE', 'REQUEST_CHANGES', 'COMMENT') - return self.open(self._make_env( - 'pull_request_review', { - 'action': 'submitted', - 'review': { - 'state': 'APPROVED' if action == 'APPROVE' else action, - 'body': body, - 'user': {'login': user}, - }, - 'pull_request': self._pr(pr), - 'repository': self._repo(pr.repo.name), - } - )) - - def status(self, repository, context, state, sha, kw): - assert state in ('success', 'failure', 'pending') - return self.open(self._make_env( - 'status', { - 'name': repository, - 'context': context, - 'state': state, - 'sha': sha, - 'repository': self._repo(repository), - 'target_url': None, - 'description': None, - **(kw or {}) - } - )) - - def issue_comment(self, issue, action, comment): - assert action in ('created', 'edited', 'deleted') - contents = { - 'action': action, - 'issue': { 'number': issue.number }, - 'repository': self._repo(issue.repo.name), - 'comment': { 'id': comment.id, 'body': comment.body, 'user': {'login': comment.user } }, - } - if isinstance(issue, PR): - contents['issue']['pull_request'] = { 'url': 'fake' } - return self.open(self._make_env('issue_comment', contents)) - - def _pr(self, pr): - """ - :type pr: PR - """ - return { - 'number': pr.number, - 'head': { - 'sha': pr.head, - 'label': pr.label, - }, - 'base': { - 'ref': pr.base, - 'repo': self._repo(pr.repo.name), - }, - 'title': pr.title, - 'body': pr.body, - 'commits': len(pr.commits), - 'user': {'login': pr.user}, - } diff --git a/runbot_merge/tests/fake_github/git.py b/runbot_merge/tests/fake_github/git.py deleted file mode 100644 index 585f739b..00000000 --- a/runbot_merge/tests/fake_github/git.py +++ /dev/null @@ -1,126 +0,0 @@ -import collections -import hashlib - -def make_obj(t, contents): - assert t in ('blob', 'tree', 'commit') - obj = b'%s %d\0%s' % (t.encode('utf-8'), len(contents), contents) - return hashlib.sha1(obj).hexdigest(), obj - -def make_blob(contents): - return make_obj('blob', contents) - -def make_tree(store, objs): - """ objs should be a mapping or iterable of (name, object) - """ - if isinstance(objs, collections.Mapping): - objs = objs.items() - - return make_obj('tree', b''.join( - b'%s %s\0%s' % ( - b'040000' if isinstance(obj, collections.Mapping) else b'100644', - name.encode('utf-8'), - h.encode('utf-8'), - ) - for name, h in sorted(objs) - for obj in [store[h]] - # TODO: check that obj is a blob or tree - )) - -def make_commit(tree, message, author, committer=None, parents=()): - contents = ['tree %s' % tree] - for parent in parents: - contents.append('parent %s' % parent) - contents.append('author %s' % author) - contents.append('committer %s' % committer or author) - contents.append('') - contents.append(message) - - return make_obj('commit', '\n'.join(contents).encode('utf-8')) - -def walk_ancestors(store, commit, exclude_self=True): - """ - :param store: mapping of hashes to commit objects (w/ a parents attribute) - :param str commit: starting commit's hash - :param exclude_self: whether the starting commit shoudl be returned as - part of the sequence - :rtype: Iterator[(str, int)] - """ - q = [(commit, 0)] - while q: - node, distance = q.pop() - q.extend((p, distance+1) for p in store[node].parents) - if not (distance == 0 and exclude_self): - yield (node, distance) - -def is_ancestor(store, candidate, of): - # could have candidate == of after all - return any( - current == candidate - for current, _ in walk_ancestors(store, of, exclude_self=False) - ) - - -def merge_base(store, c1, c2): - """ Find LCA between two commits. Brute-force: get all ancestors of A, - all ancestors of B, intersect, and pick the one with the lowest distance - """ - a1 = walk_ancestors(store, c1, exclude_self=False) - # map of sha:distance - a2 = dict(walk_ancestors(store, c2, exclude_self=False)) - # find lowest ancestor by distance(ancestor, c1) + distance(ancestor, c2) - _distance, lca = min( - (d1 + d2, a) - for a, d1 in a1 - for d2 in [a2.get(a)] - if d2 is not None - ) - return lca - -def merge_objects(store, b, o1, o2): - """ Merges trees and blobs. - - Store = Mapping - Blob = bytes - Tree = Mapping - """ - # FIXME: handle None input (similarly named entry added in two - # branches, or delete in one branch & change in other) - if not (b and o1 or o2): - raise ValueError("Don't know how to merge additions/removals yet") - b, o1, o2 = store[b], store[o1], store[o2] - if any(isinstance(o, bytes) for o in [b, o1, o2]): - raise TypeError("Don't know how to merge blobs") - - entries = sorted(set(b).union(o1, o2)) - - t = {} - for entry in entries: - base = b.get(entry) - e1 = o1.get(entry) - e2 = o2.get(entry) - if e1 == e2: - merged = e1 # either no change or same change on both side - elif base == e1: - merged = e2 # e1 did not change, use e2 - elif base == e2: - merged = e1 # e2 did not change, use e1 - else: - merged = merge_objects(store, base, e1, e2) - # None => entry removed - if merged is not None: - t[entry] = merged - - # FIXME: fix partial redundancy with make_tree - tid, _ = make_tree(store, t) - store[tid] = t - return tid - -def read_object(store, tid): - # recursively reads tree of objects - o = store[tid] - if isinstance(o, bytes): - return o.decode() - return { - k: read_object(store, v) - for k, v in o.items() - } diff --git a/runbot_merge/tests/local.py b/runbot_merge/tests/local.py deleted file mode 100644 index 4bd88b52..00000000 --- a/runbot_merge/tests/local.py +++ /dev/null @@ -1,133 +0,0 @@ -# -*- coding: utf-8 -*- -import inspect -import logging - -import pytest -import werkzeug.test, werkzeug.wrappers - -import odoo - -import fake_github - -@pytest.fixture(scope='session') -def remote_p(): - return False - -@pytest.fixture -def gh(): - with fake_github.Github() as gh: - yield gh - -@pytest.fixture -def db(dbcache): - return dbcache - -@pytest.fixture(scope='session') -def registry(request): - """ Set up Odoo & yields a registry to the specified db - """ - db = request.config.getoption('--db') - addons = request.config.getoption('--addons-path') - odoo.tools.config.parse_config(['--addons-path', addons, '-d', db, '--db-filter', db]) - try: - odoo.service.db._create_empty_database(db) - odoo.service.db._initialize_db(None, db, False, False, 'admin') - except odoo.service.db.DatabaseExists: - pass - - #odoo.service.server.load_server_wide_modules() - #odoo.service.server.preload_registries([db]) - - with odoo.api.Environment.manage(): - # ensure module is installed - r0 = odoo.registry(db) - with r0.cursor() as cr: - env = odoo.api.Environment(cr, 1, {}) - [mod] = env['ir.module.module'].search([('name', '=', 'runbot_merge')]) - mod.button_immediate_install() - - from odoo.addons.runbot_merge.models import pull_requests - pull_requests.STAGING_SLEEP = 0 - yield odoo.registry(db) - -@pytest.fixture -def cr(registry): - # in v12, enter_test_mode flags an existing cursor while in v11 it sets one up - if inspect.signature(registry.enter_test_mode).parameters: - with registry.cursor() as cr: - registry.enter_test_mode(cr) - yield cr - registry.leave_test_mode() - cr.rollback() - else: - registry.enter_test_mode() - with registry.cursor() as cr: - yield cr - cr.rollback() - registry.leave_test_mode() - -@pytest.fixture -def env(cr): - env = odoo.api.Environment(cr, odoo.SUPERUSER_ID, {}) - ctx = env['res.users'].context_get() - yield env(context=ctx) - -@pytest.fixture -def owner(): - return 'user' - -@pytest.fixture(autouse=True) -def users(env): - env['res.partner'].create({ - 'name': "Reviewer", - 'github_login': 'reviewer', - 'reviewer': True, - 'email': "reviewer@example.com", - }) - env['res.partner'].create({ - 'name': "Self Reviewer", - 'github_login': 'self_reviewer', - 'self_reviewer': True, - }) - - return { - 'reviewer': 'reviewer', - 'self_reviewer': 'self_reviewer', - 'other': 'other', - 'user': 'user', - } - -@pytest.fixture -def project(env): - return env['runbot_merge.project'].create({ - 'name': 'odoo', - 'github_token': 'okokok', - 'github_prefix': 'hansen', - 'branch_ids': [(0, 0, {'name': 'master'})], - 'required_statuses': 'legal/cla,ci/runbot', - }) - -@pytest.fixture -def make_repo(gh, project): - def make_repo(name): - fullname = 'org/' + name - project.write({'repo_ids': [(0, 0, {'name': fullname})]}) - return gh.repo(fullname, hooks=[ - ((odoo.http.root, '/runbot_merge/hooks'), [ - 'pull_request', 'issue_comment', 'status', 'pull_request_review' - ]) - ]) - return make_repo - -@pytest.fixture -def page(): - c = werkzeug.test.Client(odoo.http.root, werkzeug.wrappers.BaseResponse) - def get(url): - r = c.get(url) - assert r.status_code == 200 - return r.data - return get - -# TODO: project fixture -# TODO: repos (indirect/parameterize?) w/ WS hook -# + repo proxy object diff --git a/runbot_merge/tests/remote.py b/runbot_merge/tests/remote.py index c137debd..e69de29b 100644 --- a/runbot_merge/tests/remote.py +++ b/runbot_merge/tests/remote.py @@ -1,724 +0,0 @@ -""" -Replaces relevant fixtures to allow running the test suite against github -actual (instead of a mocked version). - -To enable this plugin, load it using ``-p runbot_merge.tests.remote`` - -.. WARNING:: this requires running ``python -mpytest`` from the root of the - runbot repository, running ``pytest`` directly will not pick it - up (as it does not setup ``sys.path``) - -Configuration: - -* an ``odoo`` binary in the path, which runs the relevant odoo; to ensure a - clean slate odoo is re-started and a new database is created before each - test - -* pytest.ini (at the root of the runbot repo) with the following sections and - keys - - ``github`` - - owner, the name of the account (personal or org) under which test repos - will be created & deleted - - token, either personal or oauth, must have the scopes ``public_repo``, - ``delete_repo`` and ``admin:repo_hook``, if personal the owner must be - the corresponding user account, not an org - - ``role_reviewer``, ``role_self_reviewer`` and ``role_other`` - - name (optional) - - token, a personal access token with the ``public_repo`` scope (otherwise - the API can't leave comments) - - .. warning:: the accounts must *not* be flagged, or the webhooks on - commenting or creating reviews will not trigger, and the - tests will fail - -* either ``ngrok`` or ``lt`` (localtunnel) available on the path. ngrok with - a configured account is recommended: ngrok is more reliable than localtunnel - but a free account is necessary to get a high-enough rate limiting for some - of the multi-repo tests to work - -Finally the tests aren't 100% reliable as they rely on quite a bit of network -traffic, it's possible that the tests fail due to network issues rather than -logic errors. -""" -import base64 -import collections -import itertools -import re -import socket -import subprocess -import time -import xmlrpc.client - -import pytest -import requests - -# Should be pytest_configure, but apparently once a plugin is registered -# its fixtures don't get unloaded even if it's unregistered, so prevent -# registering local entirely. This works because explicit plugins (-p) -# are loaded before conftest and conftest-specified plugins (officially: -# https://docs.pytest.org/en/latest/writing_plugins.html#plugin-discovery-order-at-tool-startup). - -def pytest_addhooks(pluginmanager): - pluginmanager.set_blocked('local') - -PORT=8069 - -@pytest.fixture(scope='session') -def port(): - return PORT - -def wait_for_hook(n=1): - # TODO: find better way to wait for roundtrip of actions which can trigger webhooks - time.sleep(10 * n) - -@pytest.fixture -def page(): - s = requests.Session() - def get(url): - r = s.get('http://localhost:{}{}'.format(PORT, url)) - r.raise_for_status() - return r.content - return get - -def wait_for_server(db, timeout=120): - """ Polls for server to be response & have installed our module. - - Raises socket.timeout on failure - """ - limit = time.time() + timeout - while True: - try: - uid = xmlrpc.client.ServerProxy( - 'http://localhost:{}/xmlrpc/2/common'.format(PORT))\ - .authenticate(db, 'admin', 'admin', {}) - xmlrpc.client.ServerProxy( - 'http://localhost:{}/xmlrpc/2/object'.format(PORT)) \ - .execute_kw(db, uid, 'admin', 'runbot_merge.batch', 'search', - [[]], {'limit': 1}) - break - except ConnectionRefusedError: - if time.time() > limit: - raise socket.timeout() - -@pytest.fixture(scope='session') -def remote_p(): - return True - -@pytest.fixture -def env(request): - """ - creates a db & an environment object as a proxy to xmlrpc calls - """ - db = request.config.getoption('--db') - p = subprocess.Popen([ - 'odoo', '--http-port', str(PORT), - '--addons-path', request.config.getoption('--addons-path'), - '-d', db, '-i', 'runbot_merge', - '--load', 'base,web,runbot_merge', - '--max-cron-threads', '0', # disable cron threads (we're running crons by hand) - ]) - - try: - wait_for_server(db) - - yield Environment(PORT, db) - - db_service = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/db'.format(PORT)) - db_service.drop('admin', db) - finally: - p.terminate() - p.wait(timeout=30) - -@pytest.fixture(autouse=True) -def users(users_): - return users_ - -@pytest.fixture -def project(env, config): - return env['runbot_merge.project'].create({ - 'name': 'odoo', - 'github_token': config['github']['token'], - 'github_prefix': 'hansen', - 'branch_ids': [(0, 0, {'name': 'master'})], - 'required_statuses': 'legal/cla,ci/runbot', - }) - -@pytest.fixture(scope='session') -def github(config): - s = requests.Session() - s.headers['Authorization'] = 'token {}'.format(config['github']['token']) - return s - -@pytest.fixture -def owner(config): - return config['github']['owner'] - -@pytest.fixture -def make_repo(request, config, project, github, tunnel, users, owner): - # check whether "owner" is a user or an org, as repo-creation endpoint is - # different - q = github.get('https://api.github.com/users/{}'.format(owner)) - q.raise_for_status() - if q.json().get('type') == 'Organization': - endpoint = 'https://api.github.com/orgs/{}/repos'.format(owner) - else: - # if not creating repos under an org, ensure the token matches the owner - assert users['user'] == owner, "when testing against a user (rather than an organisation) the API token must be the user's" - endpoint = 'https://api.github.com/user/repos' - - repos = [] - def repomaker(name): - fullname = '{}/{}'.format(owner, name) - repo_url = 'https://api.github.com/repos/{}'.format(fullname) - if request.config.getoption('--no-delete'): - if github.head(repo_url).ok: - pytest.skip("Repository {} already exists".format(fullname)) - else: - # just try to delete the repo, we don't really care - if github.delete(repo_url).ok: - # if we did delete a repo, wait a bit as gh might need to - # propagate the thing? - time.sleep(30) - - # create repo - r = github.post(endpoint, json={ - 'name': name, - 'has_issues': False, - 'has_projects': False, - 'has_wiki': False, - 'auto_init': False, - # at least one merge method must be enabled :( - 'allow_squash_merge': False, - # 'allow_merge_commit': False, - 'allow_rebase_merge': False, - }) - r.raise_for_status() - repos.append(fullname) - # unwatch repo - github.put('{}/subscription'.format(repo_url), json={ - 'subscribed': False, - 'ignored': True, - }) - # create webhook - github.post('{}/hooks'.format(repo_url), json={ - 'name': 'web', - 'config': { - 'url': '{}/runbot_merge/hooks'.format(tunnel), - 'content_type': 'json', - 'insecure_ssl': '1', - }, - 'events': ['pull_request', 'issue_comment', 'status', 'pull_request_review'] - }) - project.write({'repo_ids': [(0, 0, {'name': fullname})]}) - - role_tokens = { - n[5:]: vals['token'] - for n, vals in config.items() - if n.startswith('role_') - } - role_tokens['user'] = config['github']['token'] - - return Repo(github, fullname, role_tokens) - - yield repomaker - - if not request.config.getoption('--no-delete'): - for repo in reversed(repos): - github.delete('https://api.github.com/repos/{}'.format(repo)).raise_for_status() - -class Environment: - def __init__(self, port, db): - self._uid = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/common'.format(port)).authenticate(db, 'admin', 'admin', {}) - self._object = xmlrpc.client.ServerProxy('http://localhost:{}/xmlrpc/2/object'.format(port)) - self._db = db - - def __call__(self, model, method, *args, **kwargs): - return self._object.execute_kw( - self._db, self._uid, 'admin', - model, method, - args, kwargs - ) - - def __getitem__(self, name): - return Model(self, name) - -class Model: - __slots__ = ['_env', '_model', '_ids', '_fields'] - def __init__(self, env, model, ids=(), fields=None): - object.__setattr__(self, '_env', env) - object.__setattr__(self, '_model', model) - object.__setattr__(self, '_ids', tuple(ids or ())) - - object.__setattr__(self, '_fields', fields or self._env(self._model, 'fields_get', attributes=['type', 'relation'])) - - @property - def ids(self): - return self._ids - - def __bool__(self): - return bool(self._ids) - - def __len__(self): - return len(self._ids) - - def __eq__(self, other): - if not isinstance(other, Model): - return NotImplemented - return self._model == other._model and self._ids == other._ids - - def __repr__(self): - return "{}({})".format(self._model, ', '.join(str(id) for id in self._ids)) - - def exists(self): - ids = self._env(self._model, 'exists', self._ids) - return Model(self._env, self._model, ids) - - def search(self, domain, **kw): - ids = self._env(self._model, 'search', domain, **kw) - return Model(self._env, self._model, ids) - - def create(self, values): - return Model(self._env, self._model, [self._env(self._model, 'create', values)]) - - def write(self, values): - return self._env(self._model, 'write', self._ids, values) - - def read(self, fields): - return self._env(self._model, 'read', self._ids, fields) - - def unlink(self): - return self._env(self._model, 'unlink', self._ids) - - def _check_progress(self): - assert self._model == 'runbot_merge.project' - self._run_cron('runbot_merge.merge_cron') - - def _check_fetch(self): - assert self._model == 'runbot_merge.project' - self._run_cron('runbot_merge.fetch_prs_cron') - - def _send_feedback(self): - assert self._model == 'runbot_merge.project' - self._run_cron('runbot_merge.feedback_cron') - - def _check_linked_prs_statuses(self): - assert self._model == 'runbot_merge.pull_requests' - self._run_cron('runbot_merge.check_linked_prs_status') - - def _notify(self): - assert self._model == 'runbot_merge.commit' - self._run_cron('runbot_merge.process_updated_commits') - - def _run_cron(self, xid): - _, model, cron_id = self._env('ir.model.data', 'xmlid_lookup', xid) - assert model == 'ir.cron', "Expected {} to be a cron, got {}".format(xid, model) - self._env('ir.cron', 'method_direct_trigger', [cron_id]) - # sleep for some time as a lot of crap may have happened (?) - wait_for_hook() - - def __getattr__(self, fieldname): - if not self._ids: - return False - - assert len(self._ids) == 1 - if fieldname == 'id': - return self._ids[0] - - val = self.read([fieldname])[0][fieldname] - field_description = self._fields[fieldname] - if field_description['type'] in ('many2one', 'one2many', 'many2many'): - val = val or [] - if field_description['type'] == 'many2one': - val = val[:1] # (id, name) => [id] - return Model(self._env, field_description['relation'], val) - - return val - - def __setattr__(self, fieldname, value): - assert self._fields[fieldname]['type'] not in ('many2one', 'one2many', 'many2many') - self._env(self._model, 'write', self._ids, {fieldname: value}) - - def __iter__(self): - return ( - Model(self._env, self._model, [i], fields=self._fields) - for i in self._ids - ) - - def mapped(self, path): - field, *rest = path.split('.', 1) - descr = self._fields[field] - if descr['type'] in ('many2one', 'one2many', 'many2many'): - result = Model(self._env, descr['relation']) - for record in self: - result |= getattr(record, field) - - return result.mapped(rest[0]) if rest else result - - assert not rest - return [getattr(r, field) for r in self] - - def __or__(self, other): - if not isinstance(other, Model) or self._model != other._model: - return NotImplemented - - return Model(self._env, self._model, {*self._ids, *other._ids}, fields=self._fields) - - def invalidate_cache(self, fnames=None, ids=None): - pass # not a concern when every access is an RPC call - -class Repo: - __slots__ = ['name', '_session', '_tokens'] - def __init__(self, session, name, user_tokens): - self.name = name - self._session = session - self._tokens = user_tokens - - def set_secret(self, secret): - r = self._session.get( - 'https://api.github.com/repos/{}/hooks'.format(self.name)) - response = r.json() - assert 200 <= r.status_code < 300, response - [hook] = response - - r = self._session.patch('https://api.github.com/repos/{}/hooks/{}'.format(self.name, hook['id']), json={ - 'config': {**hook['config'], 'secret': secret}, - }) - assert 200 <= r.status_code < 300, r.json() - - def get_ref(self, ref): - if re.match(r'[0-9a-f]{40}', ref): - return ref - - assert ref.startswith('heads/') - r = self._session.get('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, ref)) - response = r.json() - - assert 200 <= r.status_code < 300, response - assert isinstance(response, dict), "{} doesn't exist (got {} refs)".format(ref, len(response)) - assert response['object']['type'] == 'commit' - - return response['object']['sha'] - - def make_ref(self, name, commit, force=False): - assert name.startswith('heads/') - r = self._session.post('https://api.github.com/repos/{}/git/refs'.format(self.name), json={ - 'ref': 'refs/' + name, - 'sha': commit, - }) - if force and r.status_code == 422: - r = self._session.patch('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, name), json={'sha': commit, 'force': True}) - assert 200 <= r.status_code < 300, r.json() - wait_for_hook() - - def protect(self, branch): - r = self._session.put('https://api.github.com/repos/{}/branches/{}/protection'.format(self.name, branch), json={ - 'required_status_checks': None, - 'enforce_admins': True, - 'required_pull_request_reviews': None, - 'restrictions': None, - }) - assert 200 <= r.status_code < 300, r.json() - wait_for_hook() - - def update_ref(self, name, commit, force=False): - r = self._session.patch('https://api.github.com/repos/{}/git/refs/{}'.format(self.name, name), json={'sha': commit, 'force': force}) - assert 200 <= r.status_code < 300, r.json() - wait_for_hook() - - def make_commit(self, ref, message, author, committer=None, tree=None, wait=True): - assert tree, "not supporting changes/updates" - - if not ref: # None / [] - # apparently github refuses to create trees/commits in empty repos - # using the regular API... - [(path, contents)] = tree.items() - r = self._session.put('https://api.github.com/repos/{}/contents/{}'.format(self.name, path), json={ - 'path': path, - 'message': message, - 'content': base64.b64encode(contents.encode('utf-8')).decode('ascii'), - 'branch': 'nootherwaytocreateaninitialcommitbutidontwantamasteryet%d' % next(ct) - }) - assert 200 <= r.status_code < 300, r.json() - return r.json()['commit']['sha'] - - if isinstance(ref, list): - refs = ref - else: - refs = [ref] - parents = [self.get_ref(r) for r in refs] - - r = self._session.post('https://api.github.com/repos/{}/git/trees'.format(self.name), json={ - 'tree': [ - {'path': k, 'mode': '100644', 'type': 'blob', 'content': v} - for k, v in tree.items() - ] - }) - assert 200 <= r.status_code < 300, r.json() - h = r.json()['sha'] - - data = { - 'parents': parents, - 'message': message, - 'tree': h, - } - if author: - data['author'] = author - if committer: - data['committer'] = committer - - r = self._session.post('https://api.github.com/repos/{}/git/commits'.format(self.name), json=data) - assert 200 <= r.status_code < 300, r.json() - - commit_sha = r.json()['sha'] - - # if the first parent is an actual ref (rather than a hash) update it - if parents[0] != refs[0]: - self.update_ref(refs[0], commit_sha) - elif wait: - wait_for_hook() - return commit_sha - - def make_pr(self, title, body, target, ctid, user, label=None): - # github only allows PRs from actual branches, so create an actual branch - ref = label or "temp_trash_because_head_must_be_a_ref_%d" % next(ct) - self.make_ref('heads/' + ref, ctid) - - r = self._session.post( - 'https://api.github.com/repos/{}/pulls'.format(self.name), - json={'title': title, 'body': body, 'head': ref, 'base': target,}, - headers={'Authorization': 'token {}'.format(self._tokens[user])} - ) - assert 200 <= r.status_code < 300, r.json() - # wait extra for PRs creating many PRs and relying on their ordering - # (test_batching & test_batching_split) - # would be nice to make the tests more reliable but not quite sure - # how... - wait_for_hook(2) - return PR(self, 'heads/' + ref, r.json()['number']) - - def post_status(self, ref, status, context='default', **kw): - assert status in ('error', 'failure', 'pending', 'success') - r = self._session.post('https://api.github.com/repos/{}/statuses/{}'.format(self.name, self.get_ref(ref)), json={ - 'state': status, - 'context': context, - **kw - }) - assert 200 <= r.status_code < 300, r.json() - wait_for_hook() - - def commit(self, ref): - # apparently heads/ ~ refs/heads/ but are not - # necessarily up to date ??? unlike the git ref system where :ref - # starts at heads/ - if ref.startswith('heads/'): - ref = 'refs/' + ref - - r = self._session.get('https://api.github.com/repos/{}/commits/{}'.format(self.name, ref)) - response = r.json() - assert 200 <= r.status_code < 300, response - - c = response['commit'] - return Commit( - id=response['sha'], - tree=c['tree']['sha'], - message=c['message'], - author=c['author'], - committer=c['committer'], - parents=[p['sha'] for p in response['parents']], - ) - - def read_tree(self, commit): - """ read tree object from commit - - :param Commit commit: - :rtype: Dict[str, str] - """ - r = self._session.get('https://api.github.com/repos/{}/git/trees/{}'.format(self.name, commit.tree)) - assert 200 <= r.status_code < 300, r.json() - - # read tree's blobs - tree = {} - for t in r.json()['tree']: - assert t['type'] == 'blob', "we're *not* doing recursive trees in test cases" - r = self._session.get(t['url']) - assert 200 <= r.status_code < 300, r.json() - # assume all test content is textual - tree[t['path']] = base64.b64decode(r.json()['content']).decode() - - return tree - - def is_ancestor(self, sha, of): - return any(c['sha'] == sha for c in self.log(of)) - - def log(self, ref_or_sha): - for page in itertools.count(1): - r = self._session.get( - 'https://api.github.com/repos/{}/commits'.format(self.name), - params={'sha': ref_or_sha, 'page': page} - ) - assert 200 <= r.status_code < 300, r.json() - yield from r.json() - if not r.links.get('next'): - return - -ct = itertools.count() - -Commit = collections.namedtuple('Commit', 'id tree message author committer parents') - -from odoo.tools.func import lazy_property -class LabelsProxy(collections.abc.MutableSet): - def __init__(self, pr): - self._pr = pr - - @property - def _labels(self): - pr = self._pr - r = pr._session.get('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number)) - assert r.ok, r.json() - return {label['name'] for label in r.json()} - - def __repr__(self): - return '' % self._labels - - def __eq__(self, other): - if isinstance(other, collections.abc.Set): - return other == self._labels - return NotImplemented - - def __contains__(self, label): - return label in self._labels - - def __iter__(self): - return iter(self._labels) - - def __len__(self): - return len(self._labels) - - def add(self, label): - pr = self._pr - r = pr._session.post('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number), json={ - 'labels': [label] - }) - assert r.ok, r.json() - - def discard(self, label): - pr = self._pr - r = pr._session.delete('https://api.github.com/repos/{}/issues/{}/labels/{}'.format(pr.repo.name, pr.number, label)) - # discard should do nothing if the item didn't exist in the set - assert r.ok or r.status_code == 404, r.json() - - def update(self, *others): - pr = self._pr - # because of course that one is not provided by MutableMapping... - r = pr._session.post('https://api.github.com/repos/{}/issues/{}/labels'.format(pr.repo.name, pr.number), json={ - 'labels': list(set(itertools.chain.from_iterable(others))) - }) - assert r.ok, r.json() - -class PR: - __slots__ = ['number', '_branch', 'repo', 'labels'] - def __init__(self, repo, branch, number): - """ - :type repo: Repo - :type branch: str - :type number: int - """ - self.number = number - self._branch = branch - self.repo = repo - self.labels = LabelsProxy(self) - - @property - def _session(self): - return self.repo._session - - @property - def _pr(self): - r = self._session.get('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number)) - assert 200 <= r.status_code < 300, r.json() - return r.json() - - @property - def head(self): - return self._pr['head']['sha'] - - @property - def user(self): - return self._pr['user']['login'] - - @property - def state(self): - return self._pr['state'] - - @property - def comments(self): - r = self._session.get('https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number)) - assert 200 <= r.status_code < 300, r.json() - return [ - (c['user']['login'], c['body']) - for c in r.json() - ] - - def _set_prop(self, prop, value): - r = self._session.patch('https://api.github.com/repos/{}/pulls/{}'.format(self.repo.name, self.number), json={ - prop: value - }) - assert 200 <= r.status_code < 300, r.json() - wait_for_hook() - - @property - def title(self): - raise NotImplementedError() - title = title.setter(lambda self, v: self._set_prop('title', v)) - - @property - def base(self): - raise NotImplementedError() - base = base.setter(lambda self, v: self._set_prop('base', v)) - - def post_comment(self, body, user): - r = self._session.post( - 'https://api.github.com/repos/{}/issues/{}/comments'.format(self.repo.name, self.number), - json={'body': body}, - headers={'Authorization': 'token {}'.format(self.repo._tokens[user])} - ) - assert 200 <= r.status_code < 300, r.json() - wait_for_hook() - return r.json()['id'] - - def edit_comment(self, cid, body, user): - r = self._session.patch( - 'https://api.github.com/repos/{}/issues/comments/{}'.format(self.repo.name, cid), - json={'body': body}, - headers={'Authorization': 'token {}'.format(self.repo._tokens[user])} - ) - assert 200 <= r.status_code < 300, r.json() - wait_for_hook() - - def delete_comment(self, cid, user): - r = self._session.delete( - 'https://api.github.com/repos/{}/issues/comments/{}'.format(self.repo.name, cid), - headers={'Authorization': 'token {}'.format(self.repo._tokens[user])} - ) - assert r.status_code == 204, r.json() - wait_for_hook() - - def open(self): - self._set_prop('state', 'open') - - def close(self): - self._set_prop('state', 'closed') - - def push(self, sha): - self.repo.update_ref(self._branch, sha, force=True) - - def post_review(self, state, user, body): - r = self._session.post( - 'https://api.github.com/repos/{}/pulls/{}/reviews'.format(self.repo.name, self.number), - json={'body': body, 'event': state,}, - headers={'Authorization': 'token {}'.format(self.repo._tokens[user])} - ) - assert 200 <= r.status_code < 300, r.json() - wait_for_hook() diff --git a/runbot_merge/tests/test_basic.py b/runbot_merge/tests/test_basic.py index 6e327325..6ea08a0b 100644 --- a/runbot_merge/tests/test_basic.py +++ b/runbot_merge/tests/test_basic.py @@ -10,62 +10,69 @@ from lxml import html import odoo -from test_utils import re_matches, run_crons, get_partner, _simple_init +from test_utils import re_matches, get_partner, _simple_init @pytest.fixture -def repo(make_repo): - return make_repo('repo') +def repo(project, make_repo): + r = make_repo('repo') + project.write({'repo_ids': [(0, 0, {'name': r.name})]}) + return r -def test_trivial_flow(env, repo, page, users): +def test_trivial_flow(env, repo, page, users, config): # create base branch - m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - # create PR with 2 commits - c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - c1 = repo.make_commit(c0, 'add file', None, tree={'a': 'some other content', 'b': 'a second file'}) - pr1 = repo.make_pr("gibberish", "blahblah", target='master', ctid=c1, user='user') + # create PR with 2 commits + c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) + c1 = repo.make_commit(c0, 'add file', None, tree={'a': 'some other content', 'b': 'a second file'}) + pr1 = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c1,) [pr] = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', pr1.number), ]) assert pr.state == 'opened' - run_crons(env) + env.run_crons() assert pr1.labels == {'seen 🙂'} # nothing happened - repo.post_status(c1, 'success', 'legal/cla') + with repo: + repo.post_status(c1, 'success', 'legal/cla') # rewrite status payload in old-style to ensure it does not break c = env['runbot_merge.commit'].search([('sha', '=', c1)]) c.statuses = json.dumps({k: v['state'] for k, v in json.loads(c.statuses).items()}) - repo.post_status(c1, 'success', 'ci/runbot') + with repo: + repo.post_status(c1, 'success', 'ci/runbot') - run_crons(env) + env.run_crons() assert pr.state == 'validated' assert pr1.labels == {'seen 🙂', 'CI 🤖'} - pr1.post_comment('hansen r+ rebase-merge', 'reviewer') + with repo: + pr1.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) assert pr.state == 'ready' # can't check labels here as running the cron will stage it - run_crons(env) + env.run_crons() assert pr.staging_id assert pr1.labels == {'seen 🙂', 'CI 🤖', 'r+ 👌', 'merging 👷'} - # get head of staging branch - staging_head = repo.commit('heads/staging.master') - repo.post_status(staging_head.id, 'success', 'ci/runbot', target_url='http://foo.com/pog') - repo.post_status(staging_head.id, 'success', 'legal/cla') - # the should not block the merge because it's not part of the requirements - repo.post_status(staging_head.id, 'failure', 'ci/lint', target_url='http://ignored.com/whocares') + with repo: + # get head of staging branch + staging_head = repo.commit('heads/staging.master') + repo.post_status(staging_head.id, 'success', 'ci/runbot', target_url='http://foo.com/pog') + repo.post_status(staging_head.id, 'success', 'legal/cla') + # the should not block the merge because it's not part of the requirements + repo.post_status(staging_head.id, 'failure', 'ci/lint', target_url='http://ignored.com/whocares') # need to store this because after the crons have run the staging will # have succeeded and been disabled st = pr.staging_id - run_crons(env) + env.run_crons() assert set(tuple(t) for t in st.statuses) == { (repo.name, 'legal/cla', 'success', ''), @@ -99,46 +106,48 @@ def test_trivial_flow(env, repo, page, users): .format(repo=repo, reviewer=get_partner(env, users['reviewer'])) class TestCommitMessage: - def test_commit_simple(self, env, repo, users): + def test_commit_simple(self, env, repo, users, config): """ verify 'closes ...' is correctly added in the commit message """ - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, 'simple commit message', None, tree={'f': 'm2'}) + with repo: + c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) + repo.make_ref('heads/master', c1) + c2 = repo.make_commit(c1, 'simple commit message', None, tree={'f': 'm2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', "reviewer") + prx = repo.make_pr(title='title', body='body', target='master', head=c2) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) - - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() master = repo.commit('heads/master') assert master.message == "simple commit message\n\ncloses {repo.name}#1"\ "\n\nSigned-off-by: {reviewer.formatted_email}"\ .format(repo=repo, reviewer=get_partner(env, users['reviewer'])) - def test_commit_existing(self, env, repo, users): + def test_commit_existing(self, env, repo, users, config): """ verify do not duplicate 'closes' instruction """ - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, 'simple commit message that closes #1', None, tree={'f': 'm2'}) + with repo: + c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) + repo.make_ref('heads/master', c1) + c2 = repo.make_commit(c1, 'simple commit message that closes #1', None, tree={'f': 'm2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', "reviewer") + prx = repo.make_pr(title='title', body='body', target='master', head=c2) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) - - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() master = repo.commit('heads/master') # closes #1 is already present, should not modify message @@ -146,23 +155,24 @@ class TestCommitMessage: "\n\nSigned-off-by: {reviewer.formatted_email}"\ .format(reviewer=get_partner(env, users['reviewer'])) - def test_commit_other(self, env, repo, users): + def test_commit_other(self, env, repo, users, config): """ verify do not duplicate 'closes' instruction """ - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, 'simple commit message that closes odoo/enterprise#1', None, tree={'f': 'm2'}) + with repo: + c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) + repo.make_ref('heads/master', c1) + c2 = repo.make_commit(c1, 'simple commit message that closes odoo/enterprise#1', None, tree={'f': 'm2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', "reviewer") + prx = repo.make_pr(title='title', body='body', target='master', head=c2) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) - - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() master = repo.commit('heads/master') # closes on another repositoy, should modify the commit message @@ -170,23 +180,24 @@ class TestCommitMessage: "\n\nSigned-off-by: {reviewer.formatted_email}"\ .format(repo=repo, reviewer=get_partner(env, users['reviewer'])) - def test_commit_wrong_number(self, env, repo, users): + def test_commit_wrong_number(self, env, repo, users, config): """ verify do not match on a wrong number """ - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, 'simple commit message that closes #11', None, tree={'f': 'm2'}) + with repo: + c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) + repo.make_ref('heads/master', c1) + c2 = repo.make_commit(c1, 'simple commit message that closes #11', None, tree={'f': 'm2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', "reviewer") + prx = repo.make_pr(title='title', body='body', target='master', head=c2) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) - - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() master = repo.commit('heads/master') # closes on another repositoy, should modify the commit message @@ -194,55 +205,57 @@ class TestCommitMessage: "\n\nSigned-off-by: {reviewer.formatted_email}"\ .format(repo=repo, reviewer=get_partner(env, users['reviewer'])) - def test_commit_delegate(self, env, repo, users): + def test_commit_delegate(self, env, repo, users, config): """ verify 'signed-off-by ...' is correctly added in the commit message for delegated review """ - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, 'simple commit message', None, tree={'f': 'm2'}) + with repo: + c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) + repo.make_ref('heads/master', c1) + c2 = repo.make_commit(c1, 'simple commit message', None, tree={'f': 'm2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen delegate=%s' % users['other'], "reviewer") - prx.post_comment('hansen r+', user='other') + prx = repo.make_pr(title='title', body='body', target='master', head=c2) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen delegate=%s' % users['other'], config["role_reviewer"]["token"]) + prx.post_comment('hansen r+', config['role_other']['token']) + env.run_crons() - run_crons(env) - - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() master = repo.commit('heads/master') assert master.message == "simple commit message\n\ncloses {repo.name}#1"\ "\n\nSigned-off-by: {reviewer.formatted_email}"\ .format(repo=repo, reviewer=get_partner(env, users['other'])) - def test_commit_coauthored(self, env, repo, users): + def test_commit_coauthored(self, env, repo, users, config): """ verify 'closes ...' and 'Signed-off-by' are added before co-authored-by tags. Also checks that all co-authored-by are moved at the end of the message """ - c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) - repo.make_ref('heads/master', c1) - c2 = repo.make_commit(c1, '''simple commit message + with repo: + c1 = repo.make_commit(None, 'first!', None, tree={'f': 'm1'}) + repo.make_ref('heads/master', c1) + c2 = repo.make_commit(c1, '''simple commit message Co-authored-by: Bob Fixes a thing''', None, tree={'f': 'm2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', "reviewer") + prx = repo.make_pr(title='title', body='body', target='master', head=c2) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) - - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() master = repo.commit('heads/master') assert master.message == """simple commit message @@ -263,11 +276,12 @@ class TestWebhookSecurity: """ project.secret = "a secret" - m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - pr0 = repo.make_pr("gibberish", "blahblah", target='master', ctid=c0, user='user') + c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) + pr0 = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c0) assert not env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -275,14 +289,15 @@ class TestWebhookSecurity: ]) def test_wrong_secret(self, env, project, repo): - repo.set_secret("wrong secret") project.secret = "a secret" + with repo: + repo.set_secret("wrong secret") - m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - pr0 = repo.make_pr("gibberish", "blahblah", target='master', ctid=c0, user='user') + c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) + pr0 = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c0) assert not env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -290,47 +305,50 @@ class TestWebhookSecurity: ]) def test_correct_secret(self, env, project, repo): - repo.set_secret("a secret") project.secret = "a secret" + with repo: + repo.set_secret("a secret") - m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + m = repo.make_commit(None, "initial", None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - pr0 = repo.make_pr("gibberish", "blahblah", target='master', ctid=c0, user='user') + c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) + pr0 = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c0) assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', pr0.number), ]) -def test_staging_conflict(env, repo): - # create base branch - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) +def test_staging_conflict(env, repo, config): + with repo: + # create base branch + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - # create PR - c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - c1 = repo.make_commit(c0, 'add file', None, tree={'a': 'some other content', 'b': 'a second file'}) - pr1 = repo.make_pr("gibberish", "blahblah", target='master', ctid=c1, user='user') - repo.post_status(c1, 'success', 'legal/cla') - repo.post_status(c1, 'success', 'ci/runbot') - pr1.post_comment("hansen r+ rebase-merge", "reviewer") - run_crons(env) + # create PR + c0 = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) + c1 = repo.make_commit(c0, 'add file', None, tree={'a': 'some other content', 'b': 'a second file'}) + pr1 = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c1) + repo.post_status(c1, 'success', 'legal/cla') + repo.post_status(c1, 'success', 'ci/runbot') + pr1.post_comment("hansen r+ rebase-merge", config['role_reviewer']['token']) + env.run_crons() pr1 = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', 1) ]) assert pr1.staging_id - # create second PR and make ready for staging - c2 = repo.make_commit(m, 'other', None, tree={'a': 'some content', 'c': 'ccc'}) - c3 = repo.make_commit(c2, 'other', None, tree={'a': 'some content', 'c': 'ccc', 'd': 'ddd'}) - pr2 = repo.make_pr('gibberish', 'blahblah', target='master', ctid=c3, user='user') - repo.post_status(c3, 'success', 'legal/cla') - repo.post_status(c3, 'success', 'ci/runbot') - pr2.post_comment('hansen r+ rebase-merge', "reviewer") - run_crons(env) + with repo: + # create second PR and make ready for staging + c2 = repo.make_commit(m, 'other', None, tree={'a': 'some content', 'c': 'ccc'}) + c3 = repo.make_commit(c2, 'other', None, tree={'a': 'some content', 'c': 'ccc', 'd': 'ddd'}) + pr2 = repo.make_pr(title='gibberish', body='blahblah', target='master', head=c3) + repo.post_status(c3, 'success', 'legal/cla') + repo.post_status(c3, 'success', 'ci/runbot') + pr2.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) + env.run_crons() p_2 = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', pr2.number) @@ -339,43 +357,47 @@ def test_staging_conflict(env, repo): assert pr2.labels == {'seen 🙂', 'CI 🤖', 'r+ 👌'} staging_head = repo.commit('heads/staging.master') - repo.post_status(staging_head.id, 'success', 'ci/runbot') - repo.post_status(staging_head.id, 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status(staging_head.id, 'success', 'ci/runbot') + repo.post_status(staging_head.id, 'success', 'legal/cla') + env.run_crons() assert pr1.state == 'merged' assert p_2.staging_id staging_head = repo.commit('heads/staging.master') - repo.post_status(staging_head.id, 'success', 'ci/runbot') - repo.post_status(staging_head.id, 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status(staging_head.id, 'success', 'ci/runbot') + repo.post_status(staging_head.id, 'success', 'legal/cla') + env.run_crons() assert p_2.state == 'merged' -def test_staging_concurrent(env, repo): +def test_staging_concurrent(env, repo, config): """ test staging to different targets, should be picked up together """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/1.0', m) - repo.make_ref('heads/2.0', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/1.0', m) + repo.make_ref('heads/2.0', m) env['runbot_merge.project'].search([]).write({ 'branch_ids': [(0, 0, {'name': '1.0'}), (0, 0, {'name': '2.0'})], }) - c10 = repo.make_commit(m, 'AAA', None, tree={'m': 'm', 'a': 'a'}) - c11 = repo.make_commit(c10, 'BBB', None, tree={'m': 'm', 'a': 'a', 'b': 'b'}) - pr1 = repo.make_pr('t1', 'b1', target='1.0', ctid=c11, user='user') - repo.post_status(pr1.head, 'success', 'ci/runbot') - repo.post_status(pr1.head, 'success', 'legal/cla') - pr1.post_comment('hansen r+ rebase-merge', "reviewer") + with repo: + c10 = repo.make_commit(m, 'AAA', None, tree={'m': 'm', 'a': 'a'}) + c11 = repo.make_commit(c10, 'BBB', None, tree={'m': 'm', 'a': 'a', 'b': 'b'}) + pr1 = repo.make_pr(title='t1', body='b1', target='1.0', head=c11) + repo.post_status(pr1.head, 'success', 'ci/runbot') + repo.post_status(pr1.head, 'success', 'legal/cla') + pr1.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - c20 = repo.make_commit(m, 'CCC', None, tree={'m': 'm', 'c': 'c'}) - c21 = repo.make_commit(c20, 'DDD', None, tree={'m': 'm', 'c': 'c', 'd': 'd'}) - pr2 = repo.make_pr('t2', 'b2', target='2.0', ctid=c21, user='user') - repo.post_status(pr2.head, 'success', 'ci/runbot') - repo.post_status(pr2.head, 'success', 'legal/cla') - pr2.post_comment('hansen r+ rebase-merge', "reviewer") + c20 = repo.make_commit(m, 'CCC', None, tree={'m': 'm', 'c': 'c'}) + c21 = repo.make_commit(c20, 'DDD', None, tree={'m': 'm', 'c': 'c', 'd': 'd'}) + pr2 = repo.make_pr(title='t2', body='b2', target='2.0', head=c21) + repo.post_status(pr2.head, 'success', 'ci/runbot') + repo.post_status(pr2.head, 'success', 'legal/cla') + pr2.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) pr1 = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', pr1.number) @@ -387,21 +409,22 @@ def test_staging_concurrent(env, repo): ]) assert pr2.staging_id -def test_staging_merge_fail(env, repo, users): +def test_staging_merge_fail(env, repo, users, config): """ # of staging failure (no CI) before mark & notify? """ - m1 = repo.make_commit(None, 'initial', None, tree={'f': 'm1'}) - m2 = repo.make_commit(m1, 'second', None, tree={'f': 'm2'}) - repo.make_ref('heads/master', m2) + with repo: + m1 = repo.make_commit(None, 'initial', None, tree={'f': 'm1'}) + m2 = repo.make_commit(m1, 'second', None, tree={'f': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m1, 'other second', None, tree={'f': 'c1'}) - c2 = repo.make_commit(c1, 'third', None, tree={'f': 'c2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+ rebase-merge', "reviewer") + c1 = repo.make_commit(m1, 'other second', None, tree={'f': 'c1'}) + c2 = repo.make_commit(c1, 'third', None, tree={'f': 'c2'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c2) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) pr1 = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) @@ -414,20 +437,21 @@ def test_staging_merge_fail(env, repo, users): (users['user'], re_matches('^Unable to stage PR')), ] -def test_staging_ci_timeout(env, repo, users): +def test_staging_ci_timeout(env, repo, config): """If a staging timeouts (~ delay since staged greater than configured)... requeue? """ - m = repo.make_commit(None, 'initial', None, tree={'f': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'f': 'm'}) + repo.make_ref('heads/master', m) - c1 = repo.make_commit(m, 'first', None, tree={'f': 'c1'}) - c2 = repo.make_commit(c1, 'second', None, tree={'f': 'c2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+ rebase-merge', "reviewer") - run_crons(env) + c1 = repo.make_commit(m, 'first', None, tree={'f': 'c1'}) + c2 = repo.make_commit(c1, 'second', None, tree={'f': 'c2'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c2) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) + env.run_crons() pr1 = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -437,49 +461,53 @@ def test_staging_ci_timeout(env, repo, users): timeout = env['runbot_merge.project'].search([]).ci_timeout pr1.staging_id.staged_at = odoo.fields.Datetime.to_string(datetime.datetime.now() - datetime.timedelta(minutes=2*timeout)) - env['runbot_merge.project']._check_progress() + env.run_crons('runbot_merge.merge_cron') assert pr1.state == 'error', "timeout should fail the PR" -def test_timeout_bump_on_pending(env, repo): - m = repo.make_commit(None, 'initial', None, tree={'f': '0'}) - repo.make_ref('heads/master', m) +def test_timeout_bump_on_pending(env, repo, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'f': '0'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'c', None, tree={'f': '1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', 'reviewer') - run_crons(env) + c = repo.make_commit(m, 'c', None, tree={'f': '1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() st = env['runbot_merge.stagings'].search([]) old_timeout = odoo.fields.Datetime.to_string(datetime.datetime.now() - datetime.timedelta(days=15)) st.timeout_limit = old_timeout - repo.post_status(repo.commit('heads/staging.master').id, 'pending', 'ci/runbot') - env['runbot_merge.commit']._notify() + with repo: + repo.post_status(repo.commit('heads/staging.master').id, 'pending', 'ci/runbot') + env.run_crons('runbot_merge.process_updated_commits') assert st.timeout_limit > old_timeout -def test_staging_ci_failure_single(env, repo, users): +def test_staging_ci_failure_single(env, repo, users, config): """ on failure of single-PR staging, mark & notify failure """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+ rebase-merge', "reviewer") - run_crons(env) + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c2) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).staging_id staging_head = repo.commit('heads/staging.master') - repo.post_status(staging_head.id, 'success', 'legal/cla') - repo.post_status(staging_head.id, 'failure', 'ci/runbot') # stable genius - run_crons(env) + with repo: + repo.post_status(staging_head.id, 'success', 'legal/cla') + repo.post_status(staging_head.id, 'failure', 'ci/runbot') # stable genius + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) @@ -491,31 +519,34 @@ def test_staging_ci_failure_single(env, repo, users): (users['user'], 'Staging failed: ci/runbot') ] -def test_ff_failure(env, repo, users): +def test_ff_failure(env, repo, config): """ target updated while the PR is being staged => redo staging """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ rebase-merge', "reviewer") - run_crons(env) + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c2) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).staging_id - m2 = repo.make_commit('heads/master', 'cockblock', None, tree={'m': 'm', 'm2': 'm2'}) + with repo: + m2 = repo.make_commit('heads/master', 'cockblock', None, tree={'m': 'm', 'm2': 'm2'}) assert repo.commit('heads/master').id == m2 # report staging success & run cron to merge staging = repo.commit('heads/staging.master') - repo.post_status(staging.id, 'success', 'legal/cla') - repo.post_status(staging.id, 'success', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status(staging.id, 'success', 'legal/cla') + repo.post_status(staging.id, 'success', 'ci/runbot') + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -524,32 +555,36 @@ def test_ff_failure(env, repo, users): assert repo.commit('heads/staging.master').id != staging.id,\ "PR should be staged to a new commit" -def test_ff_failure_batch(env, repo, users): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) +def test_ff_failure_batch(env, repo, users, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - a1 = repo.make_commit(m, 'a1', None, tree={'m': 'm', 'a': '1'}) - a2 = repo.make_commit(a1, 'a2', None, tree={'m': 'm', 'a': '2'}) - A = repo.make_pr('A', None, target='master', ctid=a2, user='user', label='A') - repo.post_status(A.head, 'success', 'legal/cla') - repo.post_status(A.head, 'success', 'ci/runbot') - A.post_comment('hansen r+ rebase-merge', "reviewer") + a1 = repo.make_commit(m, 'a1', None, tree={'m': 'm', 'a': '1'}) + a2 = repo.make_commit(a1, 'a2', None, tree={'m': 'm', 'a': '2'}) + repo.make_ref('heads/A', a2) + A = repo.make_pr(title='A', body=None, target='master', head='A') + repo.post_status(A.head, 'success', 'legal/cla') + repo.post_status(A.head, 'success', 'ci/runbot') + A.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - b1 = repo.make_commit(m, 'b1', None, tree={'m': 'm', 'b': '1'}) - b2 = repo.make_commit(b1, 'b2', None, tree={'m': 'm', 'b': '2'}) - B = repo.make_pr('B', None, target='master', ctid=b2, user='user', label='B') - repo.post_status(B.head, 'success', 'legal/cla') - repo.post_status(B.head, 'success', 'ci/runbot') - B.post_comment('hansen r+ rebase-merge', "reviewer") + b1 = repo.make_commit(m, 'b1', None, tree={'m': 'm', 'b': '1'}) + b2 = repo.make_commit(b1, 'b2', None, tree={'m': 'm', 'b': '2'}) + repo.make_ref('heads/B', b2) + B = repo.make_pr(title='B', body=None, target='master', head='B') + repo.post_status(B.head, 'success', 'legal/cla') + repo.post_status(B.head, 'success', 'ci/runbot') + B.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) - c1 = repo.make_commit(m, 'c1', None, tree={'m': 'm', 'c': '1'}) - c2 = repo.make_commit(c1, 'c2', None, tree={'m': 'm', 'c': '2'}) - C = repo.make_pr('C', None, target='master', ctid=c2, user='user', label='C') - repo.post_status(C.head, 'success', 'legal/cla') - repo.post_status(C.head, 'success', 'ci/runbot') - C.post_comment('hansen r+ rebase-merge', "reviewer") + c1 = repo.make_commit(m, 'c1', None, tree={'m': 'm', 'c': '1'}) + c2 = repo.make_commit(c1, 'c2', None, tree={'m': 'm', 'c': '2'}) + repo.make_ref('heads/C', c2) + C = repo.make_pr(title='C', body=None, target='master', head='C') + repo.post_status(C.head, 'success', 'legal/cla') + repo.post_status(C.head, 'success', 'ci/runbot') + C.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) messages = [ c['commit']['message'] for c in repo.log('heads/staging.master') @@ -559,21 +594,24 @@ def test_ff_failure_batch(env, repo, users): assert 'c2' in messages # block FF - m2 = repo.make_commit('heads/master', 'NO!', None, tree={'m': 'm2'}) + with repo: + m2 = repo.make_commit('heads/master', 'NO!', None, tree={'m': 'm2'}) old_staging = repo.commit('heads/staging.master') # confirm staging - repo.post_status('heads/staging.master', 'success', 'legal/cla') - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'legal/cla') + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + env.run_crons() new_staging = repo.commit('heads/staging.master') assert new_staging.id != old_staging.id # confirm again - repo.post_status('heads/staging.master', 'success', 'legal/cla') - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'legal/cla') + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + env.run_crons() messages = { c['commit']['message'] for c in repo.log('heads/master') @@ -599,30 +637,31 @@ class TestPREdition: 'project_id': env['runbot_merge.project'].search([]).id, }) - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - repo.make_ref('heads/1.0', m) - repo.make_ref('heads/2.0', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) + repo.make_ref('heads/1.0', m) + repo.make_ref('heads/2.0', m) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c2) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]) assert pr.message == 'title\n\nbody' - prx.title = "title 2" + with repo: prx.title = "title 2" assert pr.message == 'title 2\n\nbody' - prx.base = '1.0' + with repo: prx.base = '1.0' assert pr.target == branch_1 - prx.base = '2.0' + with repo: prx.base = '2.0' assert not pr.exists() - run_crons(env) + env.run_crons() assert prx.labels == set() - prx.base = '1.0' + with repo: prx.base = '1.0' assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) @@ -637,26 +676,29 @@ class TestPREdition: }) master = env['runbot_merge.branch'].search([('name', '=', 'master')]) - # master is 1 commit in advance of 1.0 - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm2'}) - repo.make_ref('heads/master', m2) - repo.make_ref('heads/1.0', m) + with repo: + # master is 1 commit in advance of 1.0 + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm2'}) + repo.make_ref('heads/master', m2) + repo.make_ref('heads/1.0', m) - # the PR builds on master, but is errorneously targeted to 1.0 - c = repo.make_commit(m2, 'first', None, tree={'m': 'm3'}) - prx = repo.make_pr('title', 'body', target='1.0', ctid=c, user='user') + # the PR builds on master, but is errorneously targeted to 1.0 + c = repo.make_commit(m2, 'first', None, tree={'m': 'm3'}) + prx = repo.make_pr(title='title', body='body', target='1.0', head=c) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]) assert not pr.squash - prx.base = 'master' + with repo: + prx.base = 'master' assert pr.target == master assert pr.squash - prx.base = '1.0' + with repo: + prx.base = '1.0' assert pr.target == branch_1 assert not pr.squash @@ -672,16 +714,17 @@ class TestPREdition: 'project_id': env['runbot_merge.project'].search([]).id, }) - c0 = repo.make_commit(None, '0', None, tree={'a': '0'}) - repo.make_ref('heads/1.0', c0) - c1 = repo.make_commit(c0, '1', None, tree={'a': '1'}) - repo.make_ref('heads/2.0', c1) - c2 = repo.make_commit(c1, '2', None, tree={'a': '2'}) - repo.make_ref('heads/master', c2) + with repo: + c0 = repo.make_commit(None, '0', None, tree={'a': '0'}) + repo.make_ref('heads/1.0', c0) + c1 = repo.make_commit(c0, '1', None, tree={'a': '1'}) + repo.make_ref('heads/2.0', c1) + c2 = repo.make_commit(c1, '2', None, tree={'a': '2'}) + repo.make_ref('heads/master', c2) - # create PR on 1.0 - c = repo.make_commit(c0, 'c', None, tree={'a': '0', 'b': '0'}) - prx = repo.make_pr('t', 'b', target='1.0', ctid=c, user='user') + # create PR on 1.0 + c = repo.make_commit(c0, 'c', None, tree={'a': '0', 'b': '0'}) + prx = repo.make_pr(title='t', body='b', target='1.0', head=c) # there should only be a single PR in the system at this point [pr] = env['runbot_merge.pull_requests'].search([]) assert pr.target == branch_1 @@ -689,9 +732,10 @@ class TestPREdition: # branch 1 is EOL, disable it branch_1.active = False - # we forgot we had active PRs for it, and we may want to merge them - # still, retarget them! - prx.base = '2.0' + with repo: + # we forgot we had active PRs for it, and we may want to merge them + # still, retarget them! + prx.base = '2.0' # check that we still only have one PR in the system [pr_] = env['runbot_merge.pull_requests'].search([]) @@ -704,55 +748,60 @@ def test_edit_staged(env, repo): """ What should happen when editing the PR/metadata (not pushing) of a staged PR """ -def test_close_staged(env, repo): +def test_close_staged(env, repo, config): """ When closing a staged PR, cancel the staging """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', user='reviewer') + c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+', config['role_reviewer']['token']) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), ]) - run_crons(env) + env.run_crons() assert pr.state == 'ready' assert pr.staging_id - prx.close() - run_crons(env) + with repo: + prx.close() + env.run_crons() assert not pr.staging_id assert not env['runbot_merge.stagings'].search([]) assert pr.state == 'closed' assert prx.labels == {'seen 🙂', 'closed 💔'} -def test_forward_port(env, repo): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) +def test_forward_port(env, repo, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - head = m - for i in range(110): - head = repo.make_commit(head, 'c_%03d' % i, None, tree={'m': 'm', 'f': str(i)}, wait=False) - # for remote since we're not waiting in commit creation - time.sleep(10) - pr = repo.make_pr('PR', None, target='master', ctid=head, user='user') - repo.post_status(pr.head, 'success', 'legal/cla') - repo.post_status(pr.head, 'success', 'ci/runbot') - pr.post_comment('hansen r+ merge', "reviewer") - run_crons(env) + head = m + for i in range(110): + head = repo.make_commit(head, 'c_%03d' % i, None, tree={'m': 'm', 'f': str(i)}) + # not sure why we wanted to wait here + + with repo: + pr = repo.make_pr(title='PR', body=None, target='master', head=head) + repo.post_status(pr.head, 'success', 'legal/cla') + repo.post_status(pr.head, 'success', 'ci/runbot') + pr.post_comment('hansen r+ merge', config['role_reviewer']['token']) + env.run_crons() st = repo.commit('heads/staging.master') assert st.message.startswith('force rebuild') - repo.post_status(st.id, 'success', 'legal/cla') - repo.post_status(st.id, 'success', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status(st.id, 'success', 'legal/cla') + repo.post_status(st.id, 'success', 'ci/runbot') + env.run_crons() h = repo.commit('heads/master') assert set(st.parents) == {h.id} @@ -760,7 +809,8 @@ def test_forward_port(env, repo): commits = {c['sha'] for c in repo.log('heads/master')} assert len(commits) == 112 -def test_rebase_failure(env, repo, users, remote_p): +@pytest.skip("Needs to find a way to make set_ref fail on *second* call.") +def test_rebase_failure(env, repo, users, config): """ It looks like gh.rebase() can fail in the final ref-setting after the merging & commits creation has been performed. At this point, the staging will fail (yay) but the target branch (tmp) would not get reset, @@ -781,24 +831,23 @@ def test_rebase_failure(env, repo, users, remote_p): but only the first time, and not the set_ref in try_staging itself, and that call is performed *in a subprocess* when running tests. """ - # FIXME: remote mode - if remote_p: - pytest.skip("Needs to find a way to make set_ref fail on *second* call in remote mode.") + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + commit_a = repo.make_commit(m, 'A', None, tree={'m': 'm', 'a': 'a'}) + repo.make_ref('heads/a', commit_a) + pr_a = repo.make_pr(title='A', body=None, target='master', head='a') + repo.post_status(pr_a.head, 'success', 'ci/runbot') + repo.post_status(pr_a.head, 'success', 'legal/cla') + pr_a.post_comment('hansen r+', config['role_reviewer']['token']) - commit_a = repo.make_commit(m, 'A', None, tree={'m': 'm', 'a': 'a'}) - pr_a = repo.make_pr('A', None, target='master', ctid=commit_a, user='user', label='a') - repo.post_status(pr_a.head, 'success', 'ci/runbot') - repo.post_status(pr_a.head, 'success', 'legal/cla') - pr_a.post_comment('hansen r+', 'reviewer') - - commit_b = repo.make_commit(m, 'B', None, tree={'m': 'm', 'b': 'b'}) - pr_b = repo.make_pr('B', None, target='master', ctid=commit_b, user='user', label='b') - repo.post_status(pr_b.head, 'success', 'ci/runbot') - repo.post_status(pr_b.head, 'success', 'legal/cla') - pr_b.post_comment('hansen r+', 'reviewer') + commit_b = repo.make_commit(m, 'B', None, tree={'m': 'm', 'b': 'b'}) + repo.make_ref('heads/b', commit_b) + pr_b = repo.make_pr(title='B', body=None, target='master', head='b') + repo.post_status(pr_b.head, 'success', 'ci/runbot') + repo.post_status(pr_b.head, 'success', 'legal/cla') + pr_b.post_comment('hansen r+', config['role_reviewer']['token']) from odoo.addons.runbot_merge.github import GH original = GH.set_ref @@ -825,19 +874,21 @@ def test_rebase_failure(env, repo, users, remote_p): 'b': 'b', } -def test_ci_failure_after_review(env, repo, users): +def test_ci_failure_after_review(env, repo, users, config): """ If a PR is r+'d but the CI ends up failing afterwards, ping the user so they're aware. This is useful for the more "fire and forget" approach especially small / simple PRs where you assume they're going to pass and just r+ immediately. """ - prx = _simple_init(repo) - prx.post_comment('hansen r+', "reviewer") - run_crons(env) + with repo: + prx = _simple_init(repo) + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() - repo.post_status(prx.head, 'failure', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status(prx.head, 'failure', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + env.run_crons() assert prx.comments == [ (users['reviewer'], 'hansen r+'), @@ -849,13 +900,14 @@ def test_reopen_state(env, repo): already a CI+ stored (as the CI might never trigger unless explicitly re-requested) """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - repo.post_status(c, 'success', 'legal/cla') - repo.post_status(c, 'success', 'ci/runbot') - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') + c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) + repo.post_status(c, 'success', 'legal/cla') + repo.post_status(c, 'success', 'ci/runbot') + prx = repo.make_pr(title='title', body='body', target='master', head=c) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -864,25 +916,25 @@ def test_reopen_state(env, repo): assert pr.state == 'validated', \ "if a PR is created on a CI'd commit, it should be validated immediately" - prx.close() + with repo: prx.close() assert pr.state == 'closed' - prx.open() + with repo: prx.open() assert pr.state == 'validated', \ "if a PR is reopened and had a CI'd head, it should be validated immediately" -def test_no_required_statuses(env, repo): +def test_no_required_statuses(env, repo, config): """ check that mergebot can work on a repo with no CI at all """ env['runbot_merge.project'].search([]).required_statuses = '' - m = repo.make_commit(None, 'initial', None, tree={'0': '0'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'0': '0'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'first', None, tree={'0': '1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') - prx.post_comment('hansen r+', 'reviewer') - - run_crons(env) + c = repo.make_commit(m, 'first', None, tree={'0': '1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c) + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -891,12 +943,12 @@ def test_no_required_statuses(env, repo): class TestRetry: @pytest.mark.xfail(reason="This may not be a good idea as it could lead to tons of rebuild spam") - def test_auto_retry_push(self, env, repo): + def test_auto_retry_push(self, env, repo, config): prx = _simple_init(repo) repo.post_status(prx.head, 'success', 'ci/runbot') repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+', "reviewer") - run_crons(env) + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) @@ -905,78 +957,84 @@ class TestRetry: staging_head = repo.commit('heads/staging.master') repo.post_status(staging_head.id, 'success', 'legal/cla') repo.post_status(staging_head.id, 'failure', 'ci/runbot') - run_crons(env) + env.run_crons() pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]) assert pr.state == 'error' - prx.push(repo.make_commit(prx.head, 'third', None, tree={'m': 'c3'})) + repo.update_ref(prx.ref, repo.make_commit(prx.head, 'third', None, tree={'m': 'c3'}), force=True) assert pr.state == 'approved' env['runbot_merge.project']._check_progress() assert pr.state == 'approved' repo.post_status(prx.head, 'success', 'ci/runbot') repo.post_status(prx.head, 'success', 'legal/cla') - run_crons(env) + env.run_crons() assert pr.state == 'ready' staging_head2 = repo.commit('heads/staging.master') assert staging_head2 != staging_head repo.post_status(staging_head2.id, 'success', 'legal/cla') repo.post_status(staging_head2.id, 'success', 'ci/runbot') - run_crons(env) + env.run_crons() assert pr.state == 'merged' @pytest.mark.parametrize('retrier', ['user', 'other', 'reviewer']) - def test_retry_comment(self, env, repo, retrier, users): + def test_retry_comment(self, env, repo, retrier, users, config): """ An accepted but failed PR should be re-tried when the author or a reviewer asks for it """ - prx = _simple_init(repo) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+ delegate=%s rebase-merge' % users['other'], "reviewer") - run_crons(env) + with repo: + prx = _simple_init(repo) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+ delegate=%s rebase-merge' % users['other'], + config["role_reviewer"]['token']) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).staging_id staging_head = repo.commit('heads/staging.master') - repo.post_status(staging_head.id, 'success', 'legal/cla') - repo.post_status(staging_head.id, 'failure', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status(staging_head.id, 'success', 'legal/cla') + repo.post_status(staging_head.id, 'failure', 'ci/runbot') + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).state == 'error' - prx.post_comment('hansen retry', retrier) + with repo: + prx.post_comment('hansen retry', config['role_' + retrier]['token']) assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).state == 'ready' - env['runbot_merge.project']._check_progress() + env.run_crons('runbot_merge.merge_cron') staging_head2 = repo.commit('heads/staging.master') assert staging_head2 != staging_head - repo.post_status(staging_head2.id, 'success', 'legal/cla') - repo.post_status(staging_head2.id, 'success', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status(staging_head2.id, 'success', 'legal/cla') + repo.post_status(staging_head2.id, 'success', 'ci/runbot') + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).state == 'merged' - def test_retry_ignored(self, env, repo, users): + def test_retry_ignored(self, env, repo, users, config): """ Check feedback in case of ignored retry command on a non-error PR. """ - prx = _simple_init(repo) - prx.post_comment('hansen r+', 'reviewer') - prx.post_comment('hansen retry', 'reviewer') + with repo: + prx = _simple_init(repo) + prx.post_comment('hansen r+', config['role_reviewer']['token']) + prx.post_comment('hansen retry', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) assert prx.comments == [ (users['reviewer'], 'hansen r+'), (users['reviewer'], 'hansen retry'), @@ -984,33 +1042,40 @@ class TestRetry: ] @pytest.mark.parametrize('disabler', ['user', 'other', 'reviewer']) - def test_retry_disable(self, env, repo, disabler, users): - prx = _simple_init(repo) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - prx.post_comment('hansen r+ delegate=%s rebase-merge' % users['other'], "reviewer") - run_crons(env) + def test_retry_disable(self, env, repo, disabler, users, config): + with repo: + prx = _simple_init(repo) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + prx.post_comment('hansen r+ delegate=%s rebase-merge' % users['other'], + config["role_reviewer"]['token']) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).staging_id staging_head = repo.commit('heads/staging.master') - repo.post_status(staging_head.id, 'success', 'legal/cla') - repo.post_status(staging_head.id, 'failure', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status(staging_head.id, 'success', 'legal/cla') + repo.post_status(staging_head.id, 'failure', 'ci/runbot') + env.run_crons() pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]) assert pr.state == 'error' - prx.post_comment('hansen r-', user=disabler) + with repo: + prx.post_comment('hansen r-', config['role_' + disabler]['token']) assert pr.state == 'validated' - prx.push(repo.make_commit(prx.head, 'third', None, tree={'m': 'c3'})) - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - run_crons(env) + with repo: + repo.make_commit(prx.ref, 'third', None, tree={'m': 'c3'}) + # just in case, apparently in some case the first post_status uses the old head... + with repo: + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + env.run_crons() assert pr.state == 'validated' class TestMergeMethod: @@ -1018,24 +1083,25 @@ class TestMergeMethod: if event['pull_request']['commits'] == 1, "squash" (/rebase); otherwise regular merge """ - def test_pr_single_commit(self, repo, env): + def test_pr_single_commit(self, repo, env, config): """ If single commit, default to rebase & FF """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', "reviewer") + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+', config['role_reviewer']['token']) assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).squash - run_crons(env) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) @@ -1053,9 +1119,10 @@ class TestMergeMethod: assert actual.parents == [m2],\ "dummy commit aside, the previous master's tip should be the sole parent of the staging commit" - repo.post_status(staging.id, 'success', 'legal/cla') - repo.post_status(staging.id, 'success', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status(staging.id, 'success', 'legal/cla') + repo.post_status(staging.id, 'success', 'ci/runbot') + env.run_crons() pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) @@ -1072,19 +1139,21 @@ class TestMergeMethod: If a PR starts with 1 commit and a second commit is added, the PR should be unflagged as squash """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), ]) assert pr.squash, "a PR with a single commit should be squashed" - prx.push(repo.make_commit(c1, 'second2', None, tree={'m': 'c2'})) + with repo: + repo.make_commit(prx.ref, 'second2', None, tree={'m': 'c2'}) assert not pr.squash, "a PR with a single commit should not be squashed" def test_pr_reset_to_single_commit(self, repo, env): @@ -1092,39 +1161,46 @@ class TestMergeMethod: If a PR starts at >1 commits and is reset back to 1, the PR should be re-flagged as squash """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - c2 = repo.make_commit(c1, 'second2', None, tree={'m': 'c2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + c2 = repo.make_commit(c1, 'second2', None, tree={'m': 'c2'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c2) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), ]) assert not pr.squash, "a PR with a single commit should not be squashed" - prx.push(repo.make_commit(m, 'fixup', None, tree={'m': 'c2'})) + with repo: + repo.update_ref( + prx.ref, + repo.make_commit(m, 'fixup', None, tree={'m': 'c2'}), + force=True + ) assert pr.squash, "a PR with a single commit should be squashed" - def test_pr_no_method(self, repo, env, users): + def test_pr_no_method(self, repo, env, users, config): """ a multi-repo PR should not be staged by default, should also get feedback indicating a merge method is necessary """ - m0 = repo.make_commit(None, 'M0', None, tree={'m': '0'}) - m1 = repo.make_commit(m0, 'M1', None, tree={'m': '1'}) - m2 = repo.make_commit(m1, 'M2', None, tree={'m': '2'}) - repo.make_ref('heads/master', m2) + with repo: + m0 = repo.make_commit(None, 'M0', None, tree={'m': '0'}) + m1 = repo.make_commit(m0, 'M1', None, tree={'m': '1'}) + m2 = repo.make_commit(m1, 'M2', None, tree={'m': '2'}) + repo.make_ref('heads/master', m2) - b0 = repo.make_commit(m1, 'B0', None, tree={'m': '1', 'b': '0'}) - b1 = repo.make_commit(b0, 'B1', None, tree={'m': '1', 'b': '1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=b1, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', "reviewer") + b0 = repo.make_commit(m1, 'B0', None, tree={'m': '1', 'b': '0'}) + b1 = repo.make_commit(b0, 'B1', None, tree={'m': '1', 'b': '1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=b1) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) assert not env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), @@ -1140,35 +1216,39 @@ class TestMergeMethod: """), ] - def test_pr_method_no_review(self, repo, env, users): + def test_pr_method_no_review(self, repo, env, users, config): """ Configuring the method should be idependent from the review """ - m0 = repo.make_commit(None, 'M0', None, tree={'m': '0'}) - m1 = repo.make_commit(m0, 'M1', None, tree={'m': '1'}) - m2 = repo.make_commit(m1, 'M2', None, tree={'m': '2'}) - repo.make_ref('heads/master', m2) + with repo: + m0 = repo.make_commit(None, 'M0', None, tree={'m': '0'}) + m1 = repo.make_commit(m0, 'M1', None, tree={'m': '1'}) + m2 = repo.make_commit(m1, 'M2', None, tree={'m': '2'}) + repo.make_ref('heads/master', m2) - b0 = repo.make_commit(m1, 'B0', None, tree={'m': '1', 'b': '0'}) - b1 = repo.make_commit(b0, 'B1', None, tree={'m': '1', 'b': '1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=b1, user='user') + b0 = repo.make_commit(m1, 'B0', None, tree={'m': '1', 'b': '0'}) + b1 = repo.make_commit(b0, 'B1', None, tree={'m': '1', 'b': '1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=b1) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), ]) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') + with repo: + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen rebase-merge', "reviewer") + prx.post_comment('hansen rebase-merge', config['role_reviewer']['token']) assert pr.merge_method == 'rebase-merge' - run_crons(env) + env.run_crons() - prx.post_comment('hansen merge', "reviewer") + with repo: + prx.post_comment('hansen merge', config['role_reviewer']['token']) assert pr.merge_method == 'merge' - run_crons(env) + env.run_crons() - prx.post_comment('hansen rebase-ff', "reviewer") + with repo: + prx.post_comment('hansen rebase-ff', config['role_reviewer']['token']) assert pr.merge_method == 'rebase-ff' - run_crons(env) + env.run_crons() assert prx.comments == [ (users['reviewer'], 'hansen rebase-merge'), @@ -1179,7 +1259,7 @@ class TestMergeMethod: (users['user'], "Merge method set to rebase and fast-forward"), ] - def test_pr_rebase_merge(self, repo, env, users): + def test_pr_rebase_merge(self, repo, env, users, config): """ test result on rebase-merge left: PR @@ -1206,25 +1286,25 @@ class TestMergeMethod: +----------+ merge | +-------+ """ - m0 = repo.make_commit(None, 'M0', None, tree={'m': '0'}) - m1 = repo.make_commit(m0, 'M1', None, tree={'m': '1'}) - m2 = repo.make_commit(m1, 'M2', None, tree={'m': '2'}) - repo.make_ref('heads/master', m2) + with repo: + m0 = repo.make_commit(None, 'M0', None, tree={'m': '0'}) + m1 = repo.make_commit(m0, 'M1', None, tree={'m': '1'}) + m2 = repo.make_commit(m1, 'M2', None, tree={'m': '2'}) + repo.make_ref('heads/master', m2) - # test commit ordering issue while at it: github sorts commits on - # author.date instead of doing so topologically which is absolutely - # not what we want - committer = {'name': 'a', 'email': 'a', 'date': '2018-10-08T11:48:43Z'} - author0 = {'name': 'a', 'email': 'a', 'date': '2018-10-01T14:58:38Z'} - author1 = {'name': 'a', 'email': 'a', 'date': '2015-10-01T14:58:38Z'} - b0 = repo.make_commit(m1, 'B0', author=author0, committer=committer, tree={'m': '1', 'b': '0'}) - b1 = repo.make_commit(b0, 'B1', author=author1, committer=committer, tree={'m': '1', 'b': '1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=b1, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ rebase-merge', "reviewer") - - run_crons(env) + # test commit ordering issue while at it: github sorts commits on + # author.date instead of doing so topologically which is absolutely + # not what we want + committer = {'name': 'a', 'email': 'a', 'date': '2018-10-08T11:48:43Z'} + author0 = {'name': 'a', 'email': 'a', 'date': '2018-10-01T14:58:38Z'} + author1 = {'name': 'a', 'email': 'a', 'date': '2015-10-01T14:58:38Z'} + b0 = repo.make_commit(m1, 'B0', author=author0, committer=committer, tree={'m': '1', 'b': '0'}) + b1 = repo.make_commit(b0, 'B1', author=author1, committer=committer, tree={'m': '1', 'b': '1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=b1) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+ rebase-merge', config['role_reviewer']['token']) + env.run_crons() # create a dag (msg:str, parents:set) from the log staging = log_to_node(repo.log('heads/staging.master')) @@ -1239,9 +1319,10 @@ class TestMergeMethod: expected = (re_matches('^force rebuild'), frozenset([merge_head])) assert staging == expected - repo.post_status('heads/staging.master', 'success', 'legal/cla') - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'legal/cla') + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + env.run_crons() pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -1264,7 +1345,7 @@ class TestMergeMethod: } assert r0.parents == [m2] - def test_pr_rebase_ff(self, repo, env, users): + def test_pr_rebase_ff(self, repo, env, users, config): """ test result on rebase-merge left: PR @@ -1288,19 +1369,19 @@ class TestMergeMethod: PR | B1 | | B1 | +------+ +--^---+ """ - m0 = repo.make_commit(None, 'M0', None, tree={'m': '0'}) - m1 = repo.make_commit(m0, 'M1', None, tree={'m': '1'}) - m2 = repo.make_commit(m1, 'M2', None, tree={'m': '2'}) - repo.make_ref('heads/master', m2) + with repo: + m0 = repo.make_commit(None, 'M0', None, tree={'m': '0'}) + m1 = repo.make_commit(m0, 'M1', None, tree={'m': '1'}) + m2 = repo.make_commit(m1, 'M2', None, tree={'m': '2'}) + repo.make_ref('heads/master', m2) - b0 = repo.make_commit(m1, 'B0', None, tree={'m': '1', 'b': '0'}) - b1 = repo.make_commit(b0, 'B1', None, tree={'m': '1', 'b': '1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=b1, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ rebase-ff', "reviewer") - - run_crons(env) + b0 = repo.make_commit(m1, 'B0', None, tree={'m': '1', 'b': '0'}) + b1 = repo.make_commit(b0, 'B1', None, tree={'m': '1', 'b': '1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=b1) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+ rebase-ff', config['role_reviewer']['token']) + env.run_crons() # create a dag (msg:str, parents:set) from the log staging = log_to_node(repo.log('heads/staging.master')) @@ -1312,9 +1393,10 @@ class TestMergeMethod: expected = node(re_matches('^force rebuild'), nb1) assert staging == expected - repo.post_status('heads/staging.master', 'success', 'legal/cla') - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'legal/cla') + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + env.run_crons() pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -1342,7 +1424,7 @@ class TestMergeMethod: def test_pr_contains_merges(self, repo, env): pass - def test_pr_force_merge_single_commit(self, repo, env, users): + def test_pr_force_merge_single_commit(self, repo, env, users, config): """ should be possible to flag a PR as regular-merged, regardless of its commits count @@ -1353,21 +1435,24 @@ class TestMergeMethod: C0 + | gib-+ """ - m = repo.make_commit(None, "M", None, tree={'a': 'a'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, "M", None, tree={'a': 'a'}) + repo.make_ref('heads/master', m) - c0 = repo.make_commit(m, 'C0', None, tree={'a': 'b'}) - prx = repo.make_pr("gibberish", "blahblah", target='master', ctid=c0, user='user') - env['runbot_merge.project']._check_progress() + c0 = repo.make_commit(m, 'C0', None, tree={'a': 'b'}) + prx = repo.make_pr(title="gibberish", body="blahblah", target='master', head=c0) + env.run_crons('runbot_merge.merge_cron') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ merge', 'reviewer') - run_crons(env) + with repo: + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+ merge', config['role_reviewer']['token']) + env.run_crons() - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() master = repo.commit('heads/master') assert master.parents == [m, prx.head], \ @@ -1388,25 +1473,28 @@ class TestMergeMethod: '': master.id } - def test_unrebase_emptymessage(self, repo, env, users): + def test_unrebase_emptymessage(self, repo, env, users, config): """ When merging between master branches (e.g. forward port), the PR may have only a title """ - m = repo.make_commit(None, "M", None, tree={'a': 'a'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, "M", None, tree={'a': 'a'}) + repo.make_ref('heads/master', m) - c0 = repo.make_commit(m, 'C0', None, tree={'a': 'b'}) - prx = repo.make_pr("gibberish", None, target='master', ctid=c0, user='user') - env['runbot_merge.project']._check_progress() + c0 = repo.make_commit(m, 'C0', None, tree={'a': 'b'}) + prx = repo.make_pr(title="gibberish", body=None, target='master', head=c0) + env.run_crons('runbot_merge.merge_cron') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ merge', 'reviewer') - run_crons(env) + with repo: + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+ merge', config['role_reviewer']['token']) + env.run_crons() - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() master = repo.commit('heads/master') assert master.parents == [m, prx.head], \ @@ -1419,7 +1507,7 @@ class TestMergeMethod: '\n\nSigned-off-by: {}'.format(repo.name, prx.number, reviewer), m, c0) assert log_to_node(repo.log('heads/master')), expected - def test_pr_mergehead(self, repo, env): + def test_pr_mergehead(self, repo, env, config): """ if the head of the PR is a merge commit and one of the parents is in the target, replicate the merge commit instead of merging @@ -1431,23 +1519,26 @@ class TestMergeMethod: C1 [label = "\\N / MERGE"] """ - m1 = repo.make_commit(None, "M1", None, tree={'a': '0'}) - m2 = repo.make_commit(m1, "M2", None, tree={'a': '1'}) - repo.make_ref('heads/master', m2) + with repo: + m1 = repo.make_commit(None, "M1", None, tree={'a': '0'}) + m2 = repo.make_commit(m1, "M2", None, tree={'a': '1'}) + repo.make_ref('heads/master', m2) - c0 = repo.make_commit(m1, 'C0', None, tree={'a': '0', 'b': '2'}) - c1 = repo.make_commit([c0, m2], 'C1', None, tree={'a': '1', 'b': '2'}) - prx = repo.make_pr("T", "TT", target='master', ctid=c1, user='user') - run_crons(env) + c0 = repo.make_commit(m1, 'C0', None, tree={'a': '0', 'b': '2'}) + c1 = repo.make_commit([c0, m2], 'C1', None, tree={'a': '1', 'b': '2'}) + prx = repo.make_pr(title="T", body="TT", target='master', head=c1) + env.run_crons() - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ merge', 'reviewer') - run_crons(env) + with repo: + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+ merge', config['role_reviewer']['token']) + env.run_crons() - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() master = repo.commit('heads/master') assert master.parents == [m2, c0] @@ -1455,7 +1546,7 @@ class TestMergeMethod: expected = node('C1', node('C0', m1), node('M2', m1)) assert log_to_node(repo.log('heads/master')), expected - def test_pr_mergehead_nonmember(self, repo, env, users): + def test_pr_mergehead_nonmember(self, repo, env, users, config): """ if the head of the PR is a merge commit but none of the parents is in the target, merge normally @@ -1469,25 +1560,28 @@ class TestMergeMethod: MERGE -> M2 MERGE -> C1 """ - m1 = repo.make_commit(None, "M1", None, tree={'a': '0'}) - m2 = repo.make_commit(m1, "M2", None, tree={'a': '1'}) - repo.make_ref('heads/master', m2) + with repo: + m1 = repo.make_commit(None, "M1", None, tree={'a': '0'}) + m2 = repo.make_commit(m1, "M2", None, tree={'a': '1'}) + repo.make_ref('heads/master', m2) - b0 = repo.make_commit(m1, 'B0', None, tree={'a': '0', 'bb': 'bb'}) + b0 = repo.make_commit(m1, 'B0', None, tree={'a': '0', 'bb': 'bb'}) - c0 = repo.make_commit(m1, 'C0', None, tree={'a': '0', 'b': '2'}) - c1 = repo.make_commit([c0, b0], 'C1', None, tree={'a': '0', 'b': '2', 'bb': 'bb'}) - prx = repo.make_pr("T", "TT", target='master', ctid=c1, user='user') - run_crons(env) + c0 = repo.make_commit(m1, 'C0', None, tree={'a': '0', 'b': '2'}) + c1 = repo.make_commit([c0, b0], 'C1', None, tree={'a': '0', 'b': '2', 'bb': 'bb'}) + prx = repo.make_pr(title="T", body="TT", target='master', head=c1) + env.run_crons() - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ merge', 'reviewer') - run_crons(env) + with repo: + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+ merge', config['role_reviewer']['token']) + env.run_crons() - repo.post_status('heads/staging.master', 'success', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'success', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() master = repo.commit('heads/master') assert master.parents == [m2, c1] @@ -1503,23 +1597,23 @@ class TestMergeMethod: assert log_to_node(repo.log('heads/master')), expected @pytest.mark.xfail(reason="removed support for squash+ command") - def test_force_squash_merge(self, repo, env): + def test_force_squash_merge(self, repo, env, config): m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) repo.make_ref('heads/master', m2) c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') + prx = repo.make_pr(title='title', body='body', target='master', head=c2) repo.post_status(prx.head, 'success', 'legal/cla') repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ squash+', "reviewer") + prx.post_comment('hansen r+ squash+', config['role_reviewer']['token']) assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).squash - run_crons(env) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) @@ -1536,7 +1630,7 @@ class TestMergeMethod: repo.post_status(staging.id, 'success', 'legal/cla') repo.post_status(staging.id, 'success', 'ci/runbot') - run_crons(env) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) @@ -1544,22 +1638,22 @@ class TestMergeMethod: assert prx.state == 'closed' @pytest.mark.xfail(reason="removed support for squash- command") - def test_disable_squash_merge(self, repo, env): + def test_disable_squash_merge(self, repo, env, config): m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) repo.make_ref('heads/master', m2) c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') + prx = repo.make_pr(title='title', body='body', target='master', head=c1) repo.post_status(prx.head, 'success', 'legal/cla') repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+ squash-', "reviewer") + prx.post_comment('hansen r+ squash-', config['role_reviewer']['token']) assert not env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).squash - run_crons(env) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) @@ -1574,7 +1668,7 @@ class TestMergeMethod: repo.post_status(staging.id, 'success', 'legal/cla') repo.post_status(staging.id, 'success', 'ci/runbot') - run_crons(env) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) @@ -1586,53 +1680,60 @@ class TestPRUpdate(object): can have additional effect (see individual tests) """ def test_update_opened(self, env, repo): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') + c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), ]) assert pr.head == c # alter & push force PR entirely - c2 = repo.make_commit(m, 'first', None, tree={'m': 'cc'}) - prx.push(c2) + with repo: + c2 = repo.make_commit(m, 'first', None, tree={'m': 'cc'}) + repo.update_ref(prx.ref, c2, force=True) assert pr.head == c2 def test_reopen_update(self, env, repo): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') + c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), ]) - prx.close() + with repo: + prx.close() assert pr.state == 'closed' assert pr.head == c - prx.open() + with repo: + prx.open() assert pr.state == 'opened' - c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) - prx.push(c2) + with repo: + c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) + repo.update_ref(prx.ref, c2, force=True) assert pr.head == c2 def test_update_validated(self, env, repo): """ Should reset to opened """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - run_crons(env) + c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + env.run_crons() pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), @@ -1640,18 +1741,20 @@ class TestPRUpdate(object): assert pr.head == c assert pr.state == 'validated' - c2 = repo.make_commit(m, 'first', None, tree={'m': 'cc'}) - prx.push(c2) + with repo: + c2 = repo.make_commit(m, 'first', None, tree={'m': 'cc'}) + repo.update_ref(prx.ref, c2, force=True) assert pr.head == c2 assert pr.state == 'opened' - def test_update_approved(self, env, repo): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + def test_update_approved(self, env, repo, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') - prx.post_comment('hansen r+', user='reviewer') + c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c) + prx.post_comment('hansen r+', config['role_reviewer']['token']) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), @@ -1659,23 +1762,25 @@ class TestPRUpdate(object): assert pr.head == c assert pr.state == 'approved' - c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) - prx.push(c2) + with repo: + c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) + repo.update_ref(prx.ref, c2, force=True) assert pr.head == c2 assert pr.state == 'opened' - def test_update_ready(self, env, repo): + def test_update_ready(self, env, repo, config): """ Should reset to opened """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', user='reviewer') - run_crons(env) + c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), @@ -1683,57 +1788,62 @@ class TestPRUpdate(object): assert pr.head == c assert pr.state == 'ready' - c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) - prx.push(c2) + with repo: + c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) + repo.update_ref(prx.ref, c2, force=True) assert pr.head == c2 assert pr.state == 'opened' - def test_update_staged(self, env, repo): + def test_update_staged(self, env, repo, config): """ Should cancel the staging & reset PR to opened """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', user='reviewer') + c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+', config['role_reviewer']['token']) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), ]) - run_crons(env) + env.run_crons() assert pr.state == 'ready' assert pr.staging_id - c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) - prx.push(c2) + with repo: + c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) + repo.update_ref(prx.ref, c2, force=True) assert pr.head == c2 assert pr.state == 'opened' assert not pr.staging_id assert not env['runbot_merge.stagings'].search([]) - def test_split(self, env, repo): + def test_split(self, env, repo, config): """ Should remove the PR from its split, and possibly delete the split entirely. """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'first', None, tree={'m': 'm', '1': '1'}) - prx1 = repo.make_pr('t1', 'b1', target='master', ctid=c, user='user', label='p1') - repo.post_status(prx1.head, 'success', 'legal/cla') - repo.post_status(prx1.head, 'success', 'ci/runbot') - prx1.post_comment('hansen r+', user='reviewer') + c = repo.make_commit(m, 'first', None, tree={'m': 'm', '1': '1'}) + repo.make_ref('heads/p1', c) + prx1 = repo.make_pr(title='t1', body='b1', target='master', head='p1') + repo.post_status(prx1.head, 'success', 'legal/cla') + repo.post_status(prx1.head, 'success', 'ci/runbot') + prx1.post_comment('hansen r+', config['role_reviewer']['token']) - c = repo.make_commit(m, 'first', None, tree={'m': 'm', '2': '2'}) - prx2 = repo.make_pr('t2', 'b2', target='master', ctid=c, user='user', label='p2') - repo.post_status(prx2.head, 'success', 'legal/cla') - repo.post_status(prx2.head, 'success', 'ci/runbot') - prx2.post_comment('hansen r+', user='reviewer') - - run_crons(env) + c = repo.make_commit(m, 'first', None, tree={'m': 'm', '2': '2'}) + repo.make_ref('heads/p2', c) + prx2 = repo.make_pr(title='t2', body='b2', target='master', head='p2') + repo.post_status(prx2.head, 'success', 'legal/cla') + repo.post_status(prx2.head, 'success', 'ci/runbot') + prx2.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() pr1, pr2 = env['runbot_merge.pull_requests'].search([], order='number') assert pr1.number == prx1.number @@ -1741,8 +1851,9 @@ class TestPRUpdate(object): assert pr1.staging_id == pr2.staging_id s0 = pr1.staging_id - repo.post_status('heads/staging.master', 'failure', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'failure', 'ci/runbot') + env.run_crons() assert pr1.staging_id and pr1.staging_id != s0, "pr1 should have been re-staged" assert not pr2.staging_id, "pr2 should not" @@ -1750,56 +1861,62 @@ class TestPRUpdate(object): # around using active_test=False assert env['runbot_merge.split'].search([]) - prx2.push(repo.make_commit(c, 'second', None, tree={'m': 'm', '2': '22'})) + with repo: + repo.update_ref(prx2.ref, repo.make_commit(c, 'second', None, tree={'m': 'm', '2': '22'}), force=True) # probably not necessary ATM but... - run_crons(env) + env.run_crons() assert pr2.state == 'opened', "state should have been reset" assert not env['runbot_merge.split'].search([]), "there should be no split left" - def test_update_error(self, env, repo): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + def test_update_error(self, env, repo, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', user='reviewer') + c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+', config['role_reviewer']['token']) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), ]) - run_crons(env) + env.run_crons() assert pr.state == 'ready' assert pr.staging_id h = repo.commit('heads/staging.master').id - repo.post_status(h, 'success', 'legal/cla') - repo.post_status(h, 'failure', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status(h, 'success', 'legal/cla') + repo.post_status(h, 'failure', 'ci/runbot') + env.run_crons() assert not pr.staging_id assert pr.state == 'error' - c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) - prx.push(c2) + with repo: + c2 = repo.make_commit(c, 'first', None, tree={'m': 'cc'}) + repo.update_ref(prx.ref, c2, force=True) assert pr.head == c2 assert pr.state == 'opened' def test_unknown_pr(self, env, repo): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/1.0', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/1.0', m) - c = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='1.0', ctid=c, user='user') + c = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='1.0', head=c) assert not env['runbot_merge.pull_requests'].search([('number', '=', prx.number)]) env['runbot_merge.project'].search([]).write({ 'branch_ids': [(0, 0, {'name': '1.0'})] }) - c2 = repo.make_commit(c, 'second', None, tree={'m': 'c2'}) - prx.push(c2) + with repo: + c2 = repo.make_commit(c, 'second', None, tree={'m': 'c2'}) + repo.update_ref(prx.ref, c2, force=True) assert not env['runbot_merge.pull_requests'].search([('number', '=', prx.number)]) @@ -1807,16 +1924,18 @@ class TestPRUpdate(object): """ If a PR is updated to a known-valid commit, it should be validated """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) - c2 = repo.make_commit(m, 'first', None, tree={'m': 'cc'}) - repo.post_status(c2, 'success', 'legal/cla') - repo.post_status(c2, 'success', 'ci/runbot') - run_crons(env) + c = repo.make_commit(m, 'fist', None, tree={'m': 'c1'}) + c2 = repo.make_commit(m, 'first', None, tree={'m': 'cc'}) + repo.post_status(c2, 'success', 'legal/cla') + repo.post_status(c2, 'success', 'ci/runbot') + env.run_crons() - prx = repo.make_pr('title', 'body', target='master', ctid=c, user='user') + with repo: + prx = repo.make_pr(title='title', body='body', target='master', head=c) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number), @@ -1824,34 +1943,27 @@ class TestPRUpdate(object): assert pr.head == c assert pr.state == 'opened' - prx.push(c2) + with repo: + repo.update_ref(prx.ref, c2, force=True) assert pr.head == c2 assert pr.state == 'validated' class TestBatching(object): - def _pr(self, repo, prefix, trees, *, - target='master', user='user', reviewer='reviewer', + def _pr(self, repo, prefix, trees, *, target='master', user, reviewer, statuses=(('ci/runbot', 'success'), ('legal/cla', 'success')) ): """ Helper creating a PR from a series of commits on a base - - :type repo: fake_github.Repo - :param prefix: a prefix used for commit messages, PR title & PR body - :param trees: a list of dicts symbolising the tree for the corresponding commit. - each tree is an update on the "current state" of the tree - :param target: branch, both the base commit and the PR target - :type target: str - :type user: str - :type reviewer: str | None - :type statuses: List[(str, str)] """ - base = repo.commit('heads/{}'.format(target)) - tree = repo.read_tree(base) - c = base.id - for i, t in enumerate(trees): - tree.update(t) - c = repo.make_commit(c, 'commit_{}_{:02}'.format(prefix, i), None, tree=dict(tree)) - pr = repo.make_pr('title {}'.format(prefix), 'body {}'.format(prefix), target=target, ctid=c, user=user, label=prefix) + *_, c = repo.make_commits( + 'heads/{}'.format(target), + *( + repo.Commit('commit_{}_{:02}'.format(prefix, i), tree=t) + for i, t in enumerate(trees) + ), + ref='heads/{}'.format(prefix) + ) + pr = repo.make_pr(title='title {}'.format(prefix), body='body {}'.format(prefix), + target=target, head=prefix, token=user) for context, result in statuses: repo.post_status(c, result, context) @@ -1868,17 +1980,18 @@ class TestBatching(object): ('number', '=', number), ]) - def test_staging_batch(self, env, repo, users): + def test_staging_batch(self, env, repo, users, config): """ If multiple PRs are ready for the same target at the same point, they should be staged together """ - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}]) - pr2 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}]) + pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr2 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + env.run_crons() - run_crons(env) pr1 = self._get(env, repo, pr1.number) assert pr1.staging_id pr2 = self._get(env, repo, pr2.number) @@ -1902,19 +2015,20 @@ class TestBatching(object): expected = (re_matches('^force rebuild'), frozenset([p2])) assert staging == expected - def test_staging_batch_norebase(self, env, repo, users): + def test_staging_batch_norebase(self, env, repo, users, config): """ If multiple PRs are ready for the same target at the same point, they should be staged together """ - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}]) - pr1.post_comment('hansen merge', 'reviewer') - pr2 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}]) - pr2.post_comment('hansen merge', 'reviewer') + pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr1.post_comment('hansen merge', config['role_reviewer']['token']) + pr2 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr2.post_comment('hansen merge', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) pr1 = self._get(env, repo, pr1.number) assert pr1.staging_id assert pr1.merge_method == 'merge' @@ -1942,17 +2056,18 @@ class TestBatching(object): expected = (re_matches('^force rebuild'), frozenset([p2])) assert staging == expected - def test_staging_batch_squash(self, env, repo, users): + def test_staging_batch_squash(self, env, repo, users, config): """ If multiple PRs are ready for the same target at the same point, they should be staged together """ - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}]) - pr2 = self._pr(repo, 'PR2', [{'c': 'CCC'}]) + pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr2 = self._pr(repo, 'PR2', [{'c': 'CCC'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + env.run_crons() - run_crons(env) pr1 = self._get(env, repo, pr1.number) assert pr1.staging_id pr2 = self._get(env, repo, pr2.number) @@ -1971,25 +2086,26 @@ class TestBatching(object): node('initial')))) assert staging == expected - def test_batching_pressing(self, env, repo): + def test_batching_pressing(self, env, repo, config): """ "Pressing" PRs should be selected before normal & batched together """ - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - pr21 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}]) - pr22 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}]) + pr21 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr22 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr11 = self._pr(repo, 'Pressing1', [{'x': 'x'}, {'y': 'y'}]) - pr12 = self._pr(repo, 'Pressing2', [{'z': 'z'}, {'zz': 'zz'}]) - pr11.post_comment('hansen priority=1', 'reviewer') - pr12.post_comment('hansen priority=1', 'reviewer') + pr11 = self._pr(repo, 'Pressing1', [{'x': 'x'}, {'y': 'y'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr12 = self._pr(repo, 'Pressing2', [{'z': 'z'}, {'zz': 'zz'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr11.post_comment('hansen priority=1', config['role_reviewer']['token']) + pr12.post_comment('hansen priority=1', config['role_reviewer']['token']) pr21, pr22, pr11, pr12 = prs = [self._get(env, repo, pr.number) for pr in [pr21, pr22, pr11, pr12]] assert pr21.priority == pr22.priority == 2 assert pr11.priority == pr12.priority == 1 - run_crons(env) + env.run_crons() assert all(pr.state == 'ready' for pr in prs) assert not pr21.staging_id @@ -1998,20 +2114,21 @@ class TestBatching(object): assert pr12.staging_id assert pr11.staging_id == pr12.staging_id - def test_batching_urgent(self, env, repo): - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + def test_batching_urgent(self, env, repo, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - pr21 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}]) - pr22 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}]) + pr21 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr22 = self._pr(repo, 'PR2', [{'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) - pr11 = self._pr(repo, 'Pressing1', [{'x': 'x'}, {'y': 'y'}]) - pr12 = self._pr(repo, 'Pressing2', [{'z': 'z'}, {'zz': 'zz'}]) - pr11.post_comment('hansen priority=1', 'reviewer') - pr12.post_comment('hansen priority=1', 'reviewer') + pr11 = self._pr(repo, 'Pressing1', [{'x': 'x'}, {'y': 'y'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr12 = self._pr(repo, 'Pressing2', [{'z': 'z'}, {'zz': 'zz'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr11.post_comment('hansen priority=1', config['role_reviewer']['token']) + pr12.post_comment('hansen priority=1', config['role_reviewer']['token']) # stage PR1 - run_crons(env) + env.run_crons() p_11, p_12, p_21, p_22 = \ [self._get(env, repo, pr.number) for pr in [pr11, pr12, pr21, pr22]] assert not p_21.staging_id or p_22.staging_id @@ -2020,70 +2137,76 @@ class TestBatching(object): staging_1 = p_11.staging_id # no statuses run on PR0s - pr01 = self._pr(repo, 'Urgent1', [{'n': 'n'}, {'o': 'o'}], reviewer=None, statuses=[]) - pr01.post_comment('hansen priority=0 rebase-merge', 'reviewer') + with repo: + pr01 = self._pr(repo, 'Urgent1', [{'n': 'n'}, {'o': 'o'}], user=config['role_user']['token'], reviewer=None, statuses=[]) + pr01.post_comment('hansen priority=0 rebase-merge', config['role_reviewer']['token']) p_01 = self._get(env, repo, pr01.number) assert p_01.state == 'opened' assert p_01.priority == 0 - run_crons(env) + env.run_crons() # first staging should be cancelled and PR0 should be staged # regardless of CI (or lack thereof) assert not staging_1.active assert not p_11.staging_id and not p_12.staging_id assert p_01.staging_id - def test_batching_urgenter_than_split(self, env, repo): + def test_batching_urgenter_than_split(self, env, repo, config): """ p=0 PRs should take priority over split stagings (processing of a staging having CI-failed and being split into sub-stagings) """ - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}]) - p_1 = self._get(env, repo, pr1.number) - pr2 = self._pr(repo, 'PR2', [{'a': 'some content', 'c': 'CCC'}, {'d': 'DDD'}]) - p_2 = self._get(env, repo, pr2.number) + pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + p_1 = self._get(env, repo, pr1.number) + pr2 = self._pr(repo, 'PR2', [{'a': 'some content', 'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + p_2 = self._get(env, repo, pr2.number) + env.run_crons() - run_crons(env) st = env['runbot_merge.stagings'].search([]) # both prs should be part of the staging assert st.mapped('batch_ids.prs') == p_1 | p_2 # add CI failure - repo.post_status('heads/staging.master', 'failure', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') + with repo: + repo.post_status('heads/staging.master', 'failure', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons() - run_crons(env) # should have staged the first half assert p_1.staging_id.heads assert not p_2.staging_id.heads # during restaging of pr1, create urgent PR - pr0 = self._pr(repo, 'urgent', [{'a': 'a', 'b': 'b'}], reviewer=None, statuses=[]) - pr0.post_comment('hansen priority=0', 'reviewer') + with repo: + pr0 = self._pr(repo, 'urgent', [{'a': 'a', 'b': 'b'}], user=config['role_user']['token'], reviewer=None, statuses=[]) + pr0.post_comment('hansen priority=0', config['role_reviewer']['token']) + env.run_crons() - run_crons(env) # TODO: maybe just deactivate stagings instead of deleting them when canceling? assert not p_1.staging_id assert self._get(env, repo, pr0.number).staging_id - def test_urgent_failed(self, env, repo): + def test_urgent_failed(self, env, repo, config): """ Ensure pr[p=0,state=failed] don't get picked up """ - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - pr21 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}]) + pr21 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) p_21 = self._get(env, repo, pr21.number) # no statuses run on PR0s - pr01 = self._pr(repo, 'Urgent1', [{'n': 'n'}, {'o': 'o'}], reviewer=None, statuses=[]) - pr01.post_comment('hansen priority=0', 'reviewer') + with repo: + pr01 = self._pr(repo, 'Urgent1', [{'n': 'n'}, {'o': 'o'}], user=config['role_user']['token'], reviewer=None, statuses=[]) + pr01.post_comment('hansen priority=0', config['role_reviewer']['token']) p_01 = self._get(env, repo, pr01.number) p_01.state = 'error' - run_crons(env) + env.run_crons() assert not p_01.staging_id, "p_01 should not be picked up as it's failed" assert p_21.staging_id, "p_21 should have been staged" @@ -2091,27 +2214,29 @@ class TestBatching(object): def test_batching_merge_failure(self): pass - def test_staging_ci_failure_batch(self, env, repo): + def test_staging_ci_failure_batch(self, env, repo, config): """ on failure split batch & requeue """ - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}]) - pr2 = self._pr(repo, 'PR2', [{'a': 'some content', 'c': 'CCC'}, {'d': 'DDD'}]) + pr1 = self._pr(repo, 'PR1', [{'a': 'AAA'}, {'b': 'BBB'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + pr2 = self._pr(repo, 'PR2', [{'a': 'some content', 'c': 'CCC'}, {'d': 'DDD'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + env.run_crons() - run_crons(env) st = env['runbot_merge.stagings'].search([]) # both prs should be part of the staging assert len(st.mapped('batch_ids.prs')) == 2 # add CI failure - repo.post_status('heads/staging.master', 'failure', 'ci/runbot') - repo.post_status('heads/staging.master', 'success', 'legal/cla') + with repo: + repo.post_status('heads/staging.master', 'failure', 'ci/runbot') + repo.post_status('heads/staging.master', 'success', 'legal/cla') pr1 = env['runbot_merge.pull_requests'].search([('number', '=', pr1.number)]) pr2 = env['runbot_merge.pull_requests'].search([('number', '=', pr2.number)]) - run_crons(env) + env.run_crons() # should have split the existing batch into two, with one of the # splits having been immediately restaged st = env['runbot_merge.stagings'].search([]) @@ -2123,49 +2248,53 @@ class TestBatching(object): # This is the failing PR! h = repo.commit('heads/staging.master').id - repo.post_status(h, 'failure', 'ci/runbot') - repo.post_status(h, 'success', 'legal/cla') - run_crons(env) + with repo: + repo.post_status(h, 'failure', 'ci/runbot') + repo.post_status(h, 'success', 'legal/cla') + env.run_crons() assert pr1.state == 'error' assert pr2.staging_id h = repo.commit('heads/staging.master').id - repo.post_status(h, 'success', 'ci/runbot') - repo.post_status(h, 'success', 'legal/cla') - env['runbot_merge.commit']._notify() - env['runbot_merge.project']._check_progress() + with repo: + repo.post_status(h, 'success', 'ci/runbot') + repo.post_status(h, 'success', 'legal/cla') + env.run_crons('runbot_merge.process_updated_commits', 'runbot_merge.merge_cron') assert pr2.state == 'merged' class TestReviewing(object): - def test_reviewer_rights(self, env, repo, users): + def test_reviewer_rights(self, env, repo, users, config): """Only users with review rights will have their r+ (and other attributes) taken in account """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', user='other') - run_crons(env) + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+', config['role_other']['token']) + env.run_crons() assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).state == 'validated' - prx.post_comment('hansen r+', user='reviewer') + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).state == 'ready' # second r+ to check warning - prx.post_comment('hansen r+', user='reviewer') + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) - run_crons(env) + env.run_crons() assert prx.comments == [ (users['other'], 'hansen r+'), (users['user'], "I'm sorry, @{}. I'm afraid I can't do that.".format(users['other'])), @@ -2175,19 +2304,20 @@ class TestReviewing(object): users['reviewer'])), ] - def test_self_review_fail(self, env, repo, users): + def test_self_review_fail(self, env, repo, users, config): """ Normal reviewers can't self-review """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='reviewer') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', user='reviewer') - run_crons(env) + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1, token=config['role_reviewer']['token']) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() assert prx.user == users['reviewer'] assert env['runbot_merge.pull_requests'].search([ @@ -2195,25 +2325,26 @@ class TestReviewing(object): ('number', '=', prx.number) ]).state == 'validated' - run_crons(env) + env.run_crons() assert prx.comments == [ (users['reviewer'], 'hansen r+'), (users['user'], "I'm sorry, @{}. You can't review+.".format(users['reviewer'])), ] - def test_self_review_success(self, env, repo, users): + def test_self_review_success(self, env, repo, users, config): """ Some users are allowed to self-review """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='self_reviewer') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', user='self_reviewer') - run_crons(env) + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1, token=config['role_self_reviewer']['token']) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen r+', config['role_self_reviewer']['token']) + env.run_crons() assert prx.user == users['self_reviewer'] assert env['runbot_merge.pull_requests'].search([ @@ -2221,21 +2352,22 @@ class TestReviewing(object): ('number', '=', prx.number) ]).state == 'ready' - def test_delegate_review(self, env, repo, users): + def test_delegate_review(self, env, repo, users, config): """Users should be able to delegate review to either the creator of the PR or an other user without review rights """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen delegate+', user='reviewer') - prx.post_comment('hansen r+', user='user') - run_crons(env) + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen delegate+', config['role_reviewer']['token']) + prx.post_comment('hansen r+', config['role_user']['token']) + env.run_crons() assert prx.user == users['user'] assert env['runbot_merge.pull_requests'].search([ @@ -2243,41 +2375,44 @@ class TestReviewing(object): ('number', '=', prx.number) ]).state == 'ready' - def test_delegate_review_thirdparty(self, env, repo, users): + def test_delegate_review_thirdparty(self, env, repo, users, config): """Users should be able to delegate review to either the creator of the PR or an other user without review rights """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen delegate=%s' % users['other'], user='reviewer') - prx.post_comment('hansen r+', user='user') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen delegate=%s' % users['other'], config['role_reviewer']['token']) + prx.post_comment('hansen r+', config['role_user']['token']) + env.run_crons() - run_crons(env) assert prx.user == users['user'] assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).state == 'validated' - prx.post_comment('hansen r+', user='other') + with repo: + prx.post_comment('hansen r+', config['role_other']['token']) assert env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]).state == 'ready' - def test_delegate_prefixes(self, env, repo): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + def test_delegate_prefixes(self, env, repo, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr('title', None, target='master', ctid=c, user='user') - prx.post_comment('hansen delegate=foo,@bar,#baz', user='reviewer') + c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) + prx = repo.make_pr(title='title', body=None, target='master', head=c) + prx.post_comment('hansen delegate=foo,@bar,#baz', config['role_reviewer']['token']) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -2287,34 +2422,38 @@ class TestReviewing(object): assert {d.github_login for d in pr.delegates} == {'foo', 'bar', 'baz'} - def test_actual_review(self, env, repo): + def test_actual_review(self, env, repo, config): """ treat github reviews as regular comments """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]) - prx.post_review('COMMENT', 'reviewer', "hansen priority=1") + with repo: + prx.post_review('COMMENT', "hansen priority=1", config['role_reviewer']['token']) assert pr.priority == 1 assert pr.state == 'opened' - prx.post_review('APPROVE', 'reviewer', "hansen priority=2") + with repo: + prx.post_review('APPROVE', "hansen priority=2", config['role_reviewer']['token']) assert pr.priority == 2 assert pr.state == 'opened' - prx.post_review('REQUEST_CHANGES', 'reviewer', 'hansen priority=1') + with repo: + prx.post_review('REQUEST_CHANGES', 'hansen priority=1', config['role_reviewer']['token']) assert pr.priority == 1 assert pr.state == 'opened' - - prx.post_review('COMMENT', 'reviewer', 'hansen r+') + with repo: + prx.post_review('COMMENT', 'hansen r+', config['role_reviewer']['token']) assert pr.priority == 1 assert pr.state == 'approved' @@ -2331,16 +2470,17 @@ class TestUnknownPR: * get statuses if head commit unknown (additional cron?) * handle any comment & review (existing PRs may enter the system on a review/r+) """ - def test_rplus_unknown(self, repo, env): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/master', m2) + def test_rplus_unknown(self, repo, env, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/master', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot', target_url="http://example.org/wheee") - run_crons(env) + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot', target_url="http://example.org/wheee") + env.run_crons() # assume an unknown but ready PR: we don't know the PR or its head commit env['runbot_merge.pull_requests'].search([ @@ -2350,12 +2490,13 @@ class TestUnknownPR: env['runbot_merge.commit'].search([('sha', '=', prx.head)]).unlink() # reviewer reviewers - prx.post_comment('hansen r+', "reviewer") + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) Fetch = env['runbot_merge.fetch_job'] assert Fetch.search([('repository', '=', repo.name), ('number', '=', prx.number)]) - env['runbot_merge.project']._check_fetch() - run_crons(env) + env.run_crons('runbot_merge.fetch_prs_cron') + env.run_crons() assert not Fetch.search([('repository', '=', repo.name), ('number', '=', prx.number)]) c = env['runbot_merge.commit'].search([('sha', '=', prx.head)]) @@ -2370,47 +2511,51 @@ class TestUnknownPR: ]) assert pr.state == 'ready' - env['runbot_merge.project']._check_progress() + env.run_crons('runbot_merge.merge_cron') assert pr.staging_id - def test_rplus_unmanaged(self, env, repo, users): + def test_rplus_unmanaged(self, env, repo, users, config): """ r+ on an unmanaged target should notify about """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/branch', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/branch', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='branch', ctid=c1, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='branch', head=c1) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen r+', "reviewer") - - env['runbot_merge.project']._check_fetch() - env['runbot_merge.project']._send_feedback() + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons( + 'runbot_merge.fetch_prs_cron', + 'runbot_merge.feedback_cron', + ) assert prx.comments == [ (users['reviewer'], 'hansen r+'), (users['user'], "I'm sorry. Branch `branch` is not within my remit."), ] - def test_rplus_review_unmanaged(self, env, repo, users): + def test_rplus_review_unmanaged(self, env, repo, users, config): """ r+ reviews can take a different path than comments """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) - repo.make_ref('heads/branch', m2) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + m2 = repo.make_commit(m, 'second', None, tree={'m': 'm', 'm2': 'm2'}) + repo.make_ref('heads/branch', m2) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='branch', ctid=c1, user='user') - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='branch', head=c1) + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_review('APPROVE', "reviewer", 'hansen r+') - - env['runbot_merge.project']._check_fetch() - env['runbot_merge.project']._send_feedback() + prx.post_review('APPROVE', 'hansen r+', config['role_reviewer']['token']) + env.run_crons( + 'runbot_merge.fetch_prs_cron', + 'runbot_merge.feedback_cron', + ) # FIXME: either split out reviews in local or merge reviews & comments in remote assert prx.comments[-1:] == [ @@ -2419,15 +2564,16 @@ class TestUnknownPR: class TestRecognizeCommands: @pytest.mark.parametrize('botname', ['hansen', 'Hansen', 'HANSEN', 'HanSen', 'hAnSeN']) - def test_botname_casing(self, repo, env, botname): + def test_botname_casing(self, repo, env, botname, config): """ Test that the botname is case-insensitive as people might write bot names capitalised or titlecased or uppercased or whatever """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr('title', None, target='master', ctid=c, user='user') + c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) + prx = repo.make_pr(title='title', body=None, target='master', head=c) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -2435,19 +2581,20 @@ class TestRecognizeCommands: ]) assert pr.state == 'opened' - prx.post_comment('%s r+' % botname, 'reviewer') + with repo: + prx.post_comment('%s r+' % botname, config['role_reviewer']['token']) assert pr.state == 'approved' @pytest.mark.parametrize('indent', ['', '\N{SPACE}', '\N{SPACE}'*4, '\N{TAB}']) - def test_botname_indented(self, repo, env, indent): + def test_botname_indented(self, repo, env, indent, config): """ matching botname should ignore leading whitespaces """ + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) - - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr('title', None, target='master', ctid=c, user='user') + c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) + prx = repo.make_pr(title='title', body=None, target='master', head=c) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -2455,18 +2602,20 @@ class TestRecognizeCommands: ]) assert pr.state == 'opened' - prx.post_comment('%shansen r+' % indent, 'reviewer') + with repo: + prx.post_comment('%shansen r+' % indent, config['role_reviewer']['token']) assert pr.state == 'approved' class TestRMinus: - def test_rminus_approved(self, repo, env): + def test_rminus_approved(self, repo, env, config): """ approved -> r- -> opened """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr('title', None, target='master', ctid=c, user='user') + c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) + prx = repo.make_pr(title='title', body=None, target='master', head=c) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -2474,31 +2623,37 @@ class TestRMinus: ]) assert pr.state == 'opened' - prx.post_comment('hansen r+', 'reviewer') + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) assert pr.state == 'approved' - prx.post_comment('hansen r-', 'user') + with repo: + prx.post_comment('hansen r-', config['role_user']['token']) assert pr.state == 'opened' - prx.post_comment('hansen r+', 'reviewer') + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) assert pr.state == 'approved' - prx.post_comment('hansen r-', 'other') + with repo: + prx.post_comment('hansen r-', config['role_other']['token']) assert pr.state == 'approved' - prx.post_comment('hansen r-', 'reviewer') + with repo: + prx.post_comment('hansen r-', config['role_reviewer']['token']) assert pr.state == 'opened' - def test_rminus_ready(self, repo, env): + def test_rminus_ready(self, repo, env, config): """ ready -> r- -> validated """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr('title', None, target='master', ctid=c, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - run_crons(env) + c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) + prx = repo.make_pr(title='title', body=None, target='master', head=c) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + env.run_crons() pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -2506,31 +2661,37 @@ class TestRMinus: ]) assert pr.state == 'validated' - prx.post_comment('hansen r+', 'reviewer') + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) assert pr.state == 'ready' - prx.post_comment('hansen r-', 'user') + with repo: + prx.post_comment('hansen r-', config['role_user']['token']) assert pr.state == 'validated' - prx.post_comment('hansen r+', 'reviewer') + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) assert pr.state == 'ready' - prx.post_comment('hansen r-', 'other') + with repo: + prx.post_comment('hansen r-', config['role_other']['token']) assert pr.state == 'ready' - prx.post_comment('hansen r-', 'reviewer') + with repo: + prx.post_comment('hansen r-', config['role_reviewer']['token']) assert pr.state == 'validated' - def test_rminus_staged(self, repo, env): + def test_rminus_staged(self, repo, env, config): """ staged -> r- -> validated """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) - prx = repo.make_pr('title', None, target='master', ctid=c, user='user') - repo.post_status(prx.head, 'success', 'ci/runbot') - repo.post_status(prx.head, 'success', 'legal/cla') - run_crons(env) + c = repo.make_commit(m, 'first', None, tree={'m': 'c'}) + prx = repo.make_pr(title='title', body=None, target='master', head=c) + repo.post_status(prx.head, 'success', 'ci/runbot') + repo.post_status(prx.head, 'success', 'legal/cla') + env.run_crons() pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -2538,57 +2699,65 @@ class TestRMinus: ]) # if reviewer unreviews, cancel staging & unreview - prx.post_comment('hansen r+', 'reviewer') - run_crons(env) + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() st = pr.staging_id assert st - prx.post_comment('hansen r-', 'reviewer') + with repo: + prx.post_comment('hansen r-', config['role_reviewer']['token']) assert not st.active assert not pr.staging_id assert pr.state == 'validated' # if author unreviews, cancel staging & unreview - prx.post_comment('hansen r+', 'reviewer') - run_crons(env) + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() st = pr.staging_id assert st - prx.post_comment('hansen r-', 'user') + with repo: + prx.post_comment('hansen r-', config['role_user']['token']) assert not st.active assert not pr.staging_id assert pr.state == 'validated' # if rando unreviews, ignore - prx.post_comment('hansen r+', 'reviewer') - run_crons(env) + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() st = pr.staging_id assert st - prx.post_comment('hansen r-', 'other') + with repo: + prx.post_comment('hansen r-', config['role_other']['token']) assert pr.staging_id == st assert pr.state == 'ready' - def test_split(self, env, repo): + def test_split(self, env, repo, config): """ Should remove the PR from its split, and possibly delete the split entirely. """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'first', None, tree={'m': 'm', '1': '1'}) - prx1 = repo.make_pr('t1', 'b1', target='master', ctid=c, user='user', label='p1') - repo.post_status(prx1.head, 'success', 'legal/cla') - repo.post_status(prx1.head, 'success', 'ci/runbot') - prx1.post_comment('hansen r+', user='reviewer') + c = repo.make_commit(m, 'first', None, tree={'m': 'm', '1': '1'}) + repo.make_ref('heads/p1', c) + prx1 = repo.make_pr(title='t1', body='b1', target='master', head='p1') + repo.post_status(prx1.head, 'success', 'legal/cla') + repo.post_status(prx1.head, 'success', 'ci/runbot') + prx1.post_comment('hansen r+', config['role_reviewer']['token']) - c = repo.make_commit(m, 'first', None, tree={'m': 'm', '2': '2'}) - prx2 = repo.make_pr('t2', 'b2', target='master', ctid=c, user='user', label='p2') - repo.post_status(prx2.head, 'success', 'legal/cla') - repo.post_status(prx2.head, 'success', 'ci/runbot') - prx2.post_comment('hansen r+', user='reviewer') - - run_crons(env) + c = repo.make_commit(m, 'first', None, tree={'m': 'm', '2': '2'}) + repo.make_ref('heads/p2', c) + prx2 = repo.make_pr(title='t2', body='b2', target='master', head='p2') + repo.post_status(prx2.head, 'success', 'legal/cla') + repo.post_status(prx2.head, 'success', 'ci/runbot') + prx2.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() pr1, pr2 = env['runbot_merge.pull_requests'].search([], order='number') assert pr1.number == prx1.number @@ -2596,8 +2765,9 @@ class TestRMinus: assert pr1.staging_id == pr2.staging_id s0 = pr1.staging_id - repo.post_status('heads/staging.master', 'failure', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status('heads/staging.master', 'failure', 'ci/runbot') + env.run_crons() assert pr1.staging_id and pr1.staging_id != s0, "pr1 should have been re-staged" assert not pr2.staging_id, "pr2 should not" @@ -2605,27 +2775,29 @@ class TestRMinus: # around using active_test=False assert env['runbot_merge.split'].search([]) - # prx2 was actually a terrible idea! - prx2.post_comment('hansen r-', user='reviewer') + with repo: + # prx2 was actually a terrible idea! + prx2.post_comment('hansen r-', config['role_reviewer']['token']) # probably not necessary ATM but... - run_crons(env) + env.run_crons() assert pr2.state == 'validated', "state should have been reset" assert not env['runbot_merge.split'].search([]), "there should be no split left" class TestComments: - def test_address_method(self, repo, env): - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + def test_address_method(self, repo, env, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) - repo.post_status(prx.head, 'success', 'legal/cla') - repo.post_status(prx.head, 'success', 'ci/runbot') - prx.post_comment('hansen delegate=foo', user='reviewer') - prx.post_comment('@hansen delegate=bar', user='reviewer') - prx.post_comment('#hansen delegate=baz', user='reviewer') + repo.post_status(prx.head, 'success', 'legal/cla') + repo.post_status(prx.head, 'success', 'ci/runbot') + prx.post_comment('hansen delegate=foo', config['role_reviewer']['token']) + prx.post_comment('@hansen delegate=bar', config['role_reviewer']['token']) + prx.post_comment('#hansen delegate=baz', config['role_reviewer']['token']) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), @@ -2635,92 +2807,104 @@ class TestComments: assert {p.github_login for p in pr.delegates} \ == {'foo', 'bar', 'baz'} - def test_delete(self, repo, env): + def test_delete(self, repo, env, config): """ Comments being deleted should be ignored """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]) - cid = prx.post_comment('hansen r+', user='reviewer') - # unreview by pushing a new commit - prx.push(repo.make_commit(c1, 'second', None, tree={'m': 'c2'})) + with repo: + cid = prx.post_comment('hansen r+', config['role_reviewer']['token']) + # unreview by pushing a new commit + repo.update_ref(prx.ref, repo.make_commit(c1, 'second', None, tree={'m': 'c2'}), force=True) assert pr.state == 'opened' - prx.delete_comment(cid, 'reviewer') + with repo: + prx.delete_comment(cid, config['role_reviewer']['token']) # check that PR is still unreviewed assert pr.state == 'opened' - def test_edit(self, repo, env): + def test_edit(self, repo, env, config): """ Comments being edited should be ignored """ - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]) - cid = prx.post_comment('hansen r+', user='reviewer') - # unreview by pushing a new commit - prx.push(repo.make_commit(c1, 'second', None, tree={'m': 'c2'})) + with repo: + cid = prx.post_comment('hansen r+', config['role_reviewer']['token']) + # unreview by pushing a new commit + repo.update_ref(prx.ref, repo.make_commit(c1, 'second', None, tree={'m': 'c2'}), force=True) assert pr.state == 'opened' - prx.edit_comment(cid, 'hansen r+ edited', 'reviewer') + with repo: + prx.edit_comment(cid, 'hansen r+ edited', config['role_reviewer']['token']) # check that PR is still unreviewed assert pr.state == 'opened' class TestFeedback: - def test_ci_approved(self, repo, env, users): + def test_ci_approved(self, repo, env, users, config): """CI failing on an r+'d PR sends feedback""" - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]) - prx.post_comment('hansen r+', user='reviewer') + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) assert pr.state == 'approved' - repo.post_status(prx.head, 'failure', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status(prx.head, 'failure', 'ci/runbot') + env.run_crons() assert prx.comments == [ (users['reviewer'], 'hansen r+'), (users['user'], "'ci/runbot' failed on this reviewed PR.") ] - def test_review_unvalidated(self, repo, env, users): + def test_review_unvalidated(self, repo, env, users, config): """r+-ing a PR with failed CI sends feedback""" - m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) - repo.make_ref('heads/master', m) + with repo: + m = repo.make_commit(None, 'initial', None, tree={'m': 'm'}) + repo.make_ref('heads/master', m) - c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c1, user='user') + c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) + prx = repo.make_pr(title='title', body='body', target='master', head=c1) pr = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', prx.number) ]) - repo.post_status(prx.head, 'failure', 'ci/runbot') - run_crons(env) + with repo: + repo.post_status(prx.head, 'failure', 'ci/runbot') + env.run_crons() assert pr.state == 'opened' - prx.post_comment('hansen r+', user='reviewer') + with repo: + prx.post_comment('hansen r+', config['role_reviewer']['token']) assert pr.state == 'approved' - run_crons(env) + env.run_crons() assert prx.comments == [ (users['reviewer'], 'hansen r+'), @@ -2730,14 +2914,15 @@ class TestInfrastructure: def test_protection(self, repo): """ force-pushing on a protected ref should fail """ - m0 = repo.make_commit(None, 'initial', None, tree={'m': 'm0'}) - m1 = repo.make_commit(m0, 'first', None, tree={'m': 'm1'}) - repo.make_ref('heads/master', m1) - repo.protect('master') + with repo: + m0 = repo.make_commit(None, 'initial', None, tree={'m': 'm0'}) + m1 = repo.make_commit(m0, 'first', None, tree={'m': 'm1'}) + repo.make_ref('heads/master', m1) + repo.protect('master') - c1 = repo.make_commit(m0, 'other', None, tree={'m': 'c1'}) - with pytest.raises(AssertionError): - repo.update_ref('heads/master', c1, force=True) + c1 = repo.make_commit(m0, 'other', None, tree={'m': 'c1'}) + with pytest.raises(AssertionError): + repo.update_ref('heads/master', c1, force=True) assert repo.get_ref('heads/master') == m1 def node(name, *children): @@ -2782,55 +2967,61 @@ class TestEmailFormatting: assert p1.formatted_email == 'Shultz ' class TestLabelling: - def test_desync(self, env, repo): - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + def test_desync(self, env, repo, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - pr = repo.make_pr('gibberish', 'blahblah', target='master', ctid=c, user='user') + c = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) + pr = repo.make_pr(title='gibberish', body='blahblah', target='master', head=c) [pr_id] = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', pr.number), ]) - repo.post_status(c, 'success', 'legal/cla') - repo.post_status(c, 'success', 'ci/runbot') + with repo: + repo.post_status(c, 'success', 'legal/cla') + repo.post_status(c, 'success', 'ci/runbot') - run_crons(env) + env.run_crons() assert pr.labels == {'seen 🙂', 'CI 🤖'} - # desync state and labels - pr.labels.remove('CI 🤖') + with repo: + # desync state and labels + pr.labels.remove('CI 🤖') - pr.post_comment('hansen r+', 'reviewer') - run_crons(env) + pr.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() assert pr.labels == {'seen 🙂', 'CI 🤖', 'r+ 👌', 'merging 👷'},\ "labels should be resynchronised" - def test_other_tags(self, env, repo): - m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) - repo.make_ref('heads/master', m) + def test_other_tags(self, env, repo, config): + with repo: + m = repo.make_commit(None, 'initial', None, tree={'a': 'some content'}) + repo.make_ref('heads/master', m) - c = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) - pr = repo.make_pr('gibberish', 'blahblah', target='master', ctid=c, user='user') + c = repo.make_commit(m, 'replace file contents', None, tree={'a': 'some other content'}) + pr = repo.make_pr(title='gibberish', body='blahblah', target='master', head=c) - # "foreign" labels - pr.labels.update(('L1', 'L2')) + with repo: + # "foreign" labels + pr.labels.update(('L1', 'L2')) [pr_id] = env['runbot_merge.pull_requests'].search([ ('repository.name', '=', repo.name), ('number', '=', pr.number), ]) - repo.post_status(c, 'success', 'legal/cla') - repo.post_status(c, 'success', 'ci/runbot') - - run_crons(env) + with repo: + repo.post_status(c, 'success', 'legal/cla') + repo.post_status(c, 'success', 'ci/runbot') + env.run_crons() assert pr.labels == {'seen 🙂', 'CI 🤖', 'L1', 'L2'}, "should not lose foreign labels" - pr.post_comment('hansen r+', 'reviewer') - run_crons(env) + with repo: + pr.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() assert pr.labels == {'seen 🙂', 'CI 🤖', 'r+ 👌', 'merging 👷', 'L1', 'L2'},\ "should not lose foreign labels" diff --git a/runbot_merge/tests/test_multirepo.py b/runbot_merge/tests/test_multirepo.py index 25019487..c9aa2014 100644 --- a/runbot_merge/tests/test_multirepo.py +++ b/runbot_merge/tests/test_multirepo.py @@ -9,23 +9,29 @@ import json import pytest -from test_utils import re_matches, run_crons, get_partner +from test_utils import re_matches, get_partner @pytest.fixture -def repo_a(make_repo): - return make_repo('a') +def repo_a(project, make_repo): + repo = make_repo('a') + project.write({'repo_ids': [(0, 0, {'name': repo.name})]}) + return repo @pytest.fixture -def repo_b(make_repo): - return make_repo('b') +def repo_b(project, make_repo): + repo = make_repo('b') + project.write({'repo_ids': [(0, 0, {'name': repo.name})]}) + return repo @pytest.fixture -def repo_c(make_repo): - return make_repo('c') +def repo_c(project, make_repo): + repo = make_repo('c') + project.write({'repo_ids': [(0, 0, {'name': repo.name})]}) + return repo -def make_pr(repo, prefix, trees, *, target='master', user='user', label=None, +def make_pr(repo, prefix, trees, *, target='master', user, statuses=(('ci/runbot', 'success'), ('legal/cla', 'success')), - reviewer='reviewer'): + reviewer): """ :type repo: fake_github.Repo :type prefix: str @@ -37,14 +43,16 @@ def make_pr(repo, prefix, trees, *, target='master', user='user', label=None, :type reviewer: str | None :rtype: fake_github.PR """ - base = repo.commit('heads/{}'.format(target)) - tree = repo.read_tree(base) - c = base.id - for i, t in enumerate(trees): - tree.update(t) - c = repo.make_commit(c, 'commit_{}_{:02}'.format(prefix, i), None, - tree=dict(tree)) - pr = repo.make_pr('title {}'.format(prefix), 'body {}'.format(prefix), target=target, ctid=c, user=user, label=label) + *_, c = repo.make_commits( + 'heads/{}'.format(target), + *( + repo.Commit('commit_{}_{:02}'.format(prefix, i), tree=tree) + for i, tree in enumerate(trees) + ), + ref='heads/{}'.format(prefix) + ) + pr = repo.make_pr(title='title {}'.format(prefix), body='body {}'.format(prefix), + target=target, head=prefix, token=user) for context, result in statuses: repo.post_status(c, result, context) if reviewer: @@ -62,35 +70,51 @@ def make_branch(repo, name, message, tree, protect=True): repo.protect(name) return c -def test_stage_one(env, project, repo_a, repo_b): +def test_stage_one(env, project, repo_a, repo_b, config): """ First PR is non-matched from A => should not select PR from B """ project.batch_limit = 1 - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - pr_a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing') + with repo_a: + make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) + pr_a = make_pr( + repo_a, 'A', [{'a': 'a_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token']) - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - pr_b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-other-thing') - - run_crons(env) + with repo_b: + make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) + pr_b = make_pr( + repo_b, 'B', [{'a': 'b_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + env.run_crons() assert to_pr(env, pr_a).state == 'ready' assert to_pr(env, pr_a).staging_id assert to_pr(env, pr_b).state == 'ready' assert not to_pr(env, pr_b).staging_id -def test_stage_match(env, project, repo_a, repo_b): +def test_stage_match(env, project, repo_a, repo_b, config): """ First PR is matched from A, => should select matched PR from B """ project.batch_limit = 1 - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - pr_a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing') - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - pr_b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-a-thing') - - run_crons(env) + with repo_a: + make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) + pr_a = make_pr( + repo_a, 'do-a-thing', [{'a': 'a_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + with repo_b: + make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) + pr_b = make_pr(repo_b, 'do-a-thing', [{'a': 'b_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + env.run_crons() pr_a = to_pr(env, pr_a) pr_b = to_pr(env, pr_b) @@ -102,7 +126,7 @@ def test_stage_match(env, project, repo_a, repo_b): assert pr_a.staging_id == pr_b.staging_id, \ "branch-matched PRs should be part of the same staging" -def test_unmatch_patch(env, project, repo_a, repo_b): +def test_unmatch_patch(env, project, repo_a, repo_b, config): """ When editing files via the UI for a project you don't have write access to, a branch called patch-XXX is automatically created in your profile to hold the change. @@ -115,13 +139,21 @@ def test_unmatch_patch(env, project, repo_a, repo_b): -> PRs with a branch name of patch-* should not be label-matched """ project.batch_limit = 1 - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - pr_a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='patch-1') - - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - pr_b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='patch-1') - - run_crons(env) + with repo_a: + make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) + pr_a = make_pr( + repo_a, 'patch-1', [{'a': 'a_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + with repo_b: + make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) + pr_b = make_pr( + repo_b, 'patch-1', [{'a': 'b_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + env.run_crons() pr_a = to_pr(env, pr_a) pr_b = to_pr(env, pr_b) @@ -130,20 +162,27 @@ def test_unmatch_patch(env, project, repo_a, repo_b): assert pr_b.state == 'ready' assert not pr_b.staging_id, 'patch-* PRs should not be branch-matched' -def test_sub_match(env, project, repo_a, repo_b, repo_c): +def test_sub_match(env, project, repo_a, repo_b, repo_c, config): """ Branch-matching should work on a subset of repositories """ project.batch_limit = 1 - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - # no pr here - - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - pr_b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-a-thing') - - make_branch(repo_c, 'master', 'initial', {'a': 'c_0'}) - pr_c = make_pr(repo_c, 'C', [{'a': 'c_1'}], label='do-a-thing') - - run_crons(env) + with repo_a: # no pr here + make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) + with repo_b: + make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) + pr_b = make_pr( + repo_b, 'do-a-thing', [{'a': 'b_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + with repo_c: + make_branch(repo_c, 'master', 'initial', {'a': 'c_0'}) + pr_c = make_pr( + repo_c, 'do-a-thing', [{'a': 'c_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + env.run_crons() pr_b = to_pr(env, pr_b) pr_c = to_pr(env, pr_c) @@ -167,28 +206,44 @@ def test_sub_match(env, project, repo_a, repo_b, repo_c): repo_c.name + '^': c_staging.parents[0], } -def test_merge_fail(env, project, repo_a, repo_b, users): +def test_merge_fail(env, project, repo_a, repo_b, users, config): """ In a matched-branch scenario, if merging in one of the linked repos fails it should revert the corresponding merges """ project.batch_limit = 1 - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) + with repo_a, repo_b: + make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) + make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - # first set of matched PRs - pr1a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing') - pr1b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-a-thing') + # first set of matched PRs + pr1a = make_pr( + repo_a, 'do-a-thing', [{'a': 'a_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + pr1b = make_pr( + repo_b, 'do-a-thing', [{'a': 'b_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) - # add a conflicting commit to B so the staging fails - repo_b.make_commit('heads/master', 'cn', None, tree={'a': 'cn'}) + # add a conflicting commit to B so the staging fails + repo_b.make_commit('heads/master', 'cn', None, tree={'a': 'cn'}) - # and a second set of PRs which should get staged while the first set - # fails - pr2a = make_pr(repo_a, 'A2', [{'b': 'ok'}], label='do-b-thing') - pr2b = make_pr(repo_b, 'B2', [{'b': 'ok'}], label='do-b-thing') - - run_crons(env) + # and a second set of PRs which should get staged while the first set + # fails + pr2a = make_pr( + repo_a, 'do-b-thing', [{'b': 'ok'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + pr2b = make_pr( + repo_b, 'do-b-thing', [{'b': 'ok'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + env.run_crons() s2 = to_pr(env, pr2a) | to_pr(env, pr2b) st = env['runbot_merge.stagings'].search([]) @@ -208,33 +263,43 @@ def test_merge_fail(env, project, repo_a, repo_b, users): for c in repo_a.log('heads/staging.master') ] == [ re_matches('^force rebuild'), - 'commit_A2_00\n\ncloses %s#2\n\nSigned-off-by: %s' % (repo_a.name, reviewer), + 'commit_do-b-thing_00\n\ncloses %s#2\n\nSigned-off-by: %s' % (repo_a.name, reviewer), 'initial' ], "dummy commit + squash-merged PR commit + root commit" -def test_ff_fail(env, project, repo_a, repo_b): +def test_ff_fail(env, project, repo_a, repo_b, config): """ In a matched-branch scenario, fast-forwarding one of the repos fails the entire thing should be rolled back """ project.batch_limit = 1 - root_a = make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing') - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-a-thing') + with repo_a, repo_b: + root_a = make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) + make_pr( + repo_a, 'do-a-thing', [{'a': 'a_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) - run_crons(env) + make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) + make_pr( + repo_b, 'do-a-thing', [{'a': 'b_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + env.run_crons() # add second commit blocking FF - cn = repo_b.make_commit('heads/master', 'second', None, tree={'a': 'b_0', 'b': 'other'}) + with repo_b: + cn = repo_b.make_commit('heads/master', 'second', None, tree={'a': 'b_0', 'b': 'other'}) assert repo_b.commit('heads/master').id == cn - repo_a.post_status('heads/staging.master', 'success', 'ci/runbot') - repo_a.post_status('heads/staging.master', 'success', 'legal/cla') - repo_b.post_status('heads/staging.master', 'success', 'ci/runbot') - repo_b.post_status('heads/staging.master', 'success', 'legal/cla') - - env['runbot_merge.project']._check_progress() + with repo_a, repo_b: + repo_a.post_status('heads/staging.master', 'success', 'ci/runbot') + repo_a.post_status('heads/staging.master', 'success', 'legal/cla') + repo_b.post_status('heads/staging.master', 'success', 'ci/runbot') + repo_b.post_status('heads/staging.master', 'success', 'legal/cla') + env.run_crons('runbot_merge.merge_cron') assert repo_b.commit('heads/master').id == cn,\ "B should still be at the conflicting commit" assert repo_a.commit('heads/master').id == root_a,\ @@ -246,23 +311,32 @@ def test_ff_fail(env, project, repo_a, repo_b): assert len(st.batch_ids.prs) == 2 class TestCompanionsNotReady: - def test_one_pair(self, env, project, repo_a, repo_b, owner, users): + def test_one_pair(self, env, project, repo_a, repo_b, config, users): """ If the companion of a ready branch-matched PR is not ready, they should not get staged """ project.batch_limit = 1 - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - # pr_a is born ready - p_a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing') + with repo_a, repo_b: + make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) + # pr_a is born ready + p_a = make_pr( + repo_a, 'do-a-thing', [{'a': 'a_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) - p_b = make_pr(repo_b, 'B', [{'a': 'b_1'}], label='do-a-thing', reviewer=None) + make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) + p_b = make_pr( + repo_b, 'do-a-thing', [{'a': 'b_1'}], + user=config['role_user']['token'], + reviewer=None, + ) pr_a = to_pr(env, p_a) pr_b = to_pr(env, p_b) - assert pr_a.label == pr_b.label == '{}:do-a-thing'.format(owner) + assert pr_a.label == pr_b.label == '{}:do-a-thing'.format(config['github']['owner']) - run_crons(env) + env.run_crons() assert pr_a.state == 'ready' assert pr_b.state == 'validated' @@ -275,28 +349,41 @@ class TestCompanionsNotReady: (users['user'], "Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (repo_b.name, p_b.number)), ] # ensure the message is only sent once per PR - env['runbot_merge.pull_requests']._check_linked_prs_statuses() + env.run_crons('runbot_merge.check_linked_prs_status') assert p_a.comments == [ (users['reviewer'], 'hansen r+'), (users['user'], "Linked pull request(s) %s#%d not ready. Linked PRs are not staged until all of them are ready." % (repo_b.name, p_b.number)), ] assert p_b.comments == [] - def test_two_of_three_unready(self, env, project, repo_a, repo_b, repo_c, owner, users): + def test_two_of_three_unready(self, env, project, repo_a, repo_b, repo_c, users, config): """ In a 3-batch, if two of the PRs are not ready both should be linked by the first one """ project.batch_limit = 1 - make_branch(repo_a, 'master', 'initial', {'f': 'a0'}) - pr_a = make_pr(repo_a, 'A', [{'f': 'a1'}], label='a-thing', reviewer=None) + with repo_a, repo_b, repo_c: + make_branch(repo_a, 'master', 'initial', {'f': 'a0'}) + pr_a = make_pr( + repo_a, 'a-thing', [{'f': 'a1'}], + user=config['role_user']['token'], + reviewer=None, + ) - make_branch(repo_b, 'master', 'initial', {'f': 'b0'}) - pr_b = make_pr(repo_b, 'B', [{'f': 'b1'}], label='a-thing') + make_branch(repo_b, 'master', 'initial', {'f': 'b0'}) + pr_b = make_pr( + repo_b, 'a-thing', [{'f': 'b1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) - make_branch(repo_c, 'master', 'initial', {'f': 'c0'}) - pr_c = make_pr(repo_c, 'C', [{'f': 'c1'}], label='a-thing', reviewer=None) + make_branch(repo_c, 'master', 'initial', {'f': 'c0'}) + pr_c = make_pr( + repo_c, 'a-thing', [{'f': 'c1'}], + user=config['role_user']['token'], + reviewer=None, + ) + env.run_crons() - run_crons(env) assert pr_a.comments == [] assert pr_b.comments == [ (users['reviewer'], 'hansen r+'), @@ -307,21 +394,34 @@ class TestCompanionsNotReady: ] assert pr_c.comments == [] - def test_one_of_three_unready(self, env, project, repo_a, repo_b, repo_c, owner, users): + def test_one_of_three_unready(self, env, project, repo_a, repo_b, repo_c, users, config): """ In a 3-batch, if one PR is not ready it should be linked on the other two """ project.batch_limit = 1 - make_branch(repo_a, 'master', 'initial', {'f': 'a0'}) - pr_a = make_pr(repo_a, 'A', [{'f': 'a1'}], label='a-thing', reviewer=None) + with repo_a, repo_b, repo_c: + make_branch(repo_a, 'master', 'initial', {'f': 'a0'}) + pr_a = make_pr( + repo_a, 'a-thing', [{'f': 'a1'}], + user=config['role_user']['token'], + reviewer=None, + ) - make_branch(repo_b, 'master', 'initial', {'f': 'b0'}) - pr_b = make_pr(repo_b, 'B', [{'f': 'b1'}], label='a-thing') + make_branch(repo_b, 'master', 'initial', {'f': 'b0'}) + pr_b = make_pr( + repo_b, 'a-thing', [{'f': 'b1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) - make_branch(repo_c, 'master', 'initial', {'f': 'c0'}) - pr_c = make_pr(repo_c, 'C', [{'f': 'c1'}], label='a-thing') + make_branch(repo_c, 'master', 'initial', {'f': 'c0'}) + pr_c = make_pr( + repo_c, 'a-thing', [{'f': 'c1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) + env.run_crons() - run_crons(env) assert pr_a.comments == [] assert pr_b.comments == [ (users['reviewer'], 'hansen r+'), @@ -337,26 +437,32 @@ class TestCompanionsNotReady: )) ] -def test_other_failed(env, project, repo_a, repo_b, owner, users): +def test_other_failed(env, project, repo_a, repo_b, users, config): """ In a non-matched-branch scenario, if the companion staging (copy of targets) fails when built with the PR, it should provide a non-useless message """ - make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) - # pr_a is born ready - pr_a = make_pr(repo_a, 'A', [{'a': 'a_1'}], label='do-a-thing') + with repo_a, repo_b: + make_branch(repo_a, 'master', 'initial', {'a': 'a_0'}) + # pr_a is born ready + pr_a = make_pr( + repo_a, 'do-a-thing', [{'a': 'a_1'}], + user=config['role_user']['token'], + reviewer=config['role_reviewer']['token'], + ) - make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) + make_branch(repo_b, 'master', 'initial', {'a': 'b_0'}) + env.run_crons() - run_crons(env) pr = to_pr(env, pr_a) assert pr.staging_id - repo_a.post_status('heads/staging.master', 'success', 'legal/cla') - repo_a.post_status('heads/staging.master', 'success', 'ci/runbot', target_url="http://example.org/a") - repo_b.post_status('heads/staging.master', 'success', 'legal/cla') - repo_b.post_status('heads/staging.master', 'failure', 'ci/runbot', target_url="http://example.org/b") - run_crons(env) + with repo_a, repo_b: + repo_a.post_status('heads/staging.master', 'success', 'legal/cla') + repo_a.post_status('heads/staging.master', 'success', 'ci/runbot', target_url="http://example.org/a") + repo_b.post_status('heads/staging.master', 'success', 'legal/cla') + repo_b.post_status('heads/staging.master', 'failure', 'ci/runbot', target_url="http://example.org/b") + env.run_crons() sth = repo_b.commit('heads/staging.master').id assert not pr.staging_id @@ -367,22 +473,23 @@ def test_other_failed(env, project, repo_a, repo_b, owner, users): ] class TestMultiBatches: - def test_batching(self, env, project, repo_a, repo_b): + def test_batching(self, env, project, repo_a, repo_b, config): """ If multiple batches (label groups) are ready they should get batched together (within the limits of teh project's batch limit) """ project.batch_limit = 3 - make_branch(repo_a, 'master', 'initial', {'a': 'a0'}) - make_branch(repo_b, 'master', 'initial', {'b': 'b0'}) - prs = [( - a and to_pr(env, make_pr(repo_a, 'A{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], label='batch{}'.format(i))), - b and to_pr(env, make_pr(repo_b, 'B{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], label='batch{}'.format(i))) - ) - for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)]) - ] + with repo_a, repo_b: + make_branch(repo_a, 'master', 'initial', {'a': 'a0'}) + make_branch(repo_b, 'master', 'initial', {'b': 'b0'}) - run_crons(env) + prs = [( + a and to_pr(env, make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)), + b and to_pr(env, make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)) + ) + for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)]) + ] + env.run_crons() st = env['runbot_merge.stagings'].search([]) assert st @@ -398,20 +505,20 @@ class TestMultiBatches: assert not prs[3][1].staging_id assert not prs[4][0].staging_id - def test_batching_split(self, env, repo_a, repo_b): + def test_batching_split(self, env, repo_a, repo_b, config): """ If a staging fails, it should get split properly across repos """ - make_branch(repo_a, 'master', 'initial', {'a': 'a0'}) - make_branch(repo_b, 'master', 'initial', {'b': 'b0'}) + with repo_a, repo_b: + make_branch(repo_a, 'master', 'initial', {'a': 'a0'}) + make_branch(repo_b, 'master', 'initial', {'b': 'b0'}) - prs = [( - a and to_pr(env, make_pr(repo_a, 'A{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], label='batch{}'.format(i))), - b and to_pr(env, make_pr(repo_b, 'B{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], label='batch{}'.format(i))) - ) - for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)]) - ] - - run_crons(env) + prs = [( + a and to_pr(env, make_pr(repo_a, 'batch{}'.format(i), [{'a{}'.format(i): 'a{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)), + b and to_pr(env, make_pr(repo_b, 'batch{}'.format(i), [{'b{}'.format(i): 'b{}'.format(i)}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],)) + ) + for i, (a, b) in enumerate([(1, 1), (0, 1), (1, 1), (1, 1), (1, 0)]) + ] + env.run_crons() st0 = env['runbot_merge.stagings'].search([]) assert len(st0.batch_ids) == 5 @@ -419,10 +526,10 @@ class TestMultiBatches: # mark b.staging as failed -> should create two splits with (0, 1) # and (2, 3, 4) and stage the first one - repo_b.post_status('heads/staging.master', 'success', 'legal/cla') - repo_b.post_status('heads/staging.master', 'failure', 'ci/runbot') - - run_crons(env) + with repo_b: + repo_b.post_status('heads/staging.master', 'success', 'legal/cla') + repo_b.post_status('heads/staging.master', 'failure', 'ci/runbot') + env.run_crons() assert not st0.active @@ -440,21 +547,21 @@ class TestMultiBatches: assert sp.mapped('batch_ids.prs') == \ prs[2][0] | prs[2][1] | prs[3][0] | prs[3][1] | prs[4][0] -def test_urgent(env, repo_a, repo_b): +def test_urgent(env, repo_a, repo_b, config): """ Either PR of a co-dependent pair being p=0 leads to the entire pair being prioritized """ - make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) - make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) + with repo_a, repo_b: + make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) + make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) - pr_a = make_pr(repo_a, 'A', [{'a1': 'a'}, {'a2': 'a'}], label='batch', reviewer=None, statuses=[]) - pr_b = make_pr(repo_b, 'B', [{'b1': 'b'}, {'b2': 'b'}], label='batch', reviewer=None, statuses=[]) - pr_c = make_pr(repo_a, 'C', [{'c1': 'c', 'c2': 'c'}]) + pr_a = make_pr(repo_a, 'batch', [{'a1': 'a'}, {'a2': 'a'}], user=config['role_user']['token'], reviewer=None, statuses=[]) + pr_b = make_pr(repo_b, 'batch', [{'b1': 'b'}, {'b2': 'b'}], user=config['role_user']['token'], reviewer=None, statuses=[]) + pr_c = make_pr(repo_a, 'C', [{'c1': 'c', 'c2': 'c'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) - pr_a.post_comment('hansen rebase-merge', 'reviewer') - pr_b.post_comment('hansen rebase-merge p=0', 'reviewer') - - run_crons(env) + pr_a.post_comment('hansen rebase-merge', config['role_reviewer']['token']) + pr_b.post_comment('hansen rebase-merge p=0', config['role_reviewer']['token']) + env.run_crons() # should have batched pr_a and pr_b despite neither being reviewed or # approved p_a, p_b = to_pr(env, pr_a), to_pr(env, pr_b) @@ -464,79 +571,81 @@ def test_urgent(env, repo_a, repo_b): assert not p_c.staging_id class TestBlocked: - def test_merge_method(self, env, repo_a): - make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) + def test_merge_method(self, env, repo_a, config): + with repo_a: + make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) - pr = make_pr(repo_a, 'A', [{'a1': 'a'}, {'a2': 'a'}]) - - run_crons(env) + pr = make_pr(repo_a, 'A', [{'a1': 'a'}, {'a2': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) + env.run_crons() p = to_pr(env, pr) assert p.state == 'ready' - print(p.id, p.squash, p.merge_method) assert p.blocked - pr.post_comment('hansen rebase-merge', 'reviewer') + with repo_a: pr.post_comment('hansen rebase-merge', config['role_reviewer']['token']) assert not p.blocked - def test_linked_closed(self, env, repo_a, repo_b): - make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) - make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) + def test_linked_closed(self, env, repo_a, repo_b, config): + with repo_a, repo_b: + make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) + make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) - pr = make_pr(repo_a, 'A', [{'a1': 'a'}], label='xxx') - b = make_pr(repo_b, 'B', [{'b1': 'b'}], label='xxx', statuses=[]) - run_crons(env) + pr = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) + b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'], statuses=[]) + env.run_crons() p = to_pr(env, pr) assert p.blocked - b.close() + with repo_b: b.close() # FIXME: find a way for PR.blocked to depend on linked PR somehow so this isn't needed p.invalidate_cache(['blocked'], [p.id]) assert not p.blocked - def test_linked_merged(self, env, repo_a, repo_b): - make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) - make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) + def test_linked_merged(self, env, repo_a, repo_b, config): + with repo_a, repo_b: + make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) + make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) - b = make_pr(repo_b, 'B', [{'b1': 'b'}], label='xxx') + b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) + env.run_crons() # stage b and c - run_crons(env) # stage b and c - - repo_a.post_status('heads/staging.master', 'success', 'legal/cla') - repo_a.post_status('heads/staging.master', 'success', 'ci/runbot') - repo_b.post_status('heads/staging.master', 'success', 'legal/cla') - repo_b.post_status('heads/staging.master', 'success', 'ci/runbot') - - run_crons(env) # merge b and c + with repo_a, repo_b: + repo_a.post_status('heads/staging.master', 'success', 'legal/cla') + repo_a.post_status('heads/staging.master', 'success', 'ci/runbot') + repo_b.post_status('heads/staging.master', 'success', 'legal/cla') + repo_b.post_status('heads/staging.master', 'success', 'ci/runbot') + env.run_crons() # merge b and c assert to_pr(env, b).state == 'merged' - pr = make_pr(repo_a, 'A', [{'a1': 'a'}], label='xxx') - run_crons(env) # merge b and c + with repo_a: + pr = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) + env.run_crons() # merge b and c p = to_pr(env, pr) assert not p.blocked - def test_linked_unready(self, env, repo_a, repo_b): + def test_linked_unready(self, env, repo_a, repo_b, config): """ Create a PR A linked to a non-ready PR B, * A is blocked by default * A is not blocked if A.p=0 * A is not blocked if B.p=0 """ - make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) - make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) + with repo_a, repo_b: + make_branch(repo_a, 'master', 'initial', {'a0': 'a'}) + make_branch(repo_b, 'master', 'initial', {'b0': 'b'}) - a = make_pr(repo_a, 'A', [{'a1': 'a'}], label='xxx') - b = make_pr(repo_b, 'B', [{'b1': 'b'}], label='xxx', statuses=[]) - run_crons(env) + a = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) + b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'], statuses=[]) + env.run_crons() pr_a = to_pr(env, a) assert pr_a.blocked - a.post_comment('hansen p=0', 'reviewer') + with repo_a: a.post_comment('hansen p=0', config['role_reviewer']['token']) assert not pr_a.blocked - a.post_comment('hansen p=2', 'reviewer') + with repo_a: a.post_comment('hansen p=2', config['role_reviewer']['token']) assert pr_a.blocked - b.post_comment('hansen p=0', 'reviewer') + with repo_b: b.post_comment('hansen p=0', config['role_reviewer']['token']) assert not pr_a.blocked diff --git a/runbot_merge/tests/test_utils.py b/runbot_merge/tests/test_utils.py index 432c48f9..5f52b1bb 100644 --- a/runbot_merge/tests/test_utils.py +++ b/runbot_merge/tests/test_utils.py @@ -12,13 +12,6 @@ class re_matches: def __repr__(self): return '~' + self._r.pattern + '~' -def run_crons(env): - "Helper to run all crons (in a relevant order) except for the fetch PR one" - env['runbot_merge.commit']._notify() - env['runbot_merge.project']._check_progress() - env['runbot_merge.pull_requests']._check_linked_prs_statuses() - env['runbot_merge.project']._send_feedback() - def get_partner(env, gh_login): return env['res.partner'].search([('github_login', '=', gh_login)]) @@ -30,5 +23,5 @@ def _simple_init(repo): repo.make_ref('heads/master', m) c1 = repo.make_commit(m, 'first', None, tree={'m': 'c1'}) c2 = repo.make_commit(c1, 'second', None, tree={'m': 'c2'}) - prx = repo.make_pr('title', 'body', target='master', ctid=c2, user='user') + prx = repo.make_pr(title='title', body='body', target='master', head=c2) return prx