2018-03-14 16:37:46 +07:00
|
|
|
import collections
|
2018-06-21 14:55:14 +07:00
|
|
|
import itertools
|
2018-09-28 18:05:41 +07:00
|
|
|
import json as json_
|
2018-03-14 16:37:46 +07:00
|
|
|
import logging
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
2018-10-09 20:01:45 +07:00
|
|
|
from odoo.tools import topological_sort
|
2019-02-28 20:45:31 +07:00
|
|
|
from . import exceptions, utils
|
2018-03-14 16:37:46 +07:00
|
|
|
|
[FIX] runbot_merge: handle the bot user not being able to comment
If the author of a PR has blocked the bot user, commenting on the PR
will fail. While comment failure is technically handled in the feedback
cron, the cron will simply retry commenting on every run filling the
log with useless unactionable garbage.
Retrying is the right thing to do in the normal case (e.g. changing tags
often has transient failures), but if we figure out we're blocked we
might as well just log a warning and drop the comment on the floor, it's
unlikely the situation will resolve itself.
Couldn't test it, because the block API is a developer preview and I
just can't get it to work anyway (404 on /user/blocks even providing the
suggested media type).
And the way the block is inferred is iffy (based on an error message),
the error body doesn't seem to provide any clean / clear cut error code:
{
"message": "Validation Failed",
"errors": [
{
"resource": "IssueComment",
"code": "unprocessable",
"field": "data",
"message": "User is blocked"
}
],
"documentation_url": "https://developer.github.com/v3/issues/comments/#create-a-comment"
}
No useful headers either.
Fixes #127
2019-05-07 15:36:53 +07:00
|
|
|
def _is_json(r):
|
|
|
|
return r and r.headers.get('content-type', '').startswith(('application/json', 'application/javascript'))
|
|
|
|
|
2018-03-14 16:37:46 +07:00
|
|
|
_logger = logging.getLogger(__name__)
|
|
|
|
class GH(object):
|
|
|
|
def __init__(self, token, repo):
|
|
|
|
self._url = 'https://api.github.com'
|
|
|
|
self._repo = repo
|
|
|
|
session = self._session = requests.Session()
|
2018-03-26 18:08:49 +07:00
|
|
|
session.headers['Authorization'] = 'token {}'.format(token)
|
2018-03-14 16:37:46 +07:00
|
|
|
|
2018-08-28 20:42:28 +07:00
|
|
|
def __call__(self, method, path, params=None, json=None, check=True):
|
2018-03-14 16:37:46 +07:00
|
|
|
"""
|
|
|
|
:type check: bool | dict[int:Exception]
|
|
|
|
"""
|
|
|
|
r = self._session.request(
|
|
|
|
method,
|
2018-03-26 18:08:49 +07:00
|
|
|
'{}/repos/{}/{}'.format(self._url, self._repo, path),
|
2018-08-28 20:42:28 +07:00
|
|
|
params=params,
|
2018-03-14 16:37:46 +07:00
|
|
|
json=json
|
|
|
|
)
|
|
|
|
if check:
|
|
|
|
if isinstance(check, collections.Mapping):
|
|
|
|
exc = check.get(r.status_code)
|
|
|
|
if exc:
|
2018-10-17 16:31:52 +07:00
|
|
|
raise exc(r.text)
|
2019-02-28 20:45:31 +07:00
|
|
|
if r.status_code >= 400:
|
|
|
|
headers = '\n'.join('\t%s: %s' % (h, v) for h, v in r.headers.items())
|
[FIX] runbot_merge: handle the bot user not being able to comment
If the author of a PR has blocked the bot user, commenting on the PR
will fail. While comment failure is technically handled in the feedback
cron, the cron will simply retry commenting on every run filling the
log with useless unactionable garbage.
Retrying is the right thing to do in the normal case (e.g. changing tags
often has transient failures), but if we figure out we're blocked we
might as well just log a warning and drop the comment on the floor, it's
unlikely the situation will resolve itself.
Couldn't test it, because the block API is a developer preview and I
just can't get it to work anyway (404 on /user/blocks even providing the
suggested media type).
And the way the block is inferred is iffy (based on an error message),
the error body doesn't seem to provide any clean / clear cut error code:
{
"message": "Validation Failed",
"errors": [
{
"resource": "IssueComment",
"code": "unprocessable",
"field": "data",
"message": "User is blocked"
}
],
"documentation_url": "https://developer.github.com/v3/issues/comments/#create-a-comment"
}
No useful headers either.
Fixes #127
2019-05-07 15:36:53 +07:00
|
|
|
if _is_json(r):
|
2019-02-28 20:45:31 +07:00
|
|
|
body = r.json()
|
|
|
|
elif r.encoding is not None:
|
|
|
|
body = utils.shorten(r.text, 200)
|
|
|
|
else:
|
|
|
|
body = utils.shorten(r.content, 200)
|
|
|
|
|
|
|
|
_logger.error("%(method)s /%(repo)s/%(path)s\n=> %(status)d %(reason)s\n%(headers)s\n\n\t%(body)r\n====================", {
|
|
|
|
'status': r.status_code,
|
|
|
|
'reason': r.reason,
|
|
|
|
'method': method,
|
|
|
|
'repo': self._repo,
|
|
|
|
'path': path,
|
|
|
|
'headers': headers,
|
|
|
|
'body': body
|
|
|
|
})
|
|
|
|
if not isinstance(body, (bytes, str)):
|
|
|
|
raise requests.HTTPError(
|
|
|
|
json_.dumps(body, indent=4),
|
|
|
|
response=r
|
|
|
|
)
|
2018-03-14 16:37:46 +07:00
|
|
|
r.raise_for_status()
|
|
|
|
return r
|
|
|
|
|
2018-11-22 00:43:05 +07:00
|
|
|
def user(self, username):
|
|
|
|
r = self._session.get("{}/users/{}".format(self._url, username))
|
|
|
|
r.raise_for_status()
|
|
|
|
return r.json()
|
|
|
|
|
2018-03-14 16:37:46 +07:00
|
|
|
def head(self, branch):
|
2018-03-26 18:08:49 +07:00
|
|
|
d = self('get', 'git/refs/heads/{}'.format(branch)).json()
|
2018-03-14 16:37:46 +07:00
|
|
|
|
2018-03-26 18:08:49 +07:00
|
|
|
assert d['ref'] == 'refs/heads/{}'.format(branch)
|
2018-03-14 16:37:46 +07:00
|
|
|
assert d['object']['type'] == 'commit'
|
2018-09-20 14:25:13 +07:00
|
|
|
_logger.debug("head(%s, %s) -> %s", self._repo, branch, d['object']['sha'])
|
2018-03-14 16:37:46 +07:00
|
|
|
return d['object']['sha']
|
|
|
|
|
|
|
|
def commit(self, sha):
|
2018-09-20 14:25:13 +07:00
|
|
|
c = self('GET', 'git/commits/{}'.format(sha)).json()
|
|
|
|
_logger.debug('commit(%s, %s) -> %s', self._repo, sha, shorten(c['message']))
|
|
|
|
return c
|
2018-03-14 16:37:46 +07:00
|
|
|
|
|
|
|
def comment(self, pr, message):
|
[FIX] runbot_merge: handle the bot user not being able to comment
If the author of a PR has blocked the bot user, commenting on the PR
will fail. While comment failure is technically handled in the feedback
cron, the cron will simply retry commenting on every run filling the
log with useless unactionable garbage.
Retrying is the right thing to do in the normal case (e.g. changing tags
often has transient failures), but if we figure out we're blocked we
might as well just log a warning and drop the comment on the floor, it's
unlikely the situation will resolve itself.
Couldn't test it, because the block API is a developer preview and I
just can't get it to work anyway (404 on /user/blocks even providing the
suggested media type).
And the way the block is inferred is iffy (based on an error message),
the error body doesn't seem to provide any clean / clear cut error code:
{
"message": "Validation Failed",
"errors": [
{
"resource": "IssueComment",
"code": "unprocessable",
"field": "data",
"message": "User is blocked"
}
],
"documentation_url": "https://developer.github.com/v3/issues/comments/#create-a-comment"
}
No useful headers either.
Fixes #127
2019-05-07 15:36:53 +07:00
|
|
|
# if the mergebot user has been blocked by the PR author, this will
|
|
|
|
# fail, but we don't want the closing of the PR to fail, or for the
|
|
|
|
# feedback cron to get stuck
|
|
|
|
try:
|
|
|
|
self('POST', 'issues/{}/comments'.format(pr), json={'body': message})
|
|
|
|
except requests.HTTPError as r:
|
|
|
|
if _is_json(r.response):
|
|
|
|
body = r.response.json()
|
|
|
|
if any(e.message == 'User is blocked' for e in (body.get('errors') or [])):
|
|
|
|
_logger.warn("comment(%s:%s) failed: user likely blocked", self._repo, pr)
|
|
|
|
return
|
|
|
|
raise
|
2018-09-20 14:25:13 +07:00
|
|
|
_logger.debug('comment(%s, %s, %s)', self._repo, pr, shorten(message))
|
2018-03-14 16:37:46 +07:00
|
|
|
|
|
|
|
def close(self, pr, message):
|
|
|
|
self.comment(pr, message)
|
2018-03-26 18:08:49 +07:00
|
|
|
self('PATCH', 'pulls/{}'.format(pr), json={'state': 'closed'})
|
2018-03-14 16:37:46 +07:00
|
|
|
|
2018-03-28 21:43:48 +07:00
|
|
|
def change_tags(self, pr, from_, to_):
|
|
|
|
to_add, to_remove = to_ - from_, from_ - to_
|
|
|
|
for t in to_remove:
|
|
|
|
r = self('DELETE', 'issues/{}/labels/{}'.format(pr, t), check=False)
|
|
|
|
# successful deletion or attempt to delete a tag which isn't there
|
|
|
|
# is fine, otherwise trigger an error
|
|
|
|
if r.status_code not in (200, 404):
|
|
|
|
r.raise_for_status()
|
|
|
|
|
|
|
|
if to_add:
|
|
|
|
self('POST', 'issues/{}/labels'.format(pr), json=list(to_add))
|
|
|
|
|
2018-09-20 14:25:13 +07:00
|
|
|
_logger.debug('change_tags(%s, %s, remove=%s, add=%s)', self._repo, pr, to_remove, to_add)
|
|
|
|
|
2018-03-14 16:37:46 +07:00
|
|
|
def fast_forward(self, branch, sha):
|
|
|
|
try:
|
2018-03-26 18:08:49 +07:00
|
|
|
self('patch', 'git/refs/heads/{}'.format(branch), json={'sha': sha})
|
2018-09-20 14:25:13 +07:00
|
|
|
_logger.debug('fast_forward(%s, %s, %s) -> OK', self._repo, branch, sha)
|
2018-03-14 16:37:46 +07:00
|
|
|
except requests.HTTPError:
|
2018-09-20 14:25:13 +07:00
|
|
|
_logger.debug('fast_forward(%s, %s, %s) -> ERROR', self._repo, branch, sha, exc_info=True)
|
2018-03-14 16:37:46 +07:00
|
|
|
raise exceptions.FastForwardError()
|
|
|
|
|
|
|
|
def set_ref(self, branch, sha):
|
|
|
|
# force-update ref
|
2018-03-26 18:08:49 +07:00
|
|
|
r = self('patch', 'git/refs/heads/{}'.format(branch), json={
|
2018-03-14 16:37:46 +07:00
|
|
|
'sha': sha,
|
|
|
|
'force': True,
|
|
|
|
}, check=False)
|
2018-10-17 16:31:52 +07:00
|
|
|
|
|
|
|
status0 = r.status_code
|
|
|
|
_logger.debug(
|
|
|
|
'set_ref(update, %s, %s, %s -> %s (%s)',
|
|
|
|
self._repo, branch, sha, status0,
|
|
|
|
'OK' if status0 == 200 else r.text or r.reason
|
|
|
|
)
|
|
|
|
if status0 == 200:
|
2018-03-14 16:37:46 +07:00
|
|
|
return
|
|
|
|
|
2018-06-07 19:44:44 +07:00
|
|
|
# 422 makes no sense but that's what github returns, leaving 404 just
|
|
|
|
# in case
|
2018-10-17 16:31:52 +07:00
|
|
|
status1 = None
|
|
|
|
if status0 in (404, 422):
|
2018-03-14 16:37:46 +07:00
|
|
|
# fallback: create ref
|
|
|
|
r = self('post', 'git/refs', json={
|
2018-03-26 18:08:49 +07:00
|
|
|
'ref': 'refs/heads/{}'.format(branch),
|
2018-03-14 16:37:46 +07:00
|
|
|
'sha': sha,
|
|
|
|
}, check=False)
|
2018-10-17 16:31:52 +07:00
|
|
|
status1 = r.status_code
|
|
|
|
_logger.debug(
|
|
|
|
'set_ref(create, %s, %s, %s) -> %s (%s)',
|
|
|
|
self._repo, branch, sha, status1,
|
|
|
|
'OK' if status1 == 201 else r.text or r.reason
|
|
|
|
)
|
|
|
|
if status1 == 201:
|
2018-03-14 16:37:46 +07:00
|
|
|
return
|
2018-10-17 16:31:52 +07:00
|
|
|
|
|
|
|
raise AssertionError("set_ref failed(%s, %s)" % (status0, status1))
|
2018-03-14 16:37:46 +07:00
|
|
|
|
2018-08-28 20:42:28 +07:00
|
|
|
def merge(self, sha, dest, message):
|
|
|
|
r = self('post', 'merges', json={
|
|
|
|
'base': dest,
|
|
|
|
'head': sha,
|
|
|
|
'commit_message': message,
|
|
|
|
}, check={409: exceptions.MergeError})
|
2018-10-08 21:30:51 +07:00
|
|
|
try:
|
|
|
|
r = r.json()
|
|
|
|
except Exception:
|
2018-10-17 16:31:52 +07:00
|
|
|
raise exceptions.MergeError("Got non-JSON reponse from github: %s %s (%s)" % (r.status_code, r.reason, r.text))
|
2018-09-20 14:25:13 +07:00
|
|
|
_logger.debug("merge(%s, %s, %s) -> %s", self._repo, dest, shorten(message), r['sha'])
|
2018-08-28 20:42:28 +07:00
|
|
|
return dict(r['commit'], sha=r['sha'])
|
|
|
|
|
|
|
|
def rebase(self, pr, dest, reset=False, commits=None):
|
|
|
|
""" Rebase pr's commits on top of dest, updates dest unless ``reset``
|
|
|
|
is set.
|
|
|
|
|
2019-07-31 14:19:39 +07:00
|
|
|
Returns the hash of the rebased head and a map of all PR commits (to the PR they were rebased to)
|
2018-08-28 20:42:28 +07:00
|
|
|
"""
|
2019-01-25 21:45:38 +07:00
|
|
|
logger = _logger.getChild('rebase')
|
2018-08-28 20:42:28 +07:00
|
|
|
original_head = self.head(dest)
|
|
|
|
if commits is None:
|
|
|
|
commits = self.commits(pr)
|
|
|
|
|
2019-01-25 21:45:38 +07:00
|
|
|
logger.debug("rebasing %s, %s on %s (reset=%s, commits=%s)",
|
|
|
|
self._repo, pr, dest, reset, len(commits))
|
|
|
|
|
2018-08-28 20:42:28 +07:00
|
|
|
assert commits, "can't rebase a PR with no commits"
|
|
|
|
for c in commits:
|
|
|
|
assert len(c['parents']) == 1, "can't rebase commits with more than one parent"
|
|
|
|
tmp_msg = 'temp rebasing PR %s (%s)' % (pr, c['sha'])
|
|
|
|
c['new_tree'] = self.merge(c['sha'], dest, tmp_msg)['tree']['sha']
|
|
|
|
|
|
|
|
prev = original_head
|
2019-07-31 14:19:39 +07:00
|
|
|
mapping = {}
|
2018-08-28 20:42:28 +07:00
|
|
|
for c in commits:
|
|
|
|
copy = self('post', 'git/commits', json={
|
|
|
|
'message': c['commit']['message'],
|
|
|
|
'tree': c['new_tree'],
|
|
|
|
'parents': [prev],
|
|
|
|
'author': c['commit']['author'],
|
|
|
|
'committer': c['commit']['committer'],
|
|
|
|
}, check={409: exceptions.MergeError}).json()
|
2019-01-25 21:45:38 +07:00
|
|
|
logger.debug('copied %s to %s (parent: %s)', c['sha'], copy['sha'], prev)
|
2019-07-31 14:19:39 +07:00
|
|
|
prev = mapping[c['sha']] = copy['sha']
|
2018-08-28 20:42:28 +07:00
|
|
|
|
|
|
|
if reset:
|
|
|
|
self.set_ref(dest, original_head)
|
2018-09-20 15:08:08 +07:00
|
|
|
else:
|
|
|
|
self.set_ref(dest, prev)
|
2018-08-28 20:42:28 +07:00
|
|
|
|
2019-01-25 21:45:38 +07:00
|
|
|
logger.debug('rebased %s, %s on %s (reset=%s, commits=%s) -> %s',
|
|
|
|
self._repo, pr, dest, reset, len(commits),
|
2018-09-20 14:25:13 +07:00
|
|
|
prev)
|
2018-08-28 20:42:28 +07:00
|
|
|
# prev is updated after each copy so it's the rebased PR head
|
2019-07-31 14:19:39 +07:00
|
|
|
return prev, mapping
|
2018-03-14 16:37:46 +07:00
|
|
|
|
2018-06-21 14:55:14 +07:00
|
|
|
# fetch various bits of issues / prs to load them
|
|
|
|
def pr(self, number):
|
|
|
|
return (
|
|
|
|
self('get', 'issues/{}'.format(number)).json(),
|
|
|
|
self('get', 'pulls/{}'.format(number)).json()
|
|
|
|
)
|
|
|
|
|
|
|
|
def comments(self, number):
|
|
|
|
for page in itertools.count(1):
|
2018-08-28 20:42:28 +07:00
|
|
|
r = self('get', 'issues/{}/comments'.format(number), params={'page': page})
|
2018-06-21 14:55:14 +07:00
|
|
|
yield from r.json()
|
|
|
|
if not r.links.get('next'):
|
|
|
|
return
|
|
|
|
|
|
|
|
def reviews(self, number):
|
|
|
|
for page in itertools.count(1):
|
2018-08-28 20:42:28 +07:00
|
|
|
r = self('get', 'pulls/{}/reviews'.format(number), params={'page': page})
|
2018-06-21 14:55:14 +07:00
|
|
|
yield from r.json()
|
|
|
|
if not r.links.get('next'):
|
|
|
|
return
|
|
|
|
|
2018-09-20 20:02:18 +07:00
|
|
|
def commits_lazy(self, pr):
|
|
|
|
for page in itertools.count(1):
|
|
|
|
r = self('get', 'pulls/{}/commits'.format(pr), params={'page': page})
|
|
|
|
yield from r.json()
|
|
|
|
if not r.links.get('next'):
|
|
|
|
return
|
|
|
|
|
2018-08-28 20:42:28 +07:00
|
|
|
def commits(self, pr):
|
|
|
|
""" Returns a PR's commits oldest first (that's what GH does &
|
|
|
|
is what we want)
|
|
|
|
"""
|
2018-10-09 20:01:45 +07:00
|
|
|
commits = list(self.commits_lazy(pr))
|
|
|
|
# map shas to the position the commit *should* have
|
|
|
|
idx = {
|
|
|
|
c: i
|
|
|
|
for i, c in enumerate(topological_sort({
|
|
|
|
c['sha']: [p['sha'] for p in c['parents']]
|
|
|
|
for c in commits
|
|
|
|
}))
|
|
|
|
}
|
|
|
|
return sorted(commits, key=lambda c: idx[c['sha']])
|
2018-08-28 20:42:28 +07:00
|
|
|
|
2018-06-21 14:55:14 +07:00
|
|
|
def statuses(self, h):
|
|
|
|
r = self('get', 'commits/{}/status'.format(h)).json()
|
|
|
|
return [{
|
|
|
|
'sha': r['sha'],
|
2018-09-17 16:04:31 +07:00
|
|
|
**s,
|
2018-06-21 14:55:14 +07:00
|
|
|
} for s in r['statuses']]
|
2018-08-28 20:42:28 +07:00
|
|
|
|
2018-09-20 14:25:13 +07:00
|
|
|
def shorten(s):
|
|
|
|
if not s:
|
|
|
|
return s
|
|
|
|
|
|
|
|
line1 = s.split('\n', 1)[0]
|
|
|
|
if len(line1) < 50:
|
|
|
|
return line1
|
|
|
|
|
|
|
|
return line1[:47] + '...'
|