[IMP] runbot_merge: return proper http responses from webhooks

The old controller system required `type='json'` which only did
JSON-RPC and prevented returning proper responses.

As of 17.0 this is not the case anymore, `type='http'` controllers can
get `content-type: application/json` requests just fine and return
whatever they want. So change that:

- `type='json'`.
- Return `Response` objects with nice status codes (and text
  mimetypes, as otherwise werkzeug defaults to html).
- Update ping to bypass normal flow as otherwise it requires
  authentication and event sources which is annoying (it might be a
  good idea to have both in order to check for configuration, however
  it's not possible to just send a ping via the webhook UI on github
  so not sure how useful that would be).
- Add some typing and improve response messages while at it.

Note: some "internal" errors (e.g. ignoring event actions) are
reported as successes because as far as I can tell webhooks only
support success (2xy) / failure (4xy, 5xy) and an event that's ignored
is not really *failed* per se.

Some things are reported as failures even though they are on the edge
because that can be useful to see what's happening e.g. comment too
large or unable to lock rows.

Fixes #1019
This commit is contained in:
Xavier Morel 2024-12-18 10:25:09 +01:00
parent 0fd254b5fe
commit 89411f968f
2 changed files with 112 additions and 59 deletions

View File

@ -1,13 +1,16 @@
from __future__ import annotations
import hashlib import hashlib
import hmac import hmac
import logging import logging
import json import json
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import Callable
import sentry_sdk import sentry_sdk
import werkzeug.exceptions
from odoo.http import Controller, request, route from odoo.api import Environment
from odoo.http import Controller, request, route, Response
from . import dashboard from . import dashboard
from . import reviewer_provisioning from . import reviewer_provisioning
@ -72,9 +75,8 @@ class MergebotController(Controller):
for staging in stagings for staging in stagings
] ]
@route('/runbot_merge/hooks', auth='none', type='http', csrf=False, methods=['POST'])
@route('/runbot_merge/hooks', auth='none', type='json', csrf=False, methods=['POST']) def index(self) -> Response:
def index(self):
req = request.httprequest req = request.httprequest
event = req.headers['X-Github-Event'] event = req.headers['X-Github-Event']
with sentry_sdk.configure_scope() as scope: with sentry_sdk.configure_scope() as scope:
@ -87,10 +89,12 @@ class MergebotController(Controller):
github._gh.info(self._format(req)) github._gh.info(self._format(req))
data = request.get_json_data()
repo = data.get('repository', {}).get('full_name')
env = request.env(user=1) env = request.env(user=1)
data = request.get_json_data()
if event == 'ping':
return handle_ping(env, request.get_json_data())
repo = data.get('repository', {}).get('full_name')
source = repo and env['runbot_merge.events_sources'].search([('repository', '=', repo)]) source = repo and env['runbot_merge.events_sources'].search([('repository', '=', repo)])
if not source: if not source:
_logger.warning( _logger.warning(
@ -98,7 +102,7 @@ class MergebotController(Controller):
req.headers.get("X-Github-Delivery"), req.headers.get("X-Github-Delivery"),
repo, repo,
) )
return werkzeug.exceptions.Forbidden() return Response(status=403, mimetype="text/plain")
elif secret := source.secret: elif secret := source.secret:
signature = 'sha256=' + hmac.new(secret.strip().encode(), req.get_data(), hashlib.sha256).hexdigest() signature = 'sha256=' + hmac.new(secret.strip().encode(), req.get_data(), hashlib.sha256).hexdigest()
if not hmac.compare_digest(signature, req.headers.get('X-Hub-Signature-256', '')): if not hmac.compare_digest(signature, req.headers.get('X-Hub-Signature-256', '')):
@ -110,7 +114,7 @@ class MergebotController(Controller):
signature, signature,
req.headers, req.headers,
) )
return werkzeug.exceptions.Forbidden() return Response(status=403, mimetype="text/plain")
elif req.headers.get('X-Hub-Signature-256'): elif req.headers.get('X-Hub-Signature-256'):
_logger.info("No secret for %s but received a signature in:\n%s", repo, req.headers) _logger.info("No secret for %s but received a signature in:\n%s", repo, req.headers)
else: else:
@ -119,7 +123,11 @@ class MergebotController(Controller):
c = EVENTS.get(event) c = EVENTS.get(event)
if not c: if not c:
_logger.warning('Unknown event %s', event) _logger.warning('Unknown event %s', event)
return 'Unknown event {}'.format(event) return Response(
status=422,
mimetype="text/plain",
response="Not setup to receive event.",
)
sentry_sdk.set_context('webhook', data) sentry_sdk.set_context('webhook', data)
return c(env, data) return c(env, data)
@ -149,7 +157,11 @@ def handle_pr(env, event):
event['pull_request']['base']['repo']['full_name'], event['pull_request']['base']['repo']['full_name'],
event['pull_request']['number'], event['pull_request']['number'],
) )
return 'Ignoring' return Response(
status=200,
mimetype="text/plain",
response="Not setup to receive action.",
)
pr = event['pull_request'] pr = event['pull_request']
r = pr['base']['repo']['full_name'] r = pr['base']['repo']['full_name']
@ -158,13 +170,11 @@ def handle_pr(env, event):
repo = env['runbot_merge.repository'].search([('name', '=', r)]) repo = env['runbot_merge.repository'].search([('name', '=', r)])
if not repo: if not repo:
_logger.warning("Received a PR for %s but not configured to handle that repo", r) _logger.warning("Received a PR for %s but not configured to handle that repo", r)
# sadly shit's retarded so odoo json endpoints really mean return Response(
# jsonrpc and it's LITERALLY NOT POSSIBLE TO REPLY WITH status=422,
# ACTUAL RAW HTTP RESPONSES and thus not possible to mimetype="text/plain",
# report actual errors to the webhooks listing thing on response="Not configured for repository.",
# github (not that we'd be looking at them but it'd be )
# useful for tests)
return "Not configured to handle {}".format(r)
# PRs to unmanaged branches are not necessarily abnormal and # PRs to unmanaged branches are not necessarily abnormal and
# we don't care # we don't care
@ -197,8 +207,13 @@ def handle_pr(env, event):
# retargeting to un-managed => delete # retargeting to un-managed => delete
if not branch: if not branch:
pr = find(source_branch) pr = find(source_branch)
number = pr.number
pr.unlink() pr.unlink()
return 'Retargeted {} to un-managed branch {}, deleted'.format(pr.id, b) return Response(
status=200,
mimetype="text/plain",
response=f'Retargeted {number} to un-managed branch {b}, deleted',
)
# retargeting from un-managed => create # retargeting from un-managed => create
if not source_branch: if not source_branch:
@ -218,8 +233,16 @@ def handle_pr(env, event):
if updates: if updates:
# copy because it updates the `updates` dict internally # copy because it updates the `updates` dict internally
pr_obj.write(dict(updates)) pr_obj.write(dict(updates))
return 'Updated {}'.format(', '.join(updates)) return Response(
return "Nothing to update ({})".format(', '.join(event['changes'].keys())) status=200,
mimetype="text/plain",
response=f'Updated {", ".join(updates)}',
)
return Response(
status=200,
mimetype="text/plain",
response=f"Nothing to update ({', '.join(event['changes'])})",
)
message = None message = None
if not branch: if not branch:
@ -240,7 +263,7 @@ def handle_pr(env, event):
feedback(message=message) feedback(message=message)
if not branch: if not branch:
return "Not set up to care about {}:{}".format(r, b) return Response(status=422, mimetype="text/plain", response=f"Not set up to care about {r}:{b}")
headers = request.httprequest.headers if request else {} headers = request.httprequest.headers if request else {}
_logger.info( _logger.info(
@ -263,12 +286,16 @@ def handle_pr(env, event):
if not author: if not author:
env['res.partner'].create({'name': author_name, 'github_login': author_name}) env['res.partner'].create({'name': author_name, 'github_login': author_name})
pr_obj = env['runbot_merge.pull_requests']._from_gh(pr) pr_obj = env['runbot_merge.pull_requests']._from_gh(pr)
return "Tracking PR as {}".format(pr_obj.id) return Response(status=201, mimetype="text/plain", response=f"Tracking PR as {pr_obj.display_name}")
pr_obj = env['runbot_merge.pull_requests']._get_or_schedule(r, pr['number'], closing=event['action'] == 'closed') pr_obj = env['runbot_merge.pull_requests']._get_or_schedule(r, pr['number'], closing=event['action'] == 'closed')
if not pr_obj: if not pr_obj:
_logger.info("webhook %s on unknown PR %s#%s, scheduled fetch", event['action'], repo.name, pr['number']) _logger.info("webhook %s on unknown PR %s#%s, scheduled fetch", event['action'], repo.name, pr['number'])
return "Unknown PR {}:{}, scheduling fetch".format(repo.name, pr['number']) return Response(
status=202, # actually we're ignoring the event so 202 might be wrong?
mimetype="text/plain",
response=f"Unknown PR {repo.name}:{pr['number']}, scheduling fetch",
)
if event['action'] == 'synchronize': if event['action'] == 'synchronize':
if pr_obj.head == pr['head']['sha']: if pr_obj.head == pr['head']['sha']:
_logger.warning( _logger.warning(
@ -277,11 +304,15 @@ def handle_pr(env, event):
pr_obj.head, pr_obj.head,
pr['head']['sha'], pr['head']['sha'],
) )
return 'No update to pr head' return Response(mimetype="text/plain", response='No update to pr head')
if pr_obj.state in ('closed', 'merged'): if pr_obj.state in ('closed', 'merged'):
_logger.error("PR %s sync %s -> %s => failure (closed)", pr_obj.display_name, pr_obj.head, pr['head']['sha']) _logger.error("PR %s sync %s -> %s => failure (closed)", pr_obj.display_name, pr_obj.head, pr['head']['sha'])
return "It's my understanding that closed/merged PRs don't get sync'd" return Response(
status=422,
mimetype="text/plain",
response="It's my understanding that closed/merged PRs don't get sync'd",
)
_logger.info( _logger.info(
"PR %s sync %s -> %s by %s => reset to 'open' and squash=%s", "PR %s sync %s -> %s by %s => reset to 'open' and squash=%s",
@ -304,14 +335,14 @@ def handle_pr(env, event):
'head': pr['head']['sha'], 'head': pr['head']['sha'],
'squash': pr['commits'] == 1, 'squash': pr['commits'] == 1,
}) })
return f'Updated to {pr_obj.head}' return Response(mimetype="text/plain", response=f'Updated to {pr_obj.head}')
if event['action'] == 'ready_for_review': if event['action'] == 'ready_for_review':
pr_obj.draft = False pr_obj.draft = False
return f'Updated {pr_obj.display_name} to ready' return Response(mimetype="text/plain", response=f'Updated {pr_obj.display_name} to ready')
if event['action'] == 'converted_to_draft': if event['action'] == 'converted_to_draft':
pr_obj.draft = True pr_obj.draft = True
return f'Updated {pr_obj.display_name} to draft' return Response(mimetype="text/plain", response=f'Updated {pr_obj.display_name} to draft')
# don't marked merged PRs as closed (!!!) # don't marked merged PRs as closed (!!!)
if event['action'] == 'closed' and pr_obj.state != 'merged': if event['action'] == 'closed' and pr_obj.state != 'merged':
@ -323,7 +354,7 @@ def handle_pr(env, event):
pr_obj.display_name, pr_obj.display_name,
oldstate, oldstate,
) )
return 'Closed {}'.format(pr_obj.display_name) return Response(mimetype="text/plain", response=f'Closed {pr_obj.display_name}')
_logger.info( _logger.info(
'%s tried to close %s (state=%s) but locking failed', '%s tried to close %s (state=%s) but locking failed',
@ -331,7 +362,11 @@ def handle_pr(env, event):
pr_obj.display_name, pr_obj.display_name,
oldstate, oldstate,
) )
return 'Ignored: could not lock rows (probably being merged)' return Response(
status=503,
mimetype="text/plain",
response='could not lock rows (probably being merged)',
)
if event['action'] == 'reopened' : if event['action'] == 'reopened' :
if pr_obj.merge_date: if pr_obj.merge_date:
@ -349,12 +384,12 @@ def handle_pr(env, event):
'squash': pr['commits'] == 1, 'squash': pr['commits'] == 1,
}) })
return 'Reopened {}'.format(pr_obj.display_name) return Response(mimetype="text/plain", response=f'Reopened {pr_obj.display_name}')
_logger.info("Ignoring event %s on PR %s", event['action'], pr['number']) _logger.info("Ignoring event %s on PR %s", event['action'], pr['number'])
return "Not handling {} yet".format(event['action']) return Response(status=200, mimetype="text/plain", response=f"Not handling {event['action']} yet")
def handle_status(env, event): def handle_status(env: Environment, event: dict) -> Response:
_logger.info( _logger.info(
'status on %(sha)s %(context)s:%(state)s (%(target_url)s) [%(description)r]', 'status on %(sha)s %(context)s:%(state)s (%(target_url)s) [%(description)r]',
event event
@ -379,11 +414,21 @@ def handle_status(env, event):
""", [event['sha'], status_value], log_exceptions=False) """, [event['sha'], status_value], log_exceptions=False)
env.ref("runbot_merge.process_updated_commits")._trigger() env.ref("runbot_merge.process_updated_commits")._trigger()
return 'ok' return Response(status=204)
def handle_comment(env, event): def handle_comment(env: Environment, event: dict) -> Response:
if 'pull_request' not in event['issue']: if 'pull_request' not in event['issue']:
return "issue comment, ignoring" return Response(
status=200,
mimetype="text/plain",
response="issue comment, ignoring",
)
if event['action'] != 'created':
return Response(
status=200,
mimetype="text/plain",
response=f"Ignored: action ({event['action']!r}) is not 'created'",
)
repo = event['repository']['full_name'] repo = event['repository']['full_name']
issue = event['issue']['number'] issue = event['issue']['number']
@ -391,36 +436,35 @@ def handle_comment(env, event):
comment = event['comment']['body'] comment = event['comment']['body']
if len(comment) > 5000: if len(comment) > 5000:
_logger.warning('comment(%s): %s %s#%s => ignored (%d characters)', event['comment']['html_url'], author, repo, issue, len(comment)) _logger.warning('comment(%s): %s %s#%s => ignored (%d characters)', event['comment']['html_url'], author, repo, issue, len(comment))
return "ignored: too big" return Response(status=413, mimetype="text/plain", response="comment too large")
_logger.info('comment[%s]: %s %s#%s %r', event['action'], author, repo, issue, comment) _logger.info('comment[%s]: %s %s#%s %r', event['action'], author, repo, issue, comment)
if event['action'] != 'created':
return "Ignored: action (%r) is not 'created'" % event['action']
return _handle_comment(env, repo, issue, event['comment']) return _handle_comment(env, repo, issue, event['comment'])
def handle_review(env, event): def handle_review(env: Environment, event: dict) -> Response:
if event['action'] != 'submitted':
return Response(
status=200,
mimetype="text/plain",
response=f"Ignored: action ({event['action']!r}) is not 'submitted'",
)
repo = event['repository']['full_name'] repo = event['repository']['full_name']
pr = event['pull_request']['number'] pr = event['pull_request']['number']
author = event['review']['user']['login'] author = event['review']['user']['login']
comment = event['review']['body'] or '' comment = event['review']['body'] or ''
if len(comment) > 5000: if len(comment) > 5000:
_logger.warning('comment(%s): %s %s#%s => ignored (%d characters)', event['review']['html_url'], author, repo, pr, len(comment)) _logger.warning('comment(%s): %s %s#%s => ignored (%d characters)', event['review']['html_url'], author, repo, pr, len(comment))
return "ignored: too big" return Response(status=413, mimetype="text/plain", response="review too large")
_logger.info('review[%s]: %s %s#%s %r', event['action'], author, repo, pr, comment) _logger.info('review[%s]: %s %s#%s %r', event['action'], author, repo, pr, comment)
if event['action'] != 'submitted': return _handle_comment(env, repo, pr, event['review'], target=event['pull_request']['base']['ref'])
return "Ignored: action (%r) is not 'submitted'" % event['action']
return _handle_comment( def handle_ping(env: Environment, event: dict) -> Response:
env, repo, pr, event['review'],
target=event['pull_request']['base']['ref'])
def handle_ping(env, event):
_logger.info("Got ping! %s", event['zen']) _logger.info("Got ping! %s", event['zen'])
return "pong" return Response(mimetype="text/plain", response="pong")
EVENTS = { EVENTS: dict[str, Callable[[Environment, dict], Response]] = {
'pull_request': handle_pr, 'pull_request': handle_pr,
'status': handle_status, 'status': handle_status,
'issue_comment': handle_comment, 'issue_comment': handle_comment,
@ -428,16 +472,25 @@ EVENTS = {
'ping': handle_ping, 'ping': handle_ping,
} }
def _handle_comment(env, repo, issue, comment, target=None): def _handle_comment(
env: Environment,
repo: str,
issue: int,
comment: dict,
target: str | None = None,
) -> Response:
repository = env['runbot_merge.repository'].search([('name', '=', repo)]) repository = env['runbot_merge.repository'].search([('name', '=', repo)])
if not repository.project_id._find_commands(comment['body'] or ''): if not repository.project_id._find_commands(comment['body'] or ''):
return "No commands, ignoring" return Response(mimetype="text/plain", response="No commands, ignoring")
pr = env['runbot_merge.pull_requests']._get_or_schedule( pr = env['runbot_merge.pull_requests']._get_or_schedule(
repo, issue, target=target, commenter=comment['user']['login'], repo, issue, target=target, commenter=comment['user']['login'],
) )
if not pr: if not pr:
return "Unknown PR, scheduling fetch" return Response(status=202, mimetype="text/plain", response="Unknown PR, scheduling fetch")
partner = env['res.partner'].search([('github_login', '=', comment['user']['login'])]) partner = env['res.partner'].search([('github_login', '=', comment['user']['login'])])
return pr._parse_commands(partner, comment, comment['user']['login']) return Response(
mimetype="text/plain",
response=pr._parse_commands(partner, comment, comment['user']['login']),
)

View File

@ -150,7 +150,7 @@ All substitutions are tentatively applied sequentially to the input.
'action': 'synchronize', 'action': 'synchronize',
'pull_request': pr, 'pull_request': pr,
'sender': {'login': self.project_id.github_prefix} 'sender': {'login': self.project_id.github_prefix}
}) }).get_data(True)
edit = controllers.handle_pr(self.env, { edit = controllers.handle_pr(self.env, {
'action': 'edited', 'action': 'edited',
'pull_request': pr, 'pull_request': pr,
@ -160,14 +160,14 @@ All substitutions are tentatively applied sequentially to the input.
'body': {'from', ''.join(pr_id.message.splitlines(keepends=True)[2:])}, 'body': {'from', ''.join(pr_id.message.splitlines(keepends=True)[2:])},
}, },
'sender': {'login': self.project_id.github_prefix}, 'sender': {'login': self.project_id.github_prefix},
}) }).get_data(True)
edit2 = '' edit2 = ''
if pr_id.draft != pr['draft']: if pr_id.draft != pr['draft']:
edit2 = controllers.handle_pr(self.env, { edit2 = controllers.handle_pr(self.env, {
'action': 'converted_to_draft' if pr['draft'] else 'ready_for_review', 'action': 'converted_to_draft' if pr['draft'] else 'ready_for_review',
'pull_request': pr, 'pull_request': pr,
'sender': {'login': self.project_id.github_prefix} 'sender': {'login': self.project_id.github_prefix}
}) + '. ' }).get_data(True) + '. '
if pr_id.state != 'closed' and pr['state'] == 'closed': if pr_id.state != 'closed' and pr['state'] == 'closed':
# don't go through controller because try_closing does weird things # don't go through controller because try_closing does weird things
# for safety / race condition reasons which ends up committing # for safety / race condition reasons which ends up committing