mirror of
https://github.com/odoo/runbot.git
synced 2025-03-27 13:25:47 +07:00
[IMP] runbot_merge: add sentry filtering, rework some error messages
- move sentry configuration and add exception-based filtering - clarify and reclassify (e.g. from warning to info) a few messages - convert assertions in rebase to MergeError so they can be correctly logged & reported, and ignored by sentry, also clarify them (especially the consistency one) Related to #544
This commit is contained in:
parent
cd4ded899b
commit
06a3a1bab5
@ -1,41 +1,5 @@
|
|||||||
import logging
|
|
||||||
from os import environ
|
|
||||||
|
|
||||||
import sentry_sdk
|
|
||||||
from sentry_sdk.integrations.logging import LoggingIntegration
|
|
||||||
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
|
|
||||||
|
|
||||||
from odoo import http
|
|
||||||
from . import models, controllers
|
from . import models, controllers
|
||||||
|
from .sentry import enable_sentry
|
||||||
def delegate(self, attr):
|
|
||||||
return getattr(self.app, attr)
|
|
||||||
SentryWsgiMiddleware.__getattr__ = delegate
|
|
||||||
|
|
||||||
def enable_sentry():
|
|
||||||
logger = logging.getLogger('runbot_merge')
|
|
||||||
|
|
||||||
dsn = environ.get('SENTRY_DSN')
|
|
||||||
if not dsn:
|
|
||||||
logger.info("No DSN found, skipping sentry...")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
sentry_sdk.init(
|
|
||||||
dsn,
|
|
||||||
integrations=[
|
|
||||||
# note: if the colorformatter is enabled, sentry gets lost
|
|
||||||
# and classifies everything as errors because it fails to
|
|
||||||
# properly classify levels as the colorformatter injects
|
|
||||||
# the ANSI color codes right into LogRecord.levelname
|
|
||||||
LoggingIntegration(level=logging.INFO, event_level=logging.WARNING),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
http.root = SentryWsgiMiddleware(http.root)
|
|
||||||
except Exception:
|
|
||||||
logger.exception("DSN found, failed to enable sentry...")
|
|
||||||
else:
|
|
||||||
logger.info("DSN found, sentry enabled...")
|
|
||||||
|
|
||||||
def _check_citext(cr):
|
def _check_citext(cr):
|
||||||
cr.execute("select 1 from pg_extension where extname = 'citext'")
|
cr.execute("select 1 from pg_extension where extname = 'citext'")
|
||||||
|
@ -235,14 +235,14 @@ def handle_pr(env, event):
|
|||||||
oldstate,
|
oldstate,
|
||||||
)
|
)
|
||||||
return 'Closed {}'.format(pr_obj.display_name)
|
return 'Closed {}'.format(pr_obj.display_name)
|
||||||
else:
|
|
||||||
_logger.warning(
|
_logger.info(
|
||||||
'%s tried to close %s (state=%s)',
|
'%s tried to close %s (state=%s) but locking failed',
|
||||||
event['sender']['login'],
|
event['sender']['login'],
|
||||||
pr_obj.display_name,
|
pr_obj.display_name,
|
||||||
oldstate,
|
oldstate,
|
||||||
)
|
)
|
||||||
return 'Ignored: could not lock rows (probably being merged)'
|
return 'Ignored: could not lock rows (probably being merged)'
|
||||||
|
|
||||||
if event['action'] == 'reopened' :
|
if event['action'] == 'reopened' :
|
||||||
if pr_obj.state == 'merged':
|
if pr_obj.state == 'merged':
|
||||||
|
@ -275,7 +275,7 @@ class GH(object):
|
|||||||
try:
|
try:
|
||||||
r = r.json()
|
r = r.json()
|
||||||
except Exception:
|
except Exception:
|
||||||
raise MergeError("Got non-JSON reponse from github: %s %s (%s)" % (r.status_code, r.reason, r.text))
|
raise MergeError("got non-JSON reponse from github: %s %s (%s)" % (r.status_code, r.reason, r.text))
|
||||||
_logger.debug(
|
_logger.debug(
|
||||||
"merge(%s, %s (%s), %s) -> %s",
|
"merge(%s, %s (%s), %s) -> %s",
|
||||||
self._repo, dest, r['parents'][0]['sha'],
|
self._repo, dest, r['parents'][0]['sha'],
|
||||||
@ -297,10 +297,17 @@ class GH(object):
|
|||||||
logger.debug("rebasing %s, %s on %s (reset=%s, commits=%s)",
|
logger.debug("rebasing %s, %s on %s (reset=%s, commits=%s)",
|
||||||
self._repo, pr, dest, reset, len(commits))
|
self._repo, pr, dest, reset, len(commits))
|
||||||
|
|
||||||
assert commits, "can't rebase a PR with no commits"
|
if not commits:
|
||||||
|
raise MergeError("PR has no commits")
|
||||||
prev = original_head
|
prev = original_head
|
||||||
for original in commits:
|
for original in commits:
|
||||||
assert len(original['parents']) == 1, "can't rebase commits with more than one parent"
|
if len(original['parents']) != 1:
|
||||||
|
raise MergeError(
|
||||||
|
"commits with multiple parents ({sha}) can not be rebased, "
|
||||||
|
"either fix the branch to remove merges or merge without "
|
||||||
|
"rebasing".format_map(
|
||||||
|
original
|
||||||
|
))
|
||||||
tmp_msg = 'temp rebasing PR %s (%s)' % (pr, original['sha'])
|
tmp_msg = 'temp rebasing PR %s (%s)' % (pr, original['sha'])
|
||||||
merged = self.merge(original['sha'], dest, tmp_msg)
|
merged = self.merge(original['sha'], dest, tmp_msg)
|
||||||
|
|
||||||
@ -309,9 +316,12 @@ class GH(object):
|
|||||||
# expect (either original_head or the previously merged commit)
|
# expect (either original_head or the previously merged commit)
|
||||||
[base_commit] = (parent['sha'] for parent in merged['parents']
|
[base_commit] = (parent['sha'] for parent in merged['parents']
|
||||||
if parent['sha'] != original['sha'])
|
if parent['sha'] != original['sha'])
|
||||||
assert prev == base_commit,\
|
if prev != base_commit:
|
||||||
"Inconsistent view of %s between head (%s) and merge (%s)" % (
|
raise MergeError(
|
||||||
dest, prev, base_commit,
|
f"Inconsistent view of branch {dest} while rebasing "
|
||||||
|
f"PR {pr} expected commit {prev} but the other parent of "
|
||||||
|
f"merge commit {merged['sha']} is {base_commit}.\n\n"
|
||||||
|
f"The branch may be getting concurrently modified."
|
||||||
)
|
)
|
||||||
prev = merged['sha']
|
prev = merged['sha']
|
||||||
original['new_tree'] = merged['tree']['sha']
|
original['new_tree'] = merged['tree']['sha']
|
||||||
|
@ -388,7 +388,7 @@ class Branch(models.Model):
|
|||||||
if len(e.args) > 1 and e.args[1]:
|
if len(e.args) > 1 and e.args[1]:
|
||||||
reason = e.args[1]
|
reason = e.args[1]
|
||||||
else:
|
else:
|
||||||
reason = e.__context__
|
reason = e.__cause__ or e.__context__
|
||||||
# if the reason is a json document, assume it's a github
|
# if the reason is a json document, assume it's a github
|
||||||
# error and try to extract the error message to give it to
|
# error and try to extract the error message to give it to
|
||||||
# the user
|
# the user
|
||||||
@ -1588,7 +1588,7 @@ class Tagging(models.Model):
|
|||||||
try:
|
try:
|
||||||
gh.change_tags(pr, tags_remove, tags_add)
|
gh.change_tags(pr, tags_remove, tags_add)
|
||||||
except Exception:
|
except Exception:
|
||||||
_logger.exception(
|
_logger.info(
|
||||||
"Error while trying to change the tags of %s#%s from %s to %s",
|
"Error while trying to change the tags of %s#%s from %s to %s",
|
||||||
repo.name, pr, remove, add,
|
repo.name, pr, remove, add,
|
||||||
)
|
)
|
||||||
@ -2211,8 +2211,8 @@ class Batch(models.Model):
|
|||||||
it = meta[to_revert.repository]
|
it = meta[to_revert.repository]
|
||||||
it['gh'].set_ref('tmp.{}'.format(to_revert.target.name), it['head'])
|
it['gh'].set_ref('tmp.{}'.format(to_revert.target.name), it['head'])
|
||||||
raise
|
raise
|
||||||
except github.MergeError:
|
except github.MergeError as e:
|
||||||
raise exceptions.MergeError(pr)
|
raise exceptions.MergeError(pr) from e
|
||||||
except exceptions.Mismatch as e:
|
except exceptions.Mismatch as e:
|
||||||
def format_items(items):
|
def format_items(items):
|
||||||
""" Bit of a pain in the ass because difflib really wants
|
""" Bit of a pain in the ass because difflib really wants
|
||||||
@ -2229,10 +2229,7 @@ class Batch(models.Model):
|
|||||||
old = list(format_items((n, str(v)) for n, v, _ in e.args[1]))
|
old = list(format_items((n, str(v)) for n, v, _ in e.args[1]))
|
||||||
new = list(format_items((n, str(v)) for n, _, v in e.args[1]))
|
new = list(format_items((n, str(v)) for n, _, v in e.args[1]))
|
||||||
diff = ''.join(Differ().compare(old, new))
|
diff = ''.join(Differ().compare(old, new))
|
||||||
_logger.warning(
|
_logger.info("data mismatch on %s:\n%s", pr.display_name, diff)
|
||||||
"data mismatch on %s:\n%s",
|
|
||||||
pr.display_name, diff
|
|
||||||
)
|
|
||||||
self.env.ref('runbot_merge.pr.staging.mismatch')._send(
|
self.env.ref('runbot_merge.pr.staging.mismatch')._send(
|
||||||
repository=pr.repository,
|
repository=pr.repository,
|
||||||
pull_request=pr.number,
|
pull_request=pr.number,
|
||||||
|
80
runbot_merge/sentry.py
Normal file
80
runbot_merge/sentry.py
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
import logging
|
||||||
|
from os import environ
|
||||||
|
|
||||||
|
import sentry_sdk
|
||||||
|
from sentry_sdk.integrations.logging import LoggingIntegration
|
||||||
|
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
|
||||||
|
|
||||||
|
from odoo import http
|
||||||
|
from runbot_merge.exceptions import FastForwardError, Mismatch, MergeError, Unmergeable
|
||||||
|
|
||||||
|
|
||||||
|
def delegate(self, attr):
|
||||||
|
return getattr(self.app, attr)
|
||||||
|
SentryWsgiMiddleware.__getattr__ = delegate
|
||||||
|
|
||||||
|
def enable_sentry():
|
||||||
|
logger = logging.getLogger('runbot_merge')
|
||||||
|
|
||||||
|
dsn = environ.get('SENTRY_DSN')
|
||||||
|
if not dsn:
|
||||||
|
logger.info("No DSN found, skipping sentry...")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
setup_sentry(dsn)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("DSN found, failed to enable sentry...")
|
||||||
|
else:
|
||||||
|
logger.info("DSN found, sentry enabled...")
|
||||||
|
|
||||||
|
|
||||||
|
def setup_sentry(dsn):
|
||||||
|
sentry_sdk.init(
|
||||||
|
dsn,
|
||||||
|
traces_sample_rate=1.0,
|
||||||
|
integrations=[
|
||||||
|
# note: if the colorformatter is enabled, sentry gets lost
|
||||||
|
# and classifies everything as errors because it fails to
|
||||||
|
# properly classify levels as the colorformatter injects
|
||||||
|
# the ANSI color codes right into LogRecord.levelname
|
||||||
|
LoggingIntegration(level=logging.INFO, event_level=logging.WARNING),
|
||||||
|
],
|
||||||
|
before_send=event_filter,
|
||||||
|
)
|
||||||
|
http.root = SentryWsgiMiddleware(http.root)
|
||||||
|
|
||||||
|
dummy_record = logging.LogRecord(name="", level=logging.NOTSET, pathname='', lineno=0, msg='', args=(), exc_info=None)
|
||||||
|
# mapping of exception types to predicates, if the predicate returns `True` the
|
||||||
|
# exception event should be suppressed
|
||||||
|
SUPPRESS_EXCEPTION = {
|
||||||
|
# Someone else deciding to push directly to the branch (which is generally
|
||||||
|
# what leads to this error) is not really actionable.
|
||||||
|
#
|
||||||
|
# Other possibilities are more structural and thus we probably want to know:
|
||||||
|
# - other 422 Unprocessable github errors (likely config issues):
|
||||||
|
# - reference does not exist
|
||||||
|
# - object does not exist
|
||||||
|
# - object is not a commit
|
||||||
|
# - branch protection issue
|
||||||
|
# - timeout on ref update (github probably dying)
|
||||||
|
# - other HTTP error (also github probably dying)
|
||||||
|
#
|
||||||
|
# might be worth using richer exceptions to make this clearer, and easier to classify
|
||||||
|
FastForwardError: lambda e: 'not a fast forward' in str(e.__cause__),
|
||||||
|
# Git conflict when merging (or non-json response which is weird),
|
||||||
|
# notified on PR
|
||||||
|
MergeError: lambda _: True,
|
||||||
|
# Failed preconditions on merging, notified on PR
|
||||||
|
Unmergeable: lambda _: True,
|
||||||
|
}
|
||||||
|
def event_filter(event, hint):
|
||||||
|
# event['level'], event['logger'], event['logentry'], event['exception']
|
||||||
|
# known hints: log_record: LogRecord, exc_info: (type, BaseExeption, Traceback) | None
|
||||||
|
exc_info = hint.get('exc_info') or hint.get('log_record', dummy_record).exc_info
|
||||||
|
if exc_info:
|
||||||
|
etype, exc, _ = exc_info
|
||||||
|
if SUPPRESS_EXCEPTION.get(etype, lambda _: False)(exc):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user