mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 15:35:46 +07:00
[CHG] runbot_merge: make merge method non-blocking
Because of the false negatives due to github's reordering of events on retargeting, blocking merge methods can be rather frustrating or the user as what's happening and how to solve it isn't clear in that case. Keep the warnings and all, but remove the blocking factor: if a PR doesn't have a merge method and is not single-commit, just skip it on staging. This way, PRs which are actually single-commit will stage fine even if the mergebot thinks they shouldn't be. Fixes #957
This commit is contained in:
parent
4215be770d
commit
20a4e97b05
@ -2,7 +2,7 @@ import hashlib
|
|||||||
import hmac
|
import hmac
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
import werkzeug.exceptions
|
import werkzeug.exceptions
|
||||||
@ -294,6 +294,12 @@ def handle_pr(env, event):
|
|||||||
event['sender']['login'],
|
event['sender']['login'],
|
||||||
pr['commits'] == 1
|
pr['commits'] == 1
|
||||||
)
|
)
|
||||||
|
if pr['base']['ref'] != pr_obj.target.name:
|
||||||
|
env['runbot_merge.fetch_job'].create({
|
||||||
|
'repository': pr_obj.repository.id,
|
||||||
|
'number': pr_obj.number,
|
||||||
|
'commits_at': datetime.now() + timedelta(minutes=5),
|
||||||
|
})
|
||||||
|
|
||||||
pr_obj.write({
|
pr_obj.write({
|
||||||
'reviewed_by': False,
|
'reviewed_by': False,
|
||||||
|
@ -5,4 +5,7 @@ class FastForwardError(Exception):
|
|||||||
class Mismatch(MergeError):
|
class Mismatch(MergeError):
|
||||||
pass
|
pass
|
||||||
class Unmergeable(MergeError):
|
class Unmergeable(MergeError):
|
||||||
...
|
pass
|
||||||
|
|
||||||
|
class Skip(MergeError):
|
||||||
|
pass
|
||||||
|
@ -195,7 +195,7 @@ class Batch(models.Model):
|
|||||||
|
|
||||||
@api.depends(
|
@api.depends(
|
||||||
"merge_date",
|
"merge_date",
|
||||||
"prs.error", "prs.draft", "prs.squash", "prs.merge_method",
|
"prs.error", "prs.draft",
|
||||||
"skipchecks",
|
"skipchecks",
|
||||||
"prs.status", "prs.reviewed_by", "prs.target",
|
"prs.status", "prs.reviewed_by", "prs.target",
|
||||||
)
|
)
|
||||||
@ -208,7 +208,7 @@ class Batch(models.Model):
|
|||||||
elif len(targets := batch.prs.mapped('target')) > 1:
|
elif len(targets := batch.prs.mapped('target')) > 1:
|
||||||
batch.blocked = f"Multiple target branches: {', '.join(targets.mapped('name'))!r}"
|
batch.blocked = f"Multiple target branches: {', '.join(targets.mapped('name'))!r}"
|
||||||
elif blocking := batch.prs.filtered(
|
elif blocking := batch.prs.filtered(
|
||||||
lambda p: p.error or p.draft or not (p.squash or p.merge_method)
|
lambda p: p.error or p.draft
|
||||||
):
|
):
|
||||||
batch.blocked = "Pull request(s) %s blocked." % ', '.join(blocking.mapped('display_name'))
|
batch.blocked = "Pull request(s) %s blocked." % ', '.join(blocking.mapped('display_name'))
|
||||||
elif not batch.skipchecks and (unready := batch.prs.filtered(
|
elif not batch.skipchecks and (unready := batch.prs.filtered(
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import ast
|
import ast
|
||||||
|
import builtins
|
||||||
import collections
|
import collections
|
||||||
import contextlib
|
import contextlib
|
||||||
import datetime
|
import datetime
|
||||||
@ -106,7 +107,7 @@ All substitutions are tentatively applied sequentially to the input.
|
|||||||
self._cr, 'runbot_merge_unique_repo', self._table, ['name'])
|
self._cr, 'runbot_merge_unique_repo', self._table, ['name'])
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def _load_pr(self, number, *, closing=False):
|
def _load_pr(self, number, *, closing=False, squash=False):
|
||||||
gh = self.github()
|
gh = self.github()
|
||||||
|
|
||||||
# fetch PR object and handle as *opened*
|
# fetch PR object and handle as *opened*
|
||||||
@ -133,6 +134,10 @@ All substitutions are tentatively applied sequentially to the input.
|
|||||||
('number', '=', number),
|
('number', '=', number),
|
||||||
])
|
])
|
||||||
if pr_id:
|
if pr_id:
|
||||||
|
if squash:
|
||||||
|
pr_id.squash = pr['commits'] == 1
|
||||||
|
return
|
||||||
|
|
||||||
sync = controllers.handle_pr(self.env, {
|
sync = controllers.handle_pr(self.env, {
|
||||||
'action': 'synchronize',
|
'action': 'synchronize',
|
||||||
'pull_request': pr,
|
'pull_request': pr,
|
||||||
@ -583,8 +588,6 @@ class PullRequests(models.Model):
|
|||||||
|
|
||||||
@api.depends(
|
@api.depends(
|
||||||
'batch_id.prs.draft',
|
'batch_id.prs.draft',
|
||||||
'batch_id.prs.squash',
|
|
||||||
'batch_id.prs.merge_method',
|
|
||||||
'batch_id.prs.state',
|
'batch_id.prs.state',
|
||||||
'batch_id.skipchecks',
|
'batch_id.skipchecks',
|
||||||
)
|
)
|
||||||
@ -592,12 +595,11 @@ class PullRequests(models.Model):
|
|||||||
self.blocked = False
|
self.blocked = False
|
||||||
requirements = (
|
requirements = (
|
||||||
lambda p: not p.draft,
|
lambda p: not p.draft,
|
||||||
lambda p: p.squash or p.merge_method,
|
|
||||||
lambda p: p.state == 'ready' \
|
lambda p: p.state == 'ready' \
|
||||||
or p.batch_id.skipchecks \
|
or p.batch_id.skipchecks \
|
||||||
and all(pr.state != 'error' for pr in p.batch_id.prs)
|
and all(pr.state != 'error' for pr in p.batch_id.prs)
|
||||||
)
|
)
|
||||||
messages = ('is in draft', 'has no merge method', 'is not ready')
|
messages = ('is in draft', 'is not ready')
|
||||||
for pr in self:
|
for pr in self:
|
||||||
if pr.state in ('merged', 'closed'):
|
if pr.state in ('merged', 'closed'):
|
||||||
continue
|
continue
|
||||||
@ -834,6 +836,10 @@ For your own safety I've ignored *everything in your entire comment*.
|
|||||||
pull_request=self.number,
|
pull_request=self.number,
|
||||||
format_args={'new_method': explanation, 'pr': self, 'user': login},
|
format_args={'new_method': explanation, 'pr': self, 'user': login},
|
||||||
)
|
)
|
||||||
|
# if the merge method is the only thing preventing (but not
|
||||||
|
# *blocking*) staging, trigger a staging
|
||||||
|
if self.state == 'ready':
|
||||||
|
self.env.ref("runbot_merge.staging_cron")._trigger()
|
||||||
case commands.Retry() if is_author or source_author:
|
case commands.Retry() if is_author or source_author:
|
||||||
if self.error:
|
if self.error:
|
||||||
self.error = False
|
self.error = False
|
||||||
@ -2497,31 +2503,41 @@ class FetchJob(models.Model):
|
|||||||
repository = fields.Many2one('runbot_merge.repository', required=True)
|
repository = fields.Many2one('runbot_merge.repository', required=True)
|
||||||
number = fields.Integer(required=True, group_operator=None)
|
number = fields.Integer(required=True, group_operator=None)
|
||||||
closing = fields.Boolean(default=False)
|
closing = fields.Boolean(default=False)
|
||||||
|
commits_at = fields.Datetime(index="btree_not_null")
|
||||||
|
|
||||||
@api.model_create_multi
|
@api.model_create_multi
|
||||||
def create(self, vals_list):
|
def create(self, vals_list):
|
||||||
self.env.ref('runbot_merge.fetch_prs_cron')._trigger()
|
now = fields.Datetime.now()
|
||||||
|
self.env.ref('runbot_merge.fetch_prs_cron')._trigger({
|
||||||
|
fields.Datetime.to_datetime(
|
||||||
|
vs.get('commits_at') or now
|
||||||
|
)
|
||||||
|
for vs in vals_list
|
||||||
|
})
|
||||||
return super().create(vals_list)
|
return super().create(vals_list)
|
||||||
|
|
||||||
def _check(self, commit=False):
|
def _check(self, commit=False):
|
||||||
"""
|
"""
|
||||||
:param bool commit: commit after each fetch has been executed
|
:param bool commit: commit after each fetch has been executed
|
||||||
"""
|
"""
|
||||||
|
now = getattr(builtins, 'current_date', None) or fields.Datetime.to_string(datetime.datetime.now())
|
||||||
while True:
|
while True:
|
||||||
f = self.search([], limit=1)
|
f = self.search([
|
||||||
|
'|', ('commits_at', '=', False), ('commits_at', '<=', now)
|
||||||
|
], limit=1)
|
||||||
if not f:
|
if not f:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
f.active = False
|
||||||
self.env.cr.execute("SAVEPOINT runbot_merge_before_fetch")
|
self.env.cr.execute("SAVEPOINT runbot_merge_before_fetch")
|
||||||
try:
|
try:
|
||||||
f.repository._load_pr(f.number, closing=f.closing)
|
f.repository._load_pr(f.number, closing=f.closing, squash=bool(f.commits_at))
|
||||||
except Exception:
|
except Exception:
|
||||||
self.env.cr.execute("ROLLBACK TO SAVEPOINT runbot_merge_before_fetch")
|
self.env.cr.execute("ROLLBACK TO SAVEPOINT runbot_merge_before_fetch")
|
||||||
_logger.exception("Failed to load pr %s, skipping it", f.number)
|
_logger.exception("Failed to load pr %s, skipping it", f.number)
|
||||||
finally:
|
finally:
|
||||||
self.env.cr.execute("RELEASE SAVEPOINT runbot_merge_before_fetch")
|
self.env.cr.execute("RELEASE SAVEPOINT runbot_merge_before_fetch")
|
||||||
|
|
||||||
f.active = False
|
|
||||||
if commit:
|
if commit:
|
||||||
self.env.cr.commit()
|
self.env.cr.commit()
|
||||||
|
|
||||||
|
@ -262,6 +262,10 @@ def stage_batches(branch: Branch, batches: Batch, staging_state: StagingState) -
|
|||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
staged |= stage_batch(env, batch, staging_state)
|
staged |= stage_batch(env, batch, staging_state)
|
||||||
|
except exceptions.Skip:
|
||||||
|
# skip silently because the PR will be retried on every staging
|
||||||
|
# which is going to be ultra spammy
|
||||||
|
pass
|
||||||
except exceptions.MergeError as e:
|
except exceptions.MergeError as e:
|
||||||
pr = e.args[0]
|
pr = e.args[0]
|
||||||
_logger.info("Failed to stage %s into %s", pr.display_name, branch.name)
|
_logger.info("Failed to stage %s into %s", pr.display_name, branch.name)
|
||||||
@ -419,9 +423,19 @@ def stage(pr: PullRequests, info: StagingSlice, related_prs: PullRequests) -> Tu
|
|||||||
invalid['target'] = branch.id
|
invalid['target'] = branch.id
|
||||||
diff.append(('Target branch', pr.target.name, branch.name))
|
diff.append(('Target branch', pr.target.name, branch.name))
|
||||||
|
|
||||||
if pr.squash != commits == 1:
|
if not method:
|
||||||
invalid['squash'] = commits == 1
|
if not pr.method_warned:
|
||||||
diff.append(('Single commit', pr.squash, commits == 1))
|
pr.env.ref('runbot_merge.pr.merge_method')._send(
|
||||||
|
repository=pr.repository,
|
||||||
|
pull_request=pr.number,
|
||||||
|
format_args={'pr': pr, 'methods': ''.join(
|
||||||
|
'* `%s` to %s\n' % pair
|
||||||
|
for pair in type(pr).merge_method.selection
|
||||||
|
if pair[0] != 'squash'
|
||||||
|
)},
|
||||||
|
)
|
||||||
|
pr.method_warned = True
|
||||||
|
raise exceptions.Skip()
|
||||||
|
|
||||||
msg = utils.make_message(prdict)
|
msg = utils.make_message(prdict)
|
||||||
if pr.message != msg:
|
if pr.message != msg:
|
||||||
|
@ -2211,7 +2211,7 @@ class TestPRUpdate:
|
|||||||
|
|
||||||
def test_update_opened(self, env, repo):
|
def test_update_opened(self, env, repo):
|
||||||
with repo:
|
with repo:
|
||||||
[c] = repo.make_commits("master", Commit('fist', tree={'m': 'c1'}))
|
[c] = repo.make_commits("master", Commit('first', tree={'m': 'c1'}))
|
||||||
prx = repo.make_pr(target='master', head=c)
|
prx = repo.make_pr(target='master', head=c)
|
||||||
|
|
||||||
pr = to_pr(env, prx)
|
pr = to_pr(env, prx)
|
||||||
@ -2595,6 +2595,77 @@ Please check and re-approve.
|
|||||||
assert not pr_id.reviewed_by
|
assert not pr_id.reviewed_by
|
||||||
assert not pr_id.squash
|
assert not pr_id.squash
|
||||||
|
|
||||||
|
@pytest.mark.defaultstatuses
|
||||||
|
def test_update_incorrect_commits_count(self, port, env, project, repo, config, users):
|
||||||
|
"""This is not a great test but it aims to kinda sorta simulate the
|
||||||
|
behaviour when a user retargets and updates a PR at about the same time:
|
||||||
|
github can send the hooks in the wrong order, which leads to the correct
|
||||||
|
base and head but can lead to the wrong squash status.
|
||||||
|
"""
|
||||||
|
project.write({
|
||||||
|
'branch_ids': [(0, 0, {
|
||||||
|
'name': 'xxx',
|
||||||
|
})]
|
||||||
|
})
|
||||||
|
with repo:
|
||||||
|
[c] = repo.make_commits("master", Commit("c", tree={"m": "n"}), ref="heads/thing")
|
||||||
|
pr = repo.make_pr(target='master', head='thing')
|
||||||
|
|
||||||
|
pr_id = to_pr(env, pr)
|
||||||
|
pr_id.head = '0'*40
|
||||||
|
with requests.Session() as s:
|
||||||
|
r = s.post(
|
||||||
|
f"http://localhost:{port}/runbot_merge/hooks",
|
||||||
|
headers={
|
||||||
|
"X-Github-Event": "pull_request",
|
||||||
|
},
|
||||||
|
json={
|
||||||
|
'action': 'synchronize',
|
||||||
|
'sender': {
|
||||||
|
'login': users['user'],
|
||||||
|
},
|
||||||
|
'repository': {
|
||||||
|
'full_name': repo.name,
|
||||||
|
},
|
||||||
|
'pull_request': {
|
||||||
|
'number': pr.number,
|
||||||
|
'head': {'sha': c},
|
||||||
|
'title': "c",
|
||||||
|
'commits': 40123,
|
||||||
|
'base': {
|
||||||
|
'ref': 'xxx',
|
||||||
|
'repo': {
|
||||||
|
'full_name': repo.name,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
r.raise_for_status()
|
||||||
|
assert pr_id.head == c, "the head should have been updated"
|
||||||
|
assert not pr_id.squash, "the wrong count should be used"
|
||||||
|
|
||||||
|
with repo:
|
||||||
|
pr.post_comment("hansen r+", config['role_reviewer']['token'])
|
||||||
|
repo.post_status(c, 'success')
|
||||||
|
env.run_crons()
|
||||||
|
assert not pr_id.blocked
|
||||||
|
assert pr_id.message_ids[::-1].mapped(lambda m: (
|
||||||
|
((m.subject or '') + '\n\n' + m.body).strip(),
|
||||||
|
list(map(read_tracking_value, m.tracking_value_ids)),
|
||||||
|
)) == [
|
||||||
|
('<p>Pull Request created</p>', []),
|
||||||
|
('', [('head', c, '0'*40)]),
|
||||||
|
('', [('head', '0'*40, c), ('squash', 1, 0)]),
|
||||||
|
('', [('state', 'Opened', 'Approved'), ('reviewed_by', '', 'Reviewer')]),
|
||||||
|
(f'<p>statuses changed on {c}</p>', [('state', 'Approved', 'Ready')]),
|
||||||
|
]
|
||||||
|
assert pr_id.staging_id
|
||||||
|
with repo:
|
||||||
|
repo.post_status('staging.master', 'success')
|
||||||
|
env.run_crons()
|
||||||
|
assert pr_id.merge_date
|
||||||
|
|
||||||
class TestBatching(object):
|
class TestBatching(object):
|
||||||
def _pr(self, repo, prefix, trees, *, target='master', user, reviewer,
|
def _pr(self, repo, prefix, trees, *, target='master', user, reviewer,
|
||||||
statuses=(('ci/runbot', 'success'), ('legal/cla', 'success'))
|
statuses=(('ci/runbot', 'success'), ('legal/cla', 'success'))
|
||||||
|
@ -806,10 +806,12 @@ class TestBlocked:
|
|||||||
|
|
||||||
p = to_pr(env, pr)
|
p = to_pr(env, pr)
|
||||||
assert p.state == 'ready'
|
assert p.state == 'ready'
|
||||||
assert p.blocked
|
assert not p.blocked
|
||||||
|
assert not p.staging_id
|
||||||
|
|
||||||
with repo_a: pr.post_comment('hansen rebase-merge', config['role_reviewer']['token'])
|
with repo_a: pr.post_comment('hansen rebase-merge', config['role_reviewer']['token'])
|
||||||
assert not p.blocked
|
env.run_crons()
|
||||||
|
assert p.staging_id
|
||||||
|
|
||||||
def test_linked_closed(self, env, repo_a, repo_b, config):
|
def test_linked_closed(self, env, repo_a, repo_b, config):
|
||||||
with repo_a, repo_b:
|
with repo_a, repo_b:
|
||||||
|
Loading…
Reference in New Issue
Block a user