mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 23:45:44 +07:00
[IMP] *: handle the addition of a new PR to a fw-ported batch
Given a batch which has been merged, and been forward-ported, to multiple branches (because skipci was set or ci passed on the repos the batch covers). There might come the need to add a PR for one of the uncovered repos. This raises the question of what to do with it, since the forward-ports for the batch already exist it's not going to get forwardported normally, nor may we want to, possibly? Options are: - don't do anything, such additions don't get ported, this is incongruous and unexpected as by default PRs are forward-ported, and if the batch wasn't an intermediate (but e.g. a conflict) it probably would be ported forward - port on merge, this allows configuring the PR properly (as it might need its own limit) but it means further batches may get unexpectedly merged (or at least retied) without the additional PR even though we likely want it in - immediately port the additional PR on creation, this makes the limit harder or impossible to configure but it makes the *batch sequence* more consistent We ended up selecting the latter, it feels closer to the updates system, and it creates more consistent batches through the sequence. It's also technically easier to ad-hoc port a PR through a bunch of branches than it is to update the "normal" forward-port process to handle partial fixups.
This commit is contained in:
parent
1e9fa48652
commit
e7e81bf375
@ -1,9 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import contextlib
|
||||
import logging
|
||||
import re
|
||||
import uuid
|
||||
from contextlib import ExitStack
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import requests
|
||||
import sentry_sdk
|
||||
from dateutil import relativedelta
|
||||
|
||||
@ -13,6 +16,7 @@ from odoo.addons.runbot_merge.github import GH
|
||||
|
||||
# how long a merged PR survives
|
||||
MERGE_AGE = relativedelta.relativedelta(weeks=2)
|
||||
FOOTER = '\nMore info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port\n'
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
@ -66,9 +70,11 @@ class ForwardPortTasks(models.Model, Queue):
|
||||
source = fields.Selection([
|
||||
('merge', 'Merge'),
|
||||
('fp', 'Forward Port Followup'),
|
||||
('insert', 'New branch port')
|
||||
('insert', 'New branch port'),
|
||||
('complete', 'Complete ported batches'),
|
||||
], required=True)
|
||||
retry_after = fields.Datetime(required=True, default='1900-01-01 01:01:01')
|
||||
pr_id = fields.Many2one('runbot_merge.pull_requests')
|
||||
|
||||
def _search_domain(self):
|
||||
return super()._search_domain() + [
|
||||
@ -82,8 +88,11 @@ class ForwardPortTasks(models.Model, Queue):
|
||||
def _process_item(self):
|
||||
batch = self.batch_id
|
||||
sentry_sdk.set_tag('forward-porting', batch.prs.mapped('display_name'))
|
||||
newbatch = batch._port_forward()
|
||||
if self.source == 'complete':
|
||||
self._complete_batches()
|
||||
return
|
||||
|
||||
newbatch = batch._port_forward()
|
||||
if not newbatch: # reached end of seq (or batch is empty)
|
||||
# FIXME: or configuration is fucky so doesn't want to FP (maybe should error and retry?)
|
||||
_logger.info(
|
||||
@ -114,6 +123,120 @@ class ForwardPortTasks(models.Model, Queue):
|
||||
if newchild:
|
||||
newchild.parent_id = pr.id
|
||||
|
||||
def _complete_batches(self):
|
||||
source = pr = self.pr_id
|
||||
if not pr:
|
||||
_logger.warning(
|
||||
"Unable to complete descendants of %s (%s): no new PR",
|
||||
self.batch_id,
|
||||
self.batch_id.prs.mapped('display_name'),
|
||||
)
|
||||
return
|
||||
_logger.info(
|
||||
"Completing batches for descendants of %s (added %s)",
|
||||
self.batch_id.prs.mapped('display_name'),
|
||||
self.pr_id.display_name,
|
||||
)
|
||||
|
||||
gh = requests.Session()
|
||||
repository = pr.repository
|
||||
gh.headers['Authorization'] = f'token {repository.project_id.fp_github_token}'
|
||||
PullRequests = self.env['runbot_merge.pull_requests']
|
||||
self.env.cr.execute('LOCK runbot_merge_pull_requests IN SHARE MODE')
|
||||
|
||||
# TODO: extract complete list of targets from `_find_next_target`
|
||||
# so we can create all the forwardport branches, push them, and
|
||||
# only then create the PR objects
|
||||
# TODO: maybe do that after making forward-port WC-less, so all the
|
||||
# branches can be pushed atomically at once
|
||||
with contextlib.ExitStack() as s:
|
||||
for descendant in self.batch_id.descendants():
|
||||
target = pr._find_next_target()
|
||||
if target is None:
|
||||
_logger.info("Will not forward-port %s: no next target", pr.display_name)
|
||||
return
|
||||
|
||||
if PullRequests.search_count([
|
||||
('source_id', '=', source.id),
|
||||
('target', '=', target.id),
|
||||
('state', 'not in', ('closed', 'merged')),
|
||||
]):
|
||||
_logger.warning("Will not forward-port %s: already ported", pr.display_name)
|
||||
return
|
||||
|
||||
if target != descendant.target:
|
||||
self.env['runbot_merge.pull_requests.feedback'].create({
|
||||
'repository': repository.id,
|
||||
'pull_request': source.id,
|
||||
'token_field': 'fp_github_token',
|
||||
'message': """\
|
||||
{pr.ping}unable to port this PR forwards due to inconsistency: goes from \
|
||||
{pr.target.name} to {next_target.name} but {batch} ({batch_prs}) targets \
|
||||
{batch.target.name}.
|
||||
""".format(pr=pr, next_target=target, batch=descendant, batch_prs=', '.join(descendant.mapped('prs.display_name')))
|
||||
})
|
||||
return
|
||||
|
||||
ref = descendant.prs[:1].refname
|
||||
# NOTE: ports the new source everywhere instead of porting each
|
||||
# PR to the next step as it does not *stop* on conflict
|
||||
conflict, working_copy = source._create_fp_branch(target, ref, s)
|
||||
working_copy.push('target', ref)
|
||||
|
||||
remote_target = repository.fp_remote_target
|
||||
owner, _ = remote_target.split('/', 1)
|
||||
message = source.message + f"\n\nForward-Port-Of: {pr.display_name}"
|
||||
|
||||
title, body = re.match(r'(?P<title>[^\n]+)\n*(?P<body>.*)', message, flags=re.DOTALL).groups()
|
||||
r = gh.post(f'https://api.github.com/repos/{pr.repository.name}/pulls', json={
|
||||
'base': target.name,
|
||||
'head': f'{owner}:{ref}',
|
||||
'title': '[FW]' + (' ' if title[0] != '[' else '') + title,
|
||||
'body': body
|
||||
})
|
||||
if not r.ok:
|
||||
_logger.warning("Failed to create forward-port PR for %s, deleting branches", pr.display_name)
|
||||
# delete all the branches this should automatically close the
|
||||
# PRs if we've created any. Using the API here is probably
|
||||
# simpler than going through the working copies
|
||||
d = gh.delete(f'https://api.github.com/repos/{remote_target}/git/refs/heads/{ref}')
|
||||
if d.ok:
|
||||
_logger.info("Deleting %s:%s=success", remote_target, ref)
|
||||
else:
|
||||
_logger.warning("Deleting %s:%s=%s", remote_target, ref, d.text)
|
||||
raise RuntimeError(f"Forwardport failure: {pr.display_name} ({r.text})")
|
||||
|
||||
new_pr = PullRequests._from_gh(r.json())
|
||||
_logger.info("Created forward-port PR %s", new_pr)
|
||||
new_pr.write({
|
||||
'batch_id': descendant.id, # should already be set correctly but...
|
||||
'merge_method': pr.merge_method,
|
||||
'source_id': source.id,
|
||||
# only link to previous PR of sequence if cherrypick passed
|
||||
# FIXME: apply parenting of siblings? Apply parenting *to* siblings?
|
||||
'parent_id': pr.id if not conflict else False,
|
||||
'detach_reason': "{1}\n{2}".format(*conflict).strip() if conflict else None,
|
||||
})
|
||||
|
||||
if conflict:
|
||||
self.env.ref('runbot_merge.forwardport.failure.conflict')._send(
|
||||
repository=pr.repository,
|
||||
pull_request=pr.number,
|
||||
token_field='fp_github_token',
|
||||
format_args={'source': source, 'pr': pr, 'new': new_pr, 'footer': FOOTER},
|
||||
)
|
||||
new_pr._fp_conflict_feedback(pr, {pr: conflict})
|
||||
|
||||
labels = ['forwardport']
|
||||
if conflict:
|
||||
labels.append('conflict')
|
||||
self.env['runbot_merge.pull_requests.tagging'].create({
|
||||
'repository': new_pr.repository.id,
|
||||
'pull_request': new_pr.number,
|
||||
'tags_add': labels,
|
||||
})
|
||||
|
||||
pr = new_pr
|
||||
|
||||
class UpdateQueue(models.Model, Queue):
|
||||
_name = 'forwardport.updates'
|
||||
|
@ -17,6 +17,7 @@ import json
|
||||
import logging
|
||||
import operator
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import typing
|
||||
from pathlib import Path
|
||||
@ -195,7 +196,19 @@ class PullRequests(models.Model):
|
||||
|
||||
if vals.get('parent_id') and 'source_id' not in vals:
|
||||
vals['source_id'] = self.browse(vals['parent_id']).root_id.id
|
||||
return super().create(vals)
|
||||
pr = super().create(vals)
|
||||
|
||||
# added a new PR to an already forward-ported batch: port the PR
|
||||
if self.env['runbot_merge.batch'].search_count([
|
||||
('parent_id', '=', pr.batch_id.id),
|
||||
]):
|
||||
self.env['forwardport.batches'].create({
|
||||
'batch_id': pr.batch_id.id,
|
||||
'source': 'complete',
|
||||
'pr_id': pr.id,
|
||||
})
|
||||
|
||||
return pr
|
||||
|
||||
def write(self, vals):
|
||||
# if the PR's head is updated, detach (should split off the FP lines as this is not the original code)
|
||||
|
@ -1,4 +1,8 @@
|
||||
from utils import Commit, make_basic, to_pr
|
||||
import re
|
||||
|
||||
import pytest
|
||||
|
||||
from utils import Commit, make_basic, to_pr, seen, re_matches
|
||||
|
||||
|
||||
def test_single_updated(env, config, make_repo):
|
||||
@ -133,19 +137,17 @@ def test_closing_during_fp(env, config, make_repo, users):
|
||||
def test_add_pr_during_fp(env, config, make_repo, users):
|
||||
""" It should be possible to add new PRs to an FP batch
|
||||
"""
|
||||
r1, _ = make_basic(env, config, make_repo)
|
||||
r2, fork2 = make_basic(env, config, make_repo)
|
||||
repos = env['runbot_merge.repository'].search([])
|
||||
repos.required_statuses = 'default'
|
||||
r1, _ = make_basic(env, config, make_repo, statuses="default")
|
||||
r2, fork2 = make_basic(env, config, make_repo, statuses="default")
|
||||
# needs a "d" branch
|
||||
repos[0].project_id.write({
|
||||
env['runbot_merge.project'].search([]).write({
|
||||
'branch_ids': [(0, 0, {'name': 'd', 'sequence': 40})],
|
||||
})
|
||||
with r1, r2:
|
||||
r1.make_ref("heads/d", r1.commit("c").id)
|
||||
r2.make_ref("heads/d", r2.commit("c").id)
|
||||
|
||||
with r1, r2:
|
||||
with r1:
|
||||
r1.make_commits('a', Commit('1', tree={'1': '0'}), ref='heads/aref')
|
||||
pr1_a = r1.make_pr(target='a', head='aref')
|
||||
r1.post_status('aref', 'success')
|
||||
@ -211,3 +213,177 @@ def test_add_pr_during_fp(env, config, make_repo, users):
|
||||
|
||||
assert find_child(pr1_c_id)
|
||||
assert find_child(pr2_c_id)
|
||||
|
||||
def test_add_to_forward_ported(env, config, make_repo, users):
|
||||
"""Add a new branch to an intermediate step of a fw *sequence*, either
|
||||
because skipci or because all the intermediate CI succeeded
|
||||
"""
|
||||
# region setup
|
||||
r1, _ = make_basic(env, config, make_repo, statuses="default")
|
||||
r2, fork2 = make_basic(env, config, make_repo, statuses="default")
|
||||
|
||||
with r1:
|
||||
r1.make_commits('a', Commit('a', tree={'a': 'a'}), ref="heads/pr1")
|
||||
pr1_a = r1.make_pr(target="a", head="pr1")
|
||||
r1.post_status(pr1_a.head, 'success')
|
||||
pr1_a.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||
env.run_crons()
|
||||
with r1, r2:
|
||||
r1.post_status('staging.a', 'success')
|
||||
r2.post_status('staging.a', 'success')
|
||||
env.run_crons()
|
||||
|
||||
# region port forward
|
||||
pr1_a_id = to_pr(env, pr1_a)
|
||||
pr1_b_id = pr1_a_id.forwardport_ids
|
||||
assert pr1_b_id
|
||||
with r1:
|
||||
r1.post_status(pr1_b_id.head, 'success')
|
||||
env.run_crons()
|
||||
pr1_c_id = pr1_a_id.forwardport_ids - pr1_b_id
|
||||
assert pr1_c_id
|
||||
# endregion
|
||||
# endregion
|
||||
|
||||
# new PR must be in fork for labels to actually match
|
||||
with r2, fork2:
|
||||
# branch in fork has no owner prefix, but HEAD for cross-repo PR does
|
||||
fork2.make_commits("b", Commit('b', tree={'b': 'b'}), ref=f'heads/{pr1_b_id.refname}')
|
||||
pr2_b = r2.make_pr(title="b", target="b", head=pr1_b_id.label)
|
||||
r2.post_status(pr2_b.head, 'success')
|
||||
env.run_crons()
|
||||
|
||||
pr2_b_id = to_pr(env, pr2_b)
|
||||
assert pr2_b_id.batch_id == pr1_b_id.batch_id
|
||||
assert len(pr2_b_id.forwardport_ids) == 1, \
|
||||
"since the batch is already forward ported, the new PR should" \
|
||||
" immediately be forward ported to match"
|
||||
assert pr2_b_id.forwardport_ids.label == pr1_c_id.label
|
||||
|
||||
pr2_a = r1.get_pr(pr1_b_id.number)
|
||||
with r1, r2:
|
||||
pr2_a.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||
pr2_b.post_comment("hansen r+", config['role_reviewer']['token'])
|
||||
env.run_crons()
|
||||
|
||||
with r1, r2:
|
||||
r1.post_status('staging.b', 'success')
|
||||
r2.post_status('staging.b', 'success')
|
||||
env.run_crons()
|
||||
|
||||
assert pr1_b_id.state == 'merged'
|
||||
assert pr2_b_id.state == 'merged'
|
||||
|
||||
assert len(pr2_b_id.forwardport_ids) == 1,\
|
||||
"verify that pr2_b did not get forward ported again on merge"
|
||||
pr2_c = r2.get_pr(pr2_b_id.forwardport_ids.number)
|
||||
assert pr2_c.comments == [
|
||||
seen(env, pr2_c, users),
|
||||
(users['user'], '''\
|
||||
@{user} this PR targets c and is the last of the forward-port chain.
|
||||
|
||||
To merge the full chain, use
|
||||
> @hansen r+
|
||||
|
||||
More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
|
||||
'''.format_map(users)),
|
||||
]
|
||||
|
||||
def test_add_to_forward_port_conflict(env, config, make_repo, users):
|
||||
"""If a PR is added to an existing forward port sequence, and it causes
|
||||
conflicts when forward ported, it should be treated similarly to an *update*
|
||||
causing a conflict: the PR is still created, but it's set in conflict.
|
||||
"""
|
||||
# region setup
|
||||
r1, _ = make_basic(env, config, make_repo, statuses="default")
|
||||
r2, fork2 = make_basic(env, config, make_repo, statuses="default")
|
||||
project = env['runbot_merge.project'].search([])
|
||||
with r2:
|
||||
r2.make_commits(
|
||||
"c",
|
||||
Commit("C-onflict", tree={"b": "X"}),
|
||||
ref="heads/c"
|
||||
)
|
||||
|
||||
with r1:
|
||||
r1.make_commits('a', Commit('a', tree={'a': 'a'}), ref="heads/pr1")
|
||||
pr1_a = r1.make_pr(target="a", head="pr1")
|
||||
r1.post_status(pr1_a.head, 'success')
|
||||
pr1_a.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||
env.run_crons()
|
||||
with r1, r2:
|
||||
r1.post_status('staging.a', 'success')
|
||||
r2.post_status('staging.a', 'success')
|
||||
env.run_crons()
|
||||
|
||||
# region port forward
|
||||
pr1_a_id = to_pr(env, pr1_a)
|
||||
pr1_b_id = pr1_a_id.forwardport_ids
|
||||
assert pr1_b_id
|
||||
with r1:
|
||||
r1.post_status(pr1_b_id.head, 'success')
|
||||
env.run_crons()
|
||||
pr1_c_id = pr1_a_id.forwardport_ids - pr1_b_id
|
||||
assert pr1_c_id
|
||||
# endregion
|
||||
# endregion
|
||||
|
||||
# new PR must be in fork for labels to actually match
|
||||
with r2, fork2:
|
||||
# branch in fork has no owner prefix, but HEAD for cross-repo PR does
|
||||
fork2.make_commits("b", Commit('b', tree={'b': 'b'}), ref=f'heads/{pr1_b_id.refname}')
|
||||
pr2_b = r2.make_pr(title="b", target="b", head=pr1_b_id.label)
|
||||
r2.post_status(pr2_b.head, 'success')
|
||||
env.run_crons()
|
||||
|
||||
pr2_b_id = to_pr(env, pr2_b)
|
||||
assert pr2_b_id.batch_id == pr1_b_id.batch_id
|
||||
pr2_c_id = pr2_b_id.forwardport_ids
|
||||
assert len(pr2_c_id) == 1, \
|
||||
"since the batch is already forward ported, the new PR should" \
|
||||
" immediately be forward ported to match"
|
||||
assert pr2_c_id.label == pr1_c_id.label
|
||||
assert not pr2_c_id.parent_id, "conflict -> should be detached"
|
||||
assert pr2_c_id.detach_reason
|
||||
|
||||
pr2_a = r1.get_pr(pr1_b_id.number)
|
||||
with r1, r2:
|
||||
pr2_a.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||
pr2_b.post_comment("hansen r+", config['role_reviewer']['token'])
|
||||
env.run_crons()
|
||||
|
||||
with r1, r2:
|
||||
r1.post_status('staging.b', 'success')
|
||||
r2.post_status('staging.b', 'success')
|
||||
env.run_crons()
|
||||
|
||||
assert pr1_b_id.state == 'merged'
|
||||
assert pr2_b_id.state == 'merged'
|
||||
|
||||
pr2_c = r2.get_pr(pr2_c_id.number)
|
||||
assert pr2_c.comments == [
|
||||
seen(env, pr2_c, users),
|
||||
# should have conflicts
|
||||
(users['user'], re_matches(r"""@{user} cherrypicking of pull request {previous.display_name} failed\.
|
||||
|
||||
stdout:
|
||||
```
|
||||
Auto-merging b
|
||||
CONFLICT \(add/add\): Merge conflict in b
|
||||
|
||||
```
|
||||
|
||||
stderr:
|
||||
```
|
||||
.*
|
||||
```
|
||||
|
||||
Either perform the forward-port manually \(and push to this branch, proceeding as usual\) or close this PR \(maybe\?\)\.
|
||||
|
||||
In the former case, you may want to edit this PR message as well\.
|
||||
|
||||
:warning: after resolving this conflict, you will need to merge it via @{project.github_prefix}\.
|
||||
|
||||
More info at https://github\.com/odoo/odoo/wiki/Mergebot#forward-port
|
||||
""".format(project=project, previous=pr2_b_id, **users), re.DOTALL))
|
||||
]
|
||||
|
@ -49,8 +49,11 @@ class re_matches:
|
||||
def __eq__(self, text):
|
||||
return self._r.match(text)
|
||||
|
||||
def __str__(self):
|
||||
return re.sub(r'\\(.)', r'\1', self._r.pattern)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._r.pattern)
|
||||
return repr(str(self))
|
||||
|
||||
def seen(env, pr, users):
|
||||
return users['user'], f'[Pull request status dashboard]({to_pr(env, pr).url}).'
|
||||
|
@ -82,13 +82,13 @@ class Batch(models.Model):
|
||||
# in DB both will prefix-match on the literal prefix then apply a
|
||||
# trivial filter (even though the filter is technically unnecessary for
|
||||
# the first form), doing it like this means we don't have to `- self`
|
||||
# in the `not includ_self` case
|
||||
# in the ``not include_self`` case
|
||||
if include_self:
|
||||
pattern = self.parent_path + '%'
|
||||
else:
|
||||
pattern = self.parent_path + '_%'
|
||||
|
||||
return self.search([("parent_path", '=like', pattern)])
|
||||
return self.search([("parent_path", '=like', pattern)], order="parent_path")
|
||||
|
||||
# also depends on all the descendants of the source or sth
|
||||
@api.depends('parent_path')
|
||||
@ -323,9 +323,10 @@ class Batch(models.Model):
|
||||
'source_id': source.id,
|
||||
# only link to previous PR of sequence if cherrypick passed
|
||||
'parent_id': pr.id if not has_conflicts else False,
|
||||
'detach_reason': "conflicts: {}".format(
|
||||
f'\n{conflicts[pr]}\n{conflicts[pr]}'.strip()
|
||||
) if has_conflicts else None,
|
||||
'detach_reason': "conflicts:\n{}".format('\n\n'.join(
|
||||
f"{out}\n{err}".strip()
|
||||
for _, out, err, _ in filter(None, conflicts.values())
|
||||
)) if has_conflicts else None,
|
||||
})
|
||||
if has_conflicts and pr.parent_id and pr.state not in ('merged', 'closed'):
|
||||
self.env.ref('runbot_merge.forwardport.failure.conflict')._send(
|
||||
@ -336,63 +337,7 @@ class Batch(models.Model):
|
||||
)
|
||||
|
||||
for pr, new_pr in zip(prs, new_batch):
|
||||
(h, out, err, hh) = conflicts.get(pr) or (None, None, None, None)
|
||||
|
||||
if h:
|
||||
sout = serr = ''
|
||||
if out.strip():
|
||||
sout = f"\nstdout:\n```\n{out}\n```\n"
|
||||
if err.strip():
|
||||
serr = f"\nstderr:\n```\n{err}\n```\n"
|
||||
|
||||
lines = ''
|
||||
if len(hh) > 1:
|
||||
lines = '\n' + ''.join(
|
||||
'* %s%s\n' % (sha, ' <- on this commit' if sha == h else '')
|
||||
for sha in hh
|
||||
)
|
||||
template = 'runbot_merge.forwardport.failure'
|
||||
format_args = {
|
||||
'pr': new_pr,
|
||||
'commits': lines,
|
||||
'stdout': sout,
|
||||
'stderr': serr,
|
||||
'footer': FOOTER,
|
||||
}
|
||||
elif has_conflicts:
|
||||
template = 'runbot_merge.forwardport.linked'
|
||||
format_args = {
|
||||
'pr': new_pr,
|
||||
'siblings': ', '.join(p.display_name for p in (new_batch - new_pr)),
|
||||
'footer': FOOTER,
|
||||
}
|
||||
elif not new_pr._find_next_target():
|
||||
ancestors = "".join(
|
||||
f"* {p.display_name}\n"
|
||||
for p in pr._iter_ancestors()
|
||||
if p.parent_id
|
||||
if p.state not in ('closed', 'merged')
|
||||
if p.target.active
|
||||
)
|
||||
template = 'runbot_merge.forwardport.final'
|
||||
format_args = {
|
||||
'pr': new_pr,
|
||||
'containing': ' containing:' if ancestors else '.',
|
||||
'ancestors': ancestors,
|
||||
'footer': FOOTER,
|
||||
}
|
||||
else:
|
||||
template = 'runbot_merge.forwardport.intermediate'
|
||||
format_args = {
|
||||
'pr': new_pr,
|
||||
'footer': FOOTER,
|
||||
}
|
||||
self.env.ref(template)._send(
|
||||
repository=new_pr.repository,
|
||||
pull_request=new_pr.number,
|
||||
token_field='fp_github_token',
|
||||
format_args=format_args,
|
||||
)
|
||||
new_pr._fp_conflict_feedback(pr, conflicts)
|
||||
|
||||
labels = ['forwardport']
|
||||
if has_conflicts:
|
||||
@ -409,7 +354,6 @@ class Batch(models.Model):
|
||||
new_batch._schedule_fp_followup()
|
||||
return new_batch
|
||||
|
||||
|
||||
def _schedule_fp_followup(self):
|
||||
_logger = logging.getLogger(__name__).getChild('forwardport.next')
|
||||
# if the PR has a parent and is CI-validated, enqueue the next PR
|
||||
|
@ -26,6 +26,7 @@ from .utils import enum
|
||||
from .. import github, exceptions, controllers, utils
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
FOOTER = '\nMore info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port\n'
|
||||
|
||||
|
||||
class StatusConfiguration(models.Model):
|
||||
@ -1386,6 +1387,64 @@ class PullRequests(models.Model):
|
||||
|
||||
return True
|
||||
|
||||
def _fp_conflict_feedback(self, previous_pr, conflicts):
|
||||
(h, out, err, hh) = conflicts.get(previous_pr) or (None, None, None, None)
|
||||
if h:
|
||||
sout = serr = ''
|
||||
if out.strip():
|
||||
sout = f"\nstdout:\n```\n{out}\n```\n"
|
||||
if err.strip():
|
||||
serr = f"\nstderr:\n```\n{err}\n```\n"
|
||||
|
||||
lines = ''
|
||||
if len(hh) > 1:
|
||||
lines = '\n' + ''.join(
|
||||
'* %s%s\n' % (sha, ' <- on this commit' if sha == h else '')
|
||||
for sha in hh
|
||||
)
|
||||
template = 'runbot_merge.forwardport.failure'
|
||||
format_args = {
|
||||
'pr': self,
|
||||
'commits': lines,
|
||||
'stdout': sout,
|
||||
'stderr': serr,
|
||||
'footer': FOOTER,
|
||||
}
|
||||
elif any(conflicts.values()):
|
||||
template = 'runbot_merge.forwardport.linked'
|
||||
format_args = {
|
||||
'pr': self,
|
||||
'siblings': ', '.join(p.display_name for p in (self.batch_id - self)),
|
||||
'footer': FOOTER,
|
||||
}
|
||||
elif not self._find_next_target():
|
||||
ancestors = "".join(
|
||||
f"* {p.display_name}\n"
|
||||
for p in previous_pr._iter_ancestors()
|
||||
if p.parent_id
|
||||
if p.state not in ('closed', 'merged')
|
||||
if p.target.active
|
||||
)
|
||||
template = 'runbot_merge.forwardport.final'
|
||||
format_args = {
|
||||
'pr': self,
|
||||
'containing': ' containing:' if ancestors else '.',
|
||||
'ancestors': ancestors,
|
||||
'footer': FOOTER,
|
||||
}
|
||||
else:
|
||||
template = 'runbot_merge.forwardport.intermediate'
|
||||
format_args = {
|
||||
'pr': self,
|
||||
'footer': FOOTER,
|
||||
}
|
||||
self.env.ref(template)._send(
|
||||
repository=self.repository,
|
||||
pull_request=self.number,
|
||||
token_field='fp_github_token',
|
||||
format_args=format_args,
|
||||
)
|
||||
|
||||
# ordering is a bit unintuitive because the lowest sequence (and name)
|
||||
# is the last link of the fp chain, reasoning is a bit more natural the
|
||||
# other way around (highest object is the last), especially with Python
|
||||
|
Loading…
Reference in New Issue
Block a user