2024-10-22 15:53:45 +07:00
|
|
|
import logging
|
|
|
|
import re
|
|
|
|
import secrets
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
from odoo import models, fields
|
|
|
|
from odoo.exceptions import UserError
|
|
|
|
|
|
|
|
from ..batch import Batch
|
|
|
|
from ..project import Project
|
|
|
|
from ..pull_requests import Repository
|
|
|
|
from ... import git
|
|
|
|
|
|
|
|
_logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class PullRequest(models.Model):
|
|
|
|
_inherit = 'runbot_merge.pull_requests'
|
|
|
|
|
|
|
|
id: int
|
|
|
|
display_name: str
|
|
|
|
project: Project
|
|
|
|
repository: Repository
|
|
|
|
batch_id: Batch
|
|
|
|
|
|
|
|
def backport(self) -> dict:
|
|
|
|
if len(self) != 1:
|
|
|
|
raise UserError(f"Backporting works one PR at a time, got {len(self)}")
|
|
|
|
|
|
|
|
if len(self.batch_id.prs) > 1:
|
|
|
|
raise UserError("Automatic backport of multi-pr batches is not currently supported")
|
|
|
|
|
|
|
|
if not self.project.fp_github_token:
|
|
|
|
raise UserError(f"Can not backport {self.display_name}: no token on project {self.project.display_name}")
|
|
|
|
|
|
|
|
if not self.repository.fp_remote_target:
|
|
|
|
raise UserError(f"Can not backport {self.display_name}: no remote on {self.project.display_name}")
|
|
|
|
|
|
|
|
w = self.env['runbot_merge.pull_requests.backport'].create({
|
|
|
|
'pr_id': self.id,
|
|
|
|
})
|
|
|
|
return {
|
|
|
|
'type': 'ir.actions.act_window',
|
|
|
|
'name': f"Backport of {self.display_name}",
|
2024-11-18 15:48:48 +07:00
|
|
|
'views': [(False, 'form')],
|
2024-10-22 15:53:45 +07:00
|
|
|
'target': 'new',
|
|
|
|
'res_model': w._name,
|
|
|
|
'res_id': w.id,
|
|
|
|
}
|
|
|
|
|
|
|
|
class PullRequestBackport(models.TransientModel):
|
|
|
|
_name = 'runbot_merge.pull_requests.backport'
|
2024-10-22 20:08:00 +07:00
|
|
|
_description = "PR backport wizard"
|
2024-10-22 15:53:45 +07:00
|
|
|
_rec_name = 'pr_id'
|
|
|
|
|
|
|
|
pr_id = fields.Many2one('runbot_merge.pull_requests', required=True)
|
2024-11-18 15:48:48 +07:00
|
|
|
project_id = fields.Many2one(related='pr_id.repository.project_id')
|
|
|
|
source_seq = fields.Integer(related='pr_id.target.sequence')
|
|
|
|
target = fields.Many2one(
|
|
|
|
'runbot_merge.branch',
|
|
|
|
domain="[('project_id', '=', project_id), ('sequence', '>', source_seq)]",
|
|
|
|
)
|
2024-10-22 15:53:45 +07:00
|
|
|
|
|
|
|
def action_apply(self) -> dict:
|
|
|
|
if not self.target:
|
|
|
|
raise UserError("A backport needs a backport target")
|
|
|
|
|
|
|
|
project = self.pr_id.project
|
|
|
|
branches = project._forward_port_ordered().ids
|
|
|
|
source = self.pr_id.source_id or self.pr_id
|
|
|
|
source_idx = branches.index(source.target.id)
|
|
|
|
if branches.index(self.target.id) >= source_idx:
|
|
|
|
raise UserError(
|
|
|
|
"The backport branch needs to be before the source's branch "
|
|
|
|
f"(got {self.target.name!r} and {source.target.name!r})"
|
|
|
|
)
|
|
|
|
|
|
|
|
_logger.info(
|
|
|
|
"backporting %s (on %s) to %s",
|
|
|
|
self.pr_id.display_name,
|
|
|
|
self.pr_id.target.name,
|
|
|
|
self.target.name,
|
|
|
|
)
|
|
|
|
|
2025-01-30 15:08:35 +07:00
|
|
|
bp_branch = f"{self.target.name}-{self.pr_id.refname}-{self.pr_id.batch_id.id}-bp"
|
2024-10-22 15:53:45 +07:00
|
|
|
repo_id = self.pr_id.repository
|
|
|
|
repo = git.get_local(repo_id)
|
|
|
|
|
|
|
|
old_map = self.pr_id.commits_map
|
|
|
|
self.pr_id.commits_map = "{}"
|
|
|
|
conflict, head = self.pr_id._create_port_branch(repo, self.target, forward=False)
|
|
|
|
self.pr_id.commits_map = old_map
|
|
|
|
|
|
|
|
if conflict:
|
|
|
|
feedback = "\n".join(filter(None, conflict[1:3]))
|
|
|
|
raise UserError(f"backport conflict:\n\n{feedback}")
|
|
|
|
repo.push(git.fw_url(repo_id), f"{head}:refs/heads/{bp_branch}")
|
|
|
|
|
|
|
|
self.env.cr.execute('LOCK runbot_merge_pull_requests IN SHARE MODE')
|
|
|
|
|
|
|
|
owner, _repo = repo_id.fp_remote_target.split('/', 1)
|
|
|
|
message = source.message + f"\n\nBackport of {self.pr_id.display_name}"
|
|
|
|
title, body = re.fullmatch(r'(?P<title>[^\n]+)\n*(?P<body>.*)', message, flags=re.DOTALL).groups()
|
|
|
|
|
|
|
|
r = requests.post(
|
|
|
|
f'https://api.github.com/repos/{repo_id.name}/pulls',
|
|
|
|
headers={'Authorization': f'token {project.fp_github_token}'},
|
|
|
|
json={
|
|
|
|
'base': self.target.name,
|
|
|
|
'head': f'{owner}:{bp_branch}',
|
|
|
|
'title': '[Backport]' + ('' if title[0] == '[' else ' ') + title,
|
|
|
|
'body': body
|
|
|
|
}
|
|
|
|
)
|
|
|
|
if not r.ok:
|
|
|
|
raise UserError(f"Backport PR creation failure: {r.text}")
|
|
|
|
|
[FIX] *: double forwardport when adding a PR to an existing batch
This is a bit of an odd case which was only noticed because of
persistent forwardport.batches, which ended up having a ton of related
traceback in the logs (the mergebot kept trying to create forward
ports from Jan 27th to Feb 10th, thankfully the errors happened in git
so did not seem to eat through our API rate limiting).
The issue was triggered by the addition of odoo/enterprise#77876 to
odoo/odoo#194818. This triggered a completion job which led to the
creation of odoo/enterprise#77877 to odoo/enterprise#77880, so far so
good.
Except the bit of code responsible for creating completion jobs only
checked if the PR was being added to a batch with a descendant. That
is the case of odoo/enterprise#77877 to odoo/enterprise#77879 (not
odoo/enterprise#77880 because that's the end of the line). As a
result, those triggered 3 more completion jobs, which kept failing in
a loop because they tried pushing different commits to their
next-siblings (without forcing, leading git to reject the non-ff push,
hurray).
A completion request should only be triggered by the addition of a
new *source* (a PR without a source) to an existing batch with
descendants, so add that to the check. This requires updating
`_from_json` to create PRs in a single step (rather than one step to
create based on github's data, and an other one for the hierarchical
tracking) as we need the source to be set during `create` not as a
post-action.
Although there was a test which could have triggered this issue, the
test only had 3 branches so was not long enough to trigger the issue:
- Initial PR 1 on branch A merged then forward-ported to B and C.
- Sibling PR 2 added to the batch in B.
- Completed to C.
- Ending there as C(1) has no descendant batch, leading to no further
completion request.
Adding a 4th branch did surface / show the issue by providing space
for a new completion request from the creation of C(2). Interestingly
even though I the test harness attempts to run all triggered crons to
completion there can be misses, so the test can fail in two different
ways (being now checked for):
- there's a leftover forwardport.batch after we've created all our
forwardports
- there's an extra PR targeting D, descending from C(2)
- in almost every case there's also a traceback in the logs, which
does fail the build thanks to the `env` fixture's check
2025-02-11 20:27:53 +07:00
|
|
|
backport = self.env['runbot_merge.pull_requests']._from_gh(
|
|
|
|
r.json(),
|
|
|
|
merge_method=self.pr_id.merge_method,
|
2024-10-22 15:53:45 +07:00
|
|
|
# the backport's own forwardport should stop right before the
|
|
|
|
# original PR by default
|
[FIX] *: double forwardport when adding a PR to an existing batch
This is a bit of an odd case which was only noticed because of
persistent forwardport.batches, which ended up having a ton of related
traceback in the logs (the mergebot kept trying to create forward
ports from Jan 27th to Feb 10th, thankfully the errors happened in git
so did not seem to eat through our API rate limiting).
The issue was triggered by the addition of odoo/enterprise#77876 to
odoo/odoo#194818. This triggered a completion job which led to the
creation of odoo/enterprise#77877 to odoo/enterprise#77880, so far so
good.
Except the bit of code responsible for creating completion jobs only
checked if the PR was being added to a batch with a descendant. That
is the case of odoo/enterprise#77877 to odoo/enterprise#77879 (not
odoo/enterprise#77880 because that's the end of the line). As a
result, those triggered 3 more completion jobs, which kept failing in
a loop because they tried pushing different commits to their
next-siblings (without forcing, leading git to reject the non-ff push,
hurray).
A completion request should only be triggered by the addition of a
new *source* (a PR without a source) to an existing batch with
descendants, so add that to the check. This requires updating
`_from_json` to create PRs in a single step (rather than one step to
create based on github's data, and an other one for the hierarchical
tracking) as we need the source to be set during `create` not as a
post-action.
Although there was a test which could have triggered this issue, the
test only had 3 branches so was not long enough to trigger the issue:
- Initial PR 1 on branch A merged then forward-ported to B and C.
- Sibling PR 2 added to the batch in B.
- Completed to C.
- Ending there as C(1) has no descendant batch, leading to no further
completion request.
Adding a 4th branch did surface / show the issue by providing space
for a new completion request from the creation of C(2). Interestingly
even though I the test harness attempts to run all triggered crons to
completion there can be misses, so the test can fail in two different
ways (being now checked for):
- there's a leftover forwardport.batch after we've created all our
forwardports
- there's an extra PR targeting D, descending from C(2)
- in almost every case there's also a traceback in the logs, which
does fail the build thanks to the `env` fixture's check
2025-02-11 20:27:53 +07:00
|
|
|
limit_id=branches[source_idx - 1],
|
|
|
|
)
|
|
|
|
_logger.info("Created backport %s for %s", backport.display_name, self.pr_id.display_name)
|
|
|
|
|
2024-10-22 15:53:45 +07:00
|
|
|
self.env['runbot_merge.pull_requests.tagging'].create({
|
|
|
|
'repository': repo_id.id,
|
|
|
|
'pull_request': backport.number,
|
|
|
|
'tags_add': ['backport'],
|
|
|
|
})
|
|
|
|
# scheduling fp followup probably doesn't make sense since we don't copy the fw_policy...
|
|
|
|
|
|
|
|
return {
|
|
|
|
'type': 'ir.actions.act_window',
|
|
|
|
'name': "new backport",
|
2024-11-18 15:48:48 +07:00
|
|
|
'views': [(False, 'form')],
|
2024-10-22 15:53:45 +07:00
|
|
|
'res_model': backport._name,
|
|
|
|
'res_id': backport.id,
|
|
|
|
}
|