[CHG] runbot_merge: perform stagings in a local clone of the repo

The github API has gotten a lot more constraining (with rate
restrictions being newly enforced or added somewhat out of nowhere),
and as importantly a lot less reliable. So move the staging process
off of github and locally, similar to the forward porting
process (whose repo cache is being reused for this).

Fixes #247
This commit is contained in:
Xavier Morel 2023-08-25 15:33:25 +02:00
commit b0b609ebe7
22 changed files with 1262 additions and 1071 deletions

View File

@ -537,6 +537,9 @@ class Repo:
self.hook = False
repos.append(self)
def __repr__(self):
return f'<conftest.Repo {self.name}>'
@property
def owner(self):
return self.name.split('/')[0]

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
{
'name': 'forward port bot',
'version': '1.2',
'version': '1.3',
'summary': "A port which forward ports successful PRs.",
'depends': ['runbot_merge'],
'data': [

View File

@ -42,17 +42,4 @@
<field name="numbercall">-1</field>
<field name="doall" eval="False"/>
</record>
<record model="ir.cron" id="maintenance">
<field name="name">Maintenance of repo cache</field>
<field name="model_id" ref="model_forwardport_maintenance"/>
<field name="state">code</field>
<field name="code">model._run()</field>
<!-- run sunday morning as it can take a while, unlikely someone will need to forward-port stuff at that point -->
<field name="nextcall" eval="datetime.utcnow() + relativedelta(weekday=6, hour=2, minute=0, second=0, microsecond=0)"/>
<field name="interval_number">1</field>
<field name="interval_type">weeks</field>
<field name="numbercall">-1</field>
<field name="doall" eval="False"/>
</record>
</odoo>

View File

@ -43,13 +43,4 @@
<field name="perm_write">0</field>
<field name="perm_unlink">0</field>
</record>
<record id="access_forwardport_maintenance" model="ir.model.access">
<field name="name">Access to maintenance is useless</field>
<field name="model_id" ref="model_forwardport_maintenance"/>
<field name="perm_read">0</field>
<field name="perm_create">0</field>
<field name="perm_write">0</field>
<field name="perm_unlink">0</field>
</record>
</odoo>

View File

@ -0,0 +1,9 @@
import pathlib
from odoo.tools.appdirs import user_cache_dir
def migrate(_cr, _version):
# avoid needing to re-clone our repo unnecessarily
pathlib.Path(user_cache_dir('forwardport')).rename(
pathlib.Path(user_cache_dir('mergebot')))

View File

@ -1,20 +1,15 @@
# -*- coding: utf-8 -*-
import logging
import pathlib
import sentry_sdk
import resource
import subprocess
import uuid
from contextlib import ExitStack
from datetime import datetime, timedelta
import sentry_sdk
from dateutil import relativedelta
from odoo import fields, models
from odoo.addons.runbot_merge import git
from odoo.addons.runbot_merge.github import GH
from odoo.tools.appdirs import user_cache_dir
# how long a merged PR survives
MERGE_AGE = relativedelta.relativedelta(weeks=2)
@ -177,7 +172,7 @@ class UpdateQueue(models.Model, Queue):
# doesn't propagate revisions fast enough so on the next loop we
# can't find the revision we just pushed
dummy_branch = str(uuid.uuid4())
ref = previous._get_local_directory()
ref = git.get_local(previous.repository, 'fp_github')
working_copy.push(ref._directory, f'{new_head}:refs/heads/{dummy_branch}')
ref.branch('--delete', '--force', dummy_branch)
# then update the child's branch to the new head
@ -261,46 +256,3 @@ class DeleteBranches(models.Model, Queue):
r.json()
)
_deleter.info('✔ deleted branch %s of PR %s', self.pr_id.label, self.pr_id.display_name)
_gc = _logger.getChild('maintenance')
def _bypass_limits():
"""Allow git to go beyond the limits set for Odoo.
On large repositories, git gc can take a *lot* of memory (especially with
`--aggressive`), if the Odoo limits are too low this can prevent the gc
from running, leading to a lack of packing and a massive amount of cruft
accumulating in the working copy.
"""
resource.setrlimit(resource.RLIMIT_AS, (resource.RLIM_INFINITY, resource.RLIM_INFINITY))
class GC(models.TransientModel):
_name = 'forwardport.maintenance'
_description = "Weekly maintenance of... cache repos?"
def _run(self):
# lock out the forward port cron to avoid concurrency issues while we're
# GC-ing it: wait until it's available, then SELECT FOR UPDATE it,
# which should prevent cron workers from running it
fp_cron = self.env.ref('forwardport.port_forward')
self.env.cr.execute("""
SELECT 1 FROM ir_cron
WHERE id = %s
FOR UPDATE
""", [fp_cron.id])
repos_dir = pathlib.Path(user_cache_dir('forwardport'))
# run on all repos with a forwardport target (~ forwardport enabled)
for repo in self.env['runbot_merge.repository'].search([('fp_remote_target', '!=', False)]):
repo_dir = repos_dir / repo.name
if not repo_dir.is_dir():
continue
_gc.info('Running maintenance on %s', repo.name)
r = subprocess.run(
['git', '--git-dir', repo_dir, 'gc', '--aggressive', '--prune=now'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
encoding='utf-8',
preexec_fn = _bypass_limits,
)
if r.returncode:
_gc.warning("Maintenance failure (status=%d):\n%s", r.returncode, r.stdout)

View File

@ -20,24 +20,24 @@ import json
import logging
import operator
import os
import pathlib
import re
import subprocess
import tempfile
import typing
from pathlib import Path
import dateutil.relativedelta
import requests
import resource
from odoo import _, models, fields, api
from odoo.osv import expression
from odoo.exceptions import UserError
from odoo.tools.misc import topological_sort, groupby
from odoo.tools.sql import reverse_order
from odoo.tools.appdirs import user_cache_dir
from odoo.addons.runbot_merge import utils
from odoo.addons.runbot_merge import git, utils
from odoo.addons.runbot_merge.models.pull_requests import RPLUS
from odoo.addons.runbot_merge.models.stagings_create import Message
footer = '\nMore info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port\n'
@ -847,14 +847,6 @@ class PullRequests(models.Model):
b.prs[0]._schedule_fp_followup()
return b
@property
def _source_url(self):
return 'https://{}:{}@github.com/{}'.format(
self.repository.project_id.fp_github_name or '',
self.repository.project_id.fp_github_token,
self.repository.name,
)
def _create_fp_branch(self, target_branch, fp_branch_name, cleanup):
""" Creates a forward-port for the current PR to ``target_branch`` under
``fp_branch_name``.
@ -874,25 +866,26 @@ class PullRequests(models.Model):
"Forward-porting %s (%s) to %s",
self.display_name, root.display_name, target_branch.name
)
source = self._get_local_directory()
source = git.get_local(self.repository, 'fp_github')
r = source.with_config(stdout=subprocess.PIPE, stderr=subprocess.STDOUT).fetch()
logger.info("Updated cache repo %s:\n%s", source._directory, r.stdout.decode())
logger.info("Create working copy...")
cache_dir = user_cache_dir('forwardport')
# PullRequest.display_name is `owner/repo#number`, so `owner` becomes a
# directory, `TemporaryDirectory` only creates the leaf, so we need to
# make sure `owner` exists in `cache_dir`.
Path(cache_dir, root.repository.name).parent.mkdir(parents=True, exist_ok=True)
working_copy = source.clone(
cleanup.enter_context(
tempfile.TemporaryDirectory(
prefix='%s-to-%s-' % (
root.display_name,
target_branch.name
),
dir=user_cache_dir('forwardport')
)),
prefix=f'{root.display_name}-to-{target_branch.name}',
dir=cache_dir)),
branch=target_branch.name
)
r = working_copy.with_config(stdout=subprocess.PIPE, stderr=subprocess.STDOUT) \
.fetch(self._source_url, root.head)
.fetch(git.source_url(self.repository, 'fp_github'), root.head)
logger.info(
"Fetched head of %s into %s:\n%s",
root.display_name,
@ -1081,32 +1074,12 @@ stderr:
def _make_fp_message(self, commit):
cmap = json.loads(self.commits_map)
msg = self._parse_commit_message(commit['commit']['message'])
msg = Message.from_message(commit['commit']['message'])
# write the *merged* commit as "original", not the PR's
msg.headers['x-original-commit'] = cmap.get(commit['sha'], commit['sha'])
# don't stringify so caller can still perform alterations
return msg
def _get_local_directory(self):
repos_dir = pathlib.Path(user_cache_dir('forwardport'))
repos_dir.mkdir(parents=True, exist_ok=True)
repo_dir = repos_dir / self.repository.name
if repo_dir.is_dir():
return git(repo_dir)
else:
_logger.info("Cloning out %s to %s", self.repository.name, repo_dir)
subprocess.run(['git', 'clone', '--bare', self._source_url, str(repo_dir)], check=True)
# bare repos don't have fetch specs by default, and fetching *into*
# them is a pain in the ass, configure fetch specs so `git fetch`
# works properly
repo = git(repo_dir)
repo.config('--add', 'remote.origin.fetch', '+refs/heads/*:refs/heads/*')
# negative refspecs require git 2.29
repo.config('--add', 'remote.origin.fetch', '^refs/heads/tmp.*')
repo.config('--add', 'remote.origin.fetch', '^refs/heads/staging.*')
return repo
def _outstanding(self, cutoff):
""" Returns "outstanding" (unmerged and unclosed) forward-ports whose
source was merged before ``cutoff`` (all of them if not provided).
@ -1170,89 +1143,6 @@ class Feedback(models.Model):
token_field = fields.Selection(selection_add=[('fp_github_token', 'Forwardport Bot')])
ALWAYS = ('gc.auto=0', 'maintenance.auto=0')
def _bypass_limits():
resource.setrlimit(resource.RLIMIT_AS, (resource.RLIM_INFINITY, resource.RLIM_INFINITY))
def git(directory): return Repo(directory, check=True)
class Repo:
def __init__(self, directory, **config):
self._directory = str(directory)
config.setdefault('stderr', subprocess.PIPE)
self._config = config
self._params = ()
self._opener = subprocess.run
def __getattr__(self, name):
return GitCommand(self, name.replace('_', '-'))
def _run(self, *args, **kwargs):
opts = {**self._config, **kwargs}
args = ('git', '-C', self._directory)\
+ tuple(itertools.chain.from_iterable(('-c', p) for p in self._params + ALWAYS))\
+ args
try:
return self._opener(args, preexec_fn=_bypass_limits, **opts)
except subprocess.CalledProcessError as e:
stream = e.stderr if e.stderr else e.stdout
if stream:
_logger.error("git call error: %s", stream)
raise
def stdout(self, flag=True):
if flag is True:
return self.with_config(stdout=subprocess.PIPE)
elif flag is False:
return self.with_config(stdout=None)
return self.with_config(stdout=flag)
def lazy(self):
r = self.with_config()
r._config.pop('check', None)
r._opener = subprocess.Popen
return r
def check(self, flag):
return self.with_config(check=flag)
def with_config(self, **kw):
opts = {**self._config, **kw}
r = Repo(self._directory, **opts)
r._opener = self._opener
r._params = self._params
return r
def with_params(self, *args):
r = self.with_config()
r._params = args
return r
def clone(self, to, branch=None):
self._run(
'clone',
*([] if branch is None else ['-b', branch]),
self._directory, to,
)
return Repo(to)
class GitCommand:
def __init__(self, repo, name):
self._name = name
self._repo = repo
def __call__(self, *args, **kwargs):
return self._repo._run(self._name, *args, *self._to_options(kwargs))
def _to_options(self, d):
for k, v in d.items():
if len(k) == 1:
yield '-' + k
else:
yield '--' + k.replace('_', '-')
if v not in (None, True):
assert v is not False
yield str(v)
class CherrypickError(Exception):
...

View File

@ -8,7 +8,8 @@ MESSAGE_TEMPLATE = """{message}
closes {repo}#{number}
{headers}Signed-off-by: {name} <{email}>"""
{headers}Signed-off-by: {name} <{email}>
"""
# target branch '-' source branch '-' base64 unique '-fw'
REF_PATTERN = r'{target}-{source}-[a-zA-Z0-9_-]{{4}}-fw'
@ -136,6 +137,4 @@ def to_pr(env, pr):
return pr
def part_of(label, pr_id, *, separator='\n\n'):
""" Adds the "part-of" pseudo-header in the footer.
"""
return f'{label}{separator}Part-of: {pr_id.display_name}'
return f'{label}{separator}Part-of: {pr_id.display_name}\n'

View File

@ -7,6 +7,7 @@
'security/ir.model.access.csv',
'data/merge_cron.xml',
'models/crons/git_maintenance.xml',
'data/runbot_merge.pull_requests.feedback.template.csv',
'views/res_partner.xml',
'views/runbot_merge_project.xml',

240
runbot_merge/git.py Normal file
View File

@ -0,0 +1,240 @@
import dataclasses
import itertools
import logging
import os
import pathlib
import resource
import subprocess
from typing import Optional, TypeVar, Union, Sequence, Tuple, Dict
from odoo.tools.appdirs import user_cache_dir
from .github import MergeError, PrCommit
_logger = logging.getLogger(__name__)
def source_url(repository, prefix: str) -> str:
return 'https://{}@github.com/{}'.format(
repository.project_id[f'{prefix}_token'],
repository.name,
)
Authorship = Union[Tuple[str, str], Tuple[str, str, str]]
def get_local(repository, prefix: Optional[str]) -> 'Optional[Repo]':
repos_dir = pathlib.Path(user_cache_dir('mergebot'))
repos_dir.mkdir(parents=True, exist_ok=True)
# NB: `repository.name` is `$org/$name` so this will be a subdirectory, probably
repo_dir = repos_dir / repository.name
if repo_dir.is_dir():
return git(repo_dir)
elif prefix:
_logger.info("Cloning out %s to %s", repository.name, repo_dir)
subprocess.run(['git', 'clone', '--bare', source_url(repository, prefix), str(repo_dir)], check=True)
# bare repos don't have fetch specs by default, and fetching *into*
# them is a pain in the ass, configure fetch specs so `git fetch`
# works properly
repo = git(repo_dir)
repo.config('--add', 'remote.origin.fetch', '+refs/heads/*:refs/heads/*')
# negative refspecs require git 2.29
repo.config('--add', 'remote.origin.fetch', '^refs/heads/tmp.*')
repo.config('--add', 'remote.origin.fetch', '^refs/heads/staging.*')
return repo
ALWAYS = ('gc.auto=0', 'maintenance.auto=0')
def _bypass_limits():
resource.setrlimit(resource.RLIMIT_AS, (resource.RLIM_INFINITY, resource.RLIM_INFINITY))
def git(directory: str) -> 'Repo':
return Repo(directory, check=True)
Self = TypeVar("Self", bound="Repo")
class Repo:
def __init__(self, directory, **config) -> None:
self._directory = str(directory)
config.setdefault('stderr', subprocess.PIPE)
self._config = config
self._params = ()
def __getattr__(self, name: str) -> 'GitCommand':
return GitCommand(self, name.replace('_', '-'))
def _run(self, *args, **kwargs) -> subprocess.CompletedProcess:
opts = {**self._config, **kwargs}
args = ('git', '-C', self._directory)\
+ tuple(itertools.chain.from_iterable(('-c', p) for p in self._params + ALWAYS))\
+ args
try:
return subprocess.run(args, preexec_fn=_bypass_limits, **opts)
except subprocess.CalledProcessError as e:
stream = e.stderr or e.stdout
if stream:
_logger.error("git call error: %s", stream)
raise
def stdout(self, flag: bool = True) -> Self:
if flag is True:
return self.with_config(stdout=subprocess.PIPE)
elif flag is False:
return self.with_config(stdout=None)
return self.with_config(stdout=flag)
def check(self, flag: bool) -> Self:
return self.with_config(check=flag)
def with_config(self, **kw) -> Self:
opts = {**self._config, **kw}
r = Repo(self._directory, **opts)
r._params = self._params
return r
def with_params(self, *args) -> Self:
r = self.with_config()
r._params = args
return r
def clone(self, to: str, branch: Optional[str] = None) -> Self:
self._run(
'clone',
*([] if branch is None else ['-b', branch]),
self._directory, to,
)
return Repo(to)
def rebase(self, dest: str, commits: Sequence[PrCommit]) -> Tuple[str, Dict[str, str]]:
"""Implements rebase by hand atop plumbing so:
- we can work without a working copy
- we can track individual commits (and store the mapping)
It looks like `--merge-base` is not sufficient for `merge-tree` to
correctly keep track of history, so it loses contents. Therefore
implement in two passes as in the github version.
"""
repo = self.stdout().with_config(text=True, check=False)
logger = _logger.getChild('rebase')
logger.debug("rebasing %s on %s (reset=%s, commits=%s)",
self._repo, dest, len(commits))
if not commits:
raise MergeError("PR has no commits")
new_trees = []
parent = dest
for original in commits:
if len(original['parents']) != 1:
raise MergeError(
f"commits with multiple parents ({original['sha']}) can not be rebased, "
"either fix the branch to remove merges or merge without "
"rebasing")
new_trees.append(check(repo.merge_tree(parent, original['sha'])).stdout.strip())
parent = check(repo.commit_tree(
tree=new_trees[-1],
parents=[parent, original['sha']],
message=f'temp rebase {original["sha"]}',
)).stdout.strip()
mapping = {}
for original, tree in zip(commits, new_trees):
authorship = check(repo.show('--no-patch', '--pretty="%an%n%ae%n%ai%n%cn%n%ce"', original['sha']))
author_name, author_email, author_date, committer_name, committer_email =\
authorship.stdout.splitlines()
c = check(repo.commit_tree(
tree=tree,
parents=[dest],
message=original['commit']['message'],
author=(author_name, author_email, author_date),
committer=(committer_name, committer_email),
)).stdout.strip()
logger.debug('copied %s to %s (parent: %s)', original['sha'], c, dest)
dest = mapping[original['sha']] = c
return dest, mapping
def merge(self, c1: str, c2: str, msg: str, *, author: Tuple[str, str]) -> str:
repo = self.stdout().with_config(text=True, check=False)
t = repo.merge_tree(c1, c2)
if t.returncode:
raise MergeError(t.stderr)
c = self.commit_tree(
tree=t.stdout.strip(),
message=msg,
parents=[c1, c2],
author=author,
)
if c.returncode:
raise MergeError(c.stderr)
return c.stdout.strip()
def commit_tree(
self, *, tree: str, message: str,
parents: Sequence[str] = (),
author: Optional[Authorship] = None,
committer: Optional[Authorship] = None,
) -> subprocess.CompletedProcess:
authorship = {}
if author:
authorship['GIT_AUTHOR_NAME'] = author[0]
authorship['GIT_AUTHOR_EMAIL'] = author[1]
if len(author) > 2:
authorship['GIT_AUTHOR_DATE'] = author[2]
if committer:
authorship['GIT_COMMITTER_NAME'] = committer[0]
authorship['GIT_COMMITTER_EMAIL'] = committer[1]
if len(committer) > 2:
authorship['GIT_COMMITTER_DATE'] = committer[2]
return self.with_config(
stdout=subprocess.PIPE,
text=True,
env={
**os.environ,
**authorship,
# we don't want git to use the timezone of the machine it's
# running on: previously it used the timezone configured in
# github (?), which I think / assume defaults to a generic UTC
'TZ': 'UTC',
}
)._run(
'commit-tree',
tree,
'-m', message,
*itertools.chain.from_iterable(('-p', p) for p in parents),
)
def check(p: subprocess.CompletedProcess) -> subprocess.CompletedProcess:
if not p.returncode:
return p
_logger.info("rebase failed at %s\nstdout:\n%s\nstderr:\n%s", p.args, p.stdout, p.stderr)
raise MergeError(p.stderr or 'merge conflict')
@dataclasses.dataclass
class GitCommand:
repo: Repo
name: str
def __call__(self, *args, **kwargs) -> subprocess.CompletedProcess:
return self.repo._run(self.name, *args, *self._to_options(kwargs))
def _to_options(self, d):
for k, v in d.items():
if len(k) == 1:
yield '-' + k
else:
yield '--' + k.replace('_', '-')
if v not in (None, True):
assert v is not False
yield str(v)

View File

@ -8,6 +8,7 @@ import pathlib
import pprint
import time
import unicodedata
from typing import Iterable, List, TypedDict, Literal
import requests
import werkzeug.urls
@ -47,6 +48,42 @@ def _init_gh_logger():
if odoo.netsvc._logger_init:
_init_gh_logger()
SimpleUser = TypedDict('SimpleUser', {
'login': str,
'url': str,
'type': Literal['User', 'Organization'],
})
Authorship = TypedDict('Authorship', {
'name': str,
'email': str,
})
Commit = TypedDict('Commit', {
'tree': str,
'url': str,
'message': str,
# optional when creating a commit
'author': Authorship,
'committer': Authorship,
'comments_count': int,
})
CommitLink = TypedDict('CommitLink', {
'html_url': str,
'sha': str,
'url': str,
})
PrCommit = TypedDict('PrCommit', {
'url': str,
'sha': str,
'commit': Commit,
# optional when creating a commit (in which case it uses the current user)
'author': SimpleUser,
'committer': SimpleUser,
'parents': List[CommitLink],
# not actually true but we're smuggling stuff via that key
'new_tree': str,
})
GH_LOG_PATTERN = """=> {method} {path}{qs}{body}
<= {r.status_code} {r.reason}
@ -137,7 +174,7 @@ class GH(object):
r.raise_for_status()
return r.json()
def head(self, branch):
def head(self, branch: str) -> str:
d = utils.backoff(
lambda: self('get', 'git/refs/heads/{}'.format(branch)).json(),
exc=requests.HTTPError
@ -276,92 +313,6 @@ class GH(object):
f"Sanity check ref update of {branch}, expected {sha} got {head}"
return status
def merge(self, sha, dest, message):
r = self('post', 'merges', json={
'base': dest,
'head': sha,
'commit_message': message,
}, check={409: MergeError})
try:
r = r.json()
except Exception:
raise MergeError("got non-JSON reponse from github: %s %s (%s)" % (r.status_code, r.reason, r.text))
_logger.debug(
"merge(%s, %s (%s), %s) -> %s",
self._repo, dest, r['parents'][0]['sha'],
shorten(message), r['sha']
)
return dict(r['commit'], sha=r['sha'], parents=r['parents'])
def rebase(self, pr, dest, reset=False, commits=None):
""" Rebase pr's commits on top of dest, updates dest unless ``reset``
is set.
Returns the hash of the rebased head and a map of all PR commits (to the PR they were rebased to)
"""
logger = _logger.getChild('rebase')
original_head = self.head(dest)
if commits is None:
commits = self.commits(pr)
logger.debug("rebasing %s, %s on %s (reset=%s, commits=%s)",
self._repo, pr, dest, reset, len(commits))
if not commits:
raise MergeError("PR has no commits")
prev = original_head
for original in commits:
if len(original['parents']) != 1:
raise MergeError(
"commits with multiple parents ({sha}) can not be rebased, "
"either fix the branch to remove merges or merge without "
"rebasing".format_map(
original
))
tmp_msg = 'temp rebasing PR %s (%s)' % (pr, original['sha'])
merged = self.merge(original['sha'], dest, tmp_msg)
# whichever parent is not original['sha'] should be what dest
# deref'd to, and we want to check that matches the "left parent" we
# expect (either original_head or the previously merged commit)
[base_commit] = (parent['sha'] for parent in merged['parents']
if parent['sha'] != original['sha'])
if prev != base_commit:
raise MergeError(
f"Inconsistent view of branch {dest} while rebasing "
f"PR {pr} expected commit {prev} but the other parent of "
f"merge commit {merged['sha']} is {base_commit}.\n\n"
f"The branch may be getting concurrently modified."
)
prev = merged['sha']
original['new_tree'] = merged['tree']['sha']
prev = original_head
mapping = {}
for c in commits:
committer = c['commit']['committer']
committer.pop('date')
copy = self('post', 'git/commits', json={
'message': c['commit']['message'],
'tree': c['new_tree'],
'parents': [prev],
'author': c['commit']['author'],
'committer': committer,
}, check={409: MergeError}).json()
logger.debug('copied %s to %s (parent: %s)', c['sha'], copy['sha'], prev)
prev = mapping[c['sha']] = copy['sha']
if reset:
self.set_ref(dest, original_head)
else:
self.set_ref(dest, prev)
logger.debug('rebased %s, %s on %s (reset=%s, commits=%s) -> %s',
self._repo, pr, dest, reset, len(commits),
prev)
# prev is updated after each copy so it's the rebased PR head
return prev, mapping
# fetch various bits of issues / prs to load them
def pr(self, number):
return (
@ -383,14 +334,14 @@ class GH(object):
if not r.links.get('next'):
return
def commits_lazy(self, pr):
def commits_lazy(self, pr: int) -> Iterable[PrCommit]:
for page in itertools.count(1):
r = self('get', 'pulls/{}/commits'.format(pr), params={'page': page})
r = self('get', f'pulls/{pr}/commits', params={'page': page})
yield from r.json()
if not r.links.get('next'):
return
def commits(self, pr):
def commits(self, pr: int) -> List[PrCommit]:
""" Returns a PR's commits oldest first (that's what GH does &
is what we want)
"""

View File

@ -3,4 +3,6 @@ from . import res_partner
from . import project
from . import pull_requests
from . import project_freeze
from . import stagings_create
from . import staging_cancel
from . import crons

View File

@ -0,0 +1 @@
from . import git_maintenance

View File

@ -0,0 +1,37 @@
import logging
import subprocess
from odoo import models
from ...git import get_local
_gc = logging.getLogger(__name__)
class GC(models.TransientModel):
_name = 'runbot_merge.maintenance'
_description = "Weekly maintenance of... cache repos?"
def _run(self):
# lock out crons which use the local repo cache to avoid concurrency
# issues while we're GC-ing it
Stagings = self.env['runbot_merge.stagings']
crons = self.env.ref('runbot_merge.staging_cron', Stagings) | self.env.ref('forwardport.port_forward', Stagings)
if crons:
self.env.cr.execute("""
SELECT 1 FROM ir_cron
WHERE id = any(%s)
FOR UPDATE
""", [crons.ids])
# run on all repos with a forwardport target (~ forwardport enabled)
for repo in self.env['runbot_merge.repository'].search([]):
repo_git = get_local(repo, prefix=None)
if not repo:
continue
_gc.info('Running maintenance on %s', repo.name)
r = repo_git\
.stdout(True)\
.with_config(stderr=subprocess.STDOUT, text=True, check=False)\
.gc(aggressive=True, prune='now')
if r.returncode:
_gc.warning("Maintenance failure (status=%d):\n%s", r.returncode, r.stdout)

View File

@ -0,0 +1,26 @@
<odoo>
<record id="access_forwardport_maintenance" model="ir.model.access">
<field name="name">Access to maintenance is useless</field>
<field name="model_id" ref="model_runbot_merge_maintenance"/>
<field name="perm_read">0</field>
<field name="perm_create">0</field>
<field name="perm_write">0</field>
<field name="perm_unlink">0</field>
</record>
<record model="ir.cron" id="maintenance">
<field name="name">Maintenance of repo cache</field>
<field name="model_id" ref="model_runbot_merge_maintenance"/>
<field name="state">code</field>
<field name="code">model._run()</field>
<!--
run sunday morning as it can take a while, unlikely someone will need
to stage or forward-port stuff at that point
-->
<field name="nextcall" eval="datetime.utcnow() + relativedelta(weekday=6, hour=2, minute=0, second=0, microsecond=0)"/>
<field name="interval_number">1</field>
<field name="interval_type">weeks</field>
<field name="numbercall">-1</field>
<field name="doall" eval="False"/>
</record>
</odoo>

View File

@ -1,9 +1,11 @@
import logging
import re
import requests
import sentry_sdk
from odoo import models, fields
from odoo import models, fields, api
from odoo.exceptions import UserError
_logger = logging.getLogger(__name__)
class Project(models.Model):
@ -28,11 +30,13 @@ class Project(models.Model):
)
github_token = fields.Char("Github Token", required=True)
github_name = fields.Char(store=True, compute="_compute_identity")
github_email = fields.Char(store=True, compute="_compute_identity")
github_prefix = fields.Char(
required=True,
default="hanson", # mergebot du bot du bot du~
help="Prefix (~bot name) used when sending commands from PR "
"comments e.g. [hanson retry] or [hanson r+ p=1]"
"comments e.g. [hanson retry] or [hanson r+ p=1]",
)
batch_limit = fields.Integer(
@ -47,6 +51,42 @@ class Project(models.Model):
freeze_id = fields.Many2one('runbot_merge.project.freeze', compute='_compute_freeze')
freeze_reminder = fields.Text()
@api.depends('github_token')
def _compute_identity(self):
s = requests.Session()
for project in self:
if not project.github_token or (project.github_name and project.github_email):
continue
r0 = s.get('https://api.github.com/user', headers={
'Authorization': 'token %s' % project.github_token
})
if not r0.ok:
_logger.error("Failed to fetch merge bot information for project %s: %s", project.name, r0.text or r0.content)
continue
r0 = r0.json()
project.github_name = r0['name'] or r0['login']
if email := r0['email']:
project.github_email = email
continue
if 'user:email' not in set(re.split(r',\s*', r0.headers['x-oauth-scopes'])):
raise UserError("The merge bot github token needs the user:email scope to fetch the bot's identity.")
r1 = s.get('https://api.github.com/user/emails', headers={
'Authorization': 'token %s' % project.github_token
})
if not r1.ok:
_logger.error("Failed to fetch merge bot emails for project %s: %s", project.name, r1.text or r1.content)
continue
project.github_email = next((
entry['email']
for entry in r1.json()
if entry['primary']
), None)
if not project.github_email:
raise UserError("The merge bot needs a public or accessible primary email set up.")
def _check_stagings(self, commit=False):
# check branches with an active staging
for branch in self.env['runbot_merge.branch']\
@ -64,6 +104,8 @@ class Project(models.Model):
self.env.cr.commit()
def _create_stagings(self, commit=False):
from .stagings_create import try_staging
# look up branches which can be staged on and have no active staging
for branch in self.env['runbot_merge.branch'].search([
('active_staging_id', '=', False),
@ -74,7 +116,7 @@ class Project(models.Model):
with self.env.cr.savepoint(), \
sentry_sdk.start_span(description=f'create staging {branch.name}') as span:
span.set_tag('branch', branch.name)
branch.try_staging()
try_staging(branch)
except Exception:
_logger.exception("Failed to create staging for branch %r", branch.name)
else:

View File

@ -1,18 +1,19 @@
import contextlib
import enum
import itertools
import json
import logging
import time
from collections import Counter
from typing import Dict
from markupsafe import Markup
from odoo import models, fields, api, Command
from odoo.addons.runbot_merge.exceptions import FastForwardError
from odoo.exceptions import UserError
from odoo.tools import drop_view_if_exists
from ... import git
from ..pull_requests import Repository
_logger = logging.getLogger(__name__)
class FreezeWizard(models.Model):
_name = 'runbot_merge.project.freeze'
@ -211,50 +212,50 @@ class FreezeWizard(models.Model):
master_name = master.name
gh_sessions = {r: r.github() for r in self.project_id.repo_ids}
repos: Dict[Repository, git.Repo] = {
r: git.get_local(r, 'github').check(False)
for r in self.project_id.repo_ids
}
for repo, copy in repos.items():
copy.fetch(git.source_url(repo, 'github'), '+refs/heads/*:refs/heads/*')
# prep new branch (via tmp refs) on every repo
rel_heads = {}
rel_heads: Dict[Repository, str] = {}
# store for master heads as odds are high the bump pr(s) will be on the
# same repo as one of the release PRs
prevs = {}
prevs: Dict[Repository, str] = {}
for rel in self.release_pr_ids:
repo_id = rel.repository_id
gh = gh_sessions[repo_id]
try:
prev = prevs[repo_id] = gh.head(master_name)
except Exception:
raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.")
except Exception as e:
raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.") from e
# create the tmp branch to merge the PR into
tmp_branch = f'tmp.{self.branch_name}'
try:
gh.set_ref(tmp_branch, prev)
except Exception as err:
raise UserError(f"Unable to create branch {self.branch_name} of repository {repo_id.name}: {err}.")
commits = gh.commits(rel.pr_id.number)
except Exception as e:
raise UserError(f"Unable to fetch commits of release PR {rel.pr_id.display_name}.") from e
rel_heads[repo_id], _ = gh.rebase(rel.pr_id.number, tmp_branch)
time.sleep(1)
rel_heads[repo_id] = repos[repo_id].rebase(prev, commits)[0]
# prep bump
bump_heads = {}
bump_heads: Dict[Repository, str] = {}
for bump in self.bump_pr_ids:
repo_id = bump.repository_id
gh = gh_sessions[repo_id]
try:
prev = prevs[repo_id] = prevs.get(repo_id) or gh.head(master_name)
except Exception:
raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.")
except Exception as e:
raise UserError(f"Unable to resolve branch {master_name} of repository {repo_id.name} to a commit.") from e
# create the tmp branch to merge the PR into
tmp_branch = f'tmp.{master_name}'
try:
gh.set_ref(tmp_branch, prev)
except Exception as err:
raise UserError(f"Unable to create branch {master_name} of repository {repo_id.name}: {err}.")
commits = gh.commits(bump.pr_id.number)
except Exception as e:
raise UserError(f"Unable to fetch commits of bump PR {bump.pr_id.display_name}.") from e
bump_heads[repo_id], _ = gh.rebase(bump.pr_id.number, tmp_branch)
time.sleep(1)
bump_heads[repo_id] = repos[repo_id].rebase(prev, commits)[0]
deployed = {}
# at this point we've got a bunch of tmp branches with merged release
@ -264,38 +265,39 @@ class FreezeWizard(models.Model):
failure = None
for rel in self.release_pr_ids:
repo_id = rel.repository_id
# helper API currently has no API to ensure we're just creating a
# new branch (as cheaply as possible) so do it by hand
status = None
with contextlib.suppress(Exception):
status = gh_sessions[repo_id].create_ref(self.branch_name, rel_heads[repo_id])
deployed[rel.pr_id.id] = rel_heads[repo_id]
to_delete.append(repo_id)
if status != 201:
if repos[repo_id].push(
git.source_url(repo_id, 'github'),
f'{rel_heads[repo_id]}:refs/heads/{self.branch_name}',
).returncode:
failure = ('create', repo_id.name, self.branch_name)
break
deployed[rel.pr_id.id] = rel_heads[repo_id]
to_delete.append(repo_id)
else: # all release deployments succeeded
for bump in self.bump_pr_ids:
repo_id = bump.repository_id
try:
gh_sessions[repo_id].fast_forward(master_name, bump_heads[repo_id])
deployed[bump.pr_id.id] = bump_heads[repo_id]
to_revert.append(repo_id)
except FastForwardError:
if repos[repo_id].push(
git.source_url(repo_id, 'github'),
f'{bump_heads[repo_id]}:refs/heads/{master_name}'
).returncode:
failure = ('fast-forward', repo_id.name, master_name)
break
deployed[bump.pr_id.id] = bump_heads[repo_id]
to_revert.append(repo_id)
if failure:
addendums = []
# creating the branch failed, try to delete all previous branches
failures = []
for prev_id in to_revert:
revert = gh_sessions[prev_id]('PATCH', f'git/refs/heads/{master_name}', json={
'sha': prevs[prev_id],
'force': True
}, check=False)
if not revert.ok:
if repos[prev_id].push(
'-f',
git.source_url(prev_id, 'github'),
f'{prevs[prev_id]}:refs/heads/{master_name}',
).returncode:
failures.append(prev_id.name)
if failures:
addendums.append(
@ -305,8 +307,10 @@ class FreezeWizard(models.Model):
failures.clear()
for prev_id in to_delete:
deletion = gh_sessions[prev_id]('DELETE', f'git/refs/heads/{self.branch_name}', check=False)
if not deletion.ok:
if repos[prev_id].push(
git.source_url(prev_id, 'github'),
f':refs/heads/{self.branch_name}'
).returncode:
failures.append(prev_id.name)
if failures:
addendums.append(
@ -468,7 +472,7 @@ class OpenPRLabels(models.Model):
def init(self):
super().init()
drop_view_if_exists(self.env.cr, "runbot_merge_freeze_labels");
drop_view_if_exists(self.env.cr, "runbot_merge_freeze_labels")
self.env.cr.execute("""
CREATE VIEW runbot_merge_freeze_labels AS (
SELECT DISTINCT ON (label)

View File

@ -1,37 +1,24 @@
# coding: utf-8
import ast
import base64
import collections
import contextlib
import datetime
import io
import itertools
import json
import logging
import os
import pprint
import re
import time
from typing import Optional, Union
from difflib import Differ
from itertools import takewhile
from typing import Optional
import requests
import sentry_sdk
import werkzeug
from werkzeug.datastructures import Headers
from odoo import api, fields, models, tools
from odoo.exceptions import ValidationError
from odoo.osv import expression
from odoo.tools import OrderedSet
from .. import github, exceptions, controllers, utils
WAIT_FOR_VISIBILITY = [10, 10, 10, 10]
_logger = logging.getLogger(__name__)
@ -67,6 +54,8 @@ class Repository(models.Model):
_name = _description = 'runbot_merge.repository'
_order = 'sequence, id'
id: int
sequence = fields.Integer(default=50, group_operator=None)
name = fields.Char(required=True)
project_id = fields.Many2one('runbot_merge.project', required=True, index=True)
@ -98,7 +87,7 @@ All substitutions are tentatively applied sequentially to the input.
vals['status_ids'] = [(5, 0, {})] + [(0, 0, {'context': c}) for c in st.split(',')]
return super().write(vals)
def github(self, token_field='github_token'):
def github(self, token_field='github_token') -> github.GH:
return github.GH(self.project_id[token_field], self.name)
def _auto_init(self):
@ -245,6 +234,8 @@ class Branch(models.Model):
_name = _description = 'runbot_merge.branch'
_order = 'sequence, name'
id: int
name = fields.Char(required=True)
project_id = fields.Many2one('runbot_merge.project', required=True, index=True)
@ -298,235 +289,6 @@ class Branch(models.Model):
for b in self:
b.active_staging_id = b.with_context(active_test=True).staging_ids
def _ready(self):
self.env.cr.execute("""
SELECT
min(pr.priority) as priority,
array_agg(pr.id) AS match
FROM runbot_merge_pull_requests pr
WHERE pr.target = any(%s)
-- exclude terminal states (so there's no issue when
-- deleting branches & reusing labels)
AND pr.state != 'merged'
AND pr.state != 'closed'
GROUP BY
pr.target,
CASE
WHEN pr.label SIMILAR TO '%%:patch-[[:digit:]]+'
THEN pr.id::text
ELSE pr.label
END
HAVING
bool_or(pr.state = 'ready') or bool_or(pr.priority = 0)
ORDER BY min(pr.priority), min(pr.id)
""", [self.ids])
browse = self.env['runbot_merge.pull_requests'].browse
return [(p, browse(ids)) for p, ids in self.env.cr.fetchall()]
def _stageable(self):
return [
(p, prs)
for p, prs in self._ready()
if not any(prs.mapped('blocked'))
]
def try_staging(self):
""" Tries to create a staging if the current branch does not already
have one. Returns None if the branch already has a staging or there
is nothing to stage, the newly created staging otherwise.
"""
logger = _logger.getChild('cron')
logger.info(
"Checking %s (%s) for staging: %s, skip? %s",
self, self.name,
self.active_staging_id,
bool(self.active_staging_id)
)
if self.active_staging_id:
return
rows = self._stageable()
priority = rows[0][0] if rows else -1
if priority == 0 or priority == 1:
# p=0 take precedence over all else
# p=1 allows merging a fix inside / ahead of a split (e.g. branch
# is broken or widespread false positive) without having to cancel
# the existing staging
batched_prs = [pr_ids for _, pr_ids in takewhile(lambda r: r[0] == priority, rows)]
elif self.split_ids:
split_ids = self.split_ids[0]
logger.info("Found split of PRs %s, re-staging", split_ids.mapped('batch_ids.prs'))
batched_prs = [batch.prs for batch in split_ids.batch_ids]
split_ids.unlink()
else: # p=2
batched_prs = [pr_ids for _, pr_ids in takewhile(lambda r: r[0] == priority, rows)]
if not batched_prs:
return
Batch = self.env['runbot_merge.batch']
staged = Batch
original_heads = {}
meta = {repo: {} for repo in self.project_id.repo_ids.having_branch(self)}
for repo, it in meta.items():
gh = it['gh'] = repo.github()
it['head'] = original_heads[repo] = gh.head(self.name)
# create tmp staging branch
gh.set_ref('tmp.{}'.format(self.name), it['head'])
batch_limit = self.project_id.batch_limit
first = True
for batch in batched_prs:
if len(staged) >= batch_limit:
break
try:
staged |= Batch.stage(meta, batch)
except exceptions.MergeError as e:
pr = e.args[0]
_logger.exception("Failed to merge %s into staging branch", pr.display_name)
if first or isinstance(e, exceptions.Unmergeable):
if len(e.args) > 1 and e.args[1]:
reason = e.args[1]
else:
reason = e.__cause__ or e.__context__
# if the reason is a json document, assume it's a github
# error and try to extract the error message to give it to
# the user
with contextlib.suppress(Exception):
reason = json.loads(str(reason))['message'].lower()
pr.state = 'error'
self.env.ref('runbot_merge.pr.merge.failed')._send(
repository=pr.repository,
pull_request=pr.number,
format_args= {'pr': pr, 'reason': reason, 'exc': e},
)
else:
first = False
if not staged:
return
heads = []
heads_map = {}
commits = []
for repo, it in meta.items():
tree = it['gh'].commit(it['head'])['tree']
# ensures staging branches are unique and always
# rebuilt
r = base64.b64encode(os.urandom(12)).decode('ascii')
trailer = ''
if heads_map:
trailer = '\n'.join(
'Runbot-dependency: %s:%s' % (repo, h)
for repo, h in heads_map.items()
)
dummy_head = {'sha': it['head']}
if it['head'] == original_heads[repo]:
# if the repo has not been updated by the staging, create a
# dummy commit to force rebuild
dummy_head = it['gh']('post', 'git/commits', json={
'message': '''force rebuild
uniquifier: %s
For-Commit-Id: %s
%s''' % (r, it['head'], trailer),
'tree': tree['sha'],
'parents': [it['head']],
}).json()
# special case if the two commits are identical because otherwise
# postgres raises error "ensure that no rows proposed for insertion
# within the same command have duplicate constained values"
if it['head'] == dummy_head['sha']:
self.env.cr.execute(
"INSERT INTO runbot_merge_commit (sha, to_check, statuses) "
"VALUES (%s, true, '{}') "
"ON CONFLICT (sha) DO UPDATE SET to_check=true "
"RETURNING id",
[it['head']]
)
[commit] = [head] = self.env.cr.fetchone()
else:
self.env.cr.execute(
"INSERT INTO runbot_merge_commit (sha, to_check, statuses) "
"VALUES (%s, false, '{}'), (%s, true, '{}') "
"ON CONFLICT (sha) DO UPDATE SET to_check=true "
"RETURNING id",
[it['head'], dummy_head['sha']]
)
([commit], [head]) = self.env.cr.fetchall()
heads_map[repo.name] = dummy_head['sha']
heads.append(fields.Command.create({
'repository_id': repo.id,
'commit_id': head,
}))
commits.append(fields.Command.create({
'repository_id': repo.id,
'commit_id': commit,
}))
# create actual staging object
st = self.env['runbot_merge.stagings'].create({
'target': self.id,
'batch_ids': [(4, batch.id, 0) for batch in staged],
'heads': heads,
'commits': commits,
})
# create staging branch from tmp
token = self.project_id.github_token
for r in self.project_id.repo_ids.having_branch(self):
it = meta[r]
staging_head = heads_map[r.name]
_logger.info(
"%s: create staging for %s:%s at %s",
self.project_id.name, r.name, self.name,
staging_head
)
refname = 'staging.{}'.format(self.name)
it['gh'].set_ref(refname, staging_head)
i = itertools.count()
@utils.backoff(delays=WAIT_FOR_VISIBILITY, exc=TimeoutError)
def wait_for_visibility():
if self._check_visibility(r, refname, staging_head, token):
_logger.info(
"[repo] updated %s:%s to %s: ok (at %d/%d)",
r.name, refname, staging_head,
next(i), len(WAIT_FOR_VISIBILITY)
)
return
_logger.warning(
"[repo] updated %s:%s to %s: failed (at %d/%d)",
r.name, refname, staging_head,
next(i), len(WAIT_FOR_VISIBILITY)
)
raise TimeoutError("Staged head not updated after %d seconds" % sum(WAIT_FOR_VISIBILITY))
logger.info("Created staging %s (%s) to %s", st, ', '.join(
'%s[%s]' % (batch, batch.prs)
for batch in staged
), st.target.name)
return st
def _check_visibility(self, repo, branch_name, expected_head, token):
""" Checks the repository actual to see if the new / expected head is
now visible
"""
# v1 protocol provides URL for ref discovery: https://github.com/git/git/blob/6e0cc6776106079ed4efa0cc9abace4107657abf/Documentation/technical/http-protocol.txt#L187
# for more complete client this is also the capabilities discovery and
# the "entry point" for the service
url = 'https://github.com/{}.git/info/refs?service=git-upload-pack'.format(repo.name)
with requests.get(url, stream=True, auth=(token, '')) as resp:
if not resp.ok:
return False
for head, ref in parse_refs_smart(resp.raw.read):
if ref != ('refs/heads/' + branch_name):
continue
return head == expected_head
return False
ACL = collections.namedtuple('ACL', 'is_admin is_reviewer is_author')
class PullRequests(models.Model):
@ -534,6 +296,9 @@ class PullRequests(models.Model):
_order = 'number desc'
_rec_name = 'number'
id: int
display_name: str
target = fields.Many2one('runbot_merge.branch', required=True, index=True)
repository = fields.Many2one('runbot_merge.repository', required=True)
# NB: check that target & repo have same project & provide project related?
@ -1264,35 +1029,14 @@ class PullRequests(models.Model):
if commit:
self.env.cr.commit()
def _parse_commit_message(self, message):
""" Parses a commit message to split out the pseudo-headers (which
should be at the end) from the body, and serialises back with a
predefined pseudo-headers ordering.
"""
return Message.from_message(message)
def _is_mentioned(self, message, *, full_reference=False):
"""Returns whether ``self`` is mentioned in ``message```
:param str | PullRequest message:
:param bool full_reference: whether the repository name must be present
:rtype: bool
"""
if full_reference:
pattern = fr'\b{re.escape(self.display_name)}\b'
else:
repository = self.repository.name # .replace('/', '\\/')
pattern = fr'( |\b{repository})#{self.number}\b'
return bool(re.search(pattern, message if isinstance(message, str) else message.message))
def _build_merge_message(self, message, related_prs=()):
def _build_merge_message(self, message: Union['PullRequests', str], related_prs=()) -> 'Message':
# handle co-authored commits (https://help.github.com/articles/creating-a-commit-with-multiple-authors/)
m = self._parse_commit_message(message)
if not self._is_mentioned(message):
m.body += '\n\ncloses {pr.display_name}'.format(pr=self)
m = Message.from_message(message)
if not is_mentioned(message, self):
m.body += f'\n\ncloses {self.display_name}'
for r in related_prs:
if not r._is_mentioned(message, full_reference=True):
if not is_mentioned(message, r, full_reference=True):
m.headers.add('Related', r.display_name)
if self.reviewed_by:
@ -1300,190 +1044,6 @@ class PullRequests(models.Model):
return m
def _add_self_references(self, commits):
"""Adds a footer reference to ``self`` to all ``commits`` if they don't
already refer to the PR.
"""
for c in (c['commit'] for c in commits):
if not self._is_mentioned(c['message']):
m = self._parse_commit_message(c['message'])
m.headers.pop('Part-Of', None)
m.headers.add('Part-Of', self.display_name)
c['message'] = str(m)
def _stage(self, gh, target, related_prs=()):
# nb: pr_commits is oldest to newest so pr.head is pr_commits[-1]
_, prdict = gh.pr(self.number)
commits = prdict['commits']
method = self.merge_method or ('rebase-ff' if commits == 1 else None)
if commits > 50 and method.startswith('rebase'):
raise exceptions.Unmergeable(self, "Rebasing 50 commits is too much.")
if commits > 250:
raise exceptions.Unmergeable(
self, "Merging PRs of 250 or more commits is not supported "
"(https://developer.github.com/v3/pulls/#list-commits-on-a-pull-request)"
)
pr_commits = gh.commits(self.number)
for c in pr_commits:
if not (c['commit']['author']['email'] and c['commit']['committer']['email']):
raise exceptions.Unmergeable(
self,
f"All commits must have author and committer email, "
f"missing email on {c['sha']} indicates the authorship is "
f"most likely incorrect."
)
# sync and signal possibly missed updates
invalid = {}
diff = []
pr_head = pr_commits[-1]['sha']
if self.head != pr_head:
invalid['head'] = pr_head
diff.append(('Head', self.head, pr_head))
if self.target.name != prdict['base']['ref']:
branch = self.env['runbot_merge.branch'].with_context(active_test=False).search([
('name', '=', prdict['base']['ref']),
('project_id', '=', self.repository.project_id.id),
])
if not branch:
self.unlink()
raise exceptions.Unmergeable(self, "While staging, found this PR had been retargeted to an un-managed branch.")
invalid['target'] = branch.id
diff.append(('Target branch', self.target.name, branch.name))
if self.squash != commits == 1:
invalid['squash'] = commits == 1
diff.append(('Single commit', self.squash, commits == 1))
msg = utils.make_message(prdict)
if self.message != msg:
invalid['message'] = msg
diff.append(('Message', self.message, msg))
if invalid:
self.write({**invalid, 'state': 'opened', 'head': pr_head})
raise exceptions.Mismatch(invalid, diff)
if self.reviewed_by and self.reviewed_by.name == self.reviewed_by.github_login:
# XXX: find other trigger(s) to sync github name?
gh_name = gh.user(self.reviewed_by.github_login)['name']
if gh_name:
self.reviewed_by.name = gh_name
# NOTE: lost merge v merge/copy distinction (head being
# a merge commit reused instead of being re-merged)
return method, getattr(self, '_stage_' + method.replace('-', '_'))(
gh, target, pr_commits, related_prs=related_prs)
def _stage_squash(self, gh, target, commits, related_prs=()):
msg = self._build_merge_message(self, related_prs=related_prs)
authorship = {}
authors = {
(c['commit']['author']['name'], c['commit']['author']['email'])
for c in commits
}
if len(authors) == 1:
name, email = authors.pop()
authorship['author'] = {'name': name, 'email': email}
else:
msg.headers.extend(sorted(
('Co-Authored-By', "%s <%s>" % author)
for author in authors
))
committers = {
(c['commit']['committer']['name'], c['commit']['committer']['email'])
for c in commits
}
if len(committers) == 1:
name, email = committers.pop()
authorship['committer'] = {'name': name, 'email': email}
# should committers also be added to co-authors?
original_head = gh.head(target)
merge_tree = gh.merge(self.head, target, 'temp merge')['tree']['sha']
head = gh('post', 'git/commits', json={
**authorship,
'message': str(msg),
'tree': merge_tree,
'parents': [original_head],
}).json()['sha']
gh.set_ref(target, head)
commits_map = {c['sha']: head for c in commits}
commits_map[''] = head
self.commits_map = json.dumps(commits_map)
return head
def _stage_rebase_ff(self, gh, target, commits, related_prs=()):
# updates head commit with PR number (if necessary) then rebases
# on top of target
msg = self._build_merge_message(commits[-1]['commit']['message'], related_prs=related_prs)
commits[-1]['commit']['message'] = str(msg)
self._add_self_references(commits[:-1])
head, mapping = gh.rebase(self.number, target, commits=commits)
self.commits_map = json.dumps({**mapping, '': head})
return head
def _stage_rebase_merge(self, gh, target, commits, related_prs=()):
self._add_self_references(commits)
h, mapping = gh.rebase(self.number, target, reset=True, commits=commits)
msg = self._build_merge_message(self, related_prs=related_prs)
merge_head = gh.merge(h, target, str(msg))['sha']
self.commits_map = json.dumps({**mapping, '': merge_head})
return merge_head
def _stage_merge(self, gh, target, commits, related_prs=()):
pr_head = commits[-1] # oldest to newest
base_commit = None
head_parents = {p['sha'] for p in pr_head['parents']}
if len(head_parents) > 1:
# look for parent(s?) of pr_head not in PR, means it's
# from target (so we merged target in pr)
merge = head_parents - {c['sha'] for c in commits}
external_parents = len(merge)
if external_parents > 1:
raise exceptions.Unmergeable(
"The PR head can only have one parent from the base branch "
"(not part of the PR itself), found %d: %s" % (
external_parents,
', '.join(merge)
))
if external_parents == 1:
[base_commit] = merge
commits_map = {c['sha']: c['sha'] for c in commits}
if base_commit:
# replicate pr_head with base_commit replaced by
# the current head
original_head = gh.head(target)
merge_tree = gh.merge(pr_head['sha'], target, 'temp merge')['tree']['sha']
new_parents = [original_head] + list(head_parents - {base_commit})
msg = self._build_merge_message(pr_head['commit']['message'], related_prs=related_prs)
copy = gh('post', 'git/commits', json={
'message': str(msg),
'tree': merge_tree,
'author': pr_head['commit']['author'],
'committer': pr_head['commit']['committer'],
'parents': new_parents,
}).json()
gh.set_ref(target, copy['sha'])
# merge commit *and old PR head* map to the pr head replica
commits_map[''] = commits_map[pr_head['sha']] = copy['sha']
self.commits_map = json.dumps(commits_map)
return copy['sha']
else:
# otherwise do a regular merge
msg = self._build_merge_message(self)
merge_head = gh.merge(self.head, target, str(msg))['sha']
# and the merge commit is the normal merge head
commits_map[''] = merge_head
self.commits_map = json.dumps(commits_map)
return merge_head
def unstage(self, reason, *args):
""" If the PR is staged, cancel the staging. If the PR is split and
waiting, remove it from the split (possibly delete the split entirely)
@ -2255,82 +1815,6 @@ class Batch(models.Model):
raise ValidationError("All prs of a batch must have different target repositories, got a duplicate %s on %s" % (pr.repository, pr))
repos |= pr.repository
def stage(self, meta, prs):
"""
Updates meta[*][head] on success
:return: () or Batch object (if all prs successfully staged)
"""
new_heads = {}
pr_fields = self.env['runbot_merge.pull_requests']._fields
for pr in prs:
gh = meta[pr.repository]['gh']
_logger.info(
"Staging pr %s for target %s; method=%s",
pr.display_name, pr.target.name,
pr.merge_method or (pr.squash and 'single') or None
)
target = 'tmp.{}'.format(pr.target.name)
original_head = gh.head(target)
try:
try:
method, new_heads[pr] = pr._stage(gh, target, related_prs=(prs - pr))
_logger.info(
"Staged pr %s to %s by %s: %s -> %s",
pr.display_name, pr.target.name, method,
original_head, new_heads[pr]
)
except Exception:
# reset the head which failed, as rebase() may have partially
# updated it (despite later steps failing)
gh.set_ref(target, original_head)
# then reset every previous update
for to_revert in new_heads.keys():
it = meta[to_revert.repository]
it['gh'].set_ref('tmp.{}'.format(to_revert.target.name), it['head'])
raise
except github.MergeError as e:
raise exceptions.MergeError(pr) from e
except exceptions.Mismatch as e:
def format_items(items):
""" Bit of a pain in the ass because difflib really wants
all lines to be newline-terminated, but not all values are
actual lines, and also needs to split multiline values.
"""
for name, value in items:
yield name + ':\n'
if not value.endswith('\n'):
value += '\n'
yield from value.splitlines(keepends=True)
yield '\n'
old = list(format_items((n, str(v)) for n, v, _ in e.args[1]))
new = list(format_items((n, str(v)) for n, _, v in e.args[1]))
diff = ''.join(Differ().compare(old, new))
_logger.info("data mismatch on %s:\n%s", pr.display_name, diff)
self.env.ref('runbot_merge.pr.staging.mismatch')._send(
repository=pr.repository,
pull_request=pr.number,
format_args={
'pr': pr,
'mismatch': ', '.join(pr_fields[f].string for f in e.args[0]),
'diff': diff,
'unchecked': ', '.join(pr_fields[f].string for f in UNCHECKABLE)
}
)
return self.env['runbot_merge.batch']
# update meta to new heads
for pr, head in new_heads.items():
meta[pr.repository]['head'] = head
return self.create({
'target': prs[0].target.id,
'prs': [(4, pr.id, 0) for pr in prs],
})
UNCHECKABLE = ['merge_method', 'overrides', 'draft']
class FetchJob(models.Model):
_name = _description = 'runbot_merge.fetch_job'
@ -2362,134 +1846,4 @@ class FetchJob(models.Model):
self.env.cr.commit()
refline = re.compile(rb'([\da-f]{40}) ([^\0\n]+)(\0.*)?\n?$')
ZERO_REF = b'0'*40
def parse_refs_smart(read):
""" yields pkt-line data (bytes), or None for flush lines """
def read_line():
length = int(read(4), 16)
if length == 0:
return None
return read(length - 4)
header = read_line()
assert header.rstrip() == b'# service=git-upload-pack', header
assert read_line() is None, "failed to find first flush line"
# read lines until second delimiter
for line in iter(read_line, None):
if line.startswith(ZERO_REF):
break # empty list (no refs)
m = refline.match(line)
yield m[1].decode(), m[2].decode()
BREAK = re.compile(r'''
^
[ ]{0,3} # 0-3 spaces of indentation
# followed by a sequence of three or more matching -, _, or * characters,
# each followed optionally by any number of spaces or tabs
# so needs to start with a _, - or *, then have at least 2 more such
# interspersed with any number of spaces or tabs
([*_-])
([ \t]*\1){2,}
[ \t]*
$
''', flags=re.VERBOSE)
SETEX_UNDERLINE = re.compile(r'''
^
[ ]{0,3} # no more than 3 spaces indentation
[-=]+ # a sequence of = characters or a sequence of - characters
[ ]* # any number of trailing spaces
$
# we don't care about "a line containing a single -" because we want to
# disambiguate SETEX headings from thematic breaks, and thematic breaks have
# 3+ -. Doesn't look like GH interprets `- - -` as a line so yay...
''', flags=re.VERBOSE)
HEADER = re.compile('^([A-Za-z-]+): (.*)$')
class Message:
@classmethod
def from_message(cls, msg):
in_headers = True
maybe_setex = None
# creating from PR message -> remove content following break
msg, handle_break = (msg, False) if isinstance(msg, str) else (msg.message, True)
headers = []
body = []
# don't process the title (first line) of the commit message
msg = msg.splitlines()
for line in reversed(msg[1:]):
if maybe_setex:
# NOTE: actually slightly more complicated: it's a SETEX heading
# only if preceding line(s) can be interpreted as a
# paragraph so e.g. a title followed by a line of dashes
# would indeed be a break, but this should be good enough
# for now, if we need more we'll need a full-blown
# markdown parser probably
if line: # actually a SETEX title -> add underline to body then process current
body.append(maybe_setex)
else: # actually break, remove body then process current
body = []
maybe_setex = None
if not line:
if not in_headers and body and body[-1]:
body.append(line)
continue
if handle_break and BREAK.match(line):
if SETEX_UNDERLINE.match(line):
maybe_setex = line
else:
body = []
continue
h = HEADER.match(line)
if h:
# c-a-b = special case from an existing test, not sure if actually useful?
if in_headers or h.group(1).lower() == 'co-authored-by':
headers.append(h.groups())
continue
body.append(line)
in_headers = False
# if there are non-title body lines, add a separation after the title
if body and body[-1]:
body.append('')
body.append(msg[0])
return cls('\n'.join(reversed(body)), Headers(reversed(headers)))
def __init__(self, body, headers=None):
self.body = body
self.headers = headers or Headers()
def __setattr__(self, name, value):
# make sure stored body is always stripped
if name == 'body':
value = value and value.strip()
super().__setattr__(name, value)
def __str__(self):
if not self.headers:
return self.body + '\n'
with io.StringIO(self.body) as msg:
msg.write(self.body)
msg.write('\n\n')
# https://git.wiki.kernel.org/index.php/CommitMessageConventions
# seems to mostly use capitalised names (rather than title-cased)
keys = list(OrderedSet(k.capitalize() for k in self.headers.keys()))
# c-a-b must be at the very end otherwise github doesn't see it
keys.sort(key=lambda k: k == 'Co-authored-by')
for k in keys:
for v in self.headers.getlist(k):
msg.write(k)
msg.write(': ')
msg.write(v)
msg.write('\n')
return msg.getvalue()
def sub(self, pattern, repl, *, flags):
""" Performs in-place replacements on the body
"""
self.body = re.sub(pattern, repl, self.body, flags=flags)
from .stagings_create import is_mentioned, Message

View File

@ -0,0 +1,702 @@
import base64
import contextlib
import dataclasses
import io
import json
import logging
import os
import re
from difflib import Differ
from itertools import takewhile
from operator import itemgetter
from typing import Dict, Union, Optional, Literal, Callable, Iterator, Tuple, List, TypeAlias
import requests
from werkzeug.datastructures import Headers
from odoo import api, models, fields
from odoo.tools import OrderedSet
from .pull_requests import Branch, Stagings, PullRequests, Repository, Batch
from .. import exceptions, utils, github, git
WAIT_FOR_VISIBILITY = [10, 10, 10, 10]
_logger = logging.getLogger(__name__)
class Project(models.Model):
_inherit = 'runbot_merge.project'
@dataclasses.dataclass(slots=True)
class StagingSlice:
"""Staging state for a single repository:
- gh is a cache for the github proxy object (contains a session for reusing
connection)
- head is the current staging head for the branch of that repo
- working_copy is the local working copy for the staging for that repo
"""
gh: github.GH
head: str
repo: git.Repo
StagingState: TypeAlias = Dict[Repository, StagingSlice]
def try_staging(branch: Branch) -> Optional[Stagings]:
""" Tries to create a staging if the current branch does not already
have one. Returns None if the branch already has a staging or there
is nothing to stage, the newly created staging otherwise.
"""
_logger.info(
"Checking %s (%s) for staging: %s, skip? %s",
branch, branch.name,
branch.active_staging_id,
bool(branch.active_staging_id)
)
if branch.active_staging_id:
return None
rows = [
(p, prs)
for p, prs in ready_prs(for_branch=branch)
if not any(prs.mapped('blocked'))
]
if not rows:
return
priority = rows[0][0]
if priority == 0 or priority == 1:
# p=0 take precedence over all else
# p=1 allows merging a fix inside / ahead of a split (e.g. branch
# is broken or widespread false positive) without having to cancel
# the existing staging
batched_prs = [pr_ids for _, pr_ids in takewhile(lambda r: r[0] == priority, rows)]
elif branch.split_ids:
split_ids = branch.split_ids[0]
_logger.info("Found split of PRs %s, re-staging", split_ids.mapped('batch_ids.prs'))
batched_prs = [batch.prs for batch in split_ids.batch_ids]
split_ids.unlink()
else: # p=2
batched_prs = [pr_ids for _, pr_ids in takewhile(lambda r: r[0] == priority, rows)]
original_heads, staging_state = staging_setup(branch, batched_prs)
staged = stage_batches(branch, batched_prs, staging_state)
if not staged:
return None
env = branch.env
heads = []
commits = []
for repo, it in staging_state.items():
if it.head != original_heads[repo]:
# if we staged something for that repo, just create a record for
# that commit, or flag existing one as to-recheck in case there are
# already statuses we want to propagate to the staging or something
env.cr.execute(
"INSERT INTO runbot_merge_commit (sha, to_check, statuses) "
"VALUES (%s, true, '{}') "
"ON CONFLICT (sha) DO UPDATE SET to_check=true "
"RETURNING id",
[it.head]
)
[commit] = [head] = env.cr.fetchone()
else:
# if we didn't stage anything for that repo, create a dummy commit
# (with a uniquifier to ensure we don't hit a previous version of
# the same) to ensure the staging head is new and we're building
# everything
project = branch.project_id
uniquifier = base64.b64encode(os.urandom(12)).decode('ascii')
dummy_head = it.repo.with_config(check=True).commit_tree(
# somewhat exceptionally, `commit-tree` wants an actual tree
# not a tree-ish
tree=f'{it.head}^{{tree}}',
parents=[it.head],
author=(project.github_name, project.github_email),
message=f'''\
force rebuild
uniquifier: {uniquifier}
For-Commit-Id: {it.head}
''',
).stdout.strip()
# see above, ideally we don't need to mark the real head as
# `to_check` because it's an old commit but `DO UPDATE` is necessary
# for `RETURNING` to work, and it doesn't really hurt (maybe)
env.cr.execute(
"INSERT INTO runbot_merge_commit (sha, to_check, statuses) "
"VALUES (%s, false, '{}'), (%s, true, '{}') "
"ON CONFLICT (sha) DO UPDATE SET to_check=true "
"RETURNING id",
[it.head, dummy_head]
)
([commit], [head]) = env.cr.fetchall()
it.head = dummy_head
heads.append(fields.Command.create({
'repository_id': repo.id,
'commit_id': head,
}))
commits.append(fields.Command.create({
'repository_id': repo.id,
'commit_id': commit,
}))
# create actual staging object
st: Stagings = env['runbot_merge.stagings'].create({
'target': branch.id,
'batch_ids': [(4, batch.id, 0) for batch in staged],
'heads': heads,
'commits': commits,
})
for repo, it in staging_state.items():
_logger.info(
"%s: create staging for %s:%s at %s",
branch.project_id.name, repo.name, branch.name,
it.head
)
it.repo.stdout(False).check(True).push(
'-f',
git.source_url(repo, 'github'),
f'{it.head}:refs/heads/staging.{branch.name}',
)
_logger.info("Created staging %s (%s) to %s", st, ', '.join(
'%s[%s]' % (batch, batch.prs)
for batch in staged
), st.target.name)
return st
def ready_prs(for_branch: Branch) -> List[Tuple[int, PullRequests]]:
env = for_branch.env
env.cr.execute("""
SELECT
min(pr.priority) as priority,
array_agg(pr.id) AS match
FROM runbot_merge_pull_requests pr
WHERE pr.target = any(%s)
-- exclude terminal states (so there's no issue when
-- deleting branches & reusing labels)
AND pr.state != 'merged'
AND pr.state != 'closed'
GROUP BY
pr.target,
CASE
WHEN pr.label SIMILAR TO '%%:patch-[[:digit:]]+'
THEN pr.id::text
ELSE pr.label
END
HAVING
bool_or(pr.state = 'ready') or bool_or(pr.priority = 0)
ORDER BY min(pr.priority), min(pr.id)
""", [for_branch.ids])
browse = env['runbot_merge.pull_requests'].browse
return [(p, browse(ids)) for p, ids in env.cr.fetchall()]
def staging_setup(
target: Branch,
batched_prs: List[PullRequests],
) -> Tuple[Dict[Repository, str], StagingState]:
"""Sets up the staging:
- stores baseline info
- creates tmp branch via gh API (to remove)
- generates working copy for each repository with the target branch
"""
all_prs: PullRequests = target.env['runbot_merge.pull_requests'].concat(*batched_prs)
staging_state = {}
original_heads = {}
for repo in target.project_id.repo_ids.having_branch(target):
gh = repo.github()
head = gh.head(target.name)
source = git.get_local(repo, 'github')
source.fetch(
git.source_url(repo, 'github'),
# a full refspec is necessary to ensure we actually fetch the ref
# (not just the commit it points to) and update it.
# `git fetch $remote $branch` seems to work locally, but it might
# be hooked only to "proper" remote-tracking branches
# (in `refs/remotes`), it doesn't seem to work here
f'+refs/heads/{target.name}:refs/heads/{target.name}',
*(pr.head for pr in all_prs if pr.repository == repo)
)
original_heads[repo] = head
staging_state[repo] = StagingSlice(gh=gh, head=head, repo=source.stdout().with_config(text=True, check=False))
return original_heads, staging_state
def stage_batches(branch: Branch, batched_prs: List[PullRequests], staging_state: StagingState) -> Stagings:
batch_limit = branch.project_id.batch_limit
env = branch.env
staged = env['runbot_merge.batch']
for batch in batched_prs:
if len(staged) >= batch_limit:
break
try:
staged |= stage_batch(env, batch, staging_state)
except exceptions.MergeError as e:
pr = e.args[0]
_logger.info("Failed to stage %s into %s", pr.display_name, branch.name, exc_info=True)
if not staged or isinstance(e, exceptions.Unmergeable):
if len(e.args) > 1 and e.args[1]:
reason = e.args[1]
else:
reason = e.__cause__ or e.__context__
# if the reason is a json document, assume it's a github error
# and try to extract the error message to give it to the user
with contextlib.suppress(Exception):
reason = json.loads(str(reason))['message'].lower()
pr.state = 'error'
env.ref('runbot_merge.pr.merge.failed')._send(
repository=pr.repository,
pull_request=pr.number,
format_args={'pr': pr, 'reason': reason, 'exc': e},
)
return staged
def check_visibility(repo: Repository, branch_name: str, expected_head: str, token: str):
""" Checks the repository actual to see if the new / expected head is
now visible
"""
# v1 protocol provides URL for ref discovery: https://github.com/git/git/blob/6e0cc6776106079ed4efa0cc9abace4107657abf/Documentation/technical/http-protocol.txt#L187
# for more complete client this is also the capabilities discovery and
# the "entry point" for the service
url = 'https://github.com/{}.git/info/refs?service=git-upload-pack'.format(repo.name)
with requests.get(url, stream=True, auth=(token, '')) as resp:
if not resp.ok:
return False
for head, ref in parse_refs_smart(resp.raw.read):
if ref != ('refs/heads/' + branch_name):
continue
return head == expected_head
return False
refline = re.compile(rb'([\da-f]{40}) ([^\0\n]+)(\0.*)?\n?')
ZERO_REF = b'0'*40
def parse_refs_smart(read: Callable[[int], bytes]) -> Iterator[Tuple[str, str]]:
""" yields pkt-line data (bytes), or None for flush lines """
def read_line() -> Optional[bytes]:
length = int(read(4), 16)
if length == 0:
return None
return read(length - 4)
header = read_line()
assert header and header.rstrip() == b'# service=git-upload-pack', header
assert read_line() is None, "failed to find first flush line"
# read lines until second delimiter
for line in iter(read_line, None):
if line.startswith(ZERO_REF):
break # empty list (no refs)
m = refline.fullmatch(line)
assert m
yield m[1].decode(), m[2].decode()
UNCHECKABLE = ['merge_method', 'overrides', 'draft']
def stage_batch(env: api.Environment, prs: PullRequests, staging: StagingState) -> Batch:
"""Stages the batch represented by the ``prs`` recordset, onto the
current corresponding staging heads.
Alongside returning the newly created batch, updates ``staging[*].head``
in-place on success. On failure, the heads should not be touched.
"""
new_heads: Dict[PullRequests, str] = {}
pr_fields = env['runbot_merge.pull_requests']._fields
for pr in prs:
info = staging[pr.repository]
_logger.info(
"Staging pr %s for target %s; method=%s",
pr.display_name, pr.target.name,
pr.merge_method or (pr.squash and 'single') or None
)
try:
method, new_heads[pr] = stage(pr, info, related_prs=(prs - pr))
_logger.info(
"Staged pr %s to %s by %s: %s -> %s",
pr.display_name, pr.target.name, method,
info.head, new_heads[pr]
)
except github.MergeError as e:
raise exceptions.MergeError(pr) from e
except exceptions.Mismatch as e:
diff = ''.join(Differ().compare(
list(format_for_difflib((n, v) for n, v, _ in e.args[1])),
list(format_for_difflib((n, v) for n, _, v in e.args[1])),
))
_logger.info("data mismatch on %s:\n%s", pr.display_name, diff)
env.ref('runbot_merge.pr.staging.mismatch')._send(
repository=pr.repository,
pull_request=pr.number,
format_args={
'pr': pr,
'mismatch': ', '.join(pr_fields[f].string for f in e.args[0]),
'diff': diff,
'unchecked': ', '.join(pr_fields[f].string for f in UNCHECKABLE)
}
)
return env['runbot_merge.batch']
# update meta to new heads
for pr, head in new_heads.items():
staging[pr.repository].head = head
return env['runbot_merge.batch'].create({
'target': prs[0].target.id,
'prs': [(4, pr.id, 0) for pr in prs],
})
def format_for_difflib(items: Iterator[Tuple[str, object]]) -> Iterator[str]:
""" Bit of a pain in the ass because difflib really wants
all lines to be newline-terminated, but not all values are
actual lines, and also needs to split multiline values.
"""
for name, value in items:
yield name + ':\n'
value = str(value)
if not value.endswith('\n'):
value += '\n'
yield from value.splitlines(keepends=True)
yield '\n'
Method = Literal['merge', 'rebase-merge', 'rebase-ff', 'squash']
def stage(pr: PullRequests, info: StagingSlice, related_prs: PullRequests) -> Tuple[Method, str]:
# nb: pr_commits is oldest to newest so pr.head is pr_commits[-1]
_, prdict = info.gh.pr(pr.number)
commits = prdict['commits']
method: Method = pr.merge_method or ('rebase-ff' if commits == 1 else None)
if commits > 50 and method.startswith('rebase'):
raise exceptions.Unmergeable(pr, "Rebasing 50 commits is too much.")
if commits > 250:
raise exceptions.Unmergeable(
pr, "Merging PRs of 250 or more commits is not supported "
"(https://developer.github.com/v3/pulls/#list-commits-on-a-pull-request)"
)
pr_commits = info.gh.commits(pr.number)
for c in pr_commits:
if not (c['commit']['author']['email'] and c['commit']['committer']['email']):
raise exceptions.Unmergeable(
pr,
f"All commits must have author and committer email, "
f"missing email on {c['sha']} indicates the authorship is "
f"most likely incorrect."
)
# sync and signal possibly missed updates
invalid = {}
diff = []
pr_head = pr_commits[-1]['sha']
if pr.head != pr_head:
invalid['head'] = pr_head
diff.append(('Head', pr.head, pr_head))
if pr.target.name != prdict['base']['ref']:
branch = pr.env['runbot_merge.branch'].with_context(active_test=False).search([
('name', '=', prdict['base']['ref']),
('project_id', '=', pr.repository.project_id.id),
])
if not branch:
pr.unlink()
raise exceptions.Unmergeable(pr, "While staging, found this PR had been retargeted to an un-managed branch.")
invalid['target'] = branch.id
diff.append(('Target branch', pr.target.name, branch.name))
if pr.squash != commits == 1:
invalid['squash'] = commits == 1
diff.append(('Single commit', pr.squash, commits == 1))
msg = utils.make_message(prdict)
if pr.message != msg:
invalid['message'] = msg
diff.append(('Message', pr.message, msg))
if invalid:
pr.write({**invalid, 'state': 'opened', 'head': pr_head})
raise exceptions.Mismatch(invalid, diff)
if pr.reviewed_by and pr.reviewed_by.name == pr.reviewed_by.github_login:
# XXX: find other trigger(s) to sync github name?
gh_name = info.gh.user(pr.reviewed_by.github_login)['name']
if gh_name:
pr.reviewed_by.name = gh_name
match method:
case 'merge':
fn = stage_merge
case 'rebase-merge':
fn = stage_rebase_merge
case 'rebase-ff':
fn = stage_rebase_ff
case 'squash':
fn = stage_squash
return method, fn(pr, info, pr_commits, related_prs=related_prs)
def stage_squash(pr: PullRequests, info: StagingSlice, commits: List[github.PrCommit], related_prs: PullRequests) -> str:
msg = pr._build_merge_message(pr, related_prs=related_prs)
authors = {
(c['commit']['author']['name'], c['commit']['author']['email'])
for c in commits
}
if len(authors) == 1:
author = authors.pop()
else:
msg.headers.extend(sorted(
('Co-Authored-By', "%s <%s>" % author)
for author in authors
))
author = (pr.repository.project_id.github_name, pr.repository.project_id.github_email)
committers = {
(c['commit']['committer']['name'], c['commit']['committer']['email'])
for c in commits
}
# should committers also be added to co-authors?
committer = committers.pop() if len(committers) == 1 else None
r = info.repo.merge_tree(info.head, pr.head)
if r.returncode:
raise exceptions.MergeError(pr, r.stderr)
merge_tree = r.stdout.strip()
r = info.repo.commit_tree(
tree=merge_tree,
parents=[info.head],
message=str(msg),
author=author,
committer=committer or author,
)
if r.returncode:
raise exceptions.MergeError(pr, r.stderr)
head = r.stdout.strip()
commits_map = {c['sha']: head for c in commits}
commits_map[''] = head
pr.commits_map = json.dumps(commits_map)
return head
def stage_rebase_ff(pr: PullRequests, info: StagingSlice, commits: List[github.PrCommit], related_prs: PullRequests) -> str:
# updates head commit with PR number (if necessary) then rebases
# on top of target
msg = pr._build_merge_message(commits[-1]['commit']['message'], related_prs=related_prs)
commits[-1]['commit']['message'] = str(msg)
add_self_references(pr, commits[:-1])
head, mapping = info.repo.rebase(info.head, commits=commits)
pr.commits_map = json.dumps({**mapping, '': head})
return head
def stage_rebase_merge(pr: PullRequests, info: StagingSlice, commits: List[github.PrCommit], related_prs: PullRequests) -> str :
add_self_references(pr, commits)
h, mapping = info.repo.rebase(info.head, commits=commits)
msg = pr._build_merge_message(pr, related_prs=related_prs)
project = pr.repository.project_id
merge_head= info.repo.merge(
info.head, h, str(msg),
author=(project.github_name, project.github_email),
)
pr.commits_map = json.dumps({**mapping, '': merge_head})
return merge_head
def stage_merge(pr: PullRequests, info: StagingSlice, commits: List[github.PrCommit], related_prs: PullRequests) -> str:
pr_head = commits[-1] # oldest to newest
base_commit = None
head_parents = {p['sha'] for p in pr_head['parents']}
if len(head_parents) > 1:
# look for parent(s?) of pr_head not in PR, means it's
# from target (so we merged target in pr)
merge = head_parents - {c['sha'] for c in commits}
external_parents = len(merge)
if external_parents > 1:
raise exceptions.Unmergeable(
"The PR head can only have one parent from the base branch "
"(not part of the PR itself), found %d: %s" % (
external_parents,
', '.join(merge)
))
if external_parents == 1:
[base_commit] = merge
commits_map = {c['sha']: c['sha'] for c in commits}
if base_commit:
# replicate pr_head with base_commit replaced by
# the current head
t = info.repo.merge_tree(info.head, pr_head['sha'])
if t.returncode:
raise exceptions.MergeError(pr, t.stderr)
merge_tree = t.stdout.strip()
new_parents = [info.head] + list(head_parents - {base_commit})
msg = pr._build_merge_message(pr_head['commit']['message'], related_prs=related_prs)
d2t = itemgetter('name', 'email', 'date')
c = info.repo.commit_tree(
tree=merge_tree,
parents=new_parents,
message=str(msg),
author=d2t(pr_head['commit']['author']),
committer=d2t(pr_head['commit']['committer']),
)
if c.returncode:
raise exceptions.MergeError(pr, c.stderr)
copy = c.stdout.strip()
# merge commit *and old PR head* map to the pr head replica
commits_map[''] = commits_map[pr_head['sha']] = copy
pr.commits_map = json.dumps(commits_map)
return copy
else:
# otherwise do a regular merge
msg = pr._build_merge_message(pr)
project = pr.repository.project_id
merge_head = info.repo.merge(
info.head, pr.head, str(msg),
author=(project.github_name, project.github_email),
)
# and the merge commit is the normal merge head
commits_map[''] = merge_head
pr.commits_map = json.dumps(commits_map)
return merge_head
def is_mentioned(message: Union[PullRequests, str], pr: PullRequests, *, full_reference: bool = False) -> bool:
"""Returns whether ``pr`` is mentioned in ``message```
"""
if full_reference:
pattern = fr'\b{re.escape(pr.display_name)}\b'
else:
repository = pr.repository.name # .replace('/', '\\/')
pattern = fr'( |\b{repository})#{pr.number}\b'
return bool(re.search(pattern, message if isinstance(message, str) else message.message))
def add_self_references(pr: PullRequests, commits: List[github.PrCommit]):
"""Adds a footer reference to ``self`` to all ``commits`` if they don't
already refer to the PR.
"""
for c in (c['commit'] for c in commits):
if not is_mentioned(c['message'], pr):
message = c['message']
m = Message.from_message(message)
m.headers.pop('Part-Of', None)
m.headers.add('Part-Of', pr.display_name)
c['message'] = str(m)
BREAK = re.compile(r'''
[ ]{0,3} # 0-3 spaces of indentation
# followed by a sequence of three or more matching -, _, or * characters,
# each followed optionally by any number of spaces or tabs
# so needs to start with a _, - or *, then have at least 2 more such
# interspersed with any number of spaces or tabs
([*_-])
([ \t]*\1){2,}
[ \t]*
''', flags=re.VERBOSE)
SETEX_UNDERLINE = re.compile(r'''
[ ]{0,3} # no more than 3 spaces indentation
[-=]+ # a sequence of = characters or a sequence of - characters
[ ]* # any number of trailing spaces
# we don't care about "a line containing a single -" because we want to
# disambiguate SETEX headings from thematic breaks, and thematic breaks have
# 3+ -. Doesn't look like GH interprets `- - -` as a line so yay...
''', flags=re.VERBOSE)
HEADER = re.compile('([A-Za-z-]+): (.*)')
class Message:
@classmethod
def from_message(cls, msg: Union[PullRequests, str]) -> 'Message':
in_headers = True
maybe_setex = None
# creating from PR message -> remove content following break
if isinstance(msg, str):
message, handle_break = (msg, False)
else:
message, handle_break = (msg.message, True)
headers = []
body: List[str] = []
# don't process the title (first line) of the commit message
lines = message.splitlines()
for line in reversed(lines[1:]):
if maybe_setex:
# NOTE: actually slightly more complicated: it's a SETEX heading
# only if preceding line(s) can be interpreted as a
# paragraph so e.g. a title followed by a line of dashes
# would indeed be a break, but this should be good enough
# for now, if we need more we'll need a full-blown
# markdown parser probably
if line: # actually a SETEX title -> add underline to body then process current
body.append(maybe_setex)
else: # actually break, remove body then process current
body = []
maybe_setex = None
if not line:
if not in_headers and body and body[-1]:
body.append(line)
continue
if handle_break and BREAK.fullmatch(line):
if SETEX_UNDERLINE.fullmatch(line):
maybe_setex = line
else:
body = []
continue
h = HEADER.fullmatch(line)
if h:
# c-a-b = special case from an existing test, not sure if actually useful?
if in_headers or h[1].lower() == 'co-authored-by':
headers.append(h.groups())
continue
body.append(line)
in_headers = False
# if there are non-title body lines, add a separation after the title
if body and body[-1]:
body.append('')
body.append(lines[0])
return cls('\n'.join(reversed(body)), Headers(reversed(headers)))
def __init__(self, body: str, headers: Optional[Headers] = None):
self.body = body
self.headers = headers or Headers()
def __setattr__(self, name, value):
# make sure stored body is always stripped
if name == 'body':
value = value and value.strip()
super().__setattr__(name, value)
def __str__(self):
if not self.headers:
return self.body.rstrip() + '\n'
with io.StringIO() as msg:
msg.write(self.body.rstrip())
msg.write('\n\n')
# https://git.wiki.kernel.org/index.php/CommitMessageConventions
# seems to mostly use capitalised names (rather than title-cased)
keys = list(OrderedSet(k.capitalize() for k in self.headers.keys()))
# c-a-b must be at the very end otherwise github doesn't see it
keys.sort(key=lambda k: k == 'Co-authored-by')
for k in keys:
for v in self.headers.getlist(k):
msg.write(k)
msg.write(': ')
msg.write(v)
msg.write('\n')
return msg.getvalue()

View File

@ -1,7 +1,6 @@
import datetime
import itertools
import json
import textwrap
import time
from unittest import mock
@ -118,7 +117,7 @@ def test_trivial_flow(env, repo, page, users, config):
'b': 'a second file',
}
assert master.message == "gibberish\n\nblahblah\n\ncloses {repo.name}#1"\
"\n\nSigned-off-by: {reviewer.formatted_email}"\
"\n\nSigned-off-by: {reviewer.formatted_email}\n"\
.format(repo=repo, reviewer=get_partner(env, users['reviewer']))
class TestCommitMessage:
@ -143,7 +142,7 @@ class TestCommitMessage:
master = repo.commit('heads/master')
assert master.message == "simple commit message\n\ncloses {repo.name}#1"\
"\n\nSigned-off-by: {reviewer.formatted_email}"\
"\n\nSigned-off-by: {reviewer.formatted_email}\n"\
.format(repo=repo, reviewer=get_partner(env, users['reviewer']))
def test_commit_existing(self, env, repo, users, config):
@ -168,7 +167,7 @@ class TestCommitMessage:
master = repo.commit('heads/master')
# closes #1 is already present, should not modify message
assert master.message == "simple commit message that closes #1"\
"\n\nSigned-off-by: {reviewer.formatted_email}"\
"\n\nSigned-off-by: {reviewer.formatted_email}\n"\
.format(reviewer=get_partner(env, users['reviewer']))
def test_commit_other(self, env, repo, users, config):
@ -193,7 +192,7 @@ class TestCommitMessage:
master = repo.commit('heads/master')
# closes on another repositoy, should modify the commit message
assert master.message == "simple commit message that closes odoo/enterprise#1\n\ncloses {repo.name}#1"\
"\n\nSigned-off-by: {reviewer.formatted_email}"\
"\n\nSigned-off-by: {reviewer.formatted_email}\n"\
.format(repo=repo, reviewer=get_partner(env, users['reviewer']))
def test_commit_wrong_number(self, env, repo, users, config):
@ -218,7 +217,7 @@ class TestCommitMessage:
master = repo.commit('heads/master')
# closes on another repositoy, should modify the commit message
assert master.message == "simple commit message that closes #11\n\ncloses {repo.name}#1"\
"\n\nSigned-off-by: {reviewer.formatted_email}"\
"\n\nSigned-off-by: {reviewer.formatted_email}\n"\
.format(repo=repo, reviewer=get_partner(env, users['reviewer']))
def test_commit_delegate(self, env, repo, users, config):
@ -248,7 +247,7 @@ class TestCommitMessage:
master = repo.commit('heads/master')
assert master.message == "simple commit message\n\ncloses {repo.name}#1"\
"\n\nSigned-off-by: {reviewer.formatted_email}"\
"\n\nSigned-off-by: {reviewer.formatted_email}\n"\
.format(repo=repo, reviewer=get_partner(env, users['other']))
def test_commit_coauthored(self, env, repo, users, config):
@ -286,7 +285,8 @@ Fixes a thing
closes {repo.name}#1
Signed-off-by: {reviewer.formatted_email}
Co-authored-by: Bob <bob@example.com>""".format(
Co-authored-by: Bob <bob@example.com>
""".format(
repo=repo,
reviewer=get_partner(env, users['reviewer'])
)
@ -695,9 +695,9 @@ def test_ff_failure_batch(env, repo, users, config):
reviewer = get_partner(env, users["reviewer"]).formatted_email
assert messages == {
'initial', 'NO!',
part_of('a1', pr_a), part_of('a2', pr_a), f'A\n\ncloses {pr_a.display_name}\n\nSigned-off-by: {reviewer}',
part_of('b1', pr_b), part_of('b2', pr_b), f'B\n\ncloses {pr_b.display_name}\n\nSigned-off-by: {reviewer}',
part_of('c1', pr_c), part_of('c2', pr_c), f'C\n\ncloses {pr_c.display_name}\n\nSigned-off-by: {reviewer}',
part_of('a1', pr_a), part_of('a2', pr_a), f'A\n\ncloses {pr_a.display_name}\n\nSigned-off-by: {reviewer}\n',
part_of('b1', pr_b), part_of('b2', pr_b), f'B\n\ncloses {pr_b.display_name}\n\nSigned-off-by: {reviewer}\n',
part_of('c1', pr_c), part_of('c2', pr_c), f'C\n\ncloses {pr_c.display_name}\n\nSigned-off-by: {reviewer}\n',
}
class TestPREdition:
@ -1526,7 +1526,7 @@ commits, I need to know how to merge it:
nb1 = node(part_of('B1', pr_id), node(part_of('B0', pr_id), nm2))
reviewer = get_partner(env, users["reviewer"]).formatted_email
merge_head = (
f'title\n\nbody\n\ncloses {pr_id.display_name}\n\nSigned-off-by: {reviewer}',
f'title\n\nbody\n\ncloses {pr_id.display_name}\n\nSigned-off-by: {reviewer}\n',
frozenset([nm2, nb1])
)
assert staging == merge_head
@ -1622,7 +1622,7 @@ commits, I need to know how to merge it:
# then compare to the dag version of the right graph
nm2 = node('M2', node('M1', node('M0')))
reviewer = get_partner(env, users["reviewer"]).formatted_email
nb1 = node(f'B1\n\ncloses {pr_id.display_name}\n\nSigned-off-by: {reviewer}',
nb1 = node(f'B1\n\ncloses {pr_id.display_name}\n\nSigned-off-by: {reviewer}\n',
node(part_of('B0', pr_id), nm2))
assert staging == nb1
@ -1712,7 +1712,7 @@ commits, I need to know how to merge it:
c0 = node('C0', m)
reviewer = get_partner(env, users["reviewer"]).formatted_email
expected = node('gibberish\n\nblahblah\n\ncloses {}#{}'
'\n\nSigned-off-by: {}'.format(repo.name, prx.number, reviewer), m, c0)
'\n\nSigned-off-by: {}\n'.format(repo.name, prx.number, reviewer), m, c0)
assert log_to_node(repo.log('heads/master')), expected
pr = env['runbot_merge.pull_requests'].search([
('repository.name', '=', repo.name),
@ -1754,7 +1754,7 @@ commits, I need to know how to merge it:
c0 = node('C0', m)
reviewer = get_partner(env, users["reviewer"]).formatted_email
expected = node('gibberish\n\ncloses {}#{}'
'\n\nSigned-off-by: {}'.format(repo.name, prx.number, reviewer), m, c0)
'\n\nSigned-off-by: {}\n'.format(repo.name, prx.number, reviewer), m, c0)
assert log_to_node(repo.log('heads/master')), expected
@pytest.mark.parametrize('separator', [
@ -1784,15 +1784,15 @@ commits, I need to know how to merge it:
env.run_crons()
head = repo.commit('heads/master')
assert head.message == textwrap.dedent(f"""\
title
assert head.message == f"""\
title
first
first
closes {repo.name}#{pr.number}
closes {repo.name}#{pr.number}
Signed-off-by: {reviewer}
""").strip(), "should not contain the content which follows the thematic break"
Signed-off-by: {reviewer}
""", "should not contain the content which follows the thematic break"
def test_pr_message_setex_title(self, repo, env, users, config):
""" should not break on a proper SETEX-style title """
@ -1824,21 +1824,21 @@ removed
env.run_crons()
head = repo.commit('heads/master')
assert head.message == textwrap.dedent(f"""\
title
assert head.message == f"""\
title
Title
---
This is some text
Title
---
This is some text
Title 2
-------
This is more text
Title 2
-------
This is more text
closes {repo.name}#{pr.number}
closes {repo.name}#{pr.number}
Signed-off-by: {reviewer}
""").strip(), "should not break the SETEX titles"
Signed-off-by: {reviewer}
""", "should not break the SETEX titles"
def test_rebase_no_edit(self, repo, env, users, config):
""" Only the merge messages should be de-breaked
@ -1861,17 +1861,17 @@ removed
env.run_crons()
head = repo.commit('heads/master')
assert head.message == textwrap.dedent(f"""\
Commit
assert head.message == f"""\
Commit
first
***
second
first
***
second
closes {repo.name}#{pr.number}
closes {repo.name}#{pr.number}
Signed-off-by: {reviewer}
""").strip(), "squashed / rebased messages should not be stripped"
Signed-off-by: {reviewer}
""", "squashed / rebased messages should not be stripped"
def test_title_no_edit(self, repo, env, users, config):
"""The first line of a commit message should not be taken in account for
@ -1903,13 +1903,15 @@ thing: thong
closes {pr_id.display_name}
Signed-off-by: {reviewer}"""
Signed-off-by: {reviewer}
"""
assert repo.commit(staging_head.parents[0]).message == f"""\
Some: thing
is odd
Part-of: {pr_id.display_name}"""
Part-of: {pr_id.display_name}
"""
def test_pr_mergehead(self, repo, env, config):
""" if the head of the PR is a merge commit and one of the parents is
@ -1994,7 +1996,7 @@ Part-of: {pr_id.display_name}"""
m1 = node('M1')
reviewer = get_partner(env, users["reviewer"]).formatted_email
expected = node(
'T\n\nTT\n\ncloses {}#{}\n\nSigned-off-by: {}'.format(repo.name, prx.number, reviewer),
'T\n\nTT\n\ncloses {}#{}\n\nSigned-off-by: {}\n'.format(repo.name, prx.number, reviewer),
node('M2', m1),
node('C1', node('C0', m1), node('B0', m1))
)
@ -2070,7 +2072,7 @@ Part-of: {pr_id.display_name}"""
closes {pr1_id.display_name}
Signed-off-by: {get_partner(env, users["reviewer"]).formatted_email}\
Signed-off-by: {get_partner(env, users["reviewer"]).formatted_email}
"""
assert one['commit']['committer']['name'] == a_user['name']
assert one['commit']['committer']['email'] == a_user['email']
@ -2101,7 +2103,7 @@ closes {pr2_id.display_name}
Signed-off-by: {get_partner(env, users["reviewer"]).formatted_email}
Co-authored-by: {a_user['name']} <{a_user['email']}>
Co-authored-by: {other_user['name']} <{other_user['email']}>\
Co-authored-by: {other_user['name']} <{other_user['email']}>
"""
assert repo.read_tree(repo.commit(two['sha'])) == {
'a': '0',
@ -2661,12 +2663,12 @@ class TestBatching(object):
staging = log_to_node(log)
reviewer = get_partner(env, users["reviewer"]).formatted_email
p1 = node(
'title PR1\n\nbody PR1\n\ncloses {}\n\nSigned-off-by: {}'.format(pr1.display_name, reviewer),
'title PR1\n\nbody PR1\n\ncloses {}\n\nSigned-off-by: {}\n'.format(pr1.display_name, reviewer),
node('initial'),
node(part_of('commit_PR1_01', pr1), node(part_of('commit_PR1_00', pr1), node('initial')))
)
p2 = node(
'title PR2\n\nbody PR2\n\ncloses {}\n\nSigned-off-by: {}'.format(pr2.display_name, reviewer),
'title PR2\n\nbody PR2\n\ncloses {}\n\nSigned-off-by: {}\n'.format(pr2.display_name, reviewer),
p1,
node(part_of('commit_PR2_01', pr2), node(part_of('commit_PR2_00', pr2), p1))
)
@ -2701,12 +2703,12 @@ class TestBatching(object):
reviewer = get_partner(env, users["reviewer"]).formatted_email
p1 = node(
'title PR1\n\nbody PR1\n\ncloses {}#{}\n\nSigned-off-by: {}'.format(repo.name, pr1.number, reviewer),
'title PR1\n\nbody PR1\n\ncloses {}#{}\n\nSigned-off-by: {}\n'.format(repo.name, pr1.number, reviewer),
node('initial'),
node('commit_PR1_01', node('commit_PR1_00', node('initial')))
)
p2 = node(
'title PR2\n\nbody PR2\n\ncloses {}#{}\n\nSigned-off-by: {}'.format(repo.name, pr2.number, reviewer),
'title PR2\n\nbody PR2\n\ncloses {}#{}\n\nSigned-off-by: {}\n'.format(repo.name, pr2.number, reviewer),
p1,
node('commit_PR2_01', node('commit_PR2_00', node('initial')))
)
@ -2735,8 +2737,8 @@ class TestBatching(object):
staging = log_to_node(log)
reviewer = get_partner(env, users["reviewer"]).formatted_email
expected = node('commit_PR2_00\n\ncloses {}#{}\n\nSigned-off-by: {}'.format(repo.name, pr2.number, reviewer),
node('commit_PR1_00\n\ncloses {}#{}\n\nSigned-off-by: {}'.format(repo.name, pr1.number, reviewer),
expected = node('commit_PR2_00\n\ncloses {}#{}\n\nSigned-off-by: {}\n'.format(repo.name, pr2.number, reviewer),
node('commit_PR1_00\n\ncloses {}#{}\n\nSigned-off-by: {}\n'.format(repo.name, pr1.number, reviewer),
node('initial')))
assert staging == expected

View File

@ -5,7 +5,6 @@ source branches).
When preparing a staging, we simply want to ensure branch-matched PRs
are staged concurrently in all repos
"""
import json
import time
import xmlrpc.client
@ -182,7 +181,6 @@ def test_stage_match(env, project, repo_a, repo_b, config, page):
assert 'Related: {}'.format(pr_b.display_name) in repo_a.commit('master').message
assert 'Related: {}'.format(pr_a.display_name) in repo_b.commit('master').message
print(pr_a.batch_ids.read(['staging_id', 'prs']))
# check that related PRs *still* link to one another after merge
assert get_related_pr_labels(pr_page(page, prx_a)) == [pr_b.display_name]
assert get_related_pr_labels(pr_page(page, prx_b)) == [pr_a.display_name]
@ -436,7 +434,7 @@ def test_merge_fail(env, project, repo_a, repo_b, users, config):
)
env.run_crons()
s2 = to_pr(env, pr2a) | to_pr(env, pr2b)
pr2a_id, pr2b_id = s2 = to_pr(env, pr2a) | to_pr(env, pr2b)
st = env['runbot_merge.stagings'].search([])
assert set(st.batch_ids.prs.ids) == set(s2.ids)
@ -454,12 +452,14 @@ def test_merge_fail(env, project, repo_a, repo_b, users, config):
c['commit']['message']
for c in repo_a.log('heads/staging.master')
] == [
"""commit_do-b-thing_00
f"""\
commit_do-b-thing_00
closes %s
closes {pr2a_id.display_name}
Related: %s
Signed-off-by: %s""" % (s2[0].display_name, s2[1].display_name, reviewer),
Related: {pr2b_id.display_name}
Signed-off-by: {reviewer}
""",
'initial'
], "dummy commit + squash-merged PR commit + root commit"
@ -1093,13 +1093,6 @@ def test_multi_project(env, make_repo, setreviewers, users, config,
pr1_id = to_pr(env, pr1)
pr2_id = to_pr(env, pr2)
print(
pr1.repo.name, pr1.number, pr1_id.display_name, pr1_id.label,
'\n',
pr2.repo.name, pr2.number, pr2_id.display_name, pr2_id.label,
flush=True,
)
assert pr1_id.state == 'ready' and not pr1_id.blocked
assert pr2_id.state == 'validated'
@ -1262,12 +1255,12 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
c_b = repo_b.commit('1.1')
assert c_b.message.startswith('Release 1.1 (B)')
assert repo_b.read_tree(c_b) == {'f': '1', 'version': ''}
assert repo_b.read_tree(c_b) == {'f': '1', 'version': '1.1'}
assert c_b.parents[0] == master_head_b
c_c = repo_c.commit('1.1')
assert c_c.message.startswith('Release 1.1 (C)')
assert repo_c.read_tree(c_c) == {'f': '2', 'version': ''}
assert repo_c.read_tree(c_c) == {'f': '2', 'version': '1.1'}
assert repo_c.commit(c_c.parents[0]).parents[0] == master_head_c
@ -1278,7 +1271,7 @@ def setup_mess(repo_a, repo_b, repo_c):
[root, _] = r.make_commits(
None,
Commit('base', tree={'version': '', 'f': '0'}),
Commit('release 1.0', tree={'version': '1.0'} if r is repo_a else None),
Commit('release 1.0', tree={'version': '1.0'}),
ref='heads/1.0'
)
master_heads.extend(r.make_commits(root, Commit('other', tree={'f': '1'}), ref='heads/master'))
@ -1300,7 +1293,7 @@ def setup_mess(repo_a, repo_b, repo_c):
with repo_b:
repo_b.make_commits(
master_heads[1],
Commit('Release 1.1 (B)', tree=None),
Commit('Release 1.1 (B)', tree={'version': '1.1'}),
ref='heads/release-1.1'
)
pr_rel_b = repo_b.make_pr(target='master', head='release-1.1')
@ -1309,7 +1302,7 @@ def setup_mess(repo_a, repo_b, repo_c):
pr_other = repo_c.make_pr(target='master', head='whocares')
repo_c.make_commits(
master_heads[2],
Commit('Release 1.1 (C)', tree=None),
Commit('Release 1.1 (C)', tree={'version': '1.1'}),
ref='heads/release-1.1'
)
pr_rel_c = repo_c.make_pr(target='master', head='release-1.1')
@ -1437,7 +1430,8 @@ def test_freeze_conflict(env, project, repo_a, repo_b, repo_c, users, config):
# create conflicting branch
with repo_c:
repo_c.make_ref('heads/1.1', heads[2])
[c] = repo_c.make_commits(heads[2], Commit("exists", tree={'version': ''}))
repo_c.make_ref('heads/1.1', c)
# actually perform the freeze
with pytest.raises(xmlrpc.client.Fault) as e:

View File

@ -25,6 +25,10 @@
<group>
<group>
<field name="github_token"/>
<field name="github_name" readonly="0"
help="Identity when creating new commits, defaults to github name, falls back to login."/>
<field name="github_email" readonly="0"
help="Identity when creating new commits, defaults to public email, falls back to primary email."/>
<field name="secret"/>
</group>
<group>