[FIX] runbot_merge: ensure all staging branches are built/tested

Before this change, when staging batches only affecting one repo (of n)
the unaffected repositories would get a staging branch exactly matching
the target.

As a result, either runbot_merge or runbot would simply return the
result of an unrelated build, potentially providing incorrect
information and either failing a staging which should have succeeded
(e.g. change in repo A broke B, PR is making a change in repo A which
fixes B, but B's state is reported as the previous broken build) or
succeeding a staging which should have failed (change in repo A breaking
B except a previous build of the exact same B succeeded with a different
A and is returned).

To fix this issue, create a dummy commit at the head of each staging
branch. Because commit dates are included in the hash and have a second
precision it's pretty unlikely that we can get built duplicates, but
just to be completely sure some random bits are added to the commit
message as well.

Various tests fixed to correctly handle the extra dummy commit on
staging branches.

fixes #35
This commit is contained in:
Xavier Morel 2018-09-10 16:00:26 +02:00
parent 2a17bbec82
commit e98a8caffb
4 changed files with 96 additions and 21 deletions

View File

@ -1,7 +1,9 @@
import base64
import collections
import datetime
import json
import logging
import os
import pprint
import re
@ -77,9 +79,14 @@ class Project(models.Model):
updated = []
try:
for repo_name, head in staging_heads.items():
if repo_name.endswith('^'):
continue
# if the staging has a $repo^ head, merge that,
# otherwise merge the regular (CI'd) head
gh[repo_name].fast_forward(
staging.target.name,
head
staging_heads.get(repo_name + '^') or head
)
updated.append(repo_name)
except exceptions.FastForwardError:
@ -171,18 +178,31 @@ class Project(models.Model):
staged |= Batch.stage(meta, batch)
if staged:
heads = {}
for repo, it in meta.items():
tree = it['gh'].commit(it['head'])['tree']
# ensures staging branches are unique and always
# rebuilt
r = base64.b64encode(os.urandom(12)).decode('ascii')
dummy_head = it['gh']('post', 'git/commits', json={
'message': 'force rebuild\n\nuniquifier: %s' % r,
'tree': tree['sha'],
'parents': [it['head']],
}).json()
# $repo is the head to check, $repo^ is the head to merge
heads[repo.name + '^'] = it['head']
heads[repo.name] = dummy_head['sha']
# create actual staging object
st = self.env['runbot_merge.stagings'].create({
'target': branch.id,
'batch_ids': [(4, batch.id, 0) for batch in staged],
'heads': json.dumps({
repo.name: it['head']
for repo, it in meta.items()
})
'heads': json.dumps(heads)
})
# create staging branch from tmp
for r, it in meta.items():
it['gh'].set_ref('staging.{}'.format(branch.name), it['head'])
it['gh'].set_ref('staging.{}'.format(branch.name), heads[r.name])
# creating the staging doesn't trigger a write on the prs
# and thus the ->staging taggings, so do that by hand
@ -757,13 +777,13 @@ class Stagings(models.Model):
def _validate(self):
Commits = self.env['runbot_merge.commit']
for s in self:
heads = list(json.loads(s.heads).values())
heads = [
head for repo, head in json.loads(s.heads).items()
if not repo.endswith('^')
]
commits = Commits.search([
('sha', 'in', heads)
])
if len(commits) < len(heads):
s.state = 'pending'
continue
reqs = [r.strip() for r in s.target.project_id.required_statuses.split(',')]
st = 'success'
@ -776,6 +796,13 @@ class Stagings(models.Model):
st = 'pending'
else:
assert v == 'success'
# mark failure as soon as we find a failed status, but wait until
# all commits are known & not pending to mark a success
if st == 'success' and len(commits) < len(heads):
s.state = 'pending'
continue
s.state = st
def cancel(self, reason, *args):
@ -825,6 +852,9 @@ class Stagings(models.Model):
# try inferring which PR failed and only mark that one
for repo, head in json.loads(self.heads).items():
if repo.endswith('^'):
continue
commit = self.env['runbot_merge.commit'].search([
('sha', '=', head)
])
@ -959,8 +989,6 @@ class Batch(models.Model):
# update meta to new heads
for pr, head in new_heads.items():
meta[pr.repository]['head'] = head
if not self.env['runbot_merge.commit'].search([('sha', '=', head)]):
self.env['runbot_merge.commit'].create({'sha': head})
return self.create({
'target': prs[0].target.id,
'prs': [(4, pr.id, 0) for pr in prs],

View File

@ -1,9 +1,12 @@
import datetime
import re
import pytest
import odoo
from test_utils import re_matches
@pytest.fixture
def repo(make_repo):
return make_repo('repo')
@ -46,6 +49,7 @@ def test_trivial_flow(env, repo):
staging_head = repo.commit('heads/staging.master')
repo.post_status(staging_head.id, 'success', 'ci/runbot')
repo.post_status(staging_head.id, 'success', 'legal/cla')
assert re.match('^force rebuild', staging_head.message)
env['runbot_merge.project']._check_progress()
assert pr.state == 'merged'
@ -539,11 +543,14 @@ class TestMergeMethod:
staging = repo.commit('heads/staging.master')
assert not repo.is_ancestor(prx.head, of=staging.id),\
"the pr head should not be an ancestor of the staging branch in a squash merge"
assert staging.parents == [m2],\
"the previous master's tip should be the sole parent of the staging commit"
assert re.match('^force rebuild', staging.message)
assert repo.read_tree(staging) == {
'm': b'c1', 'm2': b'm2',
}, "the tree should still be correctly merged"
[actual_sha] = staging.parents
actual = repo.commit(actual_sha)
assert actual.parents == [m2],\
"dummy commit aside, the previous master's tip should be the sole parent of the staging commit"
repo.post_status(staging.id, 'success', 'legal/cla')
repo.post_status(staging.id, 'success', 'ci/runbot')
@ -641,13 +648,26 @@ class TestMergeMethod:
# then compare to the dag version of the right graph
nm2 = node('M2', node('M1', node('M0')))
nb1 = node('B1', node('B0', nm2))
expected = (
merge_head = (
'title\n\nbody\n\ncloses {}#{}'.format(repo.name, prx.number),
frozenset([nm2, nb1])
)
expected = (re_matches('^force rebuild'), frozenset([merge_head]))
assert staging == expected
final_tree = repo.read_tree(repo.commit('heads/staging.master'))
repo.post_status('heads/staging.master', 'success', 'legal/cla')
repo.post_status('heads/staging.master', 'success', 'ci/runbot')
env['runbot_merge.project']._check_progress()
assert env['runbot_merge.pull_requests'].search([
('repository.name', '=', repo.name),
('number', '=', prx.number),
]).state == 'merged'
# check that the dummy commit is not in the final master
master = log_to_node(repo.log('heads/master'))
assert master == merge_head
final_tree = repo.read_tree(repo.commit('heads/master'))
assert final_tree == {'m': b'2', 'b': b'1'}, "sanity check of final tree"
@pytest.mark.skip(reason="what do if the PR contains merge commits???")

View File

@ -9,6 +9,8 @@ import json
import pytest
from test_utils import re_matches
@pytest.fixture
def repo_a(make_repo):
return make_repo('a')
@ -138,11 +140,17 @@ def test_sub_match(env, project, repo_a, repo_b, repo_c):
# should be part of the same staging
assert pr_c.staging_id == pr_b.staging_id, \
"branch-matched PRs should be part of the same staging"
st = pr_b.staging_id
b_staging = repo_b.commit('heads/staging.master')
c_staging = repo_c.commit('heads/staging.master')
assert json.loads(st.heads) == {
repo_a.name: repo_a.commit('heads/master').id,
repo_b.name: repo_b.commit('heads/staging.master').id,
repo_c.name: repo_c.commit('heads/staging.master').id,
repo_a.name: repo_a.commit('heads/staging.master').id,
repo_a.name + '^': repo_a.commit('heads/master').id,
repo_b.name: b_staging.id,
repo_b.name + '^': b_staging.parents[0],
repo_c.name: c_staging.id,
repo_c.name + '^': c_staging.parents[0],
}
def test_merge_fail(env, project, repo_a, repo_b, users):
@ -182,8 +190,14 @@ def test_merge_fail(env, project, repo_a, repo_b, users):
]
other = to_pr(env, pr1a)
assert not other.staging_id
assert len(list(repo_a.log('heads/staging.master'))) == 2,\
"root commit + squash-merged PR commit"
assert [
c['commit']['message']
for c in repo_a.log('heads/staging.master')
] == [
re_matches('^force rebuild'),
'commit_A2_00\n\ncloses %s#2' % repo_a.name,
'initial'
], "dummy commit + squash-merged PR commit + root commit"
def test_ff_fail(env, project, repo_a, repo_b):
""" In a matched-branch scenario, fast-forwarding one of the repos fails

View File

@ -0,0 +1,13 @@
# -*- coding: utf-8 -*-
import re
class re_matches:
def __init__(self, pattern, flags=0):
self._r = re.compile(pattern, flags)
def __eq__(self, text):
return self._r.match(text)
def __repr__(self):
return '~' + self._r.pattern + '~'