mirror of
https://github.com/odoo/runbot.git
synced 2025-03-27 13:25:47 +07:00
[ADD] runbot_merge: synthetic batches & stagings to freeze wizard
Merged PRs should have a batch which should have a staging, this makes the treatment uniform across the board and avoids funky data which is hard to place or issues when reconstructing history. Also create synthetic batches & stagings for older freezes (and bumps)
This commit is contained in:
parent
bbce5f8f46
commit
3191c44459
@ -70,8 +70,114 @@ def hlink(url):
|
|||||||
def link(label, url):
|
def link(label, url):
|
||||||
return f"{hlink(url)}{label}{hlink('')}"
|
return f"{hlink(url)}{label}{hlink('')}"
|
||||||
|
|
||||||
|
|
||||||
|
def batch_freezes(cr):
|
||||||
|
"""Old freezes were created batch-less but marked as merged, to make things
|
||||||
|
more consistent and avoid losing them for e.g. synthetic git histories,
|
||||||
|
associate then with synthetic successful stagings
|
||||||
|
"""
|
||||||
|
cr.execute("SELECT id FROM res_users WHERE login = 'moc@odoo.com'")
|
||||||
|
[uid] = cr.fetchone()
|
||||||
|
cr.execute("""
|
||||||
|
SELECT
|
||||||
|
array_agg(DISTINCT p.target) AS target,
|
||||||
|
array_agg(DISTINCT p.merge_date) AS merge_date,
|
||||||
|
json_object_agg(r.id, json_build_object(
|
||||||
|
'id', p.id,
|
||||||
|
'head', p.commits_map::json->''
|
||||||
|
)) AS prs
|
||||||
|
|
||||||
|
FROM runbot_merge_pull_requests p
|
||||||
|
JOIN runbot_merge_repository r ON (r.id = p.repository)
|
||||||
|
JOIN runbot_merge_branch t ON (t.id = p.target)
|
||||||
|
|
||||||
|
LEFT JOIN runbot_merge_batch_runbot_merge_pull_requests_rel bp ON (runbot_merge_pull_requests_id = p.id)
|
||||||
|
LEFT JOIN runbot_merge_batch b ON (runbot_merge_batch_id = b.id)
|
||||||
|
LEFT JOIN runbot_merge_stagings s ON (b.staging_id = s.id)
|
||||||
|
|
||||||
|
WHERE p.state = 'merged'
|
||||||
|
AND runbot_merge_pull_requests_id IS NULL
|
||||||
|
AND p.id != 1
|
||||||
|
|
||||||
|
GROUP BY label;
|
||||||
|
""")
|
||||||
|
freeze_batches = [
|
||||||
|
(target, merge_date, {int(r): p for r, p in prs.items()})
|
||||||
|
for [target], [merge_date], prs in cr._obj
|
||||||
|
]
|
||||||
|
|
||||||
|
stagings = []
|
||||||
|
for t, m, prs in freeze_batches:
|
||||||
|
# fetch the preceding successful staging on master
|
||||||
|
cr.execute("""
|
||||||
|
SELECT id
|
||||||
|
FROM runbot_merge_stagings
|
||||||
|
-- target 1 = master (so we want the last successful master staging before the freeze)
|
||||||
|
WHERE state = 'success' AND staged_at < %s AND target = 1
|
||||||
|
ORDER BY staged_at DESC
|
||||||
|
LIMIT 1
|
||||||
|
""", [m])
|
||||||
|
cr.execute("""
|
||||||
|
SELECT repository_id, commit_id
|
||||||
|
FROM runbot_merge_stagings_commits
|
||||||
|
WHERE staging_id = %s
|
||||||
|
""", cr.fetchone())
|
||||||
|
commits = dict(cr._obj)
|
||||||
|
|
||||||
|
cr.execute("""
|
||||||
|
INSERT INTO runbot_merge_stagings
|
||||||
|
(state, active, create_uid, write_uid, target, staged_at, create_date, write_date)
|
||||||
|
VALUES ('success', false, %s, %s, %s, %s, %s, %s)
|
||||||
|
RETURNING id
|
||||||
|
""", [uid, uid, t, m, m, m])
|
||||||
|
[[staging]] = cr.fetchall()
|
||||||
|
stagings.append(staging)
|
||||||
|
|
||||||
|
for repo, pr in prs.items():
|
||||||
|
if repo not in commits:
|
||||||
|
cr.execute("""
|
||||||
|
INSERT INTO runbot_merge_commit (sha) VALUES (%s)
|
||||||
|
ON CONFLICT (sha) DO UPDATE
|
||||||
|
SET to_check = runbot_merge.to_check
|
||||||
|
RETURNING id
|
||||||
|
""", [pr['head']])
|
||||||
|
[cid] = cr.fetchone()
|
||||||
|
commits[repo] = cid
|
||||||
|
|
||||||
|
for repo, commit in commits.items():
|
||||||
|
cr.execute("""
|
||||||
|
INSERT INTO runbot_merge_stagings_commits
|
||||||
|
(staging_id, repository_id, commit_id)
|
||||||
|
VALUES (%s, %s, %s)
|
||||||
|
""", [staging, repo, commit])
|
||||||
|
cr.execute("""
|
||||||
|
INSERT INTO runbot_merge_stagings_heads
|
||||||
|
(staging_id, repository_id, commit_id)
|
||||||
|
VALUES (%s, %s, %s)
|
||||||
|
""", [staging, repo, commit])
|
||||||
|
|
||||||
|
batches = []
|
||||||
|
for staging, (_, date, _) in zip(stagings, freeze_batches):
|
||||||
|
cr.execute("""
|
||||||
|
INSERT INTO runbot_merge_batch
|
||||||
|
(create_uid, write_uid, staging_id, create_date, write_date)
|
||||||
|
VALUES (%s, %s, %s, %s, %s)
|
||||||
|
RETURNING id
|
||||||
|
""", [uid, uid, staging, date, date])
|
||||||
|
[[batch]] = cr.fetchall()
|
||||||
|
batches.append(batch)
|
||||||
|
|
||||||
|
for batch, (_, _, prs) in zip(batches, freeze_batches):
|
||||||
|
for pr in prs.values():
|
||||||
|
cr.execute("""
|
||||||
|
INSERT INTO runbot_merge_batch_runbot_merge_pull_requests_rel
|
||||||
|
(runbot_merge_batch_id, runbot_merge_pull_requests_id)
|
||||||
|
VALUES (%s, %s)
|
||||||
|
""", [batch, pr['id']])
|
||||||
|
|
||||||
|
|
||||||
def migrate(cr, version):
|
def migrate(cr, version):
|
||||||
cr.execute("select 1 from forwardport_batches")
|
cr.execute("select from forwardport_batches")
|
||||||
assert not cr.rowcount, f"can't migrate the mergebot with enqueued forward ports (found {cr.rowcount})"
|
assert not cr.rowcount, f"can't migrate the mergebot with enqueued forward ports (found {cr.rowcount})"
|
||||||
# avoid SQL taking absolutely ungodly amounts of time
|
# avoid SQL taking absolutely ungodly amounts of time
|
||||||
cr.execute("SET statement_timeout = '60s'")
|
cr.execute("SET statement_timeout = '60s'")
|
||||||
@ -83,6 +189,7 @@ def migrate(cr, version):
|
|||||||
""")
|
""")
|
||||||
|
|
||||||
cleanup(cr)
|
cleanup(cr)
|
||||||
|
batch_freezes(cr)
|
||||||
|
|
||||||
cr.execute("""
|
cr.execute("""
|
||||||
SELECT
|
SELECT
|
||||||
@ -218,17 +325,13 @@ def migrate(cr, version):
|
|||||||
cr.rowcount,
|
cr.rowcount,
|
||||||
)
|
)
|
||||||
|
|
||||||
# FIXME: fixup PRs marked as merged which don't actually have a batch / staging?
|
|
||||||
|
|
||||||
# the relinked batches are those from stagings, but that means merged PRs
|
# the relinked batches are those from stagings, but that means merged PRs
|
||||||
# (or at least PRs we tried to merge), we also need batches for non-closed
|
# (or at least PRs we tried to merge), we also need batches for non-closed
|
||||||
# non-merged PRs
|
# non-merged PRs
|
||||||
logger.info("collect unbatched PRs PRs...")
|
logger.info("collect unbatched PRs...")
|
||||||
cr.execute("""
|
cr.execute("""
|
||||||
SELECT
|
SELECT
|
||||||
CASE
|
CASE
|
||||||
-- FIXME: should closed PRs w/o a batch be split out or matched with
|
|
||||||
-- one another?
|
|
||||||
WHEN label SIMILAR TO '%%:patch-[[:digit:]]+'
|
WHEN label SIMILAR TO '%%:patch-[[:digit:]]+'
|
||||||
THEN id::text
|
THEN id::text
|
||||||
ELSE label
|
ELSE label
|
||||||
@ -272,8 +375,6 @@ def migrate(cr, version):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# FIXME: leverage WEIRD_SEQUENCES
|
|
||||||
|
|
||||||
logger.info("move pr data to batches...")
|
logger.info("move pr data to batches...")
|
||||||
cr.execute("""
|
cr.execute("""
|
||||||
UPDATE runbot_merge_batch b
|
UPDATE runbot_merge_batch b
|
||||||
|
@ -267,6 +267,11 @@ class FreezeWizard(models.Model):
|
|||||||
bump.pr_id.display_name, prev, len(commits))
|
bump.pr_id.display_name, prev, len(commits))
|
||||||
bump_heads[repo_id] = repos[repo_id].rebase(prev, commits)[0]
|
bump_heads[repo_id] = repos[repo_id].rebase(prev, commits)[0]
|
||||||
|
|
||||||
|
# prevent concurrent updates to the commits table so we control the
|
||||||
|
# creation of commit objects from rebasing the release & bump PRs, do it
|
||||||
|
# only just before *pushing*
|
||||||
|
self.env.cr.execute("LOCK runbot_merge_commit IN ACCESS EXCLUSIVE MODE NOWAIT")
|
||||||
|
|
||||||
deployed = {}
|
deployed = {}
|
||||||
# at this point we've got a bunch of tmp branches with merged release
|
# at this point we've got a bunch of tmp branches with merged release
|
||||||
# and bump PRs, it's time to update the corresponding targets
|
# and bump PRs, it's time to update the corresponding targets
|
||||||
@ -350,6 +355,71 @@ class FreezeWizard(models.Model):
|
|||||||
)
|
)
|
||||||
all_prs.batch_id.merge_date = fields.Datetime.now()
|
all_prs.batch_id.merge_date = fields.Datetime.now()
|
||||||
all_prs.reviewed_by = self.env.user.partner_id.id
|
all_prs.reviewed_by = self.env.user.partner_id.id
|
||||||
|
for p in all_prs:
|
||||||
|
p.commits_map = json.dumps({
|
||||||
|
'': deployed[p.id],
|
||||||
|
p.head: deployed[p.id]
|
||||||
|
})
|
||||||
|
|
||||||
|
# stagings have to be created conditionally as otherwise we might not
|
||||||
|
# have a `target` to set and it's mandatory
|
||||||
|
laster = self.env['runbot_merge.stagings'].search(
|
||||||
|
[('target', '=', master.id), ('state', '=', 'success')],
|
||||||
|
order='id desc',
|
||||||
|
limit=1,
|
||||||
|
).commits.mapped(lambda c: (c.repository_id, c.commit_id))
|
||||||
|
if self.release_pr_ids:
|
||||||
|
rel_items = [(0, 0, {
|
||||||
|
'repository_id': repo.id,
|
||||||
|
'commit_id': self.env['runbot_merge.commit'].create({
|
||||||
|
'sha': sha,
|
||||||
|
'to_check': False,
|
||||||
|
}).id,
|
||||||
|
} if (sha := rel_heads.get(repo)) else {
|
||||||
|
'repository_id': repo.id,
|
||||||
|
'commit_id': commit.id,
|
||||||
|
})
|
||||||
|
for repo, commit in laster
|
||||||
|
]
|
||||||
|
self.env['runbot_merge.stagings'].create([{
|
||||||
|
'state': 'success',
|
||||||
|
'reason': 'release freeze staging',
|
||||||
|
'active': False,
|
||||||
|
'target': b.id,
|
||||||
|
'staging_batch_ids': [
|
||||||
|
(0, 0, {'runbot_merge_batch_id': batch.id})
|
||||||
|
for batch in self.release_pr_ids.pr_id.batch_id
|
||||||
|
],
|
||||||
|
'heads': rel_items,
|
||||||
|
'commits': rel_items,
|
||||||
|
}])
|
||||||
|
|
||||||
|
if self.bump_pr_ids:
|
||||||
|
bump_items = [(0, 0, {
|
||||||
|
'repository_id': repo.id,
|
||||||
|
'commit_id': self.env['runbot_merge.commit'].create({
|
||||||
|
'sha': sha,
|
||||||
|
'to_check': False,
|
||||||
|
}).id,
|
||||||
|
} if (sha := bump_heads.get(repo)) else {
|
||||||
|
'repository_id': repo.id,
|
||||||
|
'commit_id': commit.id,
|
||||||
|
})
|
||||||
|
for repo, commit in laster
|
||||||
|
]
|
||||||
|
self.env['runbot_merge.stagings'].create([{
|
||||||
|
'state': 'success',
|
||||||
|
'reason': 'bump freeze staging',
|
||||||
|
'active': False,
|
||||||
|
'target': master.id,
|
||||||
|
'staging_batch_ids': [
|
||||||
|
(0, 0, {'runbot_merge_batch_id': batch.id})
|
||||||
|
for batch in self.bump_pr_ids.pr_id.batch_id
|
||||||
|
],
|
||||||
|
'heads': bump_items,
|
||||||
|
'commits': bump_items,
|
||||||
|
}])
|
||||||
|
|
||||||
self.env['runbot_merge.pull_requests.feedback'].create([{
|
self.env['runbot_merge.pull_requests.feedback'].create([{
|
||||||
'repository': pr.repository.id,
|
'repository': pr.repository.id,
|
||||||
'pull_request': pr.number,
|
'pull_request': pr.number,
|
||||||
|
@ -1223,6 +1223,16 @@ def test_freeze_complete(env, project, repo_a, repo_b, repo_c, users, config):
|
|||||||
# stuff that's done directly
|
# stuff that's done directly
|
||||||
assert all(pr_id.state == 'merged' for pr_id in release_pr_ids)
|
assert all(pr_id.state == 'merged' for pr_id in release_pr_ids)
|
||||||
assert pr_bump_id.state == 'merged'
|
assert pr_bump_id.state == 'merged'
|
||||||
|
assert pr_bump_id.commits_map != '{}'
|
||||||
|
|
||||||
|
assert len(release_pr_ids.batch_id) == 1
|
||||||
|
assert release_pr_ids.batch_id.merge_date
|
||||||
|
assert release_pr_ids.batch_id.staging_ids.target.name == '1.1'
|
||||||
|
assert release_pr_ids.batch_id.staging_ids.state == 'success'
|
||||||
|
|
||||||
|
assert pr_bump_id.batch_id.merge_date
|
||||||
|
assert pr_bump_id.batch_id.staging_ids.target.name == 'master'
|
||||||
|
assert pr_bump_id.batch_id.staging_ids.state == 'success'
|
||||||
|
|
||||||
# stuff that's behind a cron
|
# stuff that's behind a cron
|
||||||
env.run_crons()
|
env.run_crons()
|
||||||
|
Loading…
Reference in New Issue
Block a user