mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 15:35:46 +07:00
[ADD] forwardport: automatic branch deleter
If a PR is *merged*, enqueue it for deletion (with a 2 weeks delay). Mainly to avoid FW branches staying around long after they've been merged (possibly eventually closed?), will also clean up regular merged branches, including historical merges forgotten by their author. Fixes #230
This commit is contained in:
parent
ac578d430d
commit
ea410ab6d1
28
conftest.py
28
conftest.py
@ -385,6 +385,10 @@ class Repo:
|
||||
# unwatch repo
|
||||
self.unsubscribe()
|
||||
|
||||
@property
|
||||
def owner(self):
|
||||
return self.name.split('/')[0]
|
||||
|
||||
def unsubscribe(self, token=None):
|
||||
self._get_session(token).put('https://api.github.com/repos/{}/subscription'.format(self.name), json={
|
||||
'subscribed': False,
|
||||
@ -417,7 +421,27 @@ class Repo:
|
||||
assert 200 <= r.status_code < 300, r.json()
|
||||
|
||||
def get_ref(self, ref):
|
||||
return self.commit(ref).id
|
||||
# differs from .commit(ref).id for the sake of assertion error messages
|
||||
# apparently commits/{ref} returns 422 or some other fool thing when the
|
||||
# ref' does not exist which sucks for asserting "the ref' has been
|
||||
# deleted"
|
||||
# FIXME: avoid calling get_ref on a hash & remove this code
|
||||
if re.match(r'[0-9a-f]{40}', ref):
|
||||
# just check that the commit exists
|
||||
r = self._session.get('https://api.github.com/repos/{}/git/commits/{}'.format(self.name, ref))
|
||||
assert 200 <= r.status_code < 300, r.reason
|
||||
return r.json()['sha']
|
||||
|
||||
if ref.startswith('refs/'):
|
||||
ref = ref[5:]
|
||||
if not ref.startswith('heads'):
|
||||
ref = 'heads/' + ref
|
||||
|
||||
r = self._session.get('https://api.github.com/repos/{}/git/ref/{}'.format(self.name, ref))
|
||||
assert 200 <= r.status_code < 300, r.reason
|
||||
res = r.json()
|
||||
assert res['object']['type'] == 'commit'
|
||||
return res['object']['sha']
|
||||
|
||||
def commit(self, ref):
|
||||
if not re.match(r'[0-9a-f]{40}', ref):
|
||||
@ -739,7 +763,7 @@ class PR:
|
||||
|
||||
@property
|
||||
def ref(self):
|
||||
return 'heads/' + self.branch[1]
|
||||
return 'heads/' + self.branch.branch
|
||||
|
||||
def post_comment(self, body, token=None):
|
||||
assert self.repo.hook
|
||||
|
@ -31,4 +31,15 @@
|
||||
<field name="numbercall">-1</field>
|
||||
<field name="doall" eval="False"/>
|
||||
</record>
|
||||
|
||||
<record model="ir.cron" id="remover">
|
||||
<field name="name">Remove branches of merged PRs</field>
|
||||
<field name="model_id" ref="model_forwardport_branch_remover"/>
|
||||
<field name="state">code</field>
|
||||
<field name="code">model._process()</field>
|
||||
<field name="interval_number">1</field>
|
||||
<field name="interval_type">hours</field>
|
||||
<field name="numbercall">-1</field>
|
||||
<field name="doall" eval="False"/>
|
||||
</record>
|
||||
</odoo>
|
||||
|
@ -1,18 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
from contextlib import ExitStack
|
||||
from datetime import datetime
|
||||
|
||||
from dateutil import relativedelta
|
||||
|
||||
from odoo import fields, models
|
||||
from odoo.addons.runbot_merge.github import GH
|
||||
|
||||
# how long a merged PR survives
|
||||
MERGE_AGE = relativedelta.relativedelta(weeks=2)
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
class Queue:
|
||||
limit = 100
|
||||
|
||||
def _process_item(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def _process(self):
|
||||
for b in self.search([]):
|
||||
for b in self.search(self._search_domain(), order='create_date, id', limit=self.limit):
|
||||
try:
|
||||
b._process_item()
|
||||
b.unlink()
|
||||
@ -22,10 +30,15 @@ class Queue:
|
||||
self.env.cr.rollback()
|
||||
self.clear_caches()
|
||||
|
||||
def _search_domain(self):
|
||||
return []
|
||||
|
||||
class BatchQueue(models.Model, Queue):
|
||||
_name = 'forwardport.batches'
|
||||
_description = 'batches which got merged and are candidates for forward-porting'
|
||||
|
||||
limit = 10
|
||||
|
||||
batch_id = fields.Many2one('runbot_merge.batch', required=True)
|
||||
source = fields.Selection([
|
||||
('merge', 'Merge'),
|
||||
@ -56,6 +69,8 @@ class UpdateQueue(models.Model, Queue):
|
||||
_name = 'forwardport.updates'
|
||||
_description = 'if a forward-port PR gets updated & has followups (cherrypick succeeded) the followups need to be updated as well'
|
||||
|
||||
limit = 10
|
||||
|
||||
original_root = fields.Many2one('runbot_merge.pull_requests')
|
||||
new_root = fields.Many2one('runbot_merge.pull_requests')
|
||||
|
||||
@ -81,3 +96,63 @@ class UpdateQueue(models.Model, Queue):
|
||||
self.env.cr.commit()
|
||||
|
||||
previous = child
|
||||
|
||||
_deleter = _logger.getChild('deleter')
|
||||
class DeleteBranches(models.Model, Queue):
|
||||
_name = 'forwardport.branch_remover'
|
||||
_description = "Removes branches of merged PRs"
|
||||
|
||||
pr_id = fields.Many2one('runbot_merge.pull_requests')
|
||||
|
||||
def _search_domain(self):
|
||||
cutoff = self.env.context.get('forwardport_merged_before') \
|
||||
or fields.Datetime.to_string(datetime.now() - MERGE_AGE)
|
||||
return [('pr_id.write_date', '<', cutoff)]
|
||||
|
||||
def _process_item(self):
|
||||
_deleter.info(
|
||||
"PR %s: checking deletion of linked branch %s",
|
||||
self.pr_id.display_name,
|
||||
self.pr_id.label
|
||||
)
|
||||
|
||||
if self.pr_id.state != 'merged':
|
||||
_deleter.info('✘ PR is not "merged" (got %s)', self.pr_id.state)
|
||||
return
|
||||
|
||||
repository = self.pr_id.repository
|
||||
fp_remote = repository.fp_remote_target
|
||||
if not fp_remote:
|
||||
_deleter.info('✘ no forward-port target')
|
||||
return
|
||||
|
||||
repo_owner, repo_name = fp_remote.split('/')
|
||||
owner, branch = self.pr_id.label.split(':')
|
||||
if repo_owner != owner:
|
||||
_deleter.info('✘ PR owner != FP target owner (%s)', repo_owner)
|
||||
return # probably don't have access to arbitrary repos
|
||||
|
||||
github = GH(token=repository.project_id.fp_github_token, repo=fp_remote)
|
||||
refurl = 'git/refs/heads/' + branch
|
||||
ref = github('get', refurl)
|
||||
if ref.status_code != 200:
|
||||
_deleter.info("✘ branch already deleted (%s)", ref.json())
|
||||
return
|
||||
|
||||
ref = ref.json()
|
||||
if ref['object']['sha'] != self.pr_id.head:
|
||||
_deleter.info(
|
||||
"✘ branch %s head mismatch, expected %s, got %s",
|
||||
self.pr_id.label,
|
||||
self.pr_id.head,
|
||||
ref['object']['sha']
|
||||
)
|
||||
return
|
||||
|
||||
r = github('delete', refurl)
|
||||
assert r.status_code == 204, \
|
||||
"Tried to delete branch %s of %s, got %s" % (
|
||||
branch, self.pr_id.display_name,
|
||||
r.json()
|
||||
)
|
||||
_deleter.info('✔ deleted branch %s of PR %s', self.pr_id.label, self.pr_id.display_name)
|
||||
|
@ -202,6 +202,11 @@ class PullRequests(models.Model):
|
||||
"It should be merged the normal way (via @%s)" % p.repository.project_id.github_prefix,
|
||||
'token_field': 'fp_github_token',
|
||||
})
|
||||
if vals.get('state') == 'merged':
|
||||
for p in self:
|
||||
self.env['forwardport.branch_remover'].create({
|
||||
'pr_id': p.id,
|
||||
})
|
||||
return r
|
||||
|
||||
def _try_closing(self, by):
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import collections
|
||||
from datetime import datetime, timedelta
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from operator import itemgetter
|
||||
|
||||
import pytest
|
||||
@ -203,6 +203,22 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
|
||||
# check that we didn't just smash the original trees
|
||||
assert prod.read_tree(prod.commit('a')) != b_tree != c_tree
|
||||
|
||||
prs = env['forwardport.branch_remover'].search([]).mapped('pr_id')
|
||||
assert prs == pr0 | pr1 | pr2, "pr1 and pr2 should be slated for branch deletion"
|
||||
env.run_crons('forwardport.remover', context={'forwardport_merged_before': FAKE_PREV_WEEK})
|
||||
|
||||
# should not have deleted the base branch (wrong repo)
|
||||
assert other_user_repo.get_ref(pr.ref) == p_1
|
||||
|
||||
# should have deleted all PR branches
|
||||
pr1_ref = prod.get_pr(pr1.number).ref
|
||||
with pytest.raises(AssertionError, match='Not Found'):
|
||||
other.get_ref(pr1_ref)
|
||||
|
||||
pr2_ref = prod.get_pr(pr2.number).ref
|
||||
with pytest.raises(AssertionError, match="Not Found"):
|
||||
other.get_ref(pr2_ref)
|
||||
|
||||
def test_update_pr(env, config, make_repo, users):
|
||||
""" Even for successful cherrypicks, it's possible that e.g. CI doesn't
|
||||
pass or the reviewer finds out they need to update the code.
|
||||
@ -964,6 +980,86 @@ More info at https://github.com/odoo/odoo/wiki/Mergebot#forward-port
|
||||
assert not pr1_1.parent_id
|
||||
assert pr2_1.state == 'opened'
|
||||
|
||||
class TestBranchDeletion:
|
||||
def test_delete_normal(self, env, config, make_repo):
|
||||
""" Regular PRs should get their branch deleted as long as they're
|
||||
created in the fp repository
|
||||
"""
|
||||
prod, other = make_basic(env, config, make_repo)
|
||||
with prod, other:
|
||||
[c] = other.make_commits('a', Commit('c', tree={'0': '0'}), ref='heads/abranch')
|
||||
pr = prod.make_pr(
|
||||
target='a', head='%s:abranch' % other.owner,
|
||||
title="a pr",
|
||||
)
|
||||
prod.post_status(c, 'success', 'legal/cla')
|
||||
prod.post_status(c, 'success', 'ci/runbot')
|
||||
pr.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||
env.run_crons()
|
||||
|
||||
with prod:
|
||||
prod.post_status('staging.a', 'success', 'legal/cla')
|
||||
prod.post_status('staging.a', 'success', 'ci/runbot')
|
||||
env.run_crons()
|
||||
|
||||
pr_id = env['runbot_merge.pull_requests'].search([
|
||||
('repository.name', '=', prod.name),
|
||||
('number', '=', pr.number)
|
||||
])
|
||||
assert pr_id.state == 'merged'
|
||||
removers = env['forwardport.branch_remover'].search([])
|
||||
to_delete_branch = removers.mapped('pr_id')
|
||||
assert to_delete_branch == pr_id
|
||||
|
||||
env.run_crons('forwardport.remover', context={'forwardport_merged_before': FAKE_PREV_WEEK})
|
||||
with pytest.raises(AssertionError, match="Not Found"):
|
||||
other.get_ref('heads/abranch')
|
||||
|
||||
def test_not_merged(self, env, config, make_repo):
|
||||
""" The branches of PRs which are still open or have been closed (rather
|
||||
than merged) should not get deleted
|
||||
"""
|
||||
prod, other = make_basic(env, config, make_repo)
|
||||
with prod, other:
|
||||
[c] = other.make_commits('a', Commit('c1', tree={'1': '0'}), ref='heads/abranch')
|
||||
pr1 = prod.make_pr(target='a', head='%s:abranch' % other.owner, title='a')
|
||||
prod.post_status(c, 'success', 'legal/cla')
|
||||
prod.post_status(c, 'success', 'ci/runbot')
|
||||
pr1.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||
|
||||
[c] = other.make_commits('a', Commit('c2', tree={'2': '0'}), ref='heads/bbranch')
|
||||
pr2 = prod.make_pr(target='a', head='%s:bbranch' % other.owner, title='b')
|
||||
pr2.close()
|
||||
|
||||
[c] = other.make_commits('a', Commit('c3', tree={'3': '0'}), ref='heads/cbranch')
|
||||
pr3 = prod.make_pr(target='a', head='%s:cbranch' % other.owner, title='c')
|
||||
prod.post_status(c, 'success', 'legal/cla')
|
||||
prod.post_status(c, 'success', 'ci/runbot')
|
||||
|
||||
other.make_commits('a', Commit('c3', tree={'4': '0'}), ref='heads/dbranch')
|
||||
pr4 = prod.make_pr(target='a', head='%s:dbranch' % other.owner, title='d')
|
||||
pr4.post_comment('hansen r+', config['role_reviewer']['token'])
|
||||
env.run_crons()
|
||||
|
||||
PR = env['runbot_merge.pull_requests']
|
||||
# check PRs are in states we expect
|
||||
pr_heads = []
|
||||
for p, st in [(pr1, 'ready'), (pr2, 'closed'), (pr3, 'validated'), (pr4, 'approved')]:
|
||||
p_id = PR.search([
|
||||
('repository.name', '=', prod.name),
|
||||
('number', '=', p.number),
|
||||
])
|
||||
assert p_id.state == st
|
||||
pr_heads.append(p_id.head)
|
||||
|
||||
env.run_crons('forwardport.remover', context={'forwardport_merged_before': FAKE_PREV_WEEK})
|
||||
|
||||
# check that the branches still exist
|
||||
assert other.get_ref('heads/abranch') == pr_heads[0]
|
||||
assert other.get_ref('heads/bbranch') == pr_heads[1]
|
||||
assert other.get_ref('heads/cbranch') == pr_heads[2]
|
||||
assert other.get_ref('heads/dbranch') == pr_heads[3]
|
||||
|
||||
def sPeNgBaB(s):
|
||||
return ''.join(
|
||||
l if i % 2 == 0 else l.upper()
|
||||
|
Loading…
Reference in New Issue
Block a user