From bbce5f8f46f270099e54b4a39c4e548798cabfd0 Mon Sep 17 00:00:00 2001 From: Xavier Morel Date: Wed, 3 Apr 2024 12:08:04 +0200 Subject: [PATCH] [IMP] *: don't remove PRs from batches on close Initially wanted to skip this only for FW PRs, but after some thinking I feel this info could still be valuable even for non-fw PRs which were never merged in the first place. Requires a few adjustments to not break *everything*: `batch.prs` excludes closed PRs by default as most processes only expect to be faced by a closed PR inside a batch, and we *especially* want to avoid that before the batch is merged (as we'd risk staging a closed PR). However since PRs don't get removed from batches anymore (and batches don't get deleted when they have no PRs) we now may have a bunch of batches whose PRs (usually a single one) are all closed, this has two major side-effects: - a new PR may get attached to an old batch full of closed PRs (as batches are filtered out on being *merged*), which is weird - the eventual list of batches gets polluted with a bunch of irrelevant batches which are hard to filter out The solution is to reintroduce an `active` field, as a stored compute field based on the state of batch PRs. This way if all PRs of a batch are closed it switches to inactive, and is automatically filtered out by search which solves both issues. --- forwardport/tests/test_batches.py | 21 +++- forwardport/tests/test_weird.py | 12 +- .../migrations/15.0.1.12/pre-migration.py | 6 +- runbot_merge/models/batch.py | 37 +++++-- runbot_merge/models/pull_requests.py | 22 ++-- runbot_merge/tests/test_batch_consistency.py | 104 ++++++++++++++++++ runbot_merge/tests/test_multirepo.py | 38 ++++++- 7 files changed, 208 insertions(+), 32 deletions(-) create mode 100644 runbot_merge/tests/test_batch_consistency.py diff --git a/forwardport/tests/test_batches.py b/forwardport/tests/test_batches.py index 0ada435c..ad1c6f8b 100644 --- a/forwardport/tests/test_batches.py +++ b/forwardport/tests/test_batches.py @@ -1,7 +1,5 @@ import re -import pytest - from utils import Commit, make_basic, to_pr, seen, re_matches @@ -133,6 +131,25 @@ def test_closing_during_fp(env, config, make_repo, users): "only one of the forward ports should be ported" assert not env['runbot_merge.pull_requests'].search([('parent_id', '=', pr1_1_id.id)]),\ "the closed PR should not be ported" + assert env['runbot_merge.pull_requests'].search([('source_id', '=', pr1_id.id)]) == pr1_1_id,\ + "the closed PR should not be ported" + + r1_b_head = r1.commit("b") + with r2: + r2.get_pr(pr2_1_id.number).post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() + assert not pr2_1_id.blocked + assert not pr2_1_id.batch_id.blocked + st = pr2_1_id.staging_id + assert st + with r1, r2: + r1.post_status('staging.b', 'success') + r2.post_status('staging.b', 'success') + env.run_crons() + assert st.state == 'success' + + assert r1_b_head.id == r1.commit("b").id, \ + "r1:b's head should not have been touched" def test_add_pr_during_fp(env, config, make_repo, users): """ It should be possible to add new PRs to an FP batch diff --git a/forwardport/tests/test_weird.py b/forwardport/tests/test_weird.py index 704d5c2a..74862760 100644 --- a/forwardport/tests/test_weird.py +++ b/forwardport/tests/test_weird.py @@ -1085,6 +1085,7 @@ def test_maintain_batch_history(env, config, make_repo, users): assert pr1_c_id.batch_id.parent_id == pr1_b_id.batch_id assert pr1_b_id.batch_id.parent_id == pr1_a_id.batch_id b_batch = pr1_b_id.batch_id + assert b_batch # endregion pr1_b = repo.get_pr(pr1_b_id.number) @@ -1094,10 +1095,11 @@ def test_maintain_batch_history(env, config, make_repo, users): assert pr1_b_id.state == 'closed' # region check that all the batches are *still* set up correctly - assert pr1_a_id.batch_id - assert not pr1_b_id.batch_id, "the PR still gets detached from the batch" - assert b_batch.exists(), "the batch is not deleted tho" - assert pr1_c_id.batch_id + assert b_batch.exists() + assert pr1_a_id.batch_id == b_batch.parent_id + assert pr1_b_id.batch_id == b_batch assert pr1_c_id.batch_id.parent_id == b_batch - assert b_batch.parent_id == pr1_a_id.batch_id + + assert pr1_b_id in b_batch.all_prs, "the PR is still in the batch" + assert pr1_b_id not in b_batch.prs, "the PR is not in the open/active batch PRs" # endregion diff --git a/runbot_merge/migrations/15.0.1.12/pre-migration.py b/runbot_merge/migrations/15.0.1.12/pre-migration.py index 12762afe..107c624b 100644 --- a/runbot_merge/migrations/15.0.1.12/pre-migration.py +++ b/runbot_merge/migrations/15.0.1.12/pre-migration.py @@ -76,7 +76,11 @@ def migrate(cr, version): # avoid SQL taking absolutely ungodly amounts of time cr.execute("SET statement_timeout = '60s'") # will be recreated & computed on the fly - cr.execute("ALTER TABLE runbot_merge_batch DROP COLUMN target") + cr.execute(""" + ALTER TABLE runbot_merge_batch + DROP COLUMN target, + DROP COLUMN active + """) cleanup(cr) diff --git a/runbot_merge/models/batch.py b/runbot_merge/models/batch.py index e48f0541..1472ccd1 100644 --- a/runbot_merge/models/batch.py +++ b/runbot_merge/models/batch.py @@ -62,7 +62,9 @@ class Batch(models.Model): ) split_id = fields.Many2one('runbot_merge.split', index=True) - prs = fields.One2many('runbot_merge.pull_requests', 'batch_id') + all_prs = fields.One2many('runbot_merge.pull_requests', 'batch_id') + prs = fields.One2many('runbot_merge.pull_requests', compute='_compute_open_prs', search='_search_open_prs') + active = fields.Boolean(compute='_compute_active', store=True, help="closed batches (batches containing only closed PRs)") fw_policy = fields.Selection([ ('default', "Default"), @@ -121,14 +123,20 @@ class Batch(models.Model): else: pattern = self.parent_path + '_%' - return self.search([("parent_path", '=like', pattern)], order="parent_path") + act = self.env.context.get('active_test', True) + return self\ + .with_context(active_test=False)\ + .search([("parent_path", '=like', pattern)], order="parent_path")\ + .with_context(active_test=act) # also depends on all the descendants of the source or sth @api.depends('parent_path') def _compute_genealogy(self): for batch in self: sid = next(iter(batch.parent_path.split('/', 1))) - batch.genealogy_ids = self.search([("parent_path", "=like", f"{sid}/%")], order="parent_path") + batch.genealogy_ids = self \ + .with_context(active_test=False)\ + .search([("parent_path", "=like", f"{sid}/%")], order="parent_path")\ def _auto_init(self): for field in self._fields.values(): @@ -155,15 +163,28 @@ class Batch(models.Model): WHERE parent_id IS NOT NULL; """) - @api.depends("prs.target") + @api.depends('all_prs.closed') + def _compute_active(self): + for b in self: + b.active = not all(p.closed for p in b.all_prs) + + @api.depends('all_prs.closed') + def _compute_open_prs(self): + for b in self: + b.prs = b.all_prs.filtered(lambda p: not p.closed) + + def _search_open_prs(self, operator, value): + return [('all_prs', operator, value), ('active', '=', True)] + + @api.depends("all_prs.target") def _compute_target(self): for batch in self: if len(batch.prs) == 1: - batch.target = batch.prs.target + batch.target = batch.all_prs.target else: - targets = set(batch.prs.mapped('target')) + targets = set(batch.all_prs.mapped('target')) if not targets: - targets = set(batch.prs.mapped('target')) + targets = set(batch.all_prs.mapped('target')) if len(targets) == 1: batch.target = targets.pop() else: @@ -180,6 +201,8 @@ class Batch(models.Model): for batch in self: if batch.merge_date: batch.blocked = "Merged." + elif not batch.active: + batch.blocked = "all prs are closed" elif blocking := batch.prs.filtered( lambda p: p.error or p.draft or not (p.squash or p.merge_method) ): diff --git a/runbot_merge/models/pull_requests.py b/runbot_merge/models/pull_requests.py index b07b031c..3e706268 100644 --- a/runbot_merge/models/pull_requests.py +++ b/runbot_merge/models/pull_requests.py @@ -563,11 +563,6 @@ class PullRequests(models.Model): return json.loads(self.overrides) return {} - @api.depends('batch_ids.active') - def _compute_active_batch(self): - for r in self: - r.batch_id = r.batch_ids.filtered(lambda b: b.active)[:1] - def _get_or_schedule(self, repo_name, number, *, target=None, closing=False): repo = self.env['runbot_merge.repository'].search([('name', '=', repo_name)]) if not repo: @@ -1255,9 +1250,7 @@ class PullRequests(models.Model): match vals.get('closed'): case True if not self.closed: vals['reviewed_by'] = False - vals['batch_id'] = False - self.env.cr.precommit.add(self.batch_id.unlink) - case False if self.closed: + case False if self.closed and not self.batch_id: vals['batch_id'] = self._get_batch( target=vals.get('target') or self.target.id, label=vals.get('label') or self.label, @@ -1382,8 +1375,7 @@ class PullRequests(models.Model): return False self.unstage("closed by %s", by) - # `write` automatically unsets reviewer & batch when closing but... - self.write({'closed': True, 'reviewed_by': False, 'batch_id': False}) + self.write({'closed': True, 'reviewed_by': False}) return True @@ -1764,7 +1756,12 @@ class Stagings(models.Model): target = fields.Many2one('runbot_merge.branch', required=True, index=True) staging_batch_ids = fields.One2many('runbot_merge.staging.batch', 'runbot_merge_stagings_id') - batch_ids = fields.Many2many('runbot_merge.batch', context={'active_test': False}, compute="_compute_batch_ids") + batch_ids = fields.Many2many( + 'runbot_merge.batch', + context={'active_test': False}, + compute="_compute_batch_ids", + search="_search_batch_ids", + ) pr_ids = fields.One2many('runbot_merge.pull_requests', compute='_compute_prs') state = fields.Selection([ ('success', 'Success'), @@ -1820,6 +1817,9 @@ class Stagings(models.Model): for staging in self: staging.batch_ids = staging.staging_batch_ids.runbot_merge_batch_id + def _search_batch_ids(self, operator, value): + return [('staging_batch_ids.runbot_merge_batch_id', operator, value)] + @api.depends('heads') def _compute_statuses(self): """ Fetches statuses associated with the various heads, returned as diff --git a/runbot_merge/tests/test_batch_consistency.py b/runbot_merge/tests/test_batch_consistency.py new file mode 100644 index 00000000..ef3c88f6 --- /dev/null +++ b/runbot_merge/tests/test_batch_consistency.py @@ -0,0 +1,104 @@ +"""This module tests edge cases specific to the batch objects themselves, +without wider relevance and thus other location. +""" +from utils import Commit, to_pr + + +def test_close_single(env, project, make_repo, setreviewers): + """If a batch has a single PR and that PR gets closed, the batch should be + inactive *and* blocked. + """ + repo = make_repo('wheee') + r = env['runbot_merge.repository'].create({ + 'project_id': project.id, + 'name': repo.name, + 'required_statuses': 'default', + 'group_id': False, + }) + setreviewers(r) + + with repo: + repo.make_commits(None, Commit("a", tree={"a": "a"}), ref='heads/master') + [c] = repo.make_commits('master', Commit('b', tree={"b": "b"})) + pr = repo.make_pr(head=c, target='master') + env.run_crons() + + pr_id = to_pr(env, pr) + batch_id = pr_id.batch_id + assert pr_id.state == 'opened' + assert batch_id.blocked + Batches = env['runbot_merge.batch'] + assert Batches.search_count([]) == 1 + + with repo: + pr.close() + + assert pr_id.state == 'closed' + assert batch_id.all_prs == pr_id + assert batch_id.prs == pr_id.browse(()) + assert batch_id.blocked == "all prs are closed" + assert not batch_id.active + + assert Batches.search_count([]) == 0 + +def test_close_multiple(env, project, make_repo, setreviewers): + """If a batch has a single PR and that PR gets closed, the batch should be + inactive *and* blocked. + """ + Batches = env['runbot_merge.batch'] + repo1 = make_repo('wheee') + repo2 = make_repo('wheeee') + project.write({ + 'repo_ids': [(0, 0, { + 'name': repo1.name, + 'required_statuses': 'default', + 'group_id': False, + }), (0, 0, { + 'name': repo2.name, + 'required_statuses': 'default', + 'group_id': False, + })] + }) + setreviewers(*project.repo_ids) + + with repo1: + repo1.make_commits(None, Commit("a", tree={"a": "a"}), ref='heads/master') + repo1.make_commits('master', Commit('b', tree={"b": "b"}), ref='heads/a_pr') + pr1 = repo1.make_pr(head='a_pr', target='master') + + with repo2: + repo2.make_commits(None, Commit("a", tree={"a": "a"}), ref='heads/master') + repo2.make_commits('master', Commit('b', tree={"b": "b"}), ref='heads/a_pr') + pr2 = repo2.make_pr(head='a_pr', target='master') + + pr1_id = to_pr(env, pr1) + pr2_id = to_pr(env, pr2) + batch_id = pr1_id.batch_id + assert pr2_id.batch_id == batch_id + + assert pr1_id.state == 'opened' + assert pr2_id.state == 'opened' + assert batch_id.all_prs == pr1_id | pr2_id + assert batch_id.prs == pr1_id | pr2_id + assert batch_id.active + assert Batches.search_count([]) == 1 + + with repo1: + pr1.close() + + assert pr1_id.state == 'closed' + assert pr2_id.state == 'opened' + assert batch_id.all_prs == pr1_id | pr2_id + assert batch_id.prs == pr2_id + assert batch_id.active + assert Batches.search_count([]) == 1 + + with repo2: + pr2.close() + + assert pr1_id.state == 'closed' + assert pr2_id.state == 'closed' + assert batch_id.all_prs == pr1_id | pr2_id + assert batch_id.prs == env['runbot_merge.pull_requests'].browse(()) + assert not batch_id.active + assert Batches.search_count([]) == 0 diff --git a/runbot_merge/tests/test_multirepo.py b/runbot_merge/tests/test_multirepo.py index 1f09b817..873986dc 100644 --- a/runbot_merge/tests/test_multirepo.py +++ b/runbot_merge/tests/test_multirepo.py @@ -811,14 +811,40 @@ class TestBlocked: repo_a.make_commits(None, Commit('initial', tree={'a0': 'a'}), ref='heads/master') repo_b.make_commits(None, Commit('initial', tree={'b0': 'b'}), ref='heads/master') - pr = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) - b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'], statuses=[]) + pr1_a = make_pr(repo_a, 'xxx', [{'a1': 'a'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'],) + pr1_b = make_pr(repo_b, 'xxx', [{'b1': 'b'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token'], statuses=[]) env.run_crons() - p = to_pr(env, pr) - assert p.blocked - with repo_b: b.close() - assert not p.blocked + head_a = repo_a.commit('master').id + head_b = repo_b.commit('master').id + pr1_a_id = to_pr(env, pr1_a) + pr1_b_id = to_pr(env, pr1_b) + assert pr1_a_id.blocked + with repo_b: pr1_b.close() + assert not pr1_a_id.blocked + assert len(pr1_a_id.batch_id.all_prs) == 2 + assert pr1_a_id.state == 'ready' + assert pr1_b_id.state == 'closed' + env.run_crons() + assert pr1_a_id.staging_id + with repo_a, repo_b: + repo_a.post_status('staging.master', 'success') + repo_b.post_status('staging.master', 'success') + env.run_crons() + assert pr1_a_id.state == 'merged' + assert pr1_a_id.batch_id.merge_date + assert repo_a.commit('master').id != head_a, \ + "the master of repo A should be updated" + assert repo_b.commit('master').id == head_b, \ + "the master of repo B should not be updated" + + with repo_a: + pr2_a = make_pr(repo_a, "xxx", [{'x': 'x'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + env.run_crons() + pr2_a_id = to_pr(env, pr2_a) + assert pr2_a_id.batch_id != pr1_a_id.batch_id + assert pr2_a_id.label == pr1_a_id.label + assert len(pr2_a_id.batch_id.all_prs) == 1 def test_linked_merged(self, env, repo_a, repo_b, config): with repo_a, repo_b: