From 461db6a3e72574662ad9336230a447755d7549dc Mon Sep 17 00:00:00 2001 From: Xavier Morel Date: Fri, 6 Dec 2024 13:44:08 +0100 Subject: [PATCH] [ADD] runbot_merge: closing linked issues on merge Requires parsing the commit messages as github plain doesn't collate the info from there, but also the descriptions: apparently github only adds those to the references if the PR targets the default branch. That's not a limitation we want. Meaning at the end of the day, the only thing we're calling graphql for is explicit manual linking, which can't be tested without extending the github API (and then would only be testable on DC), and which I'm not sure anyone bothers with... Limitations ----------- - Links are retrieved on staging (so if one is added later it won't be taken in account). - Only repository-local links are handled, not cross-repository. Fixes #777 --- conftest.py | 40 ++++++++++--- runbot_merge/__manifest__.py | 1 + runbot_merge/github.py | 3 +- runbot_merge/models/crons/__init__.py | 1 + runbot_merge/models/crons/issues_closer.py | 21 +++++++ runbot_merge/models/crons/issues_closer.xml | 23 ++++++++ runbot_merge/models/pull_requests.py | 6 ++ runbot_merge/models/stagings_create.py | 60 +++++++++++++++++++- runbot_merge/tests/test_oddities.py | 63 +++++++++++++++++++++ 9 files changed, 209 insertions(+), 9 deletions(-) create mode 100644 runbot_merge/models/crons/issues_closer.py create mode 100644 runbot_merge/models/crons/issues_closer.xml diff --git a/conftest.py b/conftest.py index de4848e9..7b42dd87 100644 --- a/conftest.py +++ b/conftest.py @@ -5,6 +5,8 @@ import errno import select import shutil import threading +import typing +from dataclasses import dataclass from typing import Optional """ @@ -637,7 +639,7 @@ def make_repo(capsys, request, config, tunnel, users): # create repo r = check(github.post(endpoint, json={ 'name': name, - 'has_issues': False, + 'has_issues': True, 'has_projects': False, 'has_wiki': False, 'auto_init': False, @@ -696,6 +698,24 @@ def _rate_limited(req): Commit = collections.namedtuple('Commit', 'id tree message author committer parents') + + +@dataclass +class Issue: + repo: Repo + token: str | None + number: int + + @property + def state(self) -> typing.Literal['open', 'closed']: + r = self.repo._get_session(self.token)\ + .get(f'https://api.github.com/repos/{self.repo.name}/issues/{self.number}') + assert r.ok, r.text + state = r.json()['state'] + assert state in ('open', 'closed'), f"Invalid {state}, expected 'open' or 'closed'" + return state + + class Repo: def __init__(self, session, fullname, repos): self._session = session @@ -975,17 +995,13 @@ class Repo: title = next(parts) body = next(parts, None) - headers = {} - if token: - headers['Authorization'] = 'token {}'.format(token) - # FIXME: change tests which pass a commit id to make_pr & remove this if re.match(r'[0-9a-f]{40}', head): ref = "temp_trash_because_head_must_be_a_ref_%d" % next(ct) self.make_ref('heads/' + ref, head) head = ref - r = self._session.post( + r = self._get_session(token).post( 'https://api.github.com/repos/{}/pulls'.format(self.name), json={ 'title': title, @@ -994,7 +1010,6 @@ class Repo: 'base': target, 'draft': draft, }, - headers=headers, ) assert 200 <= r.status_code < 300, r.text @@ -1025,6 +1040,17 @@ class Repo: if not r.links.get('next'): return + def make_issue(self, title, *, body=None, token=None) -> None: + assert self.hook + + r = self._get_session(token).post( + f"https://api.github.com/repos/{self.name}/issues", + json={'title': title, 'body': body} + ) + assert 200 <= r.status_code < 300, r.text + return Issue(self, token, r.json()['number']) + + def __enter__(self): self.hook = True return self diff --git a/runbot_merge/__manifest__.py b/runbot_merge/__manifest__.py index 65dfc4b4..3688a1e7 100644 --- a/runbot_merge/__manifest__.py +++ b/runbot_merge/__manifest__.py @@ -9,6 +9,7 @@ 'data/merge_cron.xml', 'models/crons/git_maintenance.xml', 'models/crons/cleanup_scratch_branches.xml', + 'models/crons/issues_closer.xml', 'data/runbot_merge.pull_requests.feedback.template.csv', 'views/res_partner.xml', 'views/runbot_merge_project.xml', diff --git a/runbot_merge/github.py b/runbot_merge/github.py index 98d2e4a3..e99b5478 100644 --- a/runbot_merge/github.py +++ b/runbot_merge/github.py @@ -151,7 +151,8 @@ class GH(object): if to_sleep > 0: time.sleep(to_sleep) - path = f'/repos/{self._repo}/{path}' + if path != '/graphql': + path = f'/repos/{self._repo}/{path}' r = self._session.request(method, self._url + path, params=params, json=json) if method.casefold() != 'get': self._last_update = time.time() + int(r.headers.get('Retry-After', 0)) diff --git a/runbot_merge/models/crons/__init__.py b/runbot_merge/models/crons/__init__.py index f3eed923..0d8255a0 100644 --- a/runbot_merge/models/crons/__init__.py +++ b/runbot_merge/models/crons/__init__.py @@ -1,2 +1,3 @@ from . import git_maintenance from . import cleanup_scratch_branches +from . import issues_closer diff --git a/runbot_merge/models/crons/issues_closer.py b/runbot_merge/models/crons/issues_closer.py new file mode 100644 index 00000000..904efcf9 --- /dev/null +++ b/runbot_merge/models/crons/issues_closer.py @@ -0,0 +1,21 @@ +import logging + +from odoo import models, fields + +_logger = logging.getLogger(__name__) +class BranchCleanup(models.Model): + _name = 'runbot_merge.issues_closer' + _description = "closes issues linked to PRs" + + repository_id = fields.Many2one('runbot_merge.repository', required=True) + number = fields.Integer(required=True) + + def _run(self): + ghs = {} + while t := self.search([], limit=1): + gh = ghs.get(t.repository_id.id) + if not gh: + gh = ghs[t.repository_id.id] = t.repository_id.github() + + r = gh('PATCH', f'issues/{t.number}', json={'state': 'closed'}, check=False) + t.unlink() diff --git a/runbot_merge/models/crons/issues_closer.xml b/runbot_merge/models/crons/issues_closer.xml new file mode 100644 index 00000000..fac2f5c6 --- /dev/null +++ b/runbot_merge/models/crons/issues_closer.xml @@ -0,0 +1,23 @@ + + + Access to branch cleanup is useless + + 0 + 0 + 0 + 0 + + + + Close issues linked to merged PRs + + code + model._run() + + -1 + + + diff --git a/runbot_merge/models/pull_requests.py b/runbot_merge/models/pull_requests.py index 1833a6f2..7866a1c9 100644 --- a/runbot_merge/models/pull_requests.py +++ b/runbot_merge/models/pull_requests.py @@ -8,6 +8,7 @@ import datetime import itertools import json import logging +import pprint import re import time from enum import IntEnum @@ -2084,6 +2085,8 @@ class Stagings(models.Model): statuses = fields.Binary(compute='_compute_statuses') statuses_cache = fields.Text(default='{}', required=True) + issues_to_close = fields.Json(default=lambda _: [], help="list of tasks to close if this staging succeeds") + @api.depends('staged_at', 'staging_end') def _compute_duration(self): for s in self: @@ -2414,6 +2417,9 @@ class Stagings(models.Model): 'pull_request': pr.number, 'tags_add': json.dumps([pseudobranch]), }) + if self.issues_to_close: + self.env['runbot_merge.issues_closer'].create(self.issues_to_close) + self.env.ref('runbot_merge.issues_closer_cron')._trigger() finally: self.write({'active': False}) elif self.state == 'failure' or self.is_timed_out(): diff --git a/runbot_merge/models/stagings_create.py b/runbot_merge/models/stagings_create.py index ca517395..afa57fa2 100644 --- a/runbot_merge/models/stagings_create.py +++ b/runbot_merge/models/stagings_create.py @@ -1,10 +1,12 @@ import base64 +import collections import contextlib import dataclasses import io import json import logging import os +import pprint import re from collections.abc import Mapping from difflib import Differ @@ -34,11 +36,11 @@ class StagingSlice: - gh is a cache for the github proxy object (contains a session for reusing connection) - head is the current staging head for the branch of that repo - - working_copy is the local working copy for the staging for that repo """ gh: github.GH head: str repo: git.Repo + tasks: set[int] = dataclasses.field(default_factory=set) StagingState: TypeAlias = Dict[Repository, StagingSlice] @@ -106,6 +108,7 @@ def try_staging(branch: Branch) -> Optional[Stagings]: env = branch.env heads = [] commits = [] + issues = [] for repo, it in staging_state.items(): if it.head == original_heads[repo] and branch.project_id.uniquifier: # if we didn't stage anything for that repo and uniquification is @@ -161,6 +164,10 @@ For-Commit-Id: {it.head} 'repository_id': repo.id, 'commit_id': commit, })) + issues.extend( + {'repository_id': repo.id, 'number': i} + for i in it.tasks + ) # create actual staging object st: Stagings = env['runbot_merge.stagings'].create({ @@ -168,6 +175,7 @@ For-Commit-Id: {it.head} 'staging_batch_ids': [Command.create({'runbot_merge_batch_id': batch.id}) for batch in staged], 'heads': heads, 'commits': commits, + 'issues_to_close': issues, }) for repo, it in staging_state.items(): _logger.info( @@ -472,6 +480,56 @@ def stage(pr: PullRequests, info: StagingSlice, related_prs: PullRequests) -> Tu if pr_head_tree != pr_base_tree and merge_head_tree == merge_base_tree: raise exceptions.MergeError(pr, f'results in an empty tree when merged, might be the duplicate of a merged PR.') + finder = re.compile(r""" + \b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved) + \s+\#([0-9]+) + """, re.VERBOSE | re.IGNORECASE) + + # TODO: maybe support closing issues in other repositories of the same project? + info.tasks.update( + int(m[1]) + for commit in pr_commits + for m in finder.finditer(commit['commit']['message']) + ) + # Turns out if the PR is not targeted at the default branch, apparently + # github doesn't parse its description and add links it to the closing + # issues references, it's just "fuck off". So we need to handle that one by + # hand too. + info.tasks.update(int(m[1]) for m in finder.finditer(pr.message)) + # So this ends up being *exclusively* for manually linked issues #feelsgoodman. + owner, name = pr.repository.name.split('/') + r = info.gh('post', '/graphql', json={ + 'query': """ +query ($owner: String!, $name: String!, $pr: Int!) { + repository(owner: $owner, name: $name) { + pullRequest(number: $pr) { + closingIssuesReferences(last: 100) { + nodes { + number + repository { + nameWithOwner + } + } + } + } + } +} + """, + 'variables': {'owner': owner, 'name': name, 'pr': pr.number} + }) + res = r.json() + if 'errors' in res: + _logger.warning( + "Failed to fetch closing issues for %s\n%s", + pr.display_name, + pprint.pformat(res['errors']) + ) + else: + info.tasks.update( + n['number'] + for n in res['data']['repository']['pullRequest']['closingIssuesReferences']['nodes'] + if n['repository']['nameWithOwner'] == pr.repository.name + ) return method, new_head def stage_squash(pr: PullRequests, info: StagingSlice, commits: List[github.PrCommit], related_prs: PullRequests) -> str: diff --git a/runbot_merge/tests/test_oddities.py b/runbot_merge/tests/test_oddities.py index e093b0c7..014c6b31 100644 --- a/runbot_merge/tests/test_oddities.py +++ b/runbot_merge/tests/test_oddities.py @@ -1,5 +1,6 @@ from operator import itemgetter +import pytest import requests from utils import Commit, to_pr, seen @@ -366,3 +367,65 @@ Currently available commands for @{user}: Note: this help text is dynamic and will change with the state of the PR.\ """ + +@pytest.mark.parametrize("target", ["master", "other"]) +def test_close_linked_issues(env, project, repo, config, users, partners, target): + """Github's linked issues thingie only triggers when: + + - the commit with the reference reaches the default branch + - the PR linked to the issue (via the UI or the PR description) is targeted + at and merged into the default branch + + The former does eventually happen with odoo, after a while, usually: + forward-ports will generally go through the default branch eventually amd + the master becomes the default branch on the next major release. + + *However* the latter case basically doesn't happen, if a PR is merged into + master it never "reaches the default branch", and while the description is + ported forwards (with any link it contains) that's not the case of manual + links (it's not even possible since there is no API to manipulate those). + + Thus support for linked issues needs to be added to the mergebot. Since the + necessarily has write access to PRs (to close them on merge) it should have + the same on issues. + """ + project.write({'branch_ids': [(0, 0, {'name': 'other'})]}) + with repo: + i1 = repo.make_issue(f"Issue 1") + i2 = repo.make_issue(f"Issue 2") + + [m] = repo.make_commits(None, Commit('initial', tree={'m': 'm'}), ref="heads/master") + # non-default branch + repo.make_ref("heads/other", m) + # ensure the default branch is master so we have consistent testing state + r = repo._session.patch(f'https://api.github.com/repos/{repo.name}', json={'default_branch': 'master'}) + assert r.ok, r.text + + with repo: + # there are only two locations relevant to us: + # + # - commit message + # - pr description + # + # the other two are manually linked issues (there's no API for that so + # we can't test it) and the merge message (which for us is the PR + # message) + repo.make_commits(m, Commit(f'This is my commit\n\nfixes #{i1.number}', tree={'m': 'c1'}), ref="heads/pr") + pr = repo.make_pr(target=target, head='pr', title="a pr", body=f"fixes #{i2.number}") + pr.post_comment('hansen r+', config['role_reviewer']['token']) + repo.post_status(pr.head, 'success') + + env.run_crons(None) + + pr_id = to_pr(env, pr) + assert pr_id.state == 'ready' + assert pr_id.staging_id + + assert i1.state == 'open' + assert i2.state == 'open' + with repo: + repo.post_status(f'staging.{target}', 'success') + env.run_crons(None) + assert pr_id.state == 'merged' + assert i1.state == 'closed' + assert i2.state == 'closed'