2018-06-18 15:08:48 +07:00
|
|
|
# -*- coding: utf-8 -*-
|
2021-10-20 19:28:29 +07:00
|
|
|
import collections
|
2020-11-13 16:38:48 +07:00
|
|
|
import json
|
2021-10-20 19:28:29 +07:00
|
|
|
import pathlib
|
2020-11-13 16:38:48 +07:00
|
|
|
|
2021-10-20 19:28:29 +07:00
|
|
|
import markdown
|
|
|
|
import markupsafe
|
2020-11-13 16:38:48 +07:00
|
|
|
import werkzeug.exceptions
|
|
|
|
|
2018-06-18 15:08:48 +07:00
|
|
|
from odoo.http import Controller, route, request
|
|
|
|
|
2019-08-21 19:15:10 +07:00
|
|
|
LIMIT = 20
|
2018-06-18 15:08:48 +07:00
|
|
|
class MergebotDashboard(Controller):
|
|
|
|
@route('/runbot_merge', auth="public", type="http", website=True)
|
|
|
|
def dashboard(self):
|
|
|
|
return request.render('runbot_merge.dashboard', {
|
2018-09-20 20:02:18 +07:00
|
|
|
'projects': request.env['runbot_merge.project'].with_context(active_test=False).sudo().search([]),
|
2018-06-18 15:08:48 +07:00
|
|
|
})
|
2019-08-21 19:15:10 +07:00
|
|
|
|
|
|
|
@route('/runbot_merge/<int:branch_id>', auth='public', type='http', website=True)
|
|
|
|
def stagings(self, branch_id, until=None):
|
2022-07-29 19:25:11 +07:00
|
|
|
branch = request.env['runbot_merge.branch'].browse(branch_id).sudo().exists()
|
|
|
|
if not branch:
|
|
|
|
raise werkzeug.exceptions.NotFound()
|
|
|
|
|
2019-08-21 19:15:10 +07:00
|
|
|
stagings = request.env['runbot_merge.stagings'].with_context(active_test=False).sudo().search([
|
2022-07-29 19:25:11 +07:00
|
|
|
('target', '=', branch.id),
|
2019-08-21 19:15:10 +07:00
|
|
|
('staged_at', '<=', until) if until else (True, '=', True),
|
|
|
|
], order='staged_at desc', limit=LIMIT+1)
|
|
|
|
|
|
|
|
return request.render('runbot_merge.branch_stagings', {
|
2022-07-29 19:25:11 +07:00
|
|
|
'branch': branch,
|
2019-08-21 19:15:10 +07:00
|
|
|
'stagings': stagings[:LIMIT],
|
|
|
|
'next': stagings[-1].staged_at if len(stagings) > LIMIT else None,
|
|
|
|
})
|
2020-11-13 16:38:48 +07:00
|
|
|
|
2021-10-20 19:28:29 +07:00
|
|
|
def _entries(self):
|
|
|
|
changelog = pathlib.Path(__file__).parent.parent / 'changelog'
|
|
|
|
if changelog.is_dir():
|
|
|
|
return [
|
|
|
|
(d.name, [f.read_text(encoding='utf-8') for f in d.iterdir() if f.is_file()])
|
|
|
|
for d in changelog.iterdir()
|
|
|
|
]
|
|
|
|
return []
|
|
|
|
|
|
|
|
def entries(self, item_converter):
|
|
|
|
entries = collections.OrderedDict()
|
|
|
|
for key, items in sorted(self._entries(), reverse=True):
|
|
|
|
entries.setdefault(key, []).extend(map(item_converter, items))
|
|
|
|
return entries
|
|
|
|
|
|
|
|
@route('/runbot_merge/changelog', auth='public', type='http', website=True)
|
|
|
|
def changelog(self):
|
|
|
|
md = markdown.Markdown(extensions=['nl2br'], output_format='html5')
|
|
|
|
entries = self.entries(lambda t: markupsafe.Markup(md.convert(t)))
|
|
|
|
return request.render('runbot_merge.changelog', {
|
|
|
|
'entries': entries,
|
|
|
|
})
|
|
|
|
|
2020-11-13 16:38:48 +07:00
|
|
|
@route('/<org>/<repo>/pull/<int(min=1):pr>', auth='public', type='http', website=True)
|
|
|
|
def pr(self, org, repo, pr):
|
|
|
|
pr_id = request.env['runbot_merge.pull_requests'].sudo().search([
|
|
|
|
('repository.name', '=', f'{org}/{repo}'),
|
|
|
|
('number', '=', int(pr)),
|
|
|
|
])
|
|
|
|
if not pr_id:
|
|
|
|
raise werkzeug.exceptions.NotFound()
|
|
|
|
if not pr_id.repository.group_id <= request.env.user.groups_id:
|
|
|
|
raise werkzeug.exceptions.NotFound()
|
|
|
|
|
2020-11-17 21:21:21 +07:00
|
|
|
st = {}
|
|
|
|
if pr_id.statuses:
|
|
|
|
# normalise `statuses` to map to a dict
|
|
|
|
st = {
|
|
|
|
k: {'state': v} if isinstance(v, str) else v
|
2021-01-13 14:18:17 +07:00
|
|
|
for k, v in json.loads(pr_id.statuses_full).items()
|
2020-11-17 21:21:21 +07:00
|
|
|
}
|
2020-11-13 16:38:48 +07:00
|
|
|
return request.render('runbot_merge.view_pull_request', {
|
|
|
|
'pr': pr_id,
|
|
|
|
'merged_head': json.loads(pr_id.commits_map).get(''),
|
2020-11-17 21:21:21 +07:00
|
|
|
'statuses': st
|
2020-11-13 16:38:48 +07:00
|
|
|
})
|