[ADD] runbot_merge: remote user provisioning

New accounts endpoint such that the SSO can push new pre-configured
users / employees directly. This lowers maintenance burden.

Also remove one of the source partners from the merge test, as
ordering seems wonky for unclear reasons leading to random failures of
that test.
This commit is contained in:
Xavier Morel 2022-06-03 12:08:49 +02:00
parent 1dcab0744d
commit 56898df93f
5 changed files with 251 additions and 12 deletions

View File

@ -50,11 +50,14 @@ import functools
import http.client import http.client
import itertools import itertools
import os import os
import pathlib
import pprint
import random import random
import re import re
import socket import socket
import subprocess import subprocess
import sys import sys
import tempfile
import time import time
import uuid import uuid
import warnings import warnings
@ -271,7 +274,7 @@ class DbDict(dict):
subprocess.run([ subprocess.run([
'odoo', '--no-http', 'odoo', '--no-http',
'--addons-path', self._adpath, '--addons-path', self._adpath,
'-d', db, '-i', module, '-d', db, '-i', module + ',auth_oauth',
'--max-cron-threads', '0', '--max-cron-threads', '0',
'--stop-after-init', '--stop-after-init',
'--log-level', 'warn' '--log-level', 'warn'
@ -334,17 +337,38 @@ def port():
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s.getsockname()[1] return s.getsockname()[1]
@pytest.fixture(scope='session')
def dummy_addons_path():
with tempfile.TemporaryDirectory() as dummy_addons_path:
mod = pathlib.Path(dummy_addons_path, 'saas_worker')
mod.mkdir(0o700)
(mod / '__init__.py').write_bytes(b'')
(mod / '__manifest__.py').write_text(pprint.pformat({
'name': 'dummy saas_worker',
'version': '1.0',
}), encoding='utf-8')
(mod / 'util.py').write_text("""\
def from_role(_):
return lambda fn: fn
""", encoding='utf-8')
yield dummy_addons_path
@pytest.fixture @pytest.fixture
def server(request, db, port, module): def server(request, db, port, module, dummy_addons_path):
log_handlers = [ log_handlers = [
'odoo.modules.loading:WARNING', 'odoo.modules.loading:WARNING',
] ]
if not request.config.getoption('--log-github'): if not request.config.getoption('--log-github'):
log_handlers.append('github_requests:WARNING') log_handlers.append('github_requests:WARNING')
addons_path = ','.join(map(str, [
request.config.getoption('--addons-path'),
dummy_addons_path,
]))
p = subprocess.Popen([ p = subprocess.Popen([
'odoo', '--http-port', str(port), 'odoo', '--http-port', str(port),
'--addons-path', request.config.getoption('--addons-path'), '--addons-path', addons_path,
'-d', db, '-d', db,
'--max-cron-threads', '0', # disable cron threads (we're running crons by hand) '--max-cron-threads', '0', # disable cron threads (we're running crons by hand)
*itertools.chain.from_iterable(('--log-handler', h) for h in log_handlers), *itertools.chain.from_iterable(('--log-handler', h) for h in log_handlers),

View File

@ -1,18 +1,107 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import logging
from odoo.http import Controller, request, route
from odoo import http
from odoo.http import request
try: try:
from odoo.addons.saas_worker.util import from_role from odoo.addons.saas_worker.util import from_role
except ImportError: except ImportError:
def from_role(_): def from_role(_):
return lambda _: None return lambda _: None
_logger = logging.getLogger(__name__)
class MergebotReviewerProvisioning(http.Controller): class MergebotReviewerProvisioning(Controller):
@from_role('accounts')
@route('/runbot_merge/users', type='json', auth='public')
def list_users(self):
env = request.env(su=True)
return [{
'github_login': u.github_login,
'email': u.email,
}
for u in env['res.users'].search([])
]
@from_role('accounts') @from_role('accounts')
@http.route(['/runbot_merge/get_reviewers'], type='json', auth='public') @route('/runbot_merge/provision', type='json', auth='public')
def provision_user(self, users):
_logger.info('Provisioning %s users: %s.', len(users), ', '.join(map(
'{email} ({github_login})'.format_map,
users
)))
env = request.env(su=True)
Partners = env['res.partner']
Users = env['res.users']
existing_partners = Partners.search([
'|', ('email', 'in', [u['email'] for u in users]),
('github_login', 'in', [u['github_login'] for u in users])
])
_logger.info("Found %d existing matching partners.", len(existing_partners))
partners = {}
for p in existing_partners:
if p.email:
# email is not unique, though we want it to be (probably)
current = partners.get(p.email)
if current:
_logger.warning(
"Lookup conflict: %r set on two partners %r and %r.",
p.email, current.display_name, p.display_name,
)
else:
partners[p.email] = p
if p.github_login:
# assume there can't be an existing one because github_login is
# unique, and should not be able to collide with emails
partners[p.github_login] = p
if 'oauth_provider_id' in Users:
odoo_provider = env.ref('auth_oauth.provider_openerp', False)
if odoo_provider:
for new in users:
if 'sub' in new:
new['oauth_provider_id'] = odoo_provider.id
new['oauth_uid'] = new.pop('sub')
to_create = []
created = updated = 0
for new in users:
# prioritise by github_login as that's the unique-est point of information
current = partners.get(new['github_login']) or partners.get(new['email']) or Partners
# entry doesn't have user -> create user
if not current.user_ids:
new['login'] = new['email']
# entry has partner -> create user linked to existing partner
# (and update partner implicitly)
if current:
new['partner_id'] = current.id
to_create.append(new)
continue
# otherwise update user (if there is anything to update)
user = current.user_ids
if len(user) != 1:
_logger.warning("Got %d users for partner %s.", len(user), current.display_name)
user = user[:1]
update_vals = {
k: v
for k, v in new.items()
if v not in ('login', 'email')
if v != (user[k] if k != 'oauth_provider_id' else user[k].id)
}
if update_vals:
user.write(update_vals)
updated += 1
if to_create:
Users.create(to_create)
created = len(to_create)
_logger.info("Provisioning: created %d updated %d.", created, updated)
return [created, updated]
@from_role('accounts')
@route(['/runbot_merge/get_reviewers'], type='json', auth='public')
def fetch_reviewers(self, **kwargs): def fetch_reviewers(self, **kwargs):
reviewers = request.env['res.partner.review'].sudo().search([ reviewers = request.env['res.partner.review'].sudo().search([
'|', ('review', '=', True), ('self_review', '=', True) '|', ('review', '=', True), ('self_review', '=', True)
@ -20,7 +109,7 @@ class MergebotReviewerProvisioning(http.Controller):
return reviewers return reviewers
@from_role('accounts') @from_role('accounts')
@http.route(['/runbot_merge/remove_reviewers'], type='json', auth='public', methods=['POST']) @route(['/runbot_merge/remove_reviewers'], type='json', auth='public', methods=['POST'])
def update_reviewers(self, github_logins, **kwargs): def update_reviewers(self, github_logins, **kwargs):
partners = request.env['res.partner'].sudo().search([('github_login', 'in', github_logins)]) partners = request.env['res.partner'].sudo().search([('github_login', 'in', github_logins)])
partners.write({ partners.write({

View File

@ -13,6 +13,7 @@ class CIText(fields.Char):
class Partner(models.Model): class Partner(models.Model):
_inherit = 'res.partner' _inherit = 'res.partner'
email = fields.Char(index=True)
github_login = CIText() github_login = CIText()
delegate_reviewer = fields.Many2many('runbot_merge.pull_requests') delegate_reviewer = fields.Many2many('runbot_merge.pull_requests')
formatted_email = fields.Char(string="commit email", compute='_rfc5322_formatted') formatted_email = fields.Char(string="commit email", compute='_rfc5322_formatted')

View File

@ -1,11 +1,10 @@
import requests
from utils import Commit, to_pr from utils import Commit, to_pr
def test_partner_merge(env): def test_partner_merge(env):
p_src = env['res.partner'].create({ p_src = env['res.partner'].create({
'name': 'kfhsf',
'github_login': 'tyu'
}) | env['res.partner'].create({
'name': "xxx", 'name': "xxx",
'github_login': 'xxx' 'github_login': 'xxx'
}) })
@ -97,3 +96,36 @@ def test_message_desync(env, project, make_repo, users, setreviewers, config):
assert st.message.startswith('title\n\nbody'),\ assert st.message.startswith('title\n\nbody'),\
"the stored PR message should have been ignored when staging" "the stored PR message should have been ignored when staging"
assert st.parents == [m, c], "check the staging's ancestry is the right one" assert st.parents == [m, c], "check the staging's ancestry is the right one"
def test_unreviewer(env, project, port):
repo = env['runbot_merge.repository'].create({
'project_id': project.id,
'name': 'a_test_repo',
'status_ids': [(0, 0, {'context': 'status'})]
})
p = env['res.partner'].create({
'name': 'George Pearce',
'github_login': 'emubitch',
'review_rights': [(0, 0, {'repository_id': repo.id, 'review': True})]
})
r = requests.post(f'http://localhost:{port}/runbot_merge/get_reviewers', json={
'jsonrpc': '2.0',
'id': None,
'method': 'call',
'params': {},
})
r.raise_for_status()
assert 'error' not in r.json()
assert r.json()['result'] == ['emubitch']
r = requests.post(f'http://localhost:{port}/runbot_merge/remove_reviewers', json={
'jsonrpc': '2.0',
'id': None,
'method': 'call',
'params': {'github_logins': ['emubitch']},
})
r.raise_for_status()
assert 'error' not in r.json()
assert p.review_rights == env['res.partner.review']

View File

@ -0,0 +1,93 @@
import pytest
import requests
GEORGE = {
'name': "George Pearce",
'email': 'george@example.org',
'github_login': 'emubitch',
'sub': '19321102'
}
def test_basic_provisioning(env, port):
r = provision_user(port, [GEORGE])
assert r == [1, 0]
g = env['res.users'].search([('login', '=', GEORGE['email'])])
assert g.partner_id.name == GEORGE['name']
assert g.partner_id.github_login == GEORGE['github_login']
assert g.oauth_uid == GEORGE['sub']
# repeated provisioning should be a no-op
r = provision_user(port, [GEORGE])
assert r == [0, 0]
# the email (real login) should be the determinant, any other field is
# updatable
r = provision_user(port, [{**GEORGE, 'name': "x"}])
assert r == [0, 1]
r = provision_user(port, [dict(GEORGE, name="x", github_login="y", sub="42")])
assert r == [0, 1]
# can't fail anymore because github_login now used to look up the existing
# user
# with pytest.raises(Exception):
# provision_user(port, [{
# 'name': "other@example.org",
# 'email': "x",
# 'github_login': "y",
# 'sub': "42"
# }])
r = provision_user(port, [dict(GEORGE, active=False)])
assert r == [0, 1]
assert not env['res.users'].search([('login', '=', GEORGE['email'])])
assert env['res.partner'].search([('email', '=', GEORGE['email'])])
def test_upgrade_partner(env, port):
# If a partner exists for a github login (and / or email?) it can be
# upgraded by creating a user for it
p = env['res.partner'].create({
'name': GEORGE['name'],
'email': GEORGE['email'],
})
r = provision_user(port, [GEORGE])
assert r == [1, 0]
assert p.user_ids.read(['email', 'github_login', 'oauth_uid']) == [{
'id': p.user_ids.id,
'github_login': GEORGE['github_login'],
'oauth_uid': GEORGE['sub'],
'email': GEORGE['email'],
}]
p.user_ids.unlink()
p.unlink()
p = env['res.partner'].create({
'name': GEORGE['name'],
'github_login': GEORGE['github_login'],
})
r = provision_user(port, [GEORGE])
assert r == [1, 0]
assert p.user_ids.read(['email', 'github_login', 'oauth_uid']) == [{
'id': p.user_ids.id,
'github_login': GEORGE['github_login'],
'oauth_uid': GEORGE['sub'],
'email': GEORGE['email'],
}]
p.user_ids.unlink()
p.unlink()
def provision_user(port, users):
r = requests.post(f'http://localhost:{port}/runbot_merge/provision', json={
'jsonrpc': '2.0',
'id': None,
'method': 'call',
'params': {'users': users},
})
r.raise_for_status()
json = r.json()
assert 'error' not in json
return json['result']