From 4a4252b4b912158365e3afa56aeb18d9115dd3e3 Mon Sep 17 00:00:00 2001 From: Xavier Morel Date: Wed, 7 Jun 2023 14:42:08 +0200 Subject: [PATCH] [FIX] runbot_merge: holes in provisioning - github logins are case-insensitive while the db field is CI the dict in which partners are stored for matching is not, And the caller may not preserve casing. Thus it's necessary to check the casefolded database values against casefolded parameters, rather than exactly. - users may get disabled by mistake or when one leaves the project, they may also get switched from internal to portal, therefore it is necessary to re-enable and re-enroll them if they come back. - while at it remove the user's email when they depart, as they likely use an organisational email which they don't have access to anymore Side-note, but remove the limit on the number of users / partners being created at once: because there are almost no modules in the mergebot's instance, creating partner goes quite fast (compared to a full instance), thus the limitation is almost certainly unnecessary (creating ~300 users seems to take ~450ms). Fixes ##776 --- conftest.py | 9 ++ .../controllers/reviewer_provisioning.py | 38 +++++--- runbot_merge/tests/test_provisioning.py | 97 +++++++++++++++++-- 3 files changed, 127 insertions(+), 17 deletions(-) diff --git a/conftest.py b/conftest.py index 09ca14b7..8e7b05f9 100644 --- a/conftest.py +++ b/conftest.py @@ -1078,6 +1078,15 @@ class Environment: def __getitem__(self, name): return Model(self, name) + def ref(self, xid, raise_if_not_found=True): + model, obj_id = self( + 'ir.model.data', 'check_object_reference', + *xid.split('.', 1), + raise_on_access_error=raise_if_not_found + ) + return Model(self, model, [obj_id]) if obj_id else None + + def run_crons(self, *xids, **kw): crons = xids or self._default_crons print('running crons', crons, file=sys.stderr) diff --git a/runbot_merge/controllers/reviewer_provisioning.py b/runbot_merge/controllers/reviewer_provisioning.py index 6a9b3af4..e0aa0fc7 100644 --- a/runbot_merge/controllers/reviewer_provisioning.py +++ b/runbot_merge/controllers/reviewer_provisioning.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import logging +from odoo import Command from odoo.http import Controller, request, route try: @@ -34,7 +35,7 @@ class MergebotReviewerProvisioning(Controller): Partners = env['res.partner'] Users = env['res.users'] - existing_partners = Partners.search([ + existing_partners = Partners.with_context(active_test=False).search([ '|', ('email', 'in', [u['email'] for u in users]), ('github_login', 'in', [u['github_login'] for u in users]) ]) @@ -55,20 +56,24 @@ class MergebotReviewerProvisioning(Controller): if p.github_login: # assume there can't be an existing one because github_login is # unique, and should not be able to collide with emails - partners[p.github_login] = p + partners[p.github_login.casefold()] = p + portal = env.ref('base.group_portal') internal = env.ref('base.group_user') odoo_provider = env.ref('auth_oauth.provider_openerp') to_create = [] - created = updated = 0 + updated = 0 + to_activate = Partners for new in users: if 'sub' in new: new['oauth_provider_id'] = odoo_provider.id new['oauth_uid'] = new.pop('sub') # prioritise by github_login as that's the unique-est point of information - current = partners.get(new['github_login']) or partners.get(new['email']) or Partners + current = partners.get(new['github_login'].casefold()) or partners.get(new['email']) or Partners + if not current.active: + to_activate |= current # entry doesn't have user -> create user if not current.user_ids: # skip users without an email (= login) as that @@ -77,7 +82,7 @@ class MergebotReviewerProvisioning(Controller): continue new['login'] = new['email'] - new['groups_id'] = [(4, internal.id)] + new['groups_id'] = [Command.link(internal.id)] # entry has partner -> create user linked to existing partner # (and update partner implicitly) if current: @@ -90,19 +95,29 @@ class MergebotReviewerProvisioning(Controller): if len(user) != 1: _logger.warning("Got %d users for partner %s.", len(user), current.display_name) user = user[:1] + new.setdefault("active", True) update_vals = { k: v for k, v in new.items() - if v not in ('login', 'email') if v != (user[k] if k != 'oauth_provider_id' else user[k].id) } + if user.has_group('base.group_portal'): + update_vals['groups_id'] = [ + Command.unlink(portal.id), + Command.link(internal.id), + ] + if update_vals: user.write(update_vals) updated += 1 + + created = len(to_create) if to_create: # only create 100 users at a time to avoid request timeout - Users.create(to_create[:100]) - created = len(to_create[:100]) + Users.create(to_create) + + if to_activate: + to_activate.active = True _logger.info("Provisioning: created %d updated %d.", created, updated) return [created, updated] @@ -120,12 +135,13 @@ class MergebotReviewerProvisioning(Controller): def update_reviewers(self, github_logins, **kwargs): partners = request.env['res.partner'].sudo().search([('github_login', 'in', github_logins)]) partners.write({ - 'review_rights': [(5, 0, 0)], - 'delegate_reviewer': [(5, 0, 0)], + 'email': False, + 'review_rights': [Command.clear()], + 'delegate_reviewer': [Command.clear()], }) # Assign the linked users as portal users partners.mapped('user_ids').write({ - 'groups_id': [(6, 0, [request.env.ref('base.group_portal').id])] + 'groups_id': [Command.set([request.env.ref('base.group_portal').id])] }) return True diff --git a/runbot_merge/tests/test_provisioning.py b/runbot_merge/tests/test_provisioning.py index 6119bf17..af939305 100644 --- a/runbot_merge/tests/test_provisioning.py +++ b/runbot_merge/tests/test_provisioning.py @@ -15,10 +15,8 @@ def test_basic_provisioning(env, port): assert g.partner_id.name == GEORGE['name'] assert g.partner_id.github_login == GEORGE['github_login'] assert g.oauth_uid == GEORGE['sub'] - (model, g_id) = env['ir.model.data']\ - .check_object_reference('base', 'group_user') - assert model == 'res.groups' - assert g.groups_id.id == g_id, "check that users were provisioned as internal (not portal)" + internal = env.ref('base.group_user') + assert (g.groups_id & internal) == internal, "check that users were provisioned as internal (not portal)" # repeated provisioning should be a no-op r = provision_user(port, [GEORGE]) @@ -79,8 +77,20 @@ def test_upgrade_partner(env, port): 'email': GEORGE['email'], }] - p.user_ids.unlink() - p.unlink() + # deactivate user to see if provisioning re-enables them + p.user_ids.active = False + r = provision_user(port, [GEORGE]) + assert r == [0, 1] + assert len(p.user_ids) == 1, "provisioning should re-enable user" + assert p.user_ids.active + + # re-enable both user and partner to see if both get re-enabled + p.user_ids.active = False + p.active = False + r = provision_user(port, [GEORGE]) + assert r == [0, 1] + assert p.active, "provisioning should re-enable partner" + assert p.user_ids.active def test_no_email(env, port): """ Provisioning system should ignore email-less entries @@ -88,6 +98,81 @@ def test_no_email(env, port): r = provision_user(port, [{**GEORGE, 'email': None}]) assert r == [0, 0] +def test_casing(env, port): + p = env['res.partner'].create({ + 'name': 'Bob', + 'github_login': "Bob", + }) + assert not p.user_ids + assert provision_user(port, [{ + 'name': "Bob Thebuilder", + 'github_login': "bob", + 'email': 'bob@example.org', + 'sub': '5473634', + }]) == [1, 0] + + assert p.user_ids.name == 'Bob Thebuilder' + assert p.user_ids.email == 'bob@example.org' + assert p.user_ids.oauth_uid == '5473634' + # should be written on the partner through the user + assert p.name == 'Bob Thebuilder' + assert p.email == 'bob@example.org' + assert p.github_login == 'bob' + +def test_user_leaves_and_returns(env, port): + internal = env.ref('base.group_user') + portal = env.ref('base.group_portal') + categories = internal | portal | env.ref('base.group_public') + + assert provision_user(port, [{ + "name": "Bamien Douvy", + "github_login": "DouvyB", + "email": "bado@example.org", + "sub": "123456", + }]) == [1, 0] + p = env['res.partner'].search([('github_login', '=', "DouvyB")]) + assert (p.user_ids.groups_id & categories) == internal + + # bye bye 👋 + requests.post(f'http://localhost:{port}/runbot_merge/remove_reviewers', json={ + 'jsonrpc': '2.0', + 'id': None, + 'method': 'call', + 'params': {'github_logins': ['douvyb']}, + }) + assert (p.user_ids.groups_id & categories) == portal + assert p.email is False + + # he's back ❤️ + assert provision_user(port, [{ + "name": "Bamien Douvy", + "github_login": "DouvyB", + "email": "bado@example.org", + "sub": "123456", + }]) == [0, 1] + assert (p.user_ids.groups_id & categories) == internal + assert p.email == 'bado@example.org' + +def test_bulk_ops(env, port): + a, b = env['res.partner'].create([{ + 'name': "Bob", + 'email': "bob@example.org", + 'active': False, + }, { + 'name': "Coc", + 'email': "coc@example.org", + 'active': False, + }]) + assert a.active == b.active == False + + assert provision_user(port, [ + {'email': 'bob@example.org', 'github_login': 'xyz'}, + {'email': 'coc@example.org', 'github_login': 'abc'}, + ]) == [2, 0] + assert a.users_id + assert b.users_id + assert a.active == b.active == True + def provision_user(port, users): r = requests.post(f'http://localhost:{port}/runbot_merge/provision', json={ 'jsonrpc': '2.0',