diff --git a/conftest.py b/conftest.py
index 7fb85dde..59f0753e 100644
--- a/conftest.py
+++ b/conftest.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import datetime
import select
import shutil
import threading
@@ -316,7 +317,8 @@ class DbDict(dict):
'-d', db, '-i', module + ',saas_worker,auth_oauth',
'--max-cron-threads', '0',
'--stop-after-init',
- '--log-level', 'warn'
+ '--log-level', 'warn',
+ '--log-handler', 'py.warnings:ERROR',
],
check=True,
env={**os.environ, 'XDG_DATA_HOME': str(d)}
@@ -413,15 +415,58 @@ def dummy_addons_path():
mod = pathlib.Path(dummy_addons_path, 'saas_worker')
mod.mkdir(0o700)
(mod / '__init__.py').write_text('''\
+import builtins
+import logging
+import threading
+
+import psycopg2
+
+import odoo
from odoo import api, fields, models
+_logger = logging.getLogger(__name__)
+
class Base(models.AbstractModel):
_inherit = 'base'
def run_crons(self):
+ builtins.forwardport_merged_before = self.env.context.get('forwardport_merged_before')
+ builtins.forwardport_updated_before = self.env.context.get('forwardport_updated_before')
self.env['ir.cron']._process_jobs(self.env.cr.dbname)
+ del builtins.forwardport_updated_before
+ del builtins.forwardport_merged_before
return True
+
+
+class IrCron(models.Model):
+ _inherit = 'ir.cron'
+
+ @classmethod
+ def _process_jobs(cls, db_name):
+ t = threading.current_thread()
+ try:
+ db = odoo.sql_db.db_connect(db_name)
+ t.dbname = db_name
+ with db.cursor() as cron_cr:
+ # FIXME: override `_get_all_ready_jobs` to directly lock the cron?
+ while jobs := next((
+ job
+ for j in cls._get_all_ready_jobs(cron_cr)
+ if (job := cls._acquire_one_job(cron_cr, (j['id'],)))
+ ), None):
+ # take into account overridings of _process_job() on that database
+ registry = odoo.registry(db_name)
+ registry[cls._name]._process_job(db, cron_cr, job)
+ cron_cr.commit()
+
+ except psycopg2.ProgrammingError as e:
+ raise
+ except Exception:
+ _logger.warning('Exception in cron:', exc_info=True)
+ finally:
+ if hasattr(t, 'dbname'):
+ del t.dbname
''', encoding='utf-8')
(mod / '__manifest__.py').write_text(pprint.pformat({
'name': 'dummy saas_worker',
@@ -445,6 +490,7 @@ def addons_path(request, dummy_addons_path):
def server(request, db, port, module, addons_path, tmpdir):
log_handlers = [
'odoo.modules.loading:WARNING',
+ 'py.warnings:ERROR',
]
if not request.config.getoption('--log-github'):
log_handlers.append('github_requests:WARNING')
@@ -1210,15 +1256,19 @@ class Environment:
def run_crons(self, *xids, **kw):
crons = xids or self._default_crons
- print('running crons', crons, file=sys.stderr)
+ cron_ids = []
for xid in crons:
- t0 = time.time()
- print('\trunning cron', xid, '...', file=sys.stderr)
+ if xid is None:
+ continue
+
model, cron_id = self('ir.model.data', 'check_object_reference', *xid.split('.', 1))
assert model == 'ir.cron', "Expected {} to be a cron, got {}".format(xid, model)
- self('ir.cron', 'method_direct_trigger', [cron_id], **kw)
- print('\tdone %.3fs' % (time.time() - t0), file=sys.stderr)
- print('done', file=sys.stderr)
+ cron_ids.append(cron_id)
+ if cron_ids:
+ self('ir.cron', 'write', cron_ids, {
+ 'nextcall': (datetime.datetime.utcnow() - datetime.timedelta(seconds=30)).isoformat(" ", "seconds")
+ }, **kw)
+ self('base', 'run_crons', [], **kw)
# sleep for some time as a lot of crap may have happened (?)
wait_for_hook()
diff --git a/forwardport/data/crons.xml b/forwardport/data/crons.xml
index 02d2be1e..7d0e1bf7 100644
--- a/forwardport/data/crons.xml
+++ b/forwardport/data/crons.xml
@@ -8,6 +8,7 @@
minutes
-1
+ 43
@@ -19,6 +20,7 @@
minutes
-1
+ 46
diff --git a/forwardport/models/forwardport.py b/forwardport/models/forwardport.py
index a0604da7..b6ac07d8 100644
--- a/forwardport/models/forwardport.py
+++ b/forwardport/models/forwardport.py
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
+import builtins
import contextlib
import logging
import re
@@ -345,7 +346,7 @@ class DeleteBranches(models.Model, Queue):
pr_id = fields.Many2one('runbot_merge.pull_requests')
def _search_domain(self):
- cutoff = self.env.context.get('forwardport_merged_before') \
+ cutoff = getattr(builtins, 'forwardport_merged_before', None) \
or fields.Datetime.to_string(datetime.now() - MERGE_AGE)
return [('pr_id.merge_date', '<', cutoff)]
diff --git a/forwardport/models/project.py b/forwardport/models/project.py
index 30c2df5d..72630453 100644
--- a/forwardport/models/project.py
+++ b/forwardport/models/project.py
@@ -13,6 +13,7 @@ it up), ...
"""
from __future__ import annotations
+import builtins
import datetime
import itertools
import json
@@ -487,7 +488,7 @@ stderr:
], order='source_id, id'), lambda p: p.source_id)
def _reminder(self):
- cutoff = self.env.context.get('forwardport_updated_before') \
+ cutoff = getattr(builtins, 'forwardport_updated_before', None) \
or fields.Datetime.to_string(datetime.datetime.now() - DEFAULT_DELTA)
cutoff_dt = fields.Datetime.from_string(cutoff)
diff --git a/runbot_merge/data/merge_cron.xml b/runbot_merge/data/merge_cron.xml
index b0ff6e1d..ffd3147f 100644
--- a/runbot_merge/data/merge_cron.xml
+++ b/runbot_merge/data/merge_cron.xml
@@ -8,6 +8,7 @@
minutes
-1
+ 30
Check for progress of PRs and create Stagings
@@ -18,6 +19,7 @@
minutes
-1
+ 40
Send feedback to PR
@@ -28,6 +30,7 @@
minutes
-1
+ 60
Update labels on PR
@@ -38,6 +41,7 @@
minutes
-1
+ 70
Check for PRs to fetch
@@ -48,6 +52,7 @@
minutes
-1
+ 10
Warn on linked PRs where only one is ready
@@ -58,6 +63,7 @@
hours
-1
+ 50
Impact commit statuses on PRs and stagings
@@ -68,5 +74,6 @@
minutes
-1
+ 20
diff --git a/runbot_merge/models/crons/cleanup_scratch_branches.py b/runbot_merge/models/crons/cleanup_scratch_branches.py
index 1e7b6274..a5e041ce 100644
--- a/runbot_merge/models/crons/cleanup_scratch_branches.py
+++ b/runbot_merge/models/crons/cleanup_scratch_branches.py
@@ -29,4 +29,4 @@ class BranchCleanup(models.TransientModel):
_logger.info("no tmp branch found for %s:%s", r.name, b.name)
res = gh('delete', f'git/refs/heads/staging.{b.name}', check=False)
if res.status_code != 204:
- _logger.info("no staging branch found for %s:%s", res.name, b.name)
+ _logger.info("no staging branch found for %s:%s", r.name, b.name)
diff --git a/runbot_merge/tests/test_disabled_branch.py b/runbot_merge/tests/test_disabled_branch.py
index cfd16bae..88e5e490 100644
--- a/runbot_merge/tests/test_disabled_branch.py
+++ b/runbot_merge/tests/test_disabled_branch.py
@@ -45,10 +45,21 @@ def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, conf
staging_id = branch_id.active_staging_id
assert staging_id == pr_id.staging_id
+ # staging of `pr` should have generated a staging branch
+ _ = repo.get_ref('heads/staging.other')
+ # stagings should not need a tmp branch anymore, so this should not exist
+ with pytest.raises(AssertionError, match=r'Not Found'):
+ repo.get_ref('heads/tmp.other')
+
# disable branch "other"
branch_id.active = False
env.run_crons()
+ # triggered cleanup should have deleted the staging for the disabled `other`
+ # target branch
+ with pytest.raises(AssertionError, match=r'Not Found'):
+ repo.get_ref('heads/staging.other')
+
# the PR should not have been closed implicitly
assert pr_id.state == 'ready'
# but it should be unstaged
@@ -86,17 +97,10 @@ def test_existing_pr_disabled_branch(env, project, make_repo, setreviewers, conf
assert pr_id.staging_id
# staging of `pr` should have generated a staging branch
- _ = repo.get_ref('heads/staging.other')
+ _ = repo.get_ref('heads/staging.other2')
# stagings should not need a tmp branch anymore, so this should not exist
with pytest.raises(AssertionError, match=r'Not Found'):
- repo.get_ref('heads/tmp.other')
-
- assert env['base'].run_crons()
-
- # triggered cleanup should have deleted the staging for the disabled `other`
- # target branch
- with pytest.raises(AssertionError, match=r'Not Found'):
- repo.get_ref('heads/staging.other')
+ repo.get_ref('heads/tmp.other2')
def test_new_pr_no_branch(env, project, make_repo, setreviewers, users):
""" A new PR to an *unknown* branch should be ignored and warn
diff --git a/runbot_merge/tests/test_project_toggles.py b/runbot_merge/tests/test_project_toggles.py
index 615e6e4b..30fe4eff 100644
--- a/runbot_merge/tests/test_project_toggles.py
+++ b/runbot_merge/tests/test_project_toggles.py
@@ -1,3 +1,4 @@
+import datetime
import functools
from itertools import repeat
@@ -66,16 +67,16 @@ def test_staging_priority(env, project, repo, config, mode, cutoff, second):
with repo:
[m] = repo.make_commits(None, Commit("m", tree={"ble": "1"}), ref="heads/master")
- [c] = repo.make_commits(m, Commit("c", tree={"1": "1"}), ref="heads/pr1")
+ repo.make_commits(m, Commit("c", tree={"1": "1"}), ref="heads/pr1")
pr1 = repo.make_pr(title="whatever", target="master", head="pr1")
- [c] = repo.make_commits(m, Commit("c", tree={"2": "2"}), ref="heads/pr2")
+ repo.make_commits(m, Commit("c", tree={"2": "2"}), ref="heads/pr2")
pr2 = repo.make_pr(title="whatever", target="master", head="pr2")
- [c] = repo.make_commits(m, Commit("c", tree={"3": "3"}), ref="heads/pr3")
+ repo.make_commits(m, Commit("c", tree={"3": "3"}), ref="heads/pr3")
pr3 = repo.make_pr(title="whatever", target="master", head="pr3")
- [c] = repo.make_commits(m, Commit("c", tree={"4": "4"}), ref="heads/pr4")
+ repo.make_commits(m, Commit("c", tree={"4": "4"}), ref="heads/pr4")
pr4 = repo.make_pr(title="whatever", target="master", head="pr4")
prs = [pr1, pr2, pr3, pr4]
@@ -106,6 +107,13 @@ def test_staging_priority(env, project, repo, config, mode, cutoff, second):
# trigger a split
with repo:
repo.post_status('staging.master', 'failure')
+
+ # specifically delay creation of new staging to observe the failed
+ # staging's state and the splits
+ model, cron_id = env['ir.model.data'].check_object_reference('runbot_merge', 'staging_cron')
+ env[model].browse([cron_id]).write({
+ 'nextcall': (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)).isoformat(" ", "seconds")
+ })
env.run_crons('runbot_merge.process_updated_commits', 'runbot_merge.merge_cron')
assert not staging.active
assert not env['runbot_merge.stagings'].search([]).active