mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 15:35:46 +07:00
[IMP] forwardport: reliability of PR reminders
The exponential backoff offsets from the write_date of the children PRs, however it doesn't reset, so the offsetting gets bumped up way more than originally expected or designed if the child PRs are under active development for some reason. Fix this by adding a field to specifically record the date of merge of a PR, and check that feature against the backoff offset. This should provide more regular and reliable backoff. Fixes #369
This commit is contained in:
parent
4d528465d9
commit
db9e86f760
@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
{
|
||||
'name': 'forward port bot',
|
||||
'version': '1.1',
|
||||
'summary': "A port which forward ports successful PRs.",
|
||||
'depends': ['runbot_merge'],
|
||||
'data': [
|
||||
|
10
forwardport/migrations/13.0.1.1/post-reminder-date.py
Normal file
10
forwardport/migrations/13.0.1.1/post-reminder-date.py
Normal file
@ -0,0 +1,10 @@
|
||||
def migrate(cr, version):
|
||||
""" Set the merge_date field to the current write_date, and reset
|
||||
the backoff to its default so we reprocess old PRs properly.
|
||||
"""
|
||||
cr.execute("""
|
||||
UPDATE runbot_merge_pull_requests
|
||||
SET merge_date = write_date,
|
||||
reminder_backoff_factor = -4
|
||||
WHERE state = 'merged'
|
||||
""")
|
2
forwardport/migrations/13.0.1.1/pre-tagging.py
Normal file
2
forwardport/migrations/13.0.1.1/pre-tagging.py
Normal file
@ -0,0 +1,2 @@
|
||||
def migrate(cr, version):
|
||||
cr.execute("delete from ir_model where model = 'forwardport.tagging'")
|
@ -141,7 +141,7 @@ class DeleteBranches(models.Model, Queue):
|
||||
def _search_domain(self):
|
||||
cutoff = self.env.context.get('forwardport_merged_before') \
|
||||
or fields.Datetime.to_string(datetime.now() - MERGE_AGE)
|
||||
return [('pr_id.write_date', '<', cutoff)]
|
||||
return [('pr_id.merge_date', '<', cutoff)]
|
||||
|
||||
def _process_item(self):
|
||||
_deleter.info(
|
||||
|
@ -25,7 +25,7 @@ import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import dateutil
|
||||
import dateutil.relativedelta
|
||||
import requests
|
||||
|
||||
from odoo import _, models, fields, api
|
||||
@ -188,6 +188,7 @@ class PullRequests(models.Model):
|
||||
)
|
||||
source_id = fields.Many2one('runbot_merge.pull_requests', index=True, help="the original source of this FP even if parents were detached along the way")
|
||||
reminder_backoff_factor = fields.Integer(default=-4)
|
||||
merge_date = fields.Datetime()
|
||||
|
||||
fw_policy = fields.Selection([
|
||||
('ci', "Normal"),
|
||||
@ -218,6 +219,8 @@ class PullRequests(models.Model):
|
||||
)[-1].id
|
||||
if vals.get('parent_id') and 'source_id' not in vals:
|
||||
vals['source_id'] = self.browse(vals['parent_id'])._get_root().id
|
||||
if vals.get('state') == 'merged':
|
||||
vals['merge_date'] = fields.Datetime.now()
|
||||
return super().create(vals)
|
||||
|
||||
def write(self, vals):
|
||||
@ -240,6 +243,8 @@ class PullRequests(models.Model):
|
||||
|
||||
if vals.get('parent_id') and 'source_id' not in vals:
|
||||
vals['source_id'] = self.browse(vals['parent_id'])._get_root().id
|
||||
if vals.get('state') == 'merged':
|
||||
vals['merge_date'] = fields.Datetime.now()
|
||||
r = super().write(vals)
|
||||
if self.env.context.get('forwardport_detach_warn', True):
|
||||
for p in with_parents:
|
||||
@ -985,12 +990,12 @@ stderr:
|
||||
('source_id', '!=', False),
|
||||
# active
|
||||
('state', 'not in', ['merged', 'closed']),
|
||||
# last updated more than <cutoff> ago
|
||||
('write_date', '<', cutoff),
|
||||
# original merged more than <cutoff> ago
|
||||
('source_id.merge_date', '<', cutoff),
|
||||
], order='source_id, id'), lambda p: p.source_id):
|
||||
backoff = dateutil.relativedelta.relativedelta(days=2**source.reminder_backoff_factor)
|
||||
prs = list(prs)
|
||||
if all(p.write_date > (cutoff_dt - backoff) for p in prs):
|
||||
if source.merge_date > (cutoff_dt - backoff):
|
||||
continue
|
||||
source.reminder_backoff_factor += 1
|
||||
self.env['runbot_merge.pull_requests.feedback'].create({
|
||||
|
@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import collections
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from operator import itemgetter
|
||||
@ -9,7 +8,8 @@ import pytest
|
||||
|
||||
from utils import *
|
||||
|
||||
FAKE_PREV_WEEK = (datetime.now() + timedelta(days=1)).strftime('%Y-%m-%d %H:%M:%S')
|
||||
FMT = '%Y-%m-%d %H:%M:%S'
|
||||
FAKE_PREV_WEEK = (datetime.now() + timedelta(days=1)).strftime(FMT)
|
||||
|
||||
# need:
|
||||
# * an odoo server
|
||||
@ -56,6 +56,12 @@ def test_straightforward_flow(env, config, make_repo, users):
|
||||
# use rebase-ff (instead of rebase-merge) so we don't have to dig in
|
||||
# parents of the merge commit to find the cherrypicks
|
||||
pr.post_comment('hansen r+ rebase-ff', config['role_reviewer']['token'])
|
||||
pr_id = env['runbot_merge.pull_requests'].search([
|
||||
('repository.name', '=', prod.name),
|
||||
('number', '=', pr.number),
|
||||
])
|
||||
assert not pr_id.merge_date,\
|
||||
"PR obviously shouldn't have a merge date before being merged"
|
||||
|
||||
env.run_crons()
|
||||
with prod:
|
||||
@ -65,6 +71,10 @@ def test_straightforward_flow(env, config, make_repo, users):
|
||||
# should merge the staging then create the FP PR
|
||||
env.run_crons()
|
||||
|
||||
assert datetime.now() - datetime.strptime(pr_id.merge_date, FMT) <= timedelta(minutes=1),\
|
||||
"check if merge date was set about now (within a minute as crons and " \
|
||||
"RPC calls yield various delays before we're back)"
|
||||
|
||||
p_1_merged = prod.commit('a')
|
||||
|
||||
assert p_1_merged.id != p_1
|
||||
|
Loading…
Reference in New Issue
Block a user