[IMP] runbot: improve local cleanup

When a build age reaches the gc_days parameter, its database is dropped
and its directory is removed.

With this commit, two fields are added in order to keep some builds
longer that the defined gc_days.

The gc_delay field on the build allows to add a delay (in number of
days) that is added to its gc_days to compute the gc_date.

The gc_date field is the date when the cleaning will occur.

Also, a test is added and the RunbotCase test class is improved to allow
the stop of a patcher.
This commit is contained in:
Christophe Monniez 2020-01-07 17:02:05 +01:00 committed by XavierDo
parent 5d4979a5f6
commit 149ae4a074
6 changed files with 96 additions and 26 deletions

View File

@ -6,7 +6,7 @@
'author': "Odoo SA",
'website': "http://runbot.odoo.com",
'category': 'Website',
'version': '4.8',
'version': '4.9',
'depends': ['website', 'base'],
'data': [
'security/runbot_security.xml',

View File

@ -97,6 +97,7 @@ def s2human(time):
threshold=2.1,
)
@contextlib.contextmanager
def local_pgadmin_cursor():
cnx = None
@ -107,3 +108,17 @@ def local_pgadmin_cursor():
finally:
if cnx:
cnx.close()
def list_local_dbs(additionnal_conditions=None):
additionnal_condition_str = ''
if additionnal_conditions:
additionnal_condition_str = 'AND (%s)' % ' OR '.join(additionnal_conditions)
with local_pgadmin_cursor() as local_cr:
local_cr.execute("""
SELECT datname
FROM pg_database
WHERE pg_get_userbyid(datdba) = current_user
%s
""" % additionnal_condition_str)
return [d[0] for d in local_cr.fetchall()]

View File

@ -8,7 +8,7 @@ import shutil
import subprocess
import time
import datetime
from ..common import dt2time, fqdn, now, grep, uniq_list, local_pgadmin_cursor, s2human, Commit, dest_reg, os
from ..common import dt2time, fqdn, now, grep, local_pgadmin_cursor, s2human, Commit, dest_reg, os, list_local_dbs
from ..container import docker_build, docker_stop, docker_state, Command
from ..fields import JsonDictField
from odoo.addons.runbot.models.repo import RunbotException
@ -99,6 +99,8 @@ class runbot_build(models.Model):
job = fields.Char('Active step display name', compute='_compute_job')
job_start = fields.Datetime('Job start')
job_end = fields.Datetime('Job end')
gc_date = fields.Datetime('Local cleanup date', compute='_compute_gc_date')
gc_delay = fields.Integer('Cleanup Delay', help='Used to compute gc_date')
build_start = fields.Datetime('Build start')
build_end = fields.Datetime('Build end')
job_time = fields.Integer(compute='_compute_job_time', string='Job time')
@ -165,6 +167,17 @@ class runbot_build(models.Model):
else:
record.global_state = record.local_state
@api.depends('gc_delay', 'job_end')
def _compute_gc_date(self):
icp = self.env['ir.config_parameter']
max_days_main = int(icp.get_param('runbot.db_gc_days', default=30))
max_days_child = int(icp.get_param('runbot.db_gc_days_child', default=15))
for build in self:
ref_date = fields.Datetime.from_string(build.job_end or build.create_date or fields.Datetime.now())
max_days = max_days_main if not build.parent_id else max_days_child
max_days += int(build.gc_delay if build.gc_delay else 0)
build.gc_date = ref_date + datetime.timedelta(days=(max_days))
def _get_youngest_state(self, states):
index = min([self._get_state_score(state) for state in states])
return state_order[index]
@ -496,10 +509,6 @@ class runbot_build(models.Model):
return self.browse()
def _filter_to_clean(self, dest_list, label):
icp = self.env['ir.config_parameter']
max_days_main = int(icp.get_param('runbot.db_gc_days', default=30))
max_days_child = int(icp.get_param('runbot.db_gc_days_child', default=15))
dest_by_builds_ids = defaultdict(list)
ignored = set()
for dest in dest_list:
@ -517,7 +526,7 @@ class runbot_build(models.Model):
dest_list = [dest for sublist in [dest_by_builds_ids[rem_id] for rem_id in remaining.ids] for dest in sublist]
_logger.debug('(%s) (%s) not deleted because no corresponding build found' % (label, " ".join(dest_list)))
for build in existing:
if fields.Datetime.from_string(build.job_end or build.create_date) + datetime.timedelta(days=(max_days_main if not build.parent_id else max_days_child)) < datetime.datetime.now():
if fields.Datetime.from_string(build.gc_date) < datetime.datetime.now():
if build.local_state == 'done':
for db in dest_by_builds_ids[build.id]:
yield db
@ -529,12 +538,9 @@ class runbot_build(models.Model):
Remove datadir and drop databases of build older than db_gc_days or db_gc_days_child.
If force is set to True, does the same cleaning based on recordset without checking build age.
"""
if self.pool._init:
return
_logger.debug('Local cleaning')
_filter = self._filter_to_clean
additionnal_condition_str = ''
additionnal_conditions = []
if force is True:
def filter_ids(dest_list, label):
@ -545,20 +551,10 @@ class runbot_build(models.Model):
elif not build:
_logger.debug('%s (%s) skipped because not dest format', label, dest)
_filter = filter_ids
additionnal_conditions = []
for _id in self.exists().ids:
additionnal_conditions.append("datname like '%s-%%'" % _id)
if additionnal_conditions:
additionnal_condition_str = 'AND (%s)' % ' OR '.join(additionnal_conditions)
with local_pgadmin_cursor() as local_cr:
local_cr.execute("""
SELECT datname
FROM pg_database
WHERE pg_get_userbyid(datdba) = current_user
%s
""" % additionnal_condition_str)
existing_db = [d[0] for d in local_cr.fetchall()]
existing_db = list_local_dbs(additionnal_conditions=additionnal_conditions)
for db in _filter(dest_list=existing_db, label='db'):
self._logger('Removing database')

View File

@ -16,6 +16,7 @@ class RunbotCase(TransactionCase):
self.Branch = self.env['runbot.branch']
self.patchers = {}
self.patcher_objects = {}
def git_side_effect(cmd):
if cmd[:2] == ['show', '-s'] or cmd[:3] == ['show', '--pretty="%H -- %s"', '-s']:
@ -41,17 +42,29 @@ class RunbotCase(TransactionCase):
self.start_patcher('docker_stop', 'odoo.addons.runbot.models.repo.docker_stop')
self.start_patcher('cr_commit', 'odoo.sql_db.Cursor.commit', None)
self.start_patcher('repo_commit', 'odoo.addons.runbot.models.repo.runbot_repo._commit', None)
self.start_patcher('_local_cleanup_patcher', 'odoo.addons.runbot.models.build.runbot_build._local_cleanup')
self.start_patcher('_local_pg_dropdb_patcher', 'odoo.addons.runbot.models.build.runbot_build._local_pg_dropdb')
def start_patcher(self, patcher_name, patcher_path, return_value=Dummy, side_effect=Dummy):
def stop_patcher_wrapper():
self.stop_patcher(patcher_name)
patcher = patch(patcher_path)
if not hasattr(patcher, 'is_local'):
res = patcher.start()
self.addCleanup(patcher.stop)
self.addCleanup(stop_patcher_wrapper)
self.patchers[patcher_name] = res
self.patcher_objects[patcher_name] = patcher
if side_effect != Dummy:
res.side_effect = side_effect
elif return_value != Dummy:
res.return_value = return_value
def stop_patcher(self, patcher_name):
if patcher_name in self.patcher_objects:
self.patcher_objects[patcher_name].stop()
del self.patcher_objects[patcher_name]
def create_build(self, vals):
return self.Build.create(vals)

View File

@ -1,10 +1,12 @@
# -*- coding: utf-8 -*-
from collections import defaultdict
from unittest.mock import patch
from odoo.tests import common
import datetime
from unittest.mock import patch
from odoo import fields
from .common import RunbotCase
def rev_parse(repo, branch_name):
"""
simulate a rev parse by returning a fake hash of form
@ -14,6 +16,7 @@ def rev_parse(repo, branch_name):
head_hash = 'rp_%s_%s_head' % (repo.name.split(':')[1], branch_name.split('/')[-1])
return head_hash
class Test_Build(RunbotCase):
def setUp(self):
@ -300,6 +303,47 @@ class Test_Build(RunbotCase):
})
self.assertEqual(build.config_id, self.env.ref('runbot.runbot_build_config_default_no_run'), "config_id should be the one set on the build")
def test_build_gc_date(self):
""" test build gc date and gc_delay"""
self.branch.config_id = self.env.ref('runbot.runbot_build_config_default')
build = self.create_build({
'branch_id': self.branch.id,
'name': 'd0d0caca0000ffffffffffffffffffffffffffff',
'local_state': 'done'
})
child_build = self.create_build({
'branch_id': self.branch.id,
'name': 'd0d0caca0000ffffffffffffffffffffffffffff',
'extra_params': '2',
'parent_id': build.id,
'local_state': 'done'
})
# verify that the gc_day is set 30 days later (29 days since we should be a few microseconds later)
delta = fields.Datetime.from_string(build.gc_date) - datetime.datetime.now()
self.assertEqual(delta.days, 29)
child_delta = fields.Datetime.from_string(child_build.gc_date) - datetime.datetime.now()
self.assertEqual(child_delta.days, 14)
# Keep child build ten days more
child_build.gc_delay = 10
child_delta = fields.Datetime.from_string(child_build.gc_date) - datetime.datetime.now()
self.assertEqual(child_delta.days, 24)
# test the real _local_cleanup method
self.stop_patcher('_local_cleanup_patcher')
self.start_patcher('build_local_pgadmin_cursor_patcher', 'odoo.addons.runbot.models.build.local_pgadmin_cursor')
self.start_patcher('build_os_listdirr_patcher', 'odoo.addons.runbot.models.build.os.listdir')
dbname = '%s-foobar' % build.dest
self.start_patcher('list_local_dbs_patcher', 'odoo.addons.runbot.models.build.list_local_dbs', return_value=[dbname])
build._local_cleanup()
self.assertFalse(self.patchers['_local_pg_dropdb_patcher'].called)
build.job_end = datetime.datetime.now() - datetime.timedelta(days=31)
build._local_cleanup()
self.patchers['_local_pg_dropdb_patcher'].assert_called_with(dbname)
@patch('odoo.addons.runbot.models.build._logger')
def test_build_skip(self, mock_logger):
"""test build is skipped"""

View File

@ -48,6 +48,8 @@
<field name="hidden" groups="base.group_no_one"/>
<field name="build_url" widget="url" readonly="1"/>
<field name="keep_running"/>
<field name="gc_date" readonly="1"/>
<field name="gc_delay"/>
</group>
</sheet>
</form>