[IMP] stylistic improvements

easier to understand and to fix...
This commit is contained in:
Gery Debongnie 2014-07-04 16:16:45 +02:00
commit 2a5f6c3a51

View File

@ -35,63 +35,62 @@ _logger = logging.getLogger(__name__)
#---------------------------------------------------------- #----------------------------------------------------------
def log(*l, **kw): def log(*l, **kw):
out = [] out = [i if isinstance(i, basestring) else repr(i) for i in l] + \
for i in l: ["%s=%r" % (k, v) for k, v in kw.items()]
if not isinstance(i, basestring):
i = repr(i)
out.append(i)
out += ["%s=%r" % (k, v) for k, v in kw.items()]
_logger.debug(' '.join(out)) _logger.debug(' '.join(out))
def dashes(s): def dashes(string):
"""Sanitize the input string"""
for i in '~":\'': for i in '~":\'':
s = s.replace(i, "") string = string.replace(i, "")
for i in '/_. ': for i in '/_. ':
s = s.replace(i, "-") string = string.replace(i, "-")
return s return string
def mkdirs(dirs): def mkdirs(dirs):
for i in dirs: for d in dirs:
if not os.path.exists(i): if not os.path.exists(d):
os.makedirs(i) os.makedirs(d)
def grep(filename, s): def grep(filename, string):
if os.path.isfile(filename): if os.path.isfile(filename):
return open(filename).read().find(s) != -1 return open(filename).read().find(string) != -1
return False return False
_re_error = r'^(?:\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ (?:ERROR|CRITICAL) )|(?:Traceback \(most recent call last\):)$' _re_error = r'^(?:\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ (?:ERROR|CRITICAL) )|(?:Traceback \(most recent call last\):)$'
_re_warning = r'^\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ WARNING ' _re_warning = r'^\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ WARNING '
def rfind(filename, patern): def rfind(filename, pattern):
"""Determine in something in filename matches the pattern"""
if os.path.isfile(filename): if os.path.isfile(filename):
p = re.compile(patern, re.M) regexp = re.compile(pattern, re.M)
with open(filename, 'r') as f: with open(filename, 'r') as f:
if p.findall(f.read()): if regexp.findall(f.read()):
return True return True
return False return False
def lock(name): def lock(filename):
fd = os.open(name, os.O_CREAT | os.O_RDWR, 0600) fd = os.open(filename, os.O_CREAT | os.O_RDWR, 0600)
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
def locked(name): def locked(filename):
r = False result = False
try: try:
fd = os.open(name, os.O_CREAT | os.O_RDWR, 0600) fd = os.open(filename, os.O_CREAT | os.O_RDWR, 0600)
try: try:
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError: except IOError:
r = True result = True
os.close(fd) os.close(fd)
except OSError: except OSError:
r = False result = False
return r return result
def nowait(): def nowait():
signal.signal(signal.SIGCHLD, signal.SIG_IGN) signal.signal(signal.SIGCHLD, signal.SIG_IGN)
def run(l, env=None): def run(l, env=None):
"""Run a command described by l in environment env"""
log("run", l) log("run", l)
env = dict(os.environ, **env) if env else None env = dict(os.environ, **env) if env else None
if isinstance(l, list): if isinstance(l, list):
@ -111,14 +110,16 @@ def run(l, env=None):
def now(): def now():
return time.strftime(openerp.tools.DEFAULT_SERVER_DATETIME_FORMAT) return time.strftime(openerp.tools.DEFAULT_SERVER_DATETIME_FORMAT)
def dt2time(dt): def dt2time(datetime):
return time.mktime(time.strptime(dt, openerp.tools.DEFAULT_SERVER_DATETIME_FORMAT)) """Convert datetime to time"""
return time.mktime(time.strptime(datetime, openerp.tools.DEFAULT_SERVER_DATETIME_FORMAT))
def s2human(t): def s2human(time):
for m,u in [(86400,'d'),(3600,'h'),(60,'m')]: """Convert a time in second into an human readable string"""
if t>=m: for delay, desc in [(86400,'d'),(3600,'h'),(60,'m')]:
return str(int(t/m))+u if time >= delay:
return str(int(t))+"s" return str(int(time / delay)) + desc
return str(int(time)) + "s"
#---------------------------------------------------------- #----------------------------------------------------------
# RunBot Models # RunBot Models
@ -129,22 +130,22 @@ class runbot_repo(osv.osv):
_order = 'name' _order = 'name'
def _get_path(self, cr, uid, ids, field_name, arg, context=None): def _get_path(self, cr, uid, ids, field_name, arg, context=None):
wd = self.root(cr, uid) root = self.root(cr, uid)
r = {} result = {}
for repo in self.browse(cr, uid, ids, context=context): for repo in self.browse(cr, uid, ids, context=context):
name = repo.name name = repo.name
for i in '@:/': for i in '@:/':
name = name.replace(i, '_') name = name.replace(i, '_')
r[repo.id] = os.path.join(wd, 'repo', name) result[repo.id] = os.path.join(root, 'repo', name)
return r return result
def _get_base(self, cr, uid, ids, field_name, arg, context=None): def _get_base(self, cr, uid, ids, field_name, arg, context=None):
r = {} result = {}
for repo in self.browse(cr, uid, ids, context=context): for repo in self.browse(cr, uid, ids, context=context):
name = re.sub('.+@', '', repo.name) name = re.sub('.+@', '', repo.name)
name = name.replace(':','/') name = name.replace(':','/')
r[repo.id] = name result[repo.id] = name
return r return result
_columns = { _columns = {
'name': fields.char('Repository', required=True), 'name': fields.char('Repository', required=True),
@ -170,11 +171,12 @@ class runbot_repo(osv.osv):
return domain return domain
def root(self, cr, uid, context=None): def root(self, cr, uid, context=None):
"""Return root directory of repository"""
default = os.path.join(os.path.dirname(__file__), 'static') default = os.path.join(os.path.dirname(__file__), 'static')
root = self.pool.get('ir.config_parameter').get_param(cr, uid, 'runbot.root', default) return self.pool.get('ir.config_parameter').get_param(cr, uid, 'runbot.root', default)
return root
def git(self, cr, uid, ids, cmd, context=None): def git(self, cr, uid, ids, cmd, context=None):
"""Execute git command cmd"""
for repo in self.browse(cr, uid, ids, context=context): for repo in self.browse(cr, uid, ids, context=context):
cmd = ['git', '--git-dir=%s' % repo.path] + cmd cmd = ['git', '--git-dir=%s' % repo.path] + cmd
_logger.info("git: %s", ' '.join(cmd)) _logger.info("git: %s", ' '.join(cmd))
@ -189,22 +191,23 @@ class runbot_repo(osv.osv):
p2.communicate()[0] p2.communicate()[0]
def github(self, cr, uid, ids, url, payload=None, delete=False, context=None): def github(self, cr, uid, ids, url, payload=None, delete=False, context=None):
"""Return a http request to be sent to github"""
for repo in self.browse(cr, uid, ids, context=context): for repo in self.browse(cr, uid, ids, context=context):
mo = re.search('([^/]+)/([^/]+)/([^/]+)', repo.base) match_object = re.search('([^/]+)/([^/]+)/([^/]+)', repo.base)
if mo: if match_object:
url = url.replace(':owner', mo.group(2)) url = url.replace(':owner', match_object.group(2))
url = url.replace(':repo', mo.group(3)) url = url.replace(':repo', match_object.group(3))
url = 'https://api.%s%s' % (mo.group(1),url) url = 'https://api.%s%s' % (match_object.group(1),url)
s = requests.Session() session = requests.Session()
s.auth = (repo.token,'x-oauth-basic') session.auth = (repo.token,'x-oauth-basic')
s.headers.update({'Accept': 'application/vnd.github.she-hulk-preview+json'}) session.headers.update({'Accept': 'application/vnd.github.she-hulk-preview+json'})
if payload: if payload:
r = s.post(url, data=simplejson.dumps(payload)) response = session.post(url, data=simplejson.dumps(payload))
elif delete: elif delete:
r = s.delete(url) response = session.delete(url)
else: else:
r = s.get(url) response = session.get(url)
return r.json() return response.json()
def update(self, cr, uid, ids, context=None): def update(self, cr, uid, ids, context=None):
for repo in self.browse(cr, uid, ids, context=context): for repo in self.browse(cr, uid, ids, context=context):
@ -224,18 +227,17 @@ class runbot_repo(osv.osv):
repo.git(['fetch', '-p', 'origin', '+refs/pull/*/head:refs/pull/*']) repo.git(['fetch', '-p', 'origin', '+refs/pull/*/head:refs/pull/*'])
fields = ['refname','objectname','authordate:iso8601','authorname','subject'] fields = ['refname','objectname','authordate:iso8601','authorname','subject']
fmt = "%00".join(["%("+i+")" for i in fields]) fmt = "%00".join(["%("+field+")" for field in fields])
out = repo.git(['for-each-ref', '--format', fmt, '--sort=-committerdate', 'refs/heads', 'refs/pull']) git_refs = repo.git(['for-each-ref', '--format', fmt, '--sort=-committerdate', 'refs/heads', 'refs/pull'])
out = out.strip() git_refs = git_refs.strip()
refs = []
for l in out.split('\n'): def decode_utf(string):
ref = [] try:
for i in l.split('\x00'): return field.decode('utf-8')
try: except UnicodeDecodeError:
ref.append(i.decode('utf-8')) return ''
except UnicodeDecodeError: refs = [[decode_utf(field) for field in line.split('\x00')] for line in git_refs.split('\n')]
ref.append('')
refs.append(ref)
for name, sha, date, author, subject in refs: for name, sha, date, author, subject in refs:
# create or get branch # create or get branch
branch_ids = self.pool['runbot.branch'].search(cr, uid, [('repo_id', '=', repo.id), ('name', '=', name)]) branch_ids = self.pool['runbot.branch'].search(cr, uid, [('repo_id', '=', repo.id), ('name', '=', name)])
@ -256,7 +258,7 @@ class runbot_repo(osv.osv):
Build.write(cr, uid, to_be_skipped_ids, {'state': 'done', 'result': 'skipped'}) Build.write(cr, uid, to_be_skipped_ids, {'state': 'done', 'result': 'skipped'})
_logger.debug('repo %s branch %s new build found revno %s', branch.repo_id.name, branch.name, sha) _logger.debug('repo %s branch %s new build found revno %s', branch.repo_id.name, branch.name, sha)
v = { build_info = {
'branch_id': branch.id, 'branch_id': branch.id,
'name': sha, 'name': sha,
'author': author, 'author': author,
@ -271,37 +273,37 @@ class runbot_repo(osv.osv):
def scheduler(self, cr, uid, ids=None, context=None): def scheduler(self, cr, uid, ids=None, context=None):
for repo in self.browse(cr, uid, ids, context=context): for repo in self.browse(cr, uid, ids, context=context):
bo = self.pool['runbot.build'] Build = self.pool['runbot.build']
dom = [('repo_id', '=', repo.id)] domain = [('repo_id', '=', repo.id)]
# schedule jobs # schedule jobs (transitions testing -> running, kill jobs, ...)
build_ids = bo.search(cr, uid, dom + [('state', 'in', ['testing', 'running'])]) build_ids = Build.search(cr, uid, domain + [('state', 'in', ['testing', 'running'])])
bo.schedule(cr, uid, build_ids) Build.schedule(cr, uid, build_ids)
# launch new tests # launch new tests
testing = bo.search_count(cr, uid, dom + [('state', '=', 'testing')]) testing = Build.search_count(cr, uid, domain + [('state', '=', 'testing')])
pending = bo.search_count(cr, uid, dom + [('state', '=', 'pending')]) pending = Build.search_count(cr, uid, domain + [('state', '=', 'pending')])
while testing < repo.testing and pending > 0: while testing < repo.testing and pending > 0:
# find sticky pending build if any, otherwise, last pending (by id, not by sequence) will do the job # find sticky pending build if any, otherwise, last pending (by id, not by sequence) will do the job
pending_ids = bo.search(cr, uid, dom + [('state', '=', 'pending'), ('branch_id.sticky', '=', True)], limit=1) pending_ids = Build.search(cr, uid, domain + [('state', '=', 'pending'), ('branch_id.sticky', '=', True)], limit=1)
if not pending_ids: if not pending_ids:
pending_ids = bo.search(cr, uid, dom + [('state', '=', 'pending')], order="id desc") pending_ids = Build.search(cr, uid, domain + [('state', '=', 'pending')], order="id desc")
pending = bo.browse(cr, uid, pending_ids[0]) pending = Build.browse(cr, uid, pending_ids[0])
pending.schedule() pending.schedule()
# compute the number of testing and pending jobs again # compute the number of testing and pending jobs again
testing = bo.search_count(cr, uid, dom + [('state', '=', 'testing')]) testing = Build.search_count(cr, uid, domain + [('state', '=', 'testing')])
pending = bo.search_count(cr, uid, dom + [('state', '=', 'pending')]) pending = Build.search_count(cr, uid, domain + [('state', '=', 'pending')])
# terminate and reap doomed build # terminate and reap doomed build
build_ids = bo.search(cr, uid, dom + [('state', '=', 'running')]) build_ids = Build.search(cr, uid, domain + [('state', '=', 'running')])
# sort builds: the last build of each sticky branch then the rest # sort builds: the last build of each sticky branch then the rest
sticky = {} sticky = {}
non_sticky = [] non_sticky = []
for build in bo.browse(cr, uid, build_ids): for build in Build.browse(cr, uid, build_ids):
if build.branch_id.sticky and build.branch_id.id not in sticky: if build.branch_id.sticky and build.branch_id.id not in sticky:
sticky[build.branch_id.id] = build.id sticky[build.branch_id.id] = build.id
else: else:
@ -309,20 +311,20 @@ class runbot_repo(osv.osv):
build_ids = sticky.values() build_ids = sticky.values()
build_ids += non_sticky build_ids += non_sticky
# terminate extra running builds # terminate extra running builds
bo.terminate(cr, uid, build_ids[repo.running:]) Build.terminate(cr, uid, build_ids[repo.running:])
bo.reap(cr, uid, build_ids) Build.reap(cr, uid, build_ids)
def nginx(self, cr, uid, context=None): def nginx(self, cr, uid, context=None):
v = {} settings = {}
v['port'] = openerp.tools.config['xmlrpc_port'] settings['port'] = openerp.tools.config['xmlrpc_port']
nginx_dir = os.path.join(self.root(cr, uid), 'nginx') nginx_dir = os.path.join(self.root(cr, uid), 'nginx')
v['nginx_dir'] = nginx_dir settings['nginx_dir'] = nginx_dir
ids = self.search(cr, uid, [('nginx','=',True)], order='id') ids = self.search(cr, uid, [('nginx','=',True)], order='id')
if ids: if ids:
build_ids = self.pool['runbot.build'].search(cr, uid, [('repo_id','in',ids), ('state','=','running')]) build_ids = self.pool['runbot.build'].search(cr, uid, [('repo_id','in',ids), ('state','=','running')])
v['builds'] = self.pool['runbot.build'].browse(cr, uid, build_ids) settings['builds'] = self.pool['runbot.build'].browse(cr, uid, build_ids)
nginx_config = self.pool['ir.ui.view'].render(cr, uid, "runbot.nginx_config", v) nginx_config = self.pool['ir.ui.view'].render(cr, uid, "runbot.nginx_config", settings)
mkdirs([nginx_dir]) mkdirs([nginx_dir])
open(os.path.join(nginx_dir, 'nginx.conf'),'w').write(nginx_config) open(os.path.join(nginx_dir, 'nginx.conf'),'w').write(nginx_config)
try: try:
@ -335,10 +337,10 @@ class runbot_repo(osv.osv):
def killall(self, cr, uid, ids=None, context=None): def killall(self, cr, uid, ids=None, context=None):
# kill switch # kill switch
bo = self.pool['runbot.build'] Build = self.pool['runbot.build']
build_ids = bo.search(cr, uid, [('state', 'not in', ['done', 'pending'])]) build_ids = Build.search(cr, uid, [('state', 'not in', ['done', 'pending'])])
bo.terminate(cr, uid, build_ids) Build.terminate(cr, uid, build_ids)
bo.reap(cr, uid, build_ids) Build.reap(cr, uid, build_ids)
def cron(self, cr, uid, ids=None, context=None): def cron(self, cr, uid, ids=None, context=None):
ids = self.search(cr, uid, [('auto', '=', True)]) ids = self.search(cr, uid, [('auto', '=', True)])
@ -387,6 +389,7 @@ class runbot_build(osv.osv):
return r return r
def _get_time(self, cr, uid, ids, field_name, arg, context=None): def _get_time(self, cr, uid, ids, field_name, arg, context=None):
"""Return the time taken by the tests"""
r = {} r = {}
for build in self.browse(cr, uid, ids, context=context): for build in self.browse(cr, uid, ids, context=context):
r[build.id] = 0 r[build.id] = 0
@ -397,6 +400,7 @@ class runbot_build(osv.osv):
return r return r
def _get_age(self, cr, uid, ids, field_name, arg, context=None): def _get_age(self, cr, uid, ids, field_name, arg, context=None):
"""Return the time between job start and now"""
r = {} r = {}
for build in self.browse(cr, uid, ids, context=context): for build in self.browse(cr, uid, ids, context=context):
r[build.id] = 0 r[build.id] = 0
@ -441,8 +445,8 @@ class runbot_build(osv.osv):
} }
def create(self, cr, uid, values, context=None): def create(self, cr, uid, values, context=None):
bid = super(runbot_build, self).create(cr, uid, values, context=context) build_id = super(runbot_build, self).create(cr, uid, values, context=context)
self.write(cr, uid, [bid], {'sequence' : bid}, context=context) self.write(cr, uid, [build_id], {'sequence' : build_id}, context=context)
def reset(self, cr, uid, ids, context=None): def reset(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, { 'state' : 'pending' }, context=context) self.write(cr, uid, ids, { 'state' : 'pending' }, context=context)
@ -454,7 +458,7 @@ class runbot_build(osv.osv):
_logger.debug(*l) _logger.debug(*l)
def list_jobs(self): def list_jobs(self):
jobs = [i for i in dir(self) if i.startswith('job')] jobs = [job for job in dir(self) if job.startswith('job')]
jobs.sort() jobs.sort()
return jobs return jobs
@ -524,6 +528,7 @@ class runbot_build(osv.osv):
openerp.service.db._create_empty_database(dbname) openerp.service.db._create_empty_database(dbname)
def cmd(self, cr, uid, ids, context=None): def cmd(self, cr, uid, ids, context=None):
"""Return a list describing the command to start the build"""
for build in self.browse(cr, uid, ids, context=context): for build in self.browse(cr, uid, ids, context=context):
# Server # Server
server_path = build.path("openerp-server") server_path = build.path("openerp-server")
@ -536,12 +541,12 @@ class runbot_build(osv.osv):
# modules # modules
if build.repo_id.modules: if build.repo_id.modules:
mods = build.repo_id.modules modules = build.repo_id.modules
else: else:
l = glob.glob(build.path('openerp/addons/*/__init__.py')) l = glob.glob(build.path('openerp/addons/*/__init__.py'))
mods = set([os.path.basename(os.path.dirname(i)) for i in l]) modules = set([os.path.basename(os.path.dirname(i)) for i in l])
mods = mods - set(['auth_ldap', 'document_ftp', 'hw_escpos', 'hw_proxy', 'hw_scanner', 'base_gengo', 'website_gengo']) modules = modules - set(['auth_ldap', 'document_ftp', 'hw_escpos', 'hw_proxy', 'hw_scanner', 'base_gengo', 'website_gengo'])
mods = ",".join(list(mods)) modules = ",".join(list(modules))
# commandline # commandline
cmd = [ cmd = [
@ -564,7 +569,7 @@ class runbot_build(osv.osv):
#self.run_log(cmd, logfile=self.test_all_path) #self.run_log(cmd, logfile=self.test_all_path)
#run(["coverage","html","-d",self.coverage_base_path,"--ignore-errors","--include=*.py"],env={'COVERAGE_FILE': self.coverage_file_path}) #run(["coverage","html","-d",self.coverage_base_path,"--ignore-errors","--include=*.py"],env={'COVERAGE_FILE': self.coverage_file_path})
return cmd, mods return cmd, modules
def spawn(self, cmd, lock_path, log_path, cpu_limit=None, shell=False, showstderr=False): def spawn(self, cmd, lock_path, log_path, cpu_limit=None, shell=False, showstderr=False):
def preexec_fn(): def preexec_fn():
@ -588,6 +593,7 @@ class runbot_build(osv.osv):
return p.pid return p.pid
def github_status(self, cr, uid, ids, context=None): def github_status(self, cr, uid, ids, context=None):
"""Notify github of failed/successful builds"""
for build in self.browse(cr, uid, ids, context=context): for build in self.browse(cr, uid, ids, context=context):
desc = "runbot build %s" % (build.dest,) desc = "runbot build %s" % (build.dest,)
if build.state == 'testing': if build.state == 'testing':
@ -691,6 +697,7 @@ class runbot_build(osv.osv):
return self.spawn(cmd, lock_path, log_path, cpu_limit=None, showstderr=True) return self.spawn(cmd, lock_path, log_path, cpu_limit=None, showstderr=True)
def force(self, cr, uid, ids, context=None): def force(self, cr, uid, ids, context=None):
"""Force a rebuild"""
for build in self.browse(cr, uid, ids, context=context): for build in self.browse(cr, uid, ids, context=context):
max_id = self.search(cr, uid, [('repo_id','=',build.repo_id.id)], order='id desc', limit=1)[0] max_id = self.search(cr, uid, [('repo_id','=',build.repo_id.id)], order='id desc', limit=1)[0]
# Force it now # Force it now
@ -874,9 +881,9 @@ class RunbotController(http.Controller):
v['branches'] = branches v['branches'] = branches
# stats # stats
v['testing'] = build_obj.search(cr, uid, [('repo_id','=',repo.id), ('state','=','testing')], count=True) v['testing'] = build_obj.search_count(cr, uid, [('repo_id','=',repo.id), ('state','=','testing')])
v['running'] = build_obj.search(cr, uid, [('repo_id','=',repo.id), ('state','=','running')], count=True) v['running'] = build_obj.search_count(cr, uid, [('repo_id','=',repo.id), ('state','=','running')])
v['pending'] = build_obj.search(cr, uid, [('repo_id','=',repo.id), ('state','=','pending')], count=True) v['pending'] = build_obj.search_count(cr, uid, [('repo_id','=',repo.id), ('state','=','pending')])
v.update({ v.update({
'search': search, 'search': search,