diff --git a/runbot/runbot.py b/runbot/runbot.py index adbd2f83..e39a2de1 100644 --- a/runbot/runbot.py +++ b/runbot/runbot.py @@ -35,63 +35,62 @@ _logger = logging.getLogger(__name__) #---------------------------------------------------------- def log(*l, **kw): - out = [] - for i in l: - if not isinstance(i, basestring): - i = repr(i) - out.append(i) - out += ["%s=%r" % (k, v) for k, v in kw.items()] + out = [i if isinstance(i, basestring) else repr(i) for i in l] + \ + ["%s=%r" % (k, v) for k, v in kw.items()] _logger.debug(' '.join(out)) -def dashes(s): +def dashes(string): + """Sanitize the input string""" for i in '~":\'': - s = s.replace(i, "") + string = string.replace(i, "") for i in '/_. ': - s = s.replace(i, "-") - return s + string = string.replace(i, "-") + return string def mkdirs(dirs): - for i in dirs: - if not os.path.exists(i): - os.makedirs(i) + for d in dirs: + if not os.path.exists(d): + os.makedirs(d) -def grep(filename, s): +def grep(filename, string): if os.path.isfile(filename): - return open(filename).read().find(s) != -1 + return open(filename).read().find(string) != -1 return False _re_error = r'^(?:\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ (?:ERROR|CRITICAL) )|(?:Traceback \(most recent call last\):)$' _re_warning = r'^\d{4}-\d\d-\d\d \d\d:\d\d:\d\d,\d{3} \d+ WARNING ' -def rfind(filename, patern): +def rfind(filename, pattern): + """Determine in something in filename matches the pattern""" if os.path.isfile(filename): - p = re.compile(patern, re.M) + regexp = re.compile(pattern, re.M) with open(filename, 'r') as f: - if p.findall(f.read()): + if regexp.findall(f.read()): return True return False -def lock(name): - fd = os.open(name, os.O_CREAT | os.O_RDWR, 0600) +def lock(filename): + fd = os.open(filename, os.O_CREAT | os.O_RDWR, 0600) fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) -def locked(name): - r = False +def locked(filename): + result = False try: - fd = os.open(name, os.O_CREAT | os.O_RDWR, 0600) + fd = os.open(filename, os.O_CREAT | os.O_RDWR, 0600) try: fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: - r = True + result = True os.close(fd) except OSError: - r = False - return r + result = False + return result def nowait(): signal.signal(signal.SIGCHLD, signal.SIG_IGN) def run(l, env=None): + """Run a command described by l in environment env""" log("run", l) env = dict(os.environ, **env) if env else None if isinstance(l, list): @@ -111,14 +110,16 @@ def run(l, env=None): def now(): return time.strftime(openerp.tools.DEFAULT_SERVER_DATETIME_FORMAT) -def dt2time(dt): - return time.mktime(time.strptime(dt, openerp.tools.DEFAULT_SERVER_DATETIME_FORMAT)) +def dt2time(datetime): + """Convert datetime to time""" + return time.mktime(time.strptime(datetime, openerp.tools.DEFAULT_SERVER_DATETIME_FORMAT)) -def s2human(t): - for m,u in [(86400,'d'),(3600,'h'),(60,'m')]: - if t>=m: - return str(int(t/m))+u - return str(int(t))+"s" +def s2human(time): + """Convert a time in second into an human readable string""" + for delay, desc in [(86400,'d'),(3600,'h'),(60,'m')]: + if time >= delay: + return str(int(time / delay)) + desc + return str(int(time)) + "s" #---------------------------------------------------------- # RunBot Models @@ -129,22 +130,22 @@ class runbot_repo(osv.osv): _order = 'name' def _get_path(self, cr, uid, ids, field_name, arg, context=None): - wd = self.root(cr, uid) - r = {} + root = self.root(cr, uid) + result = {} for repo in self.browse(cr, uid, ids, context=context): name = repo.name for i in '@:/': name = name.replace(i, '_') - r[repo.id] = os.path.join(wd, 'repo', name) - return r + result[repo.id] = os.path.join(root, 'repo', name) + return result def _get_base(self, cr, uid, ids, field_name, arg, context=None): - r = {} + result = {} for repo in self.browse(cr, uid, ids, context=context): name = re.sub('.+@', '', repo.name) name = name.replace(':','/') - r[repo.id] = name - return r + result[repo.id] = name + return result _columns = { 'name': fields.char('Repository', required=True), @@ -170,11 +171,12 @@ class runbot_repo(osv.osv): return domain def root(self, cr, uid, context=None): + """Return root directory of repository""" default = os.path.join(os.path.dirname(__file__), 'static') - root = self.pool.get('ir.config_parameter').get_param(cr, uid, 'runbot.root', default) - return root + return self.pool.get('ir.config_parameter').get_param(cr, uid, 'runbot.root', default) def git(self, cr, uid, ids, cmd, context=None): + """Execute git command cmd""" for repo in self.browse(cr, uid, ids, context=context): cmd = ['git', '--git-dir=%s' % repo.path] + cmd _logger.info("git: %s", ' '.join(cmd)) @@ -189,22 +191,23 @@ class runbot_repo(osv.osv): p2.communicate()[0] def github(self, cr, uid, ids, url, payload=None, delete=False, context=None): + """Return a http request to be sent to github""" for repo in self.browse(cr, uid, ids, context=context): - mo = re.search('([^/]+)/([^/]+)/([^/]+)', repo.base) - if mo: - url = url.replace(':owner', mo.group(2)) - url = url.replace(':repo', mo.group(3)) - url = 'https://api.%s%s' % (mo.group(1),url) - s = requests.Session() - s.auth = (repo.token,'x-oauth-basic') - s.headers.update({'Accept': 'application/vnd.github.she-hulk-preview+json'}) + match_object = re.search('([^/]+)/([^/]+)/([^/]+)', repo.base) + if match_object: + url = url.replace(':owner', match_object.group(2)) + url = url.replace(':repo', match_object.group(3)) + url = 'https://api.%s%s' % (match_object.group(1),url) + session = requests.Session() + session.auth = (repo.token,'x-oauth-basic') + session.headers.update({'Accept': 'application/vnd.github.she-hulk-preview+json'}) if payload: - r = s.post(url, data=simplejson.dumps(payload)) + response = session.post(url, data=simplejson.dumps(payload)) elif delete: - r = s.delete(url) + response = session.delete(url) else: - r = s.get(url) - return r.json() + response = session.get(url) + return response.json() def update(self, cr, uid, ids, context=None): for repo in self.browse(cr, uid, ids, context=context): @@ -224,18 +227,17 @@ class runbot_repo(osv.osv): repo.git(['fetch', '-p', 'origin', '+refs/pull/*/head:refs/pull/*']) fields = ['refname','objectname','authordate:iso8601','authorname','subject'] - fmt = "%00".join(["%("+i+")" for i in fields]) - out = repo.git(['for-each-ref', '--format', fmt, '--sort=-committerdate', 'refs/heads', 'refs/pull']) - out = out.strip() - refs = [] - for l in out.split('\n'): - ref = [] - for i in l.split('\x00'): - try: - ref.append(i.decode('utf-8')) - except UnicodeDecodeError: - ref.append('') - refs.append(ref) + fmt = "%00".join(["%("+field+")" for field in fields]) + git_refs = repo.git(['for-each-ref', '--format', fmt, '--sort=-committerdate', 'refs/heads', 'refs/pull']) + git_refs = git_refs.strip() + + def decode_utf(string): + try: + return field.decode('utf-8') + except UnicodeDecodeError: + return '' + refs = [[decode_utf(field) for field in line.split('\x00')] for line in git_refs.split('\n')] + for name, sha, date, author, subject in refs: # create or get branch branch_ids = self.pool['runbot.branch'].search(cr, uid, [('repo_id', '=', repo.id), ('name', '=', name)]) @@ -256,7 +258,7 @@ class runbot_repo(osv.osv): Build.write(cr, uid, to_be_skipped_ids, {'state': 'done', 'result': 'skipped'}) _logger.debug('repo %s branch %s new build found revno %s', branch.repo_id.name, branch.name, sha) - v = { + build_info = { 'branch_id': branch.id, 'name': sha, 'author': author, @@ -271,37 +273,37 @@ class runbot_repo(osv.osv): def scheduler(self, cr, uid, ids=None, context=None): for repo in self.browse(cr, uid, ids, context=context): - bo = self.pool['runbot.build'] - dom = [('repo_id', '=', repo.id)] + Build = self.pool['runbot.build'] + domain = [('repo_id', '=', repo.id)] - # schedule jobs - build_ids = bo.search(cr, uid, dom + [('state', 'in', ['testing', 'running'])]) - bo.schedule(cr, uid, build_ids) + # schedule jobs (transitions testing -> running, kill jobs, ...) + build_ids = Build.search(cr, uid, domain + [('state', 'in', ['testing', 'running'])]) + Build.schedule(cr, uid, build_ids) # launch new tests - testing = bo.search_count(cr, uid, dom + [('state', '=', 'testing')]) - pending = bo.search_count(cr, uid, dom + [('state', '=', 'pending')]) + testing = Build.search_count(cr, uid, domain + [('state', '=', 'testing')]) + pending = Build.search_count(cr, uid, domain + [('state', '=', 'pending')]) while testing < repo.testing and pending > 0: # find sticky pending build if any, otherwise, last pending (by id, not by sequence) will do the job - pending_ids = bo.search(cr, uid, dom + [('state', '=', 'pending'), ('branch_id.sticky', '=', True)], limit=1) + pending_ids = Build.search(cr, uid, domain + [('state', '=', 'pending'), ('branch_id.sticky', '=', True)], limit=1) if not pending_ids: - pending_ids = bo.search(cr, uid, dom + [('state', '=', 'pending')], order="id desc") + pending_ids = Build.search(cr, uid, domain + [('state', '=', 'pending')], order="id desc") - pending = bo.browse(cr, uid, pending_ids[0]) + pending = Build.browse(cr, uid, pending_ids[0]) pending.schedule() # compute the number of testing and pending jobs again - testing = bo.search_count(cr, uid, dom + [('state', '=', 'testing')]) - pending = bo.search_count(cr, uid, dom + [('state', '=', 'pending')]) + testing = Build.search_count(cr, uid, domain + [('state', '=', 'testing')]) + pending = Build.search_count(cr, uid, domain + [('state', '=', 'pending')]) # terminate and reap doomed build - build_ids = bo.search(cr, uid, dom + [('state', '=', 'running')]) + build_ids = Build.search(cr, uid, domain + [('state', '=', 'running')]) # sort builds: the last build of each sticky branch then the rest sticky = {} non_sticky = [] - for build in bo.browse(cr, uid, build_ids): + for build in Build.browse(cr, uid, build_ids): if build.branch_id.sticky and build.branch_id.id not in sticky: sticky[build.branch_id.id] = build.id else: @@ -309,20 +311,20 @@ class runbot_repo(osv.osv): build_ids = sticky.values() build_ids += non_sticky # terminate extra running builds - bo.terminate(cr, uid, build_ids[repo.running:]) - bo.reap(cr, uid, build_ids) + Build.terminate(cr, uid, build_ids[repo.running:]) + Build.reap(cr, uid, build_ids) def nginx(self, cr, uid, context=None): - v = {} - v['port'] = openerp.tools.config['xmlrpc_port'] + settings = {} + settings['port'] = openerp.tools.config['xmlrpc_port'] nginx_dir = os.path.join(self.root(cr, uid), 'nginx') - v['nginx_dir'] = nginx_dir + settings['nginx_dir'] = nginx_dir ids = self.search(cr, uid, [('nginx','=',True)], order='id') if ids: build_ids = self.pool['runbot.build'].search(cr, uid, [('repo_id','in',ids), ('state','=','running')]) - v['builds'] = self.pool['runbot.build'].browse(cr, uid, build_ids) + settings['builds'] = self.pool['runbot.build'].browse(cr, uid, build_ids) - nginx_config = self.pool['ir.ui.view'].render(cr, uid, "runbot.nginx_config", v) + nginx_config = self.pool['ir.ui.view'].render(cr, uid, "runbot.nginx_config", settings) mkdirs([nginx_dir]) open(os.path.join(nginx_dir, 'nginx.conf'),'w').write(nginx_config) try: @@ -335,10 +337,10 @@ class runbot_repo(osv.osv): def killall(self, cr, uid, ids=None, context=None): # kill switch - bo = self.pool['runbot.build'] - build_ids = bo.search(cr, uid, [('state', 'not in', ['done', 'pending'])]) - bo.terminate(cr, uid, build_ids) - bo.reap(cr, uid, build_ids) + Build = self.pool['runbot.build'] + build_ids = Build.search(cr, uid, [('state', 'not in', ['done', 'pending'])]) + Build.terminate(cr, uid, build_ids) + Build.reap(cr, uid, build_ids) def cron(self, cr, uid, ids=None, context=None): ids = self.search(cr, uid, [('auto', '=', True)]) @@ -387,6 +389,7 @@ class runbot_build(osv.osv): return r def _get_time(self, cr, uid, ids, field_name, arg, context=None): + """Return the time taken by the tests""" r = {} for build in self.browse(cr, uid, ids, context=context): r[build.id] = 0 @@ -397,6 +400,7 @@ class runbot_build(osv.osv): return r def _get_age(self, cr, uid, ids, field_name, arg, context=None): + """Return the time between job start and now""" r = {} for build in self.browse(cr, uid, ids, context=context): r[build.id] = 0 @@ -441,8 +445,8 @@ class runbot_build(osv.osv): } def create(self, cr, uid, values, context=None): - bid = super(runbot_build, self).create(cr, uid, values, context=context) - self.write(cr, uid, [bid], {'sequence' : bid}, context=context) + build_id = super(runbot_build, self).create(cr, uid, values, context=context) + self.write(cr, uid, [build_id], {'sequence' : build_id}, context=context) def reset(self, cr, uid, ids, context=None): self.write(cr, uid, ids, { 'state' : 'pending' }, context=context) @@ -454,7 +458,7 @@ class runbot_build(osv.osv): _logger.debug(*l) def list_jobs(self): - jobs = [i for i in dir(self) if i.startswith('job')] + jobs = [job for job in dir(self) if job.startswith('job')] jobs.sort() return jobs @@ -524,6 +528,7 @@ class runbot_build(osv.osv): openerp.service.db._create_empty_database(dbname) def cmd(self, cr, uid, ids, context=None): + """Return a list describing the command to start the build""" for build in self.browse(cr, uid, ids, context=context): # Server server_path = build.path("openerp-server") @@ -536,12 +541,12 @@ class runbot_build(osv.osv): # modules if build.repo_id.modules: - mods = build.repo_id.modules + modules = build.repo_id.modules else: l = glob.glob(build.path('openerp/addons/*/__init__.py')) - mods = set([os.path.basename(os.path.dirname(i)) for i in l]) - mods = mods - set(['auth_ldap', 'document_ftp', 'hw_escpos', 'hw_proxy', 'hw_scanner', 'base_gengo', 'website_gengo']) - mods = ",".join(list(mods)) + modules = set([os.path.basename(os.path.dirname(i)) for i in l]) + modules = modules - set(['auth_ldap', 'document_ftp', 'hw_escpos', 'hw_proxy', 'hw_scanner', 'base_gengo', 'website_gengo']) + modules = ",".join(list(modules)) # commandline cmd = [ @@ -564,7 +569,7 @@ class runbot_build(osv.osv): #self.run_log(cmd, logfile=self.test_all_path) #run(["coverage","html","-d",self.coverage_base_path,"--ignore-errors","--include=*.py"],env={'COVERAGE_FILE': self.coverage_file_path}) - return cmd, mods + return cmd, modules def spawn(self, cmd, lock_path, log_path, cpu_limit=None, shell=False, showstderr=False): def preexec_fn(): @@ -588,6 +593,7 @@ class runbot_build(osv.osv): return p.pid def github_status(self, cr, uid, ids, context=None): + """Notify github of failed/successful builds""" for build in self.browse(cr, uid, ids, context=context): desc = "runbot build %s" % (build.dest,) if build.state == 'testing': @@ -691,6 +697,7 @@ class runbot_build(osv.osv): return self.spawn(cmd, lock_path, log_path, cpu_limit=None, showstderr=True) def force(self, cr, uid, ids, context=None): + """Force a rebuild""" for build in self.browse(cr, uid, ids, context=context): max_id = self.search(cr, uid, [('repo_id','=',build.repo_id.id)], order='id desc', limit=1)[0] # Force it now @@ -874,9 +881,9 @@ class RunbotController(http.Controller): v['branches'] = branches # stats - v['testing'] = build_obj.search(cr, uid, [('repo_id','=',repo.id), ('state','=','testing')], count=True) - v['running'] = build_obj.search(cr, uid, [('repo_id','=',repo.id), ('state','=','running')], count=True) - v['pending'] = build_obj.search(cr, uid, [('repo_id','=',repo.id), ('state','=','pending')], count=True) + v['testing'] = build_obj.search_count(cr, uid, [('repo_id','=',repo.id), ('state','=','testing')]) + v['running'] = build_obj.search_count(cr, uid, [('repo_id','=',repo.id), ('state','=','running')]) + v['pending'] = build_obj.search_count(cr, uid, [('repo_id','=',repo.id), ('state','=','pending')]) v.update({ 'search': search,