[FIX] runbot_merge, forwardport: create_single is deprecated

Update a bunch of `create` overrides to work in batch. Also fix a few
`super()` calls unnecessarily in legacy style.
This commit is contained in:
Xavier Morel 2024-11-19 14:58:41 +01:00
parent c7523c0429
commit 667aa69f5b
2 changed files with 121 additions and 82 deletions

View File

@ -185,31 +185,55 @@ class PullRequests(models.Model):
reminder_backoff_factor = fields.Integer(default=-4, group_operator=None) reminder_backoff_factor = fields.Integer(default=-4, group_operator=None)
@api.model_create_single @api.model_create_multi
def create(self, vals): def create(self, vals_list):
# PR opened event always creates a new PR, override so we can precreate PRs # we're doing a lot of weird processing so fast-path this case even if
existing = self.search([ # unlikely
('repository', '=', vals['repository']), if not vals_list:
('number', '=', vals['number']), return self.browse()
])
if existing:
return existing
if vals.get('parent_id') and 'source_id' not in vals: existings = []
vals['source_id'] = self.browse(vals['parent_id']).root_id.id to_create = []
pr = super().create(vals) for vals in vals_list:
# PR opened event always creates a new PR, override so we can precreate PRs
if existing := self.search([
('repository', '=', vals['repository']),
('number', '=', vals['number']),
]):
existings.append(existing.id)
else:
existings.append(None)
to_create.append(vals)
if vals.get('parent_id') and 'source_id' not in vals:
vals['source_id'] = self.browse(vals['parent_id']).root_id.id
# added a new PR to an already forward-ported batch: port the PR # no PR to create, return the existing ones
if self.env['runbot_merge.batch'].search_count([ if not to_create:
('parent_id', '=', pr.batch_id.id), return self.browse(existings)
]):
self.env['forwardport.batches'].create({
'batch_id': pr.batch_id.id,
'source': 'complete',
'pr_id': pr.id,
})
return pr prs = super().create(to_create)
for pr in prs:
# added a new PR to an already forward-ported batch: port the PR
if self.env['runbot_merge.batch'].search_count([
('parent_id', '=', pr.batch_id.id),
]):
self.env['forwardport.batches'].create({
'batch_id': pr.batch_id.id,
'source': 'complete',
'pr_id': pr.id,
})
# no existing PR, return all the created ones
if len(to_create) == len(vals_list):
return prs
# merge existing and created
prs = iter(prs)
return self.browse((
existing or next(prs).id
for existing in existings
))
def write(self, vals): def write(self, vals):
# if the PR's head is updated, detach (should split off the FP lines as this is not the original code) # if the PR's head is updated, detach (should split off the FP lines as this is not the original code)

View File

@ -82,15 +82,16 @@ class Repository(models.Model):
All substitutions are tentatively applied sequentially to the input. All substitutions are tentatively applied sequentially to the input.
""") """)
@api.model @api.model_create_multi
def create(self, vals): def create(self, vals_list):
if 'status_ids' in vals: for vals in vals_list:
return super().create(vals) if 'status_ids' in vals:
continue
st = vals.pop('required_statuses', 'legal/cla,ci/runbot') if st := vals.pop('required_statuses', 'legal/cla,ci/runbot'):
if st: vals['status_ids'] = [(0, 0, {'context': c}) for c in st.split(',')]
vals['status_ids'] = [(0, 0, {'context': c}) for c in st.split(',')]
return super().create(vals) return super().create(vals_list)
def write(self, vals): def write(self, vals):
st = vals.pop('required_statuses', None) st = vals.pop('required_statuses', None)
@ -102,7 +103,7 @@ All substitutions are tentatively applied sequentially to the input.
return github.GH(self.project_id[token_field], self.name) return github.GH(self.project_id[token_field], self.name)
def _auto_init(self): def _auto_init(self):
res = super(Repository, self)._auto_init() res = super()._auto_init()
tools.create_unique_index( tools.create_unique_index(
self._cr, 'runbot_merge_unique_repo', self._table, ['name']) self._cr, 'runbot_merge_unique_repo', self._table, ['name'])
return res return res
@ -294,7 +295,7 @@ class Branch(models.Model):
staging_enabled = fields.Boolean(default=True) staging_enabled = fields.Boolean(default=True)
def _auto_init(self): def _auto_init(self):
res = super(Branch, self)._auto_init() res = super()._auto_init()
tools.create_unique_index( tools.create_unique_index(
self._cr, 'runbot_merge_unique_branch_per_repo', self._cr, 'runbot_merge_unique_branch_per_repo',
self._table, ['name', 'project_id']) self._table, ['name', 'project_id'])
@ -1359,36 +1360,44 @@ For your own safety I've ignored *everything in your entire comment*.
]) ])
return batch or batch.create({}) return batch or batch.create({})
@api.model @api.model_create_multi
def create(self, vals): def create(self, vals_list):
batch = self._get_batch(target=vals['target'], label=vals['label']) batches = {}
vals['batch_id'] = batch.id for vals in vals_list:
if 'limit_id' not in vals: batch_key = vals['target'], vals['label']
limits = {p.limit_id for p in batch.prs} batch = batches.get(batch_key)
if len(limits) == 1: if batch is None:
vals['limit_id'] = limits.pop().id batch = batches[batch_key] = self._get_batch(target=vals['target'], label=vals['label'])
elif limits: vals['batch_id'] = batch.id
repo = self.env['runbot_merge.repository'].browse(vals['repository'])
_logger.warning( if 'limit_id' not in vals:
"Unable to set limit on %s#%s: found multiple limits in batch (%s)", limits = {p.limit_id for p in batch.prs}
repo.name, vals['number'], if len(limits) == 1:
', '.join( vals['limit_id'] = limits.pop().id
f'{p.display_name} => {p.limit_id.name}' elif limits:
for p in batch.prs repo = self.env['runbot_merge.repository'].browse(vals['repository'])
_logger.warning(
"Unable to set limit on %s#%s: found multiple limits in batch (%s)",
repo.name, vals['number'],
', '.join(
f'{p.display_name} => {p.limit_id.name}'
for p in batch.prs
)
) )
prs = super().create(vals_list)
for pr in prs:
c = self.env['runbot_merge.commit'].search([('sha', '=', pr.head)])
pr._validate(c.statuses)
if pr.state not in ('closed', 'merged'):
self.env.ref('runbot_merge.pr.created')._send(
repository=pr.repository,
pull_request=pr.number,
format_args={'pr': pr},
) )
return prs
pr = super().create(vals)
c = self.env['runbot_merge.commit'].search([('sha', '=', pr.head)])
pr._validate(c.statuses)
if pr.state not in ('closed', 'merged'):
self.env.ref('runbot_merge.pr.created')._send(
repository=pr.repository,
pull_request=pr.number,
format_args={'pr': pr},
)
return pr
def _from_gh(self, description, author=None, branch=None, repo=None): def _from_gh(self, description, author=None, branch=None, repo=None):
if repo is None: if repo is None:
@ -1457,11 +1466,14 @@ For your own safety I've ignored *everything in your entire comment*.
newhead = vals.get('head') newhead = vals.get('head')
if newhead: if newhead:
if pid := self.env.cr.precommit.data.get('change-author'): authors = self.env.cr.precommit.data.get(f'mail.tracking.author.{self._name}', {})
writer = self.env['res.partner'].browse(pid) for p in self:
else: if pid := authors.get(p.id):
writer = self.env.user.partner_id writer = self.env['res.partner'].browse(pid)
self.unstage("updated by %s", writer.github_login or writer.name) else:
writer = self.env.user.partner_id
p.unstage("updated by %s", writer.github_login or writer.name)
# this can be batched
c = self.env['runbot_merge.commit'].search([('sha', '=', newhead)]) c = self.env['runbot_merge.commit'].search([('sha', '=', newhead)])
self._validate(c.statuses) self._validate(c.statuses)
return w return w
@ -1754,18 +1766,20 @@ class Tagging(models.Model):
tags_remove = fields.Char(required=True, default='[]') tags_remove = fields.Char(required=True, default='[]')
tags_add = fields.Char(required=True, default='[]') tags_add = fields.Char(required=True, default='[]')
def create(self, values): @api.model_create_multi
if values.pop('state_from', None): def create(self, vals_list):
values['tags_remove'] = ALL_TAGS for values in vals_list:
if 'state_to' in values: if values.pop('state_from', None):
values['tags_add'] = _TAGS[values.pop('state_to')] values['tags_remove'] = ALL_TAGS
if not isinstance(values.get('tags_remove', ''), str): if 'state_to' in values:
values['tags_remove'] = json.dumps(list(values['tags_remove'])) values['tags_add'] = _TAGS[values.pop('state_to')]
if not isinstance(values.get('tags_add', ''), str): if not isinstance(values.get('tags_remove', ''), str):
values['tags_add'] = json.dumps(list(values['tags_add'])) values['tags_remove'] = json.dumps(list(values['tags_remove']))
if values: if not isinstance(values.get('tags_add', ''), str):
values['tags_add'] = json.dumps(list(values['tags_add']))
if any(vals_list):
self.env.ref('runbot_merge.labels_cron')._trigger() self.env.ref('runbot_merge.labels_cron')._trigger()
return super().create(values) return super().create(vals_list)
def _send(self): def _send(self):
# noinspection SqlResolve # noinspection SqlResolve
@ -1969,16 +1983,17 @@ class Commit(models.Model):
commit_ids = fields.Many2many('runbot_merge.stagings', relation='runbot_merge_stagings_commits', column2='staging_id', column1='commit_id') commit_ids = fields.Many2many('runbot_merge.stagings', relation='runbot_merge_stagings_commits', column2='staging_id', column1='commit_id')
pull_requests = fields.One2many('runbot_merge.pull_requests', compute='_compute_prs') pull_requests = fields.One2many('runbot_merge.pull_requests', compute='_compute_prs')
@api.model_create_single @api.model_create_multi
def create(self, values): def create(self, vals_list):
values['to_check'] = True for values in vals_list:
r = super(Commit, self).create(values) values['to_check'] = True
r = super().create(vals_list)
self.env.ref("runbot_merge.process_updated_commits")._trigger() self.env.ref("runbot_merge.process_updated_commits")._trigger()
return r return r
def write(self, values): def write(self, values):
values.setdefault('to_check', True) values.setdefault('to_check', True)
r = super(Commit, self).write(values) r = super().write(values)
if values['to_check']: if values['to_check']:
self.env.ref("runbot_merge.process_updated_commits")._trigger() self.env.ref("runbot_merge.process_updated_commits")._trigger()
return r return r
@ -2023,7 +2038,7 @@ class Commit(models.Model):
] ]
def _auto_init(self): def _auto_init(self):
res = super(Commit, self)._auto_init() res = super()._auto_init()
self._cr.execute(""" self._cr.execute("""
CREATE INDEX IF NOT EXISTS runbot_merge_unique_statuses CREATE INDEX IF NOT EXISTS runbot_merge_unique_statuses
ON runbot_merge_commit ON runbot_merge_commit