mirror of
https://github.com/odoo/runbot.git
synced 2025-03-15 23:45:44 +07:00
[FIX] runbot_merge, forwardport: create_single is deprecated
Update a bunch of `create` overrides to work in batch. Also fix a few `super()` calls unnecessarily in legacy style.
This commit is contained in:
parent
c7523c0429
commit
667aa69f5b
@ -185,20 +185,35 @@ class PullRequests(models.Model):
|
||||
|
||||
reminder_backoff_factor = fields.Integer(default=-4, group_operator=None)
|
||||
|
||||
@api.model_create_single
|
||||
def create(self, vals):
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
# we're doing a lot of weird processing so fast-path this case even if
|
||||
# unlikely
|
||||
if not vals_list:
|
||||
return self.browse()
|
||||
|
||||
existings = []
|
||||
to_create = []
|
||||
for vals in vals_list:
|
||||
# PR opened event always creates a new PR, override so we can precreate PRs
|
||||
existing = self.search([
|
||||
if existing := self.search([
|
||||
('repository', '=', vals['repository']),
|
||||
('number', '=', vals['number']),
|
||||
])
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
]):
|
||||
existings.append(existing.id)
|
||||
else:
|
||||
existings.append(None)
|
||||
to_create.append(vals)
|
||||
if vals.get('parent_id') and 'source_id' not in vals:
|
||||
vals['source_id'] = self.browse(vals['parent_id']).root_id.id
|
||||
pr = super().create(vals)
|
||||
|
||||
# no PR to create, return the existing ones
|
||||
if not to_create:
|
||||
return self.browse(existings)
|
||||
|
||||
prs = super().create(to_create)
|
||||
|
||||
for pr in prs:
|
||||
# added a new PR to an already forward-ported batch: port the PR
|
||||
if self.env['runbot_merge.batch'].search_count([
|
||||
('parent_id', '=', pr.batch_id.id),
|
||||
@ -209,7 +224,16 @@ class PullRequests(models.Model):
|
||||
'pr_id': pr.id,
|
||||
})
|
||||
|
||||
return pr
|
||||
# no existing PR, return all the created ones
|
||||
if len(to_create) == len(vals_list):
|
||||
return prs
|
||||
|
||||
# merge existing and created
|
||||
prs = iter(prs)
|
||||
return self.browse((
|
||||
existing or next(prs).id
|
||||
for existing in existings
|
||||
))
|
||||
|
||||
def write(self, vals):
|
||||
# if the PR's head is updated, detach (should split off the FP lines as this is not the original code)
|
||||
|
@ -82,15 +82,16 @@ class Repository(models.Model):
|
||||
All substitutions are tentatively applied sequentially to the input.
|
||||
""")
|
||||
|
||||
@api.model
|
||||
def create(self, vals):
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
for vals in vals_list:
|
||||
if 'status_ids' in vals:
|
||||
return super().create(vals)
|
||||
continue
|
||||
|
||||
st = vals.pop('required_statuses', 'legal/cla,ci/runbot')
|
||||
if st:
|
||||
if st := vals.pop('required_statuses', 'legal/cla,ci/runbot'):
|
||||
vals['status_ids'] = [(0, 0, {'context': c}) for c in st.split(',')]
|
||||
return super().create(vals)
|
||||
|
||||
return super().create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
st = vals.pop('required_statuses', None)
|
||||
@ -102,7 +103,7 @@ All substitutions are tentatively applied sequentially to the input.
|
||||
return github.GH(self.project_id[token_field], self.name)
|
||||
|
||||
def _auto_init(self):
|
||||
res = super(Repository, self)._auto_init()
|
||||
res = super()._auto_init()
|
||||
tools.create_unique_index(
|
||||
self._cr, 'runbot_merge_unique_repo', self._table, ['name'])
|
||||
return res
|
||||
@ -294,7 +295,7 @@ class Branch(models.Model):
|
||||
staging_enabled = fields.Boolean(default=True)
|
||||
|
||||
def _auto_init(self):
|
||||
res = super(Branch, self)._auto_init()
|
||||
res = super()._auto_init()
|
||||
tools.create_unique_index(
|
||||
self._cr, 'runbot_merge_unique_branch_per_repo',
|
||||
self._table, ['name', 'project_id'])
|
||||
@ -1359,10 +1360,16 @@ For your own safety I've ignored *everything in your entire comment*.
|
||||
])
|
||||
return batch or batch.create({})
|
||||
|
||||
@api.model
|
||||
def create(self, vals):
|
||||
batch = self._get_batch(target=vals['target'], label=vals['label'])
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
batches = {}
|
||||
for vals in vals_list:
|
||||
batch_key = vals['target'], vals['label']
|
||||
batch = batches.get(batch_key)
|
||||
if batch is None:
|
||||
batch = batches[batch_key] = self._get_batch(target=vals['target'], label=vals['label'])
|
||||
vals['batch_id'] = batch.id
|
||||
|
||||
if 'limit_id' not in vals:
|
||||
limits = {p.limit_id for p in batch.prs}
|
||||
if len(limits) == 1:
|
||||
@ -1378,7 +1385,9 @@ For your own safety I've ignored *everything in your entire comment*.
|
||||
)
|
||||
)
|
||||
|
||||
pr = super().create(vals)
|
||||
prs = super().create(vals_list)
|
||||
|
||||
for pr in prs:
|
||||
c = self.env['runbot_merge.commit'].search([('sha', '=', pr.head)])
|
||||
pr._validate(c.statuses)
|
||||
|
||||
@ -1388,7 +1397,7 @@ For your own safety I've ignored *everything in your entire comment*.
|
||||
pull_request=pr.number,
|
||||
format_args={'pr': pr},
|
||||
)
|
||||
return pr
|
||||
return prs
|
||||
|
||||
def _from_gh(self, description, author=None, branch=None, repo=None):
|
||||
if repo is None:
|
||||
@ -1457,11 +1466,14 @@ For your own safety I've ignored *everything in your entire comment*.
|
||||
|
||||
newhead = vals.get('head')
|
||||
if newhead:
|
||||
if pid := self.env.cr.precommit.data.get('change-author'):
|
||||
authors = self.env.cr.precommit.data.get(f'mail.tracking.author.{self._name}', {})
|
||||
for p in self:
|
||||
if pid := authors.get(p.id):
|
||||
writer = self.env['res.partner'].browse(pid)
|
||||
else:
|
||||
writer = self.env.user.partner_id
|
||||
self.unstage("updated by %s", writer.github_login or writer.name)
|
||||
p.unstage("updated by %s", writer.github_login or writer.name)
|
||||
# this can be batched
|
||||
c = self.env['runbot_merge.commit'].search([('sha', '=', newhead)])
|
||||
self._validate(c.statuses)
|
||||
return w
|
||||
@ -1754,7 +1766,9 @@ class Tagging(models.Model):
|
||||
tags_remove = fields.Char(required=True, default='[]')
|
||||
tags_add = fields.Char(required=True, default='[]')
|
||||
|
||||
def create(self, values):
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
for values in vals_list:
|
||||
if values.pop('state_from', None):
|
||||
values['tags_remove'] = ALL_TAGS
|
||||
if 'state_to' in values:
|
||||
@ -1763,9 +1777,9 @@ class Tagging(models.Model):
|
||||
values['tags_remove'] = json.dumps(list(values['tags_remove']))
|
||||
if not isinstance(values.get('tags_add', ''), str):
|
||||
values['tags_add'] = json.dumps(list(values['tags_add']))
|
||||
if values:
|
||||
if any(vals_list):
|
||||
self.env.ref('runbot_merge.labels_cron')._trigger()
|
||||
return super().create(values)
|
||||
return super().create(vals_list)
|
||||
|
||||
def _send(self):
|
||||
# noinspection SqlResolve
|
||||
@ -1969,16 +1983,17 @@ class Commit(models.Model):
|
||||
commit_ids = fields.Many2many('runbot_merge.stagings', relation='runbot_merge_stagings_commits', column2='staging_id', column1='commit_id')
|
||||
pull_requests = fields.One2many('runbot_merge.pull_requests', compute='_compute_prs')
|
||||
|
||||
@api.model_create_single
|
||||
def create(self, values):
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
for values in vals_list:
|
||||
values['to_check'] = True
|
||||
r = super(Commit, self).create(values)
|
||||
r = super().create(vals_list)
|
||||
self.env.ref("runbot_merge.process_updated_commits")._trigger()
|
||||
return r
|
||||
|
||||
def write(self, values):
|
||||
values.setdefault('to_check', True)
|
||||
r = super(Commit, self).write(values)
|
||||
r = super().write(values)
|
||||
if values['to_check']:
|
||||
self.env.ref("runbot_merge.process_updated_commits")._trigger()
|
||||
return r
|
||||
@ -2023,7 +2038,7 @@ class Commit(models.Model):
|
||||
]
|
||||
|
||||
def _auto_init(self):
|
||||
res = super(Commit, self)._auto_init()
|
||||
res = super()._auto_init()
|
||||
self._cr.execute("""
|
||||
CREATE INDEX IF NOT EXISTS runbot_merge_unique_statuses
|
||||
ON runbot_merge_commit
|
||||
|
Loading…
Reference in New Issue
Block a user