Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Syncing from upstream odoo/runbot (17.0) #802

Open
wants to merge 23 commits into
base: 17.0
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
e065206
[IMP] conftest: minor cleanups to repo creation
xmo-odoo Feb 4, 2025
3b0e59a
[IMP] conftest: fstring-ify repo forking
xmo-odoo Feb 6, 2025
6d5c539
[IMP] *: fork with main_branch_only
xmo-odoo Feb 6, 2025
e3b4d2b
[IMP] *: cleanup status contexts in tests
xmo-odoo Feb 6, 2025
6d8f9d9
[IMP] forwardport: batch push on updates
xmo-odoo Feb 6, 2025
0dd11ac
[FIX] *: double forwardport when adding a PR to an existing batch
xmo-odoo Feb 11, 2025
5bdeb04
[FIX] runbot_merge: disable autosave on boolean toggles
xmo-odoo Feb 12, 2025
8a931ad
[IMP] forwardport: better fw batch failure notification
xmo-odoo Feb 13, 2025
79116fc
[FIX] forwardport: duplicate batch completion
xmo-odoo Feb 14, 2025
905b0e0
[ADD] forwardport: test on resuming fw by hand
xmo-odoo Feb 14, 2025
b765bb8
[FIX] forwardport: a source may be unmerged
xmo-odoo Feb 18, 2025
e19a9fa
[REM] forwardport: re-enablement of forward ports when closing wizard
xmo-odoo Feb 18, 2025
d8c2782
[IMP] runbot_merge: add cron trigger button
xmo-odoo Feb 18, 2025
49c9314
[IMP] mergebot: improve command line
Xavier-Do Feb 20, 2025
d9b2441
Critical commands fix
xmo-odoo Feb 20, 2025
93a52b5
[FIX] forwardport: type error
xmo-odoo Feb 20, 2025
98bb01e
[ADD] runbot_merge, tests: opentelemetry support
xmo-odoo Nov 28, 2024
abf1298
[IMP] runbot_merge: add optional statuses on PRs
xmo-odoo Feb 25, 2025
157fec3
[FIX] runbot_merge: reject patches yielding empty commits
xmo-odoo Feb 25, 2025
5d08b79
[IMP] runbot_merge: replace `show --pretty=%H` by `rev-list -1`
xmo-odoo Feb 25, 2025
e6057af
[FIX] runbot_merge: handle missing patch commits
xmo-odoo Feb 25, 2025
a2b27ab
[ADD] runbot_merge: create controller to merge a commit
mart-e Feb 7, 2025
ccfff33
[IMP] runbot_merge: latest patches first
mart-e Feb 24, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
285 changes: 176 additions & 109 deletions conftest.py

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions forwardport/controllers.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,19 +59,20 @@ def outstanding(self, partner=0, authors=True, reviewers=True, group=0):
if reviewers:
partner_filter.append([(f'source_id.reviewed_by{suffix}', '=', arg)])

now = datetime.datetime.now()
outstanding = PullRequests.search([
('source_id', '!=', False),
('blocked', '!=', False),
('state', 'in', ['opened', 'validated', 'approved', 'ready', 'error']),
('create_date', '<', datetime.datetime.now() - DEFAULT_DELTA),
('create_date', '<', now - DEFAULT_DELTA),
*(partner_filter and expression.OR(partner_filter)),
])

outstanding_per_group = collections.Counter()
outstanding_per_author = collections.Counter()
outstanding_per_reviewer = collections.Counter()
outstandings = []
for source in outstanding.mapped('source_id').sorted('merge_date'):
for source in outstanding.mapped('source_id').sorted(lambda s: s.merge_date or now):
prs = source.forwardport_ids.filtered(lambda p: p.state not in ['merged', 'closed'])
outstandings.append({
'source': source,
Expand Down
3 changes: 2 additions & 1 deletion forwardport/data/queues.xml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
<field name="name">Forward port batches</field>
<field name="model">forwardport.batches</field>
<field name="arch" type="xml">
<tree>
<tree decoration-danger="cannot_apply">
<field name="cannot_apply" column_invisible="1"/>
<field name="source"/>
<field name="batch_id"/>
<field name="retry_after_relative" string="Retry In"/>
Expand Down
71 changes: 41 additions & 30 deletions forwardport/models/forwardport.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
# -*- coding: utf-8 -*-
import builtins
import collections
import logging
import re
import sys
from collections.abc import Mapping
from contextlib import ExitStack
from datetime import datetime, timedelta

Expand Down Expand Up @@ -62,6 +65,7 @@ def _search_domain(self):

class ForwardPortTasks(models.Model, Queue):
_name = 'forwardport.batches'
_inherit = ['mail.thread']
_description = 'batches which got merged and are candidates for forward-porting'

limit = 10
Expand All @@ -74,6 +78,7 @@ class ForwardPortTasks(models.Model, Queue):
('complete', 'Complete ported batches'),
], required=True)
retry_after = fields.Datetime(required=True, default='1900-01-01 01:01:01')
cannot_apply = fields.Boolean(compute='_compute_cannot_apply', store=True)
retry_after_relative = fields.Char(compute="_compute_retry_after_relative")
pr_id = fields.Many2one('runbot_merge.pull_requests')

Expand All @@ -90,21 +95,31 @@ def write(self, vals):

def _search_domain(self):
return super()._search_domain() + [
('cannot_apply', '=', False),
('retry_after', '<=', fields.Datetime.to_string(fields.Datetime.now())),
]

@api.depends('retry_after')
@api.depends('retry_after', 'cannot_apply')
def _compute_retry_after_relative(self):
now = fields.Datetime.now()
for t in self:
if t.retry_after <= now:
if t.cannot_apply:
t.retry_after_relative = "N/A"
elif t.retry_after <= now:
t.retry_after_relative = ""
else:
t.retry_after_relative = format_timedelta(t.retry_after - now, locale=t.env.lang)

@api.depends('retry_after')
def _compute_cannot_apply(self):
for t in self:
t.cannot_apply = t.retry_after > (t.create_date + timedelta(days=1))

def _on_failure(self):
super()._on_failure()
self.retry_after = fields.Datetime.to_string(fields.Datetime.now() + timedelta(minutes=30))
_, e, _ = sys.exc_info()
self._message_log(body=f"Error while processing forward-port batch: {e}")
self.retry_after = fields.Datetime.now() + timedelta(hours=1)

def _process_item(self):
batch = self.batch_id
Expand Down Expand Up @@ -158,6 +173,7 @@ def _process_insert(self, batch, newbatch):

def _complete_batches(self):
source = pr = self.pr_id
source_id = pr.source_id or pr
if not pr:
_logger.warning(
"Unable to complete descendants of %s (%s): no new PR",
Expand Down Expand Up @@ -189,10 +205,10 @@ def _complete_batches(self):
return

if PullRequests.search_count([
('source_id', '=', source.id),
('source_id', '=', source_id.id),
('target', '=', target.id),
('state', 'not in', ('closed', 'merged')),
]):
], limit=1):
_logger.warning("Will not forward-port %s: already ported", pr.display_name)
return

Expand Down Expand Up @@ -239,17 +255,15 @@ def _complete_batches(self):
_logger.warning("Deleting %s:%s=%s", remote_target, ref, d.text)
raise RuntimeError(f"Forwardport failure: {pr.display_name} ({r.text})")

new_pr = PullRequests._from_gh(r.json())
_logger.info("Created forward-port PR %s", new_pr)
new_pr.write({
'batch_id': descendant.id, # should already be set correctly but...
'merge_method': pr.merge_method,
'source_id': source.id,
# only link to previous PR of sequence if cherrypick passed
# FIXME: apply parenting of siblings? Apply parenting *to* siblings?
'parent_id': pr.id if not conflict else False,
'detach_reason': "{1}\n{2}".format(*conflict).strip() if conflict else None,
})
new_pr = PullRequests._from_gh(
r.json(),
batch_id=descendant.id,
merge_method=pr.merge_method,
source_id=source_id.id,
parent_id=False if conflict else pr.id,
detach_reason="{1}\n{2}".format(*conflict).strip() if conflict else None
)
_logger.info("Created forward-port PR %s", new_pr.display_name)

if conflict:
self.env.ref('runbot_merge.forwardport.failure.conflict')._send(
Expand Down Expand Up @@ -289,6 +303,8 @@ def _process_item(self):
previous = self.new_root
sentry_sdk.set_tag("update-root", self.new_root.display_name)
with ExitStack() as s:
# dict[repo: [ref, old_head, new_head]
updates: Mapping[str, list[str, str, str]] = collections.defaultdict(list)
for child in self.new_root._iter_descendants():
self.env.cr.execute("""
SELECT id
Expand Down Expand Up @@ -346,23 +362,18 @@ def _process_item(self):
# 'state': 'opened',
'squash': commits_count == 1,
})
# then update the child's branch to the new head
repo.push(
f'--force-with-lease={child.refname}:{old_head}',
git.fw_url(child.repository),
f"{new_head}:refs/heads/{child.refname}")

# committing here means github could technically trigger its
# webhook before sending a response, but committing before
# would mean we can update the PR in database but fail to
# update on github, which is probably worse?
# alternatively we can commit, push, and rollback if the push
# fails
# FIXME: handle failures (especially on non-first update)
self.env.cr.commit()
updates[child.repository].append((child.refname, old_head, new_head))

previous = child

for repository, refs in updates.items():
# then update the child branches to the new heads
repo.push(
*(f'--force-with-lease={ref}:{old}' for ref, old, _new in refs),
git.fw_url(repository),
*(f"{new}:refs/heads/{ref}" for ref, _old, new in refs)
)

_deleter = _logger.getChild('deleter')
class DeleteBranches(models.Model, Queue):
_name = 'forwardport.branch_remover'
Expand Down
11 changes: 6 additions & 5 deletions forwardport/models/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,10 +213,11 @@ def create(self, vals_list):

new = super().create(to_create)
for pr in new:
# added a new PR to an already forward-ported batch: port the PR
if self.env['runbot_merge.batch'].search_count([
# added a new PR to an already forward-ported batch: immediately
# port forward to complete the genealogy
if not pr.source_id and self.env['runbot_merge.batch'].search_count([
('parent_id', '=', pr.batch_id.id),
]):
], limit=1):
self.env['forwardport.batches'].create({
'batch_id': pr.batch_id.id,
'source': 'complete',
Expand Down Expand Up @@ -543,9 +544,9 @@ def _reminder(self):
# after 6 months, start sending emails
if age > datetime.timedelta(weeks=26):
if author := source.author.email:
emails[author] |= prs
emails[author] = emails[author].union(*prs)
if reviewer := source.reviewed_by.email:
emails[reviewer] |= prs
emails[reviewer] = emails[reviewer].union(*prs)
self.env.ref('runbot_merge.forwardport.reminder')._send(
repository=pr.repository,
pull_request=pr.number,
Expand Down
12 changes: 0 additions & 12 deletions forwardport/models/project_freeze.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,3 @@ def create(self, vals_list):
r = super().create(vals_list)
self.env.ref('forwardport.port_forward').active = False
return r

def action_freeze(self):
return super(FreezeWizard, self.with_context(forwardport_keep_disabled=True))\
.action_freeze()

def unlink(self):
r = super().unlink()
if not (self.env.context.get('forwardport_keep_disabled') or self.search_count([])):
cron = self.env.ref('forwardport.port_forward')
cron.active = True
cron._trigger() # process forward ports enqueued during the freeze period
return r
Loading