diff --git a/conftest.py b/conftest.py index c51479afa..985dbff71 100644 --- a/conftest.py +++ b/conftest.py @@ -1135,7 +1135,9 @@ def disable_hooks(self) -> typing.Iterator[None]: for hook in hook_urls: r = sess.patch(hook, json={'active': False}) assert r.ok, r.text + wait_for_hook() yield + wait_for_hook() for hook in reversed(hook_urls): r = sess.patch(hook, json={'active': True}) assert r.ok, r.text diff --git a/mergebot_test_utils/saas_worker/__init__.py b/mergebot_test_utils/saas_worker/__init__.py index 4f4ee22fb..2c7277913 100644 --- a/mergebot_test_utils/saas_worker/__init__.py +++ b/mergebot_test_utils/saas_worker/__init__.py @@ -3,9 +3,14 @@ import threading import psycopg2 +import requests.sessions import odoo -from odoo import models +from odoo import models, fields + +from . import auth_util + +requests.sessions.Session = auth_util.SaasSession _logger = logging.getLogger(__name__) @@ -49,3 +54,11 @@ def _process_jobs(cls, db_name): finally: if hasattr(t, 'dbname'): del t.dbname + + +class SaasCalls(models.Model): + _name = _description = 'saas.calls' + + method = fields.Char() + url = fields.Char() + body = fields.Binary(attachment=False) \ No newline at end of file diff --git a/mergebot_test_utils/saas_worker/__manifest__.py b/mergebot_test_utils/saas_worker/__manifest__.py index 95f49d666..8254e2cf5 100644 --- a/mergebot_test_utils/saas_worker/__manifest__.py +++ b/mergebot_test_utils/saas_worker/__manifest__.py @@ -2,4 +2,7 @@ 'name': 'dummy saas_worker', 'version': '1.0', 'license': 'BSD-0-Clause', + 'data': [ + 'access.xml', + ] } diff --git a/mergebot_test_utils/saas_worker/access.xml b/mergebot_test_utils/saas_worker/access.xml new file mode 100644 index 000000000..eec0e9aa9 --- /dev/null +++ b/mergebot_test_utils/saas_worker/access.xml @@ -0,0 +1,10 @@ + + + Access to saas calls + + 1 + 0 + 0 + 0 + + \ No newline at end of file diff --git a/mergebot_test_utils/saas_worker/auth_util.py b/mergebot_test_utils/saas_worker/auth_util.py new file mode 100644 index 000000000..9e474f108 --- /dev/null +++ b/mergebot_test_utils/saas_worker/auth_util.py @@ -0,0 +1,44 @@ +import io +import threading +import urllib.parse + +import requests.adapters +import requests.auth +import requests.sessions + +import odoo + + +class SaasAdapter(requests.adapters.BaseAdapter): + def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): + res = requests.Response() + res.request = request + res.status_code = 204 + res.raw = io.BytesIO() + return res + + def close(self) -> None: + pass + + +class SaasSession(requests.sessions.Session): + def __init__(self): + super().__init__() + self.mount('saas://', SaasAdapter()) + + +class SaasAuth(requests.auth.AuthBase): + def __call__(self, request: requests.PreparedRequest) -> requests.PreparedRequest: + dbname = threading.current_thread().dbname + db = odoo.sql_db.db_connect(dbname) + with db.cursor() as cr: + env = odoo.api.Environment(cr, 1, {}) + env['saas.calls'].create({ + 'method': request.method, + 'url': request.url, + 'body': request.body, + }) + request.url = urllib.parse.urlsplit(request.url)\ + ._replace(scheme='saas')\ + .geturl() + return request \ No newline at end of file diff --git a/runbot_merge/__manifest__.py b/runbot_merge/__manifest__.py index 7d1d1bf0f..6a5afd36d 100644 --- a/runbot_merge/__manifest__.py +++ b/runbot_merge/__manifest__.py @@ -1,6 +1,6 @@ { 'name': 'merge bot', - 'version': '1.19', + 'version': '1.20', 'depends': ['contacts', 'mail', 'website'], 'data': [ 'security/security.xml', @@ -26,6 +26,8 @@ 'models/commands/runbot_merge.acls.csv', 'models/commands/views.xml', 'models/crons/validate.xml', + 'models/crons/status_request.xml', + 'models/crons/check_commits.xml', ], 'assets': { 'web._assets_primary_variables': [ diff --git a/runbot_merge/controllers/__init__.py b/runbot_merge/controllers/__init__.py index 08f555efe..cb2d3c187 100644 --- a/runbot_merge/controllers/__init__.py +++ b/runbot_merge/controllers/__init__.py @@ -7,6 +7,7 @@ from datetime import datetime, timedelta from typing import Callable +import psycopg2.errors import sentry_sdk from werkzeug.exceptions import NotFound, UnprocessableEntity @@ -74,7 +75,7 @@ def stagings_for_commits(self, **kw): @route('/runbot_merge/stagings/', auth='none', type='http', methods=['GET']) def prs_for_staging(self, staging): staging = request.env(user=1)['runbot_merge.stagings'].browse(staging).sudo() - if not staging.exists: + if not staging.exists(): raise NotFound() return request.make_json_response(staging_dict(staging)) @@ -173,7 +174,15 @@ def _format(self, request): def handle_pr(env, event): pr = event['pull_request'] - squash = pr['commits'] == 1 + squash = False + check_commits = lambda _: None + match pr['commits']: + case 0: + check_commits = lambda pr_obj: env['runbot_merge.pull_requests.check_commits'].create({ + 'pull_request_id': pr_obj.id, + }) + case 1: + squash = True r = pr['base']['repo']['full_name'] if event['action'] in [ @@ -192,6 +201,7 @@ def handle_pr(env, event): ('squash', '!=', squash), ]): pr.squash = squash + check_commits(pr) return Response( status=200, @@ -267,6 +277,7 @@ def find(target): if updates: # copy because it updates the `updates` dict internally pr_obj.write(dict(updates)) + check_commits(pr_obj) return Response( status=200, mimetype="text/plain", @@ -320,8 +331,13 @@ def find(target): author = env['res.partner'].search([('github_login', '=', author_name)], limit=1) if not author: env['res.partner'].create({'name': author_name, 'github_login': author_name}) - pr_obj = env['runbot_merge.pull_requests']._from_gh(pr) - return Response(status=201, mimetype="text/plain", response=f"Tracking PR as {pr_obj.display_name}") + try: + pr_obj = env['runbot_merge.pull_requests']._from_gh(pr) + check_commits(pr_obj) + return Response(status=201, mimetype="text/plain", response=f"Tracking PR as {pr_obj.display_name}") + except psycopg2.errors.UniqueViolation: + env.cr.rollback() + return Response(status=200, mimetype="text/plain", response="Already known") pr_obj = env['runbot_merge.pull_requests']._get_or_schedule(r, pr['number'], closing=event['action'] == 'closed') if not pr_obj: @@ -370,11 +386,13 @@ def find(target): 'head': pr['head']['sha'], 'squash': squash, }) + check_commits(pr_obj) return Response(mimetype="text/plain", response=f'Updated to {pr_obj.head}') if event['action'] not in ('closed', 'reopened'): if pr_obj.squash != squash: pr_obj.squash = squash + check_commits(pr_obj) if event['action'] == 'ready_for_review': pr_obj.draft = False @@ -422,6 +440,7 @@ def find(target): 'head': pr['head']['sha'], 'squash': pr['commits'] == 1, }) + check_commits(pr_obj) return Response(mimetype="text/plain", response=f'Reopened {pr_obj.display_name}') diff --git a/runbot_merge/controllers/misc.py b/runbot_merge/controllers/misc.py index e9a14ad38..bbd27c1c0 100644 --- a/runbot_merge/controllers/misc.py +++ b/runbot_merge/controllers/misc.py @@ -20,10 +20,10 @@ def merge_commit(self, commit_hash, repository, branch, project="RD"): target = request.env["runbot_merge.branch"].sudo().search([ ("name", "=", branch), - ("project_id", "=", project) + ("project_id.name", "=", project) ]) if not target: - return {"error": "Target branch %s:%s not found" % (branch, target)} + return {"error": f"Target branch {project}:{branch} not found"} patch = request.env["runbot_merge.patch"].sudo().create({ "repository": repository_id.id, diff --git a/runbot_merge/controllers/reviewer_provisioning.py b/runbot_merge/controllers/reviewer_provisioning.py index a5399b6f8..86e7fd3ea 100644 --- a/runbot_merge/controllers/reviewer_provisioning.py +++ b/runbot_merge/controllers/reviewer_provisioning.py @@ -55,6 +55,17 @@ def provision_user(self, users): # unique, and should not be able to collide with emails partners[p.github_login.casefold()] = p + existing_users = Users.with_context(active_test=False).search([ + ('partner_id', 'not in', existing_partners.ids), + '|', ('login', 'in', [u['email'] for u in users]), + ('oauth_uid', 'in', [s for u in users if (s := u.get('sub'))]), + ]) + _logger.info("Found %d existing matching users.", len(existing_users)) + users_map = {} + for u in existing_users: + users_map[u.login] = u + users_map[u.oauth_uid] = u + portal = env.ref('base.group_portal') internal = env.ref('base.group_user') odoo_provider = env.ref('auth_oauth.provider_openerp') @@ -68,7 +79,12 @@ def provision_user(self, users): new['oauth_uid'] = new.pop('sub') # prioritise by github_login as that's the unique-est point of information - current = partners.get(new['github_login'].casefold()) or partners.get(new['email']) or Partners + current = partners.get(new['github_login'].casefold()) \ + or partners.get(new['email']) \ + or (users_map.get(new['email']) + or users_map.get(new.get('oauth_uid')) + or Users + ).partner_id if not current.active: to_activate |= current @@ -127,6 +143,7 @@ def provision_user(self, users): new['partner_id'] = current.id to_create.append(new) + created = len(to_create) if to_create: # only create 100 users at a time to avoid request timeout @@ -152,7 +169,16 @@ def fetch_reviewers(self, **kwargs): '/runbot_merge/remove_reviewers', # deprecated URL ], type='json', auth='public', methods=['POST']) def disable_users(self, github_logins, **kwargs): + _logger.info("deprovisioning %s: %s") partners = request.env['res.partner'].sudo().search([('github_login', 'in', github_logins)]) + _logger.info( + "deprovisioning %s: %s", + github_logins, + [ + f"{p.display_name} ({', '.join(p.mapped('user_ids.login'))})" + for p in partners + ] + ) partners.write({ 'email': False, 'parent_id': False, diff --git a/runbot_merge/fw_tests/test_conflicts.py b/runbot_merge/fw_tests/test_conflicts.py index 559edf331..04c5ad9d4 100644 --- a/runbot_merge/fw_tests/test_conflicts.py +++ b/runbot_merge/fw_tests/test_conflicts.py @@ -170,6 +170,55 @@ def test_conflict(env, config, make_repo, users): 'i': 'a', } +def test_conflict_unknown_root_head( + env, config, make_repo, +): + """Very specific implementation detail: on conflict we try to infer + modify/delete conflicts in order to mark the files git reintrodued, to do so + we try to list the files modified by the original PR, and that requires + having the head of its target branch locally... + """ + fw_cron = env.ref('runbot_merge.port_forward') + fw_cron.active = False + prod, _other = make_basic(env, config, make_repo, statuses="default") + + with prod: + # generate conflict: remove f from b, while updating f in a + prod.make_commits( + 'b', Commit('33', tree={'g': 'c'}, reset=True), + ref='heads/b' + ) + [p_0] = prod.make_commits( + 'a', Commit('p_0', tree={'f': 'xxx'}), + ref='heads/a_pr' + ) + pr = prod.make_pr(target='a', head='a_pr') + prod.post_status(p_0, 'success') + pr.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() + with prod: + prod.post_status('staging.a', 'success') + env.run_crons() + + # there should be no forward port (yet) + assert env['runbot_merge.pull_requests'].search_count([]) == 1 + # there should be an fw job + assert env['forwardport.batches'].search_count([]) == 1 + + # add a commit to a which the mergebot should not know about + with prod: + prod.make_commits( + 'a', + Commit('X', tree={'g': 'fkdshf'}), + ref='heads/a', + ) + + fw_cron.active = True + fw_cron.trigger() + env.run_crons() + + assert env['runbot_merge.pull_requests'].search_count([]) == 2 + def test_massive_conflict(env, config, make_repo): """If the conflict is large enough, the commit message may exceed ARG_MAX and trigger E2BIG. diff --git a/runbot_merge/git.py b/runbot_merge/git.py index 24dfa9c58..0fef58f34 100644 --- a/runbot_merge/git.py +++ b/runbot_merge/git.py @@ -193,16 +193,6 @@ def fetchone(self, repo, branch: str) -> str: """ return next(self.fetch_heads(repo, f"refs/heads/{branch}")) - def remote_head(self, repo, branch: str) -> str: - r = self.stdout().with_config(check=True, encoding="utf-8").ls_remote( - source_url(repo), - f'refs/heads/{branch}', - ) - assert r.stdout.count('\n') == 1, f"expected single line, got {r.stdout}" - # The output is in the format: TAB LF - head, _ = r.stdout.split('\t', 1) - return head - def get_tree(self, rev: str) -> str: return self.stdout().with_config(check=True, encoding="utf-8")\ .rev_parse(f'{rev}^{{tree}}')\ diff --git a/runbot_merge/migrations/17.0.1.20/pre-migrations.py b/runbot_merge/migrations/17.0.1.20/pre-migrations.py new file mode 100644 index 000000000..166c160c8 --- /dev/null +++ b/runbot_merge/migrations/17.0.1.20/pre-migrations.py @@ -0,0 +1,5 @@ +def migrate(cr, _version): + cr.execute(""" +ALTER TYPE runbot_merge_acls_state_type + RENAME TO runbot_merge_acls_effect_type; +""") \ No newline at end of file diff --git a/runbot_merge/models/backport/__init__.py b/runbot_merge/models/backport/__init__.py index b8f166b3a..5c5e8f003 100644 --- a/runbot_merge/models/backport/__init__.py +++ b/runbot_merge/models/backport/__init__.py @@ -89,7 +89,7 @@ def action_apply(self) -> dict: old_map = self.pr_id.commits_map self.pr_id.commits_map = "{}" - conflict, head = self.pr_id._create_port_branch(repo, self.target, forward=False) + conflict, head, n = self.pr_id._create_port_branch(repo, self.target, forward=False) self.pr_id.commits_map = old_map if conflict: @@ -122,6 +122,7 @@ def action_apply(self) -> dict: # the backport's own forwardport should stop right before the # original PR by default limit_id=branches[source_idx - 1], + squash=n==1, ) _logger.info("Created backport %s for %s", backport.display_name, self.pr_id.display_name) diff --git a/runbot_merge/models/batch.py b/runbot_merge/models/batch.py index b4e0806e7..8fa584f90 100644 --- a/runbot_merge/models/batch.py +++ b/runbot_merge/models/batch.py @@ -11,7 +11,7 @@ from odoo import models, fields, api from .pull_requests import ForwardPortError, PullRequests -from .utils import enum +from .utils import EnumSelection from .. import git _logger = logging.getLogger(__name__) @@ -87,14 +87,12 @@ class Batch(models.Model): default=False, tracking=True, help="Cancels current staging on target branch when becoming ready" ) - priority = fields.Selection([ + priority = EnumSelection([ ('nice', "Nice"), ('default', "Default"), ('priority', "Priority"), ('alone', "Alone"), - ], default='default', group_operator=None, required=True, tracking=True, - column_type=enum(_name, 'priority'), - ) + ], default='default', group_operator=None, required=True, tracking=True) blocked = fields.Char(store=True, compute="_compute_blocked", tracking=True) unblocked_at = fields.Datetime(store=True, compute="_compute_blocked", tracking=True) @@ -147,18 +145,6 @@ def _compute_genealogy(self): .search([("parent_path", "=like", f"{sid}/%")], order="parent_path")\ def _auto_init(self): - for field in self._fields.values(): - if not isinstance(field, fields.Selection) or field.column_type[0] == 'varchar': - continue - - t = field.column_type[1] - self.env.cr.execute("SELECT FROM pg_type WHERE typname = %s", [t]) - if not self.env.cr.rowcount: - self.env.cr.execute( - f"CREATE TYPE {t} AS ENUM %s", - [tuple(s for s, _ in field.selection)] - ) - super()._auto_init() self.env.cr.execute(""" @@ -333,9 +319,10 @@ def _port_forward(self): _logger.info("Forward-porting %s to %s (using branch %r)", self, target.name, new_branch) conflicts = {} + commits_count = {} for pr in prs: repo = git.get_local(pr.repository) - conflicts[pr], head = pr._create_port_branch(repo, target, forward=True) + conflicts[pr], head, commits_count[pr] = pr._create_port_branch(repo, target, forward=True) r = repo.check(False).push(git.fw_url(pr.repository), f"{head}:refs/heads/{new_branch}") if r.returncode: self._delete_fw_branches(conflicts.keys(), new_branch) @@ -388,6 +375,7 @@ def _port_forward(self): for _, out, err, _ in filter(None, conflicts.values()) ) ), + squash=commits_count[pr]==1, ) _logger.info("Created forward-port PR %s", new_pr) new_batch |= new_pr @@ -457,7 +445,7 @@ def _schedule_fp_followup(self, *, force_fw=False): scheduled = self.browse(()) for batch in self: fw_policy = batch.source.fw_policy - force_fw = force_fw or fw_policy == 'skipmerge' + forcefw = force_fw or fw_policy == 'skipmerge' prs = ', '.join(batch.prs.mapped('display_name')) _logger.info('Checking if forward-port %s (%s)', batch, prs) @@ -472,10 +460,10 @@ def _schedule_fp_followup(self, *, force_fw=False): # same thing as all the PRs having a source, kinda, but cheaper, # it's not entirely true as technically the user could have added a # PR to the forward ported batch - if not (batch.parent_id and (force_fw or all(p.parent_id for p in batch.prs))): + if not (batch.parent_id and (forcefw or all(p.parent_id for p in batch.prs))): _logger.info('-> no parent %s (%s)', batch, prs) continue - if not force_fw and fw_policy not in ('skipci', 'skipmerge') \ + if not forcefw and fw_policy not in ('skipci', 'skipmerge') \ and (invalid := batch.prs.filtered(lambda p: p.status != 'success')): _logger.info( '-> wrong state %s (%s)', @@ -496,6 +484,7 @@ def _schedule_fp_followup(self, *, force_fw=False): prs, ', '.join(next_target.mapped('name')), ) + continue if n := self.with_context(active_test=False).search([ ('target', '=', next_target.id), diff --git a/runbot_merge/models/commands/__init__.py b/runbot_merge/models/commands/__init__.py index 7746494d6..29053a44b 100644 --- a/runbot_merge/models/commands/__init__.py +++ b/runbot_merge/models/commands/__init__.py @@ -8,7 +8,7 @@ from odoo.api import Environment from odoo.tools import SQL, lazy_property from .commands import Command -from ..utils import enum +from ..utils import EnumSelection if typing.TYPE_CHECKING: from ..pull_requests import PullRequests @@ -87,11 +87,7 @@ class ACL(models.Model): command = fields.Selection(list(commands_list()), required=True) arg = fields.Char() - effect = fields.Selection( - [("add", "Add"), ("remove", "Remove")], - required=True, - column_type=enum(_name, 'state'), - ) + effect = EnumSelection([("add", "Add"), ("remove", "Remove")], required=True) predicate = fields.Char( help="Command predicate, receives a `rel` object exposing the `author`" @@ -103,18 +99,6 @@ class ACL(models.Model): repository_id = fields.Many2one('runbot_merge.repository') def _auto_init(self): - for field in self._fields.values(): - if not isinstance(field, fields.Selection) or field.column_type[0] == 'varchar': - continue - - t = field.column_type[1] - self.env.cr.execute("SELECT 1 FROM pg_type WHERE typname = %s", [t]) - if not self.env.cr.rowcount: - self.env.cr.execute( - f"CREATE TYPE {t} AS ENUM %s", - [tuple(s for s, _ in field.selection)] - ) - super()._auto_init() # support for looking up records with by partner and repository, `add` first. diff --git a/runbot_merge/models/crons/__init__.py b/runbot_merge/models/crons/__init__.py index 08371dc86..79833eb6a 100644 --- a/runbot_merge/models/crons/__init__.py +++ b/runbot_merge/models/crons/__init__.py @@ -2,3 +2,5 @@ from . import issues_closer from . import forwardport from . import validate +from . import status_request +from . import check_commits \ No newline at end of file diff --git a/runbot_merge/models/crons/check_commits.py b/runbot_merge/models/crons/check_commits.py new file mode 100644 index 000000000..3d13a95fc --- /dev/null +++ b/runbot_merge/models/crons/check_commits.py @@ -0,0 +1,45 @@ +import logging + +from odoo import api, fields, models +from ... import git + + +_logger = logging.getLogger(__name__) + +class CheckCommits(models.Model): + _name = 'runbot_merge.pull_requests.check_commits' + _description = "uses git to compute a PR's commit count" + + pull_request_id = fields.Many2one('runbot_merge.pull_requests', required=True) + + @api.model_create_multi + def create(self, vals_list): + self.env.ref('runbot_merge.cron_check_commits')._trigger() + return super().create(vals_list) + + def _run(self): + # this cron should be needed extremely rarely, and has low time + # sensitivity, so we can just process one item at a time. + t = self.search([], limit=1) + if not t: + return + + pr_id = t.pull_request_id + repo = git.get_local(pr_id.repository) + target_head, pr_head = sorted( + repo.fetch_heads( + pr_id.repository, + f"refs/heads/{pr_id.target.name}", + pr_id.head, + ), + key=lambda h: h == pr_id.head, + ) + r = repo.stdout().with_config( + check=True, + encoding='utf-8', + ).rev_list('--count', f'{target_head}..{pr_head}') + _logger.info("%s: %s commits", pr_id.display_name, r.stdout.strip()) + pr_id.squash = int(r.stdout) == 1 + + t.unlink() + self.env.ref('runbot_merge.cron_check_commits')._trigger() diff --git a/runbot_merge/models/crons/check_commits.xml b/runbot_merge/models/crons/check_commits.xml new file mode 100644 index 000000000..67dd0f734 --- /dev/null +++ b/runbot_merge/models/crons/check_commits.xml @@ -0,0 +1,19 @@ + + + Access to commits checker is useless + + 0 + 0 + 0 + 0 + + + + Use git to check a PR's commits count (~squash status) + + code + model._run() + -1 + + + diff --git a/runbot_merge/models/crons/forwardport.py b/runbot_merge/models/crons/forwardport.py index 073225373..74da60e9d 100644 --- a/runbot_merge/models/crons/forwardport.py +++ b/runbot_merge/models/crons/forwardport.py @@ -264,14 +264,14 @@ def _complete_batches(self): # NOTE: ports the new source everywhere instead of porting each # PR to the next step as it does not *stop* on conflict repo = git.get_local(source.repository) - conflict, head = source._create_port_branch(repo, target, forward=True) + conflict, head, n = source._create_port_branch(repo, target, forward=True) repo.push(git.fw_url(pr.repository), f'{head}:refs/heads/{ref}') remote_target = repository.fp_remote_target owner, _ = remote_target.split('/', 1) message = source.message + f"\n\nForward-Port-Of: {pr.display_name}" - title, body = re.match(r'(?P[^\n]+)\n*(?P<body>.*)', message, flags=re.DOTALL).groups() + title, body = re.fullmatch(r'(?P<title>[^\n]+)\n*(?P<body>.*)', message, flags=re.DOTALL).groups() r = gh.post(f'https://api.github.com/repos/{pr.repository.name}/pulls', json={ 'base': target.name, 'head': f'{owner}:{ref}', @@ -297,7 +297,8 @@ def _complete_batches(self): merge_method=pr.merge_method, source_id=source_id.id, parent_id=False if report_conflicts else pr.id, - detach_reason="{1}\n{2}".format(*conflict).strip() if report_conflicts else None + detach_reason="{1}\n{2}".format(*conflict).strip() if report_conflicts else None, + squash=n==1, ) _logger.info("Created forward-port PR %s", new_pr.display_name) @@ -372,7 +373,7 @@ def _process_item(self): ) repo = git.get_local(previous.repository) - conflicts, new_head = previous._create_port_branch(repo, child.target, forward=True) + conflicts, new_head, n = previous._create_port_branch(repo, child.target, forward=True) if conflicts: _, out, err, _ = conflicts @@ -394,17 +395,12 @@ def _process_item(self): }, ) - target_head = repo.fetchone(previous.repository, child.target.name) - commits_count = int(repo.stdout().rev_list( - f'{target_head}..{new_head}', - count=True - ).stdout.decode().strip()) old_head = child.head # update child's head to the head we're going to push child.with_context(ignore_head_update=True).write({ 'head': new_head, # 'state': 'opened', - 'squash': commits_count == 1, + 'squash': n == 1, }) updates[child.repository].append((child.refname, old_head, new_head)) diff --git a/runbot_merge/models/crons/status_request.py b/runbot_merge/models/crons/status_request.py new file mode 100644 index 000000000..fff5e346d --- /dev/null +++ b/runbot_merge/models/crons/status_request.py @@ -0,0 +1,39 @@ +import logging + +import requests + +from odoo import api, fields, models + + +_logger = logging.getLogger(__name__) + + +class StatusRequest(models.Model): + _name = 'runbot_merge.pull_requests.status.request' + _description = "requests that the runbot computes / generates / sends statuses" + + pull_request_id = fields.Many2one('runbot_merge.pull_requests') + + @api.model_create_multi + def create(self, vals_list): + self.env.ref('runbot_merge.cron_status_request')._trigger() + return super().create(vals_list) + + def _run(self): + # hide from pytest as it's not on the PYTHONPATH + from odoo.addons.saas_worker.auth_util import SaasAuth + prs = self.search([]) + names = prs.mapped('pull_request_id.display_name') + prs.unlink() + _logger.info("Trigger statuses for %s", ", ".join(names)) + res = requests.post( + 'https://runbot.odoo.com/runbot/request_ci', + auth=SaasAuth(), + json={'pull_requests': names} + ) + if not res.ok: + _logger.warning( + "Statuses request failed %s\n%s", + res.status_code, + res.text, + ) \ No newline at end of file diff --git a/runbot_merge/models/crons/status_request.xml b/runbot_merge/models/crons/status_request.xml new file mode 100644 index 000000000..9e680c868 --- /dev/null +++ b/runbot_merge/models/crons/status_request.xml @@ -0,0 +1,19 @@ +<odoo> + <record id="status_request" model="ir.model.access"> + <field name="name">Access is useless</field> + <field name="model_id" ref="model_runbot_merge_pull_requests_status_request"/> + <field name="perm_read">0</field> + <field name="perm_create">0</field> + <field name="perm_write">0</field> + <field name="perm_unlink">0</field> + </record> + + <record model="ir.cron" id="cron_status_request"> + <field name="name">Sends status requests</field> + <field name="model_id" ref="model_runbot_merge_pull_requests_status_request"/> + <field name="state">code</field> + <field name="code">model._run()</field> + <field name="numbercall">-1</field> + <field name="doall" eval="False"/> + </record> +</odoo> diff --git a/runbot_merge/models/crons/validate.py b/runbot_merge/models/crons/validate.py index ba9f38057..94e7e8179 100644 --- a/runbot_merge/models/crons/validate.py +++ b/runbot_merge/models/crons/validate.py @@ -31,7 +31,7 @@ def _run(self) -> None: if blocked := validations.filtered(lambda v: v.batch_id.blocked): _logger.warning( "Found unexpected blocked batches in the validation queue: %s", - ", ".join(blocked.batch_id) + blocked.batch_id, ) validations -= blocked blocked.unlink() @@ -44,7 +44,7 @@ def _run(self) -> None: if pr.repository not in to_fetch: to_fetch[pr.repository] = { 'repo': git.get_local(pr.repository).stdout().with_config(text=True), - 'branches': {pr.target.name}, + 'branches': {f"refs/heads/{pr.target.name}"}, 'heads': {pr.head}, } continue @@ -72,6 +72,8 @@ def _run(self) -> None: validate_pr(pr, states[pr.repository.name, pr.target.name]) except exceptions.Mismatch as e: mismatch_warn(e, "data mismatch during check:\n{diff}") + except exceptions.Skip as e: + continue except exceptions.MergeError as e: if len(e.args) > 1 and e.args[1]: reason = e.args[1] diff --git a/runbot_merge/models/project.py b/runbot_merge/models/project.py index baa25a04a..7217ea355 100644 --- a/runbot_merge/models/project.py +++ b/runbot_merge/models/project.py @@ -80,6 +80,10 @@ class Project(models.Model): warn_mergiraf = fields.Boolean(compute='_compute_warn_mergiraf') fw_nice = fields.Boolean(help="Lower priority of forward-ports") + request_missing_statuses = fields.Boolean( + help="If some required statuses have never been received on a PR," + " request them from the runbot." + ) @api.depends('use_mergiraf') def _compute_warn_mergiraf(self): @@ -335,7 +339,6 @@ def _insert_intermediate_prs(self, branches_before): logger = _logger.getChild('project').getChild(p.name) logger.debug("branches updated %s -> %s", bbefore, bafter) - print(f"\n\nbranches updated {bbefore} -> {bafter}\n", flush=True) # Last possibility: branch was inserted but not at end, get all # branches before and all branches after diff --git a/runbot_merge/models/pull_requests.py b/runbot_merge/models/pull_requests.py index 57b650a97..97789d892 100644 --- a/runbot_merge/models/pull_requests.py +++ b/runbot_merge/models/pull_requests.py @@ -38,7 +38,7 @@ from .commands import commands, AccessFailure, Rel from .. import github, exceptions, controllers, utils, git -from .utils import enum, readonly, dfm +from .utils import readonly, dfm, EnumSelection Conflict = tuple[str, str, str, list[str]] @@ -59,7 +59,7 @@ class StatusConfiguration(models.Model): context = fields.Char(required=True) repo_id = fields.Many2one('runbot_merge.repository', required=True, ondelete='cascade') branch_filter = fields.Char(help="branches this status applies to") - prs = fields.Selection([ + prs = EnumSelection([ ('required', 'Required'), ('optional', 'Optional'), ('ignored', 'Ignored'), @@ -67,25 +67,9 @@ class StatusConfiguration(models.Model): default='required', required=True, string="Applies to pull requests", - column_type=enum(_name, 'prs'), ) stagings = fields.Boolean(string="Applies to stagings", default=True) - def _auto_init(self): - for field in self._fields.values(): - if not isinstance(field, fields.Selection) or field.column_type[0] == 'varchar': - continue - - t = field.column_type[1] - self.env.cr.execute("SELECT 1 FROM pg_type WHERE typname = %s", [t]) - if not self.env.cr.rowcount: - self.env.cr.execute( - f"CREATE TYPE {t} AS ENUM %s", - [tuple(s for s, _ in field.selection)] - ) - - super()._auto_init() - def _for_branch(self, branch): assert branch._name == 'runbot_merge.branch', \ f'Expected branch, got {branch}' @@ -205,7 +189,7 @@ def _load_pr( 'changes': { 'base': {'ref': {'from': pr_id.target.name}}, 'title': {'from': pr_id.message.splitlines()[0]}, - 'body': {'from', ''.join(pr_id.message.splitlines(keepends=True)[2:])}, + 'body': {'from': ''.join(pr_id.message.splitlines(keepends=True)[2:])}, }, 'sender': {'login': self.project_id.github_prefix}, }).get_data(True) @@ -222,7 +206,7 @@ def _load_pr( # don't go through controller because try_closing does weird things # for safety / race condition reasons which ends up committing # and breaks everything - pr_id.state = 'closed' + pr_id.closed = True self.env['runbot_merge.pull_requests.feedback'].create({ 'repository': pr_id.repository.id, 'pull_request': number, @@ -473,7 +457,7 @@ class PullRequests(models.Model): store=True, ) - state = fields.Selection([ + state = EnumSelection([ ('opened', 'Opened'), ('closed', 'Closed'), ('validated', 'Validated'), @@ -489,7 +473,6 @@ class PullRequests(models.Model): store=True, index=True, tracking=True, - column_type=enum(_name, 'state'), ) open = fields.Boolean(compute='_compute_open', search='_search_open') @@ -523,12 +506,12 @@ def _search_open(self, operator, value): help="A draft PR can not be merged", ) squash = fields.Boolean(default=False, tracking=True) - merge_method = fields.Selection([ + merge_method = EnumSelection([ ('merge', "merge directly, using the PR as merge commit message"), ('rebase-merge', "rebase and merge, using the PR as merge commit message"), ('rebase-ff', "rebase and fast-forward"), ('squash', "squash"), - ], default=False, tracking=True, column_type=enum(_name, 'merge_method')) + ], default=False, tracking=True) method_warned = fields.Boolean(default=False) reviewed_by = fields.Many2one('res.partner', index=True, tracking=True) @@ -542,11 +525,11 @@ def _search_open(self, operator, value): help="Compilation of the full status of the PR (commit statuses + overrides), as JSON", store=True, ) - status = fields.Selection([ + status = EnumSelection([ ('pending', 'Pending'), ('failure', 'Failure'), ('success', 'Success'), - ], compute='_compute_statuses', store=True, inverse=readonly, readonly=True, column_type=enum(_name, 'status')) + ], compute='_compute_statuses', store=True, inverse=readonly, readonly=True) previous_failure = fields.Char(default='{}') batch_id = fields.Many2one('runbot_merge.batch', index=True) @@ -667,7 +650,7 @@ def _compute_url(self): @api.depends('parent_id.root_id') def _compute_root(self): for p in self: - p.root_id = reduce(lambda _, p: p, self._iter_ancestors()) + p.root_id = reduce(lambda _, p: p, p._iter_ancestors()) @api.depends('message') def _compute_message_title(self): @@ -679,7 +662,7 @@ def _compute_message_html(self): for pr in self: match pr.message.split('\n\n', 1): case [title]: - pr.message_html = Markup('<h3>%s<h3>') % title + pr.message_html = Markup('<h3>%s</h3>') % title case [title, description]: pr.message_html = Markup('<h3>%s</h3>\n%s') % ( title, @@ -1010,10 +993,10 @@ def format_help(warn_ignore: bool, address: bool = True) -> str: delegates = self.author else: delegates = self.env['res.partner'] - for login in users: - delegates |= delegates.search([('github_login', '=', login)]) or delegates.create({ - 'name': login, - 'github_login': login, + for l in users: + delegates |= delegates.search([('github_login', '=', l)]) or delegates.create({ + 'name': l, + 'github_login': l, }) delegates.write({'delegate_reviewer': [(4, self.id, 0)]}) case commands.Priority(): @@ -1356,6 +1339,16 @@ def _approve(self, author, login): ) if not (self.squash or self.merge_method): self.env.ref('runbot_merge.check_merge_method')._trigger() + if self.project.request_missing_statuses: + statuses = json.loads(self.statuses_full) + if any( + st.context not in statuses + for st in self.repository.status_ids._for_pr(self) + if st.prs == 'required' + ): + self.env['runbot_merge.pull_requests.status.request'].create({ + 'pull_request_id': self.id, + }) return None def _pr_acl(self, user) -> ACL: @@ -1423,12 +1416,15 @@ def _search_applicable_statuses(self, operator, value): @api.depends( 'statuses', 'overrides', 'target', 'parent_id', 'skipchecks', + 'closed', 'applicable_statuses.context', 'applicable_statuses.branch_filter', 'applicable_statuses.prs', ) def _compute_statuses(self): for pr in self: + if pr.merge_date or pr.closed: + continue statuses = {**json.loads(pr.statuses), **pr._get_overrides()} pr.statuses_full = json.dumps(statuses, indent=4) @@ -1520,18 +1516,6 @@ def _notify_ci_failed(self, ci): ) def _auto_init(self): - for field in self._fields.values(): - if not isinstance(field, fields.Selection) or field.column_type[0] == 'varchar': - continue - - t = field.column_type[1] - self.env.cr.execute("SELECT 1 FROM pg_type WHERE typname = %s", [t]) - if not self.env.cr.rowcount: - self.env.cr.execute( - f"CREATE TYPE {t} AS ENUM %s", - [tuple(s for s, _ in field.selection)] - ) - super()._auto_init() # incorrect index: unique(number, target, repository). tools.drop_index(self._cr, 'runbot_merge_unique_pr_per_target', self._table) @@ -1702,17 +1686,17 @@ def write(self, vals): if merge_method not in (False, 'rebase-ff') and pr.message != vals['message']: pr.unstage("merge message updated") - remover = self.env['forwardport.branch_remover'] - match vals.get('closed'): - case True if not self.closed: - vals['reviewed_by'] = False - remover.create([{'pr_id': self.id}]) - case False if self.closed and not self.batch_id: - vals['batch_id'] = self._get_batch( - target=vals.get('target') or self.target.id, - label=vals.get('label') or self.label, - ) - remover.search([('pr_id', '=', self.id)]).unlink() + remover = self.env['forwardport.branch_remover'] + match vals.get('closed'): + case True if not pr.closed: + vals['reviewed_by'] = False + remover.create([{'pr_id': pr.id}]) + case False if pr.closed and not pr.batch_id: + vals['batch_id'] = self._get_batch( + target=vals.get('target') or pr.target.id, + label=vals.get('label') or pr.label, + ) + remover.search([('pr_id', '=', pr.id)]).unlink() # if the PR's head is updated, detach (should split off the FP lines as this is not the original code) # TODO: better way to do this? Especially because we don't want to @@ -1951,9 +1935,9 @@ def _fp_conflict_feedback(self, previous_pr, conflicts): sout = serr = '' newline_ellipsis = '\n[...]' if out.strip(): - sout = f"\nstdout:\n```\n{utils.shorten(out, 8096, newline_ellipsis)}\n```\n" + sout = f"\nstdout:\n```\n{utils.shorten(out, 8192, newline_ellipsis)}\n```\n" if err.strip(): - serr = f"\nstderr:\n```\n{utils.shorten(err, 8069, newline_ellipsis)}\n```\n" + serr = f"\nstderr:\n```\n{utils.shorten(err, 8192, newline_ellipsis)}\n```\n" lines = '' if len(hh) > 1: @@ -2078,7 +2062,7 @@ def _create_port_branch( logger.info("Fetched head of %s (%s)", root.display_name, root.head) try: - return None, root._cherry_pick(source, target_branch, target_head) + return None, *root._cherry_pick(source, target_branch, target_head) except CherrypickError as e: h, out, err, commits = e.args @@ -2119,7 +2103,7 @@ def _create_port_branch( # port / conflict it's a modify/delete conflict: the file was # deleted in the target branch, and the update (modify) in the # original PR causes it to be added back - base = conf.remote_head(root.repository, root.target.name) + base = conf.fetchone(root.repository, root.target.name) original_modified = set(conf.diff( "--diff-filter=M", "--name-only", "--merge-base", base, @@ -2142,12 +2126,12 @@ def _create_port_branch( committer=committer[:2], ) assert commit.returncode == 0,\ - f"commit failed\n\n{commit.stdout.decode()}\n\n{commit.stderr.decode}" + f"commit failed\n\n{commit.stdout.decode()}\n\n{commit.stderr.decode()}" hh = commit.stdout.strip() - return (h, out, err, [c['sha'] for c in commits]), hh + return (h, out, err, [c['sha'] for c in commits]), hh, 1 - def _cherry_pick(self, repo: git.Repo, branch: Branch, head: str) -> str: + def _cherry_pick(self, repo: git.Repo, branch: Branch, head: str) -> tuple[str, int]: """ Cherrypicks ``self`` into ``branch`` :return: the HEAD of the forward-port is successful @@ -2252,7 +2236,7 @@ def _cherry_pick(self, repo: git.Repo, branch: Branch, head: str) -> str: head = cc.stdout.strip() logger.info('%s -> %s', commit_sha, head) - return head + return head, len(commits) def _make_fp_message(self, commit): cmap = json.loads(self.commits_map) @@ -2469,6 +2453,7 @@ def _send(self): to_remove.extend(ids) self.browse(to_remove).unlink() + class Feedback(models.Model): """ Queue of feedback comments to send to PR users """ @@ -2487,6 +2472,8 @@ class Feedback(models.Model): string="Bot User", help="Token field (from repo's project) to use to post messages" ) + tried = fields.Integer() + retry_after = fields.Datetime() @api.model_create_multi def create(self, vals_list): @@ -2496,8 +2483,14 @@ def create(self, vals_list): def _send(self): ghs = {} - to_remove = [] - for f in self.search([]): + i = 0 + limit = 100 + try_limit = 12 + for i, f in enumerate(self.search([ + '|', ('tried', '=', False), ('tried', '<', try_limit), + '|', ('retry_after', '=', False), + ('retry_after', '<', datetime.datetime.now()), + ], limit=limit), start=1): repo = f.repository gh = ghs.get((repo, f.token_field)) if not gh: @@ -2532,16 +2525,16 @@ def _send(self): if isinstance(e, HTTPError) and e.response.status_code == 500: self.env.context.get('deactivate', lambda _: None)(True) self.env['mail.thread'].message_notify( - body=f"Feedback cron failed with {str(e)}, cron has beeen disabled.", + subject=f"Feedback cron disabled ({e.response.reason})", + body=e.response.text, partner_ids=self.env.ref('runbot_merge.group_admin').users.partner_id.ids, ) elif isinstance(e, HTTPError) and e.response.status_code == 404 and f.reaction: _logger.info( "Comment not found (%s) when trying to send a reaction to %s#%s (%s)", - e, f.repo.name, f.pull_request, f.reaction, + e, f.repository.name, f.pull_request, f.reaction, ) - to_remove.append(f.id) - continue + f.unlink() _logger.exception( "Error while trying to %s %s#%s (%s)", @@ -2549,9 +2542,23 @@ def _send(self): repo.name, f.pull_request, utils.shorten(f.message, 200) ) + f.tried += 1 + f.retry_after = datetime.datetime.now() \ + + datetime.timedelta(minutes=10) + if f.tried >= try_limit: + self.env['mail.thread'].message_notify( + subject=f"Failed to send feedback {f.id} to {f.repository.name}#{f.pull_request}", + body=f.message, + partner_ids=self.env.ref('runbot_merge.group_admin').users.partner_id.ids, + ) else: - to_remove.append(f.id) - self.browse(to_remove).unlink() + f.unlink() + + # if we reached the end of the batch, there are probably leftover + # feedbacks to send + if i == limit: + self.env.ref('runbot_merge.feedback_cron')._trigger() + class FeedbackTemplate(models.Model): _name = 'runbot_merge.pull_requests.feedback.template' @@ -2702,6 +2709,8 @@ def _notify(self): if stagings: stagings._notify(c) + + self.env.cr.commit() except psycopg2.errors.SerializationFailure: serialization_failures = True _logger.info("Failed to apply commit %s: serialization failure", sha) @@ -2709,8 +2718,6 @@ def _notify(self): except Exception: _logger.exception("Failed to apply commit %s", sha) self.env.cr.rollback() - else: - self.env.cr.commit() if serialization_failures: self.env.ref("runbot_merge.process_updated_commits")._trigger() @@ -3043,7 +3050,7 @@ def fail(self, message, prs=None): match self.target.on_fail: case 'nothing': pass - case 'join' if len(self.target.split_ids) == 1: + case 'join' if len(self.target.split_ids) <= 1: pass case 'join': batch_ids = self.target.split_ids.batch_ids @@ -3142,7 +3149,6 @@ def check_status(self): logger.info("Checking active staging %s (state=%s)", self, self.state) project = self.target.project_id if self.state == 'success': - gh = {repo.name: repo.github() for repo in project.repo_ids.having_branch(self.target)} self.env.cr.execute(''' SELECT 1 FROM runbot_merge_pull_requests WHERE id in %s @@ -3152,7 +3158,7 @@ def check_status(self): with sentry_sdk.start_span(description="merge staging") as span: span.set_tag("staging", self.id) span.set_tag("branch", self.target.name) - self._safety_dance(gh) + self._safety_dance() except exceptions.FastForwardError as e: logger.warning( "Could not fast-forward successful staging on %s:%s: %s", @@ -3222,7 +3228,7 @@ def check_status(self): def is_timed_out(self): return fields.Datetime.from_string(self.timeout_limit) < datetime.datetime.now() - def _safety_dance(self, gh): + def _safety_dance(self): """ Reverting updates doesn't work if the branches are protected (because a revert is basically a force push). So we can update REPO_A, then fail to update REPO_B for some reason, and we're hosed. @@ -3234,42 +3240,48 @@ def _safety_dance(self, gh): to REPO_B during the staging we catch it. If we're really unlucky they could still push after the dry run but... """ - tmp_target = 'tmp.' + self.target.name + def push(rr, url, refspec) -> str | None: + r = rr.push('--porcelain', url, refspec) + if r.returncode: + if m := re.search(r'^[+*!=-]\t.*:.*\t\[(.*)]\s*(.*)$', r.stdout, re.MULTILINE): + if m[2]: + return f"{m[1]} {m[2]}" + return m[1] + return r.stderr + return None + + repos = {} # first force-push the current targets to all tmps - for repo_name in self.heads.repository_id.mapped('name'): - g = gh[repo_name] - g.set_ref(tmp_target, g.head(self.target.name)) + for head in self.heads: + repo = head.repository_id + repo_url, r = repos[repo] = ( + git.source_url(repo), + git.get_local(repo).stdout().with_config(encoding="utf-8", check=False), + ) + # can't push an object you don't have locally... + branch_head = next(( + h for h in r.fetch_heads(repo, f"refs/heads/{self.target.name}", head.commit_id.sha) + if h != head.commit_id.sha + ), head.commit_id.sha) + if (reason := push(r, repo_url, f"+{branch_head}:refs/heads/tmp.{self.target.name}")) is not None: + raise exceptions.FastForwardError(repo.name) \ + from Exception(reason) # then attempt to FF the tmp to the staging commits - for c in self.heads: - gh[c.repository_id.name].fast_forward(tmp_target, c.commit_id.sha) + for head in self.heads: + repo_url, r = repos[head.repository_id] + if (reason := push(r, repo_url, f"{head.commit_id.sha}:refs/heads/tmp.{self.target.name}")) is not None: + raise exceptions.FastForwardError(head.repository_id.name) \ + from Exception(reason) # there is still a race condition here, but it's way # lower than "the entire staging duration"... - # TODO: skip for "nil" staging commits (iso current head) to limit GH - # error sources? # TODO: maybe ff commits in repository importance order as well? for i, c in enumerate(self.commits): - for pause in [0.1, 0.3, 0.5, 0.9, 0]: # last one must be 0/falsy of we lose the exception - try: - gh[c.repository_id.name].fast_forward( - self.target.name, - c.commit_id.sha - ) - except exceptions.FastForwardError as e: - # If this is the first staging commit, nothing has happened - # maybe, technically github could have both accepted and - # errored if the incident is bad enough) so just signal an - # FF error - if not i: - raise - if i and pause: - time.sleep(pause) - continue - raise exceptions.InconsistentIntegration( - self.commits[:i], - self.commits[i:], - ) from e - else: - break + repo_url, repo = repos[c.repository_id] + if (reason := push(repo, repo_url, f"{c.commit_id.sha}:refs/heads/{self.target.name}")) is not None: + raise exceptions.InconsistentIntegration( + self.commits[:i], + self.commits[i:], + ) from Exception(reason) @api.returns('runbot_merge.stagings') def for_heads(self, *heads): diff --git a/runbot_merge/models/stagings_create.py b/runbot_merge/models/stagings_create.py index fcb7975c3..0e79fff48 100644 --- a/runbot_merge/models/stagings_create.py +++ b/runbot_merge/models/stagings_create.py @@ -23,7 +23,7 @@ from odoo import api, models, fields, Command, addons from odoo.tools import OrderedSet, groupby, Reverse -from .pull_requests import Branch, Stagings, PullRequests, Repository +from .pull_requests import Branch, Stagings, PullRequests, Repository, Split from .batch import Batch from .. import exceptions, utils, github, git from ..github import PrCommit @@ -65,6 +65,22 @@ def batch_key(batch: Batch) -> tuple: ) return -priority, batch.unblocked_at or datetime.max, batch.id +def prepare_split( + split: Split, + priority: Literal['splits', 'ready', 'largest'], +) -> tuple[Batch, int, set[int]]: + batches = split.batch_ids.sorted(batch_key) + parent_id = split.staging_id.id + originals = set(split.original_batches or ()) + split.with_context(staging_split=True).unlink() + _logger.info( + "staging split PRs %s (prioritising %s)", + ', '.join(batches.mapped('prs.display_name')), + priority, + ) + return batches, parent_id, originals + + def try_staging(branch: Branch, batches: Optional[Batch] = None) -> Optional[Stagings]: """ Tries to create a staging if the current branch does not already have one. Returns None if the branch already has a staging or there @@ -84,6 +100,7 @@ def log(label: str, batches: Batch) -> None: parent_id = False originals = set() + split = None if batches: log("staging retried PRs %s", batches) else: @@ -92,12 +109,8 @@ def log(label: str, batches: Batch) -> None: if alone: log("staging high-priority PRs %s", batches) elif branch.project_id.staging_priority == 'default': - if split := branch.first_split.with_context(staging_split=True): - parent_id = split.staging_id.id - batches = split.batch_ids.sorted(batch_key) - originals = set(split.original_batches) - split.unlink() - log("staging split PRs %s (prioritising splits)", batches) + if split := branch.first_split: + batches, parent_id, originals = prepare_split(split, 'splits') else: # priority, normal; priority = sorted ahead of normal, so always picked # first as long as there's room @@ -106,26 +119,21 @@ def log(label: str, batches: Batch) -> None: if batches: log("staging ready PRs %s (prioritising ready)", batches) else: - split = branch.first_split.with_context(staging_split=True) - parent_id = split.staging_id.id - batches = split.batch_ids.sorted(batch_key) - originals = set(split.original_batches) - split.unlink() - log("staging split PRs %s (prioritising ready)", batches) + split = branch.first_split + batches, parent_id, originals = prepare_split(split, 'ready') else: assert branch.project_id.staging_priority == 'largest' - maxsplit = max(branch.split_ids, key=lambda s: len(s.batch_ids), default=branch.env['runbot_merge.split']) - _logger.info("largest split = %d, ready = %d", len(maxsplit.batch_ids), len(batches)) + split = max(branch.split_ids, key=lambda s: len(s.batch_ids), default=branch.env['runbot_merge.split']) + _logger.info("largest split = %d, ready = %d", len(split.batch_ids), len(batches)) # bias towards splits if len(ready) = len(batch_ids) - if len(maxsplit.batch_ids) >= len(batches): - batches = maxsplit.batch_ids.sorted(batch_key) - originals = set(maxsplit.original_batches) - maxsplit.unlink() - log("staging split PRs %s (prioritising largest)", batches) + if len(split.batch_ids) >= len(batches): + batches, parent_id, originals = prepare_split(split, 'largest') else: log("staging ready PRs %s (prioritising largest)", batches) if not batches: + if split: # in case of empty split, re-run staging Soon© + branch.env.ref('runbot_merge.staging_cron')._trigger() return None original_heads, staging_state = staging_setup(branch, batches) @@ -266,7 +274,7 @@ def log(label: str, batches: Batch) -> None: with contextlib.closing(branch.env.cr.savepoint()): staged_split = stage_batches(branch, split, staging_state_split) if not staged_split: - _logger.warning("Failed to stage presplit %d of %s") + _logger.warning("Failed to stage presplit %d of %s", i, st) break if staged_split != split: _logger.warning("Failed to fully stage presplit %d of %s", i, st) @@ -618,7 +626,7 @@ def validate_pr(pr: PullRequests, info: StagingSlice) -> tuple[Method, list[PrCo # raise exceptions.Skip( # f"{pr.display_name} mergeable_state={prdict['mergeable_state']!r}" # ) - if commits > 50 and method.startswith('rebase'): + if commits > 50 and method and method.startswith('rebase'): raise exceptions.Unmergeable(pr, "Rebasing 50 commits is too much.") if commits > 250: raise exceptions.Unmergeable( @@ -759,11 +767,10 @@ def stage_merge(pr: PullRequests, info: StagingSlice, commits: List[github.PrCom external_parents = len(merge) if external_parents > 1: raise exceptions.Unmergeable( + pr, "The PR head can only have one parent from the base branch " - "(not part of the PR itself), found %d: %s" % ( - external_parents, - ', '.join(merge) - )) + f"(not part of the PR itself), found {external_parents:d}: {', '.join(merge)}" + ) if external_parents == 1: [base_commit] = merge diff --git a/runbot_merge/models/utils.py b/runbot_merge/models/utils.py index 4ac8ff303..bd10a2888 100644 --- a/runbot_merge/models/utils.py +++ b/runbot_merge/models/utils.py @@ -1,22 +1,59 @@ import logging from contextvars import ContextVar -from typing import Tuple from xml.etree.ElementTree import Element import markdown.inlinepatterns import markdown.treeprocessors from markupsafe import escape, Markup - -def enum(model: str, field: str) -> Tuple[str, str]: - n = f'{model.replace(".", "_")}_{field}_type' - return n, n +from odoo import fields, models +from odoo.tools import SQL def readonly(_): raise TypeError("Field is readonly") +class Base(models.AbstractModel): + _inherit = 'base' + + def _auto_init(self) -> None: + cr = self.env.cr + for f in self._fields.values(): + if not isinstance(f, EnumSelection): + continue + t, _ = f.column_type + cr.execute("SELECT FROM pg_type WHERE typname = %s", [t]) + if not cr.rowcount: + cr.execute(SQL( + "CREATE TYPE %s AS ENUM %s", + SQL.identifier(t), + tuple(s for s, _ in f.selection), + )) + + super()._auto_init() + + +class EnumSelection(fields.Selection): + @property + def column_type(self) -> tuple[str, str]: + n = f'{self.model_name.replace(".", "_")}_{self.name}_type' + return n, n + + def update_db_column(self, model, column) -> None: + cr = model._cr + t, _ = self.column_type + cr.execute("SELECT FROM pg_type WHERE typname = %s", [t]) + if not cr.rowcount: + cr.execute(SQL( + "CREATE TYPE %s AS ENUM %s", + SQL.identifier(t), + tuple(s for s, _ in self.selection), + )) + + super().update_db_column(model, column) + + DFM_CONTEXT_REPO = ContextVar("dfm_context", default="") def dfm(repository: str, text: str) -> Markup: """ Converts the input text from markup to HTML using the Odoo PR diff --git a/scripts/git-fw b/runbot_merge/scripts/git-fw similarity index 58% rename from scripts/git-fw rename to runbot_merge/scripts/git-fw index a9432b1c8..5586ec8c8 100755 --- a/scripts/git-fw +++ b/runbot_merge/scripts/git-fw @@ -3,9 +3,12 @@ # requires-python = ">= 3.10 # dependencies = [] # /// +import hashlib import json +import pathlib import re import sys +import threading import typing import urllib.error import urllib.parse @@ -13,6 +16,7 @@ import urllib.request from subprocess import run, PIPE, DEVNULL __VERSION__ = "0.1" +SCRIPT_URL = 'https://raw.githubusercontent.com/odoo/runbot/refs/heads/17.0/runbot_merge/scripts/git-fw' class PrInfo(typing.TypedDict): @@ -38,8 +42,8 @@ SYNOPSYS \tgit fw (--continue | --skip | --abort | --quit) DESCRIPTION -\tGiven a pull request (by github or mergebot URL, or <owner>/<repo>#<number> -\tshort form), re-execute its forward porting via git-cherry-pick(1), +\tGiven a pull request (by number, <owner>/<repo>#<number> short form, or full +\tgithub or mergebot URL), re-execute its forward porting via git-cherry-pick(1), \tproviding the ability to interactively fix conflicts then push the result \tback onto the forward-port branch. @@ -74,26 +78,43 @@ def redo(pr: str) -> int: exit("not a forward port") parent = info(res["parent"]) repository_url, reflist = ls_remote( - res["repository"], res["target"], parent["target"] + res["repository"], + res["target"], + parent["target"], + res['ref'], ) refs = dict(reflist) - commits_of_interest = [*refs.values(), parent["head"], res["head"]] + commits_of_interest = [*refs.values(), parent["head"], res["head"], res['ref']] if cm := parent["commits_map"]: commits_of_interest.append(cm[""]) + if f'refs/heads/{res["ref"]}' in refs: + devname = res['repository'] + else: + # try to probe a dev repository for the branch + devname = res['repository'].replace('odoo/', 'odoo-dev/') + dev_url, reflist = ls_remote(devname, res['ref']) + if not reflist: + exit(f"unable to find ref {res['ref']!r} in {res['repository']!r} or {devname!r}") + + urls = tuple( + f"{root}{devname}{ext}" + for root in ("https://github.com/", 'git@github.com:') + for ext in ('.git', '') + ) + + for name, url, kind in remotes(): + if kind == 'fetch' and url in urls: + remotename = name + break + else: + exit(f"found {res['ref']!r} in {devname!r} but no configured remote for it") + r = run( - [ - "git", - "fetch", - "--dry-run", - "--porcelain", - "--no-tags", - repository_url, - *commits_of_interest, - ], + ["git", "fetch", "--no-tags", "-q", remotename, *commits_of_interest], stdout=DEVNULL, ) if r.returncode: - exit("unable to retrieve commits of interest") + exit(f"unable to retrieve commits of interest from {remotename!r}") # list the source's commits r = run( @@ -115,7 +136,8 @@ def redo(pr: str) -> int: # remap commits to forward port to those which got merged commits = [cm[c] for c in commits] - run(["git", "switch", res["ref"]], check=True) + run(["git", "switch", "--guess", res["ref"]], check=True) + run(["git", "branch", "--set-upstream-to", f"{remotename}/{res['ref']}"], check=True, stdout=DEVNULL) merge_base = run( ["git", "merge-base", refs[f"refs/heads/{res['target']}"], res["head"]], stdout=PIPE, @@ -127,6 +149,9 @@ def redo(pr: str) -> int: def parse_pr(pr: str) -> tuple[str, str]: + if m := re.fullmatch(r'#?(\d+)', pr): + return from_remotes(), m[1] + if m := re.fullmatch(r"(\w+/\w+)#(\d+)", pr): return m[1], m[2] @@ -139,6 +164,36 @@ def parse_pr(pr: str) -> tuple[str, str]: exit(f"failed to find PR information in {pr!r}") +def remotes() -> typing.Iterator[tuple[str, str, str]]: + r = run(["git", "remote", "-v"], check=True, stdout=PIPE, encoding="utf-8") + for line in r.stdout.splitlines(keepends=False): + # a line should be name TAB url SPACE ( kind ) + if m := re.fullmatch( + r''' + (?P<name>[^\t]+) + \t + (?P<url>[^ ]+) + \x20 + \((?P<kind>.+)\) + # promisor filters + (?: \[.*])? + ''', + line, + re.VERBOSE, + ): + yield typing.cast(tuple[str, str, str], m.groups()) + + +def from_remotes() -> str: + for _name, url, _kind in remotes(): + for prefix in ('git@github.com:', 'https://github.com/'): + if url.startswith(prefix): + return url.removeprefix(prefix)\ + .removesuffix('.git')\ + .replace('odoo-dev/', 'odoo/') + exit(f"Found no github remote in:\n{r.stdout}") + + def ls_remote( repository: str, *patterns: str ) -> tuple[str, typing.Iterator[tuple[str, str]]]: @@ -151,7 +206,7 @@ def ls_remote( f"git@github.com:{repository}", ]: r = run( - ["git", "ls-remote", "-q", repository_url, *patterns], + ["git", "-c", "credential.interactive=false", "ls-remote", "-q", repository_url, *patterns], stdout=PIPE, stderr=DEVNULL, ) @@ -178,5 +233,17 @@ def info(url: str) -> PrInfo: exit(f"invalid response (not json): {e}") +def check_version(f): + if not f: + return + + h = hashlib.blake2b(pathlib.Path(f).read_bytes(), usedforsecurity=False) + with urllib.request.urlopen(SCRIPT_URL) as r: + hh = hashlib.blake2b(r.read(), usedforsecurity=False) + if h.digest() != hh.digest(): + print(f"You may want to update {f}:\nA different version is available at {SCRIPT_URL}") + + if __name__ == "__main__": + threading.Thread(target=check_version, args=(__file__,), daemon=True).start() exit(main()) diff --git a/runbot_merge/static/src/js/runbot_merge.js b/runbot_merge/static/src/js/runbot_merge.js index df9095a8d..642606674 100644 --- a/runbot_merge/static/src/js/runbot_merge.js +++ b/runbot_merge/static/src/js/runbot_merge.js @@ -46,38 +46,28 @@ if (document.readyState === "loading") { /* region cross-staging batch highlighting */ window.addEventListener("mouseover", (e) => { - const batch = e.target.closest('li.batch'); + const batch = e.target.closest('[data-batch-id]'); if (!batch) return; // Only trigger if coming from outside this batch const related = e.relatedTarget; if (!related || !batch.contains(related)) { - for (const b of document.querySelectorAll(`li.batch[data-batch-id="${batch.dataset.batchId}"]`)) { - b.style.outline = '1px dashed var(--body-color)'; + for (const b of document.querySelectorAll(`[data-batch-id="${batch.dataset.batchId}"]`)) { + b.style.outline = 'thin dashed'; } } }); window.addEventListener("mouseout", (e) => { - const batch = e.target.closest('li.batch'); + const batch = e.target.closest('[data-batch-id]'); if (!batch) return; // Only trigger if leaving to outside this batch const related = e.relatedTarget; if (!related || !batch.contains(related)) { - for (const b of document.querySelectorAll(`li.batch[data-batch-id="${batch.dataset.batchId}"]`)) { + for (const b of document.querySelectorAll(`[data-batch-id="${batch.dataset.batchId}"]`)) { b.style.outline = ''; } } }); /* endregion */ - -/* region dropdowns */ -// If there's an open dropdown and we click outside the dropdown, close the dropdown. -window.addEventListener("click", e => { - const dropdown = document.querySelector('details[name="dropdown"][open]'); - if (dropdown && !dropdown.contains(e.target)) { - dropdown.removeAttribute('open'); - } -}); - window.addEventListener("click", e => { const toggle = e.target.closest('a.dropdown-toggle'); if (toggle) { @@ -96,61 +86,10 @@ window.addEventListener("click", e => { title.classList.toggle('fold'); } }); - -/** - * Only implement flipping up if there's no space below, not the left/right - * toggling and sliding. - * - * TODO: use popper.js instead to get more flexible behaviour? - */ -function placeDropdown(details) { - const viewportHeight = document.documentElement.clientHeight; - - const detailsRect = details.getBoundingClientRect(); - const dropDown = details.querySelector(':scope > div'); - const dropdownRect = dropDown.getBoundingClientRect() - - // Amount of clipping in each direction (negative if dropdown is fully inside the viewport) - const clippingBottom = (detailsRect.bottom + dropdownRect.height) - viewportHeight; - // fastpath - if (clippingBottom <= 0 && !dropDown.style.inset) { - return; - } - - const clippingTop = -(detailsRect.top - dropdownRect.height); - let inset; - if (clippingBottom <= 0 || clippingBottom <= clippingTop) { - inset = `${detailsRect.height}px auto auto 0`; - } else { - inset = `auto auto ${detailsRect.height}px 0`; - } - dropDown.style.inset = inset; -} - -window.addEventListener("click", e => { - const summary = e.target.closest('summary'); - const details = summary?.parentNode; - if (details && !details.hasAttribute('open') && details.getAttribute('name') === 'dropdown') { - summary.nextElementSibling.style.visibility = 'hidden'; - } -}); -window.addEventListener("toggle", e => { - if (e.newState === 'open' && e.target.matches('details[name="dropdown"]')) { - placeDropdown(e.target); - } - e.target.querySelector(':scope>div').style.visibility = ''; -}, {capture: true}); - -window.addEventListener('scroll', _ => { - const openDetails = document.querySelector('details[name="dropdown"][open]'); - if (openDetails) { - placeDropdown(openDetails); - } -}); -window.addEventListener('resize', _ => { - const openDetails = document.querySelector('details[name="dropdown"][open]'); - if (openDetails) { - placeDropdown(openDetails); +window.addEventListener('touchstart', e => { + const title = e.target.tagName !== 'A' && e.target.closest('section>section>h2'); + if (title) { + title.classList.toggle('fold'); + return false; } }); -/* endregion */ \ No newline at end of file diff --git a/runbot_merge/static/src/runbot_merge.css b/runbot_merge/static/src/runbot_merge.css index 47cfd6bef..8412992b1 100644 --- a/runbot_merge/static/src/runbot_merge.css +++ b/runbot_merge/static/src/runbot_merge.css @@ -436,13 +436,7 @@ ul.stagings { grid-template-columns: repeat(12, minmax( min(11pc + var(--pad1) + var(--pad2), 49vw - var(--gutter-x) * 0.5), 1fr)); grid-template-rows: repeat(2, auto); grid-column-gap: 1px; - /* - This is Not Awesome, but apparently you can not combine auto in one - direction and visible in the other, so you can't have the dropdown - get out of the stagings at the bottom while having the stagings - horizontally scrollable (at least not without JS scrolling). - */ - overflow-x: clip; + overflow-x: auto; &>li.staging { grid-template-rows: subgrid; @@ -472,11 +466,17 @@ ul.stagings { } } } - &>details { - margin: 0.5em 0; - &>summary { - text-indent: 1em hanging; - } + /* + :open is supposed to work on popovertarget (in supporting browser) + but it doesn't seem to work in FF while matching + `@supports selector(:has)`, so FF gets neither version therefore + just use the legacy one + */ + &>button[popovertarget]:before { + content: '▸ '; + } + &:has([popover]:popover-open) > button[popovertarget]:before { + content: "▾ "; } } } @@ -492,38 +492,48 @@ ul.stagings { } } -details[name="dropdown"] { - position: relative; - &>summary { - padding: 0 var(--pad1, 0) 0 var(--pad2, 0); +div.dropdown[popover] { + :has(&) { + anchor-scope: all; + } + button:has(+ &) { + display: inline-block; + background: none; + border: none; cursor: pointer; user-select: none; + + text-align: start; + text-indent: 0.8em hanging; + margin: 0.5em 0; + padding: 0 var(--pad1, 0) 0 var(--pad2, 0); } - &>div { - position: absolute; - z-index: 100; - max-height: 70vh; - min-width: 10rem; - overflow: auto; - color: var(--color-foreground); - background-color: var(--color-background); + position: absolute; + inset: unset; + top: anchor(var(--my-anchor) bottom); + left: anchor(var(--my-anchor) left); + position-try-fallbacks: flip-block, flip-inline; - padding: 0.5rem 0; - border: 1px solid transparent; - border-radius: 0.25rem; + max-height: 70vh; + min-width: 10rem; + overflow: auto; - /*inset: 100% 0 auto auto;*/ + color: var(--color-foreground); + background-color: var(--color-background); - & > * { - display: block; - width: 100%; - padding: 0.25rem 1rem; - white-space: nowrap; - color: color-mix(in srgb, var(--link-color) 85%, var(--color-background)); - &:hover { - color: color-mix(in srgb, var(--link-color), var(--color-foreground)); - } + padding: 0.5rem 0; + border: 1px solid transparent; + border-radius: 0.25rem; + + & > * { + display: block; + width: 100%; + padding: 0.25rem 1rem; + white-space: nowrap; + color: color-mix(in srgb, var(--link-color) 85%, var(--color-background)); + &:hover { + color: color-mix(in srgb, var(--link-color), var(--color-foreground)); } } } diff --git a/runbot_merge/tests/test_basic.py b/runbot_merge/tests/test_basic.py index 11efec797..4410f30cc 100644 --- a/runbot_merge/tests/test_basic.py +++ b/runbot_merge/tests/test_basic.py @@ -158,7 +158,7 @@ def test_trivial_flow(env, repo, page, users, config, project, partners, status_ } p = html.fromstring(page('/runbot_merge')) - s = p.cssselect('.staging details[name="dropdown"] a') + s = p.cssselect('.staging div.staging-statuses a') assert len(s) == 2, "not logged so only *required* statuses" for e, status in zip(s, ['legal/cla', 'ci/runbot']): assert set(e.classes) == {'bg-success'} @@ -740,13 +740,13 @@ def test_ff_failure(env, repo, config, page): repo.post_status('staging.master', 'success') env.run_crons() - assert st.reason == 'update is not a fast forward' + assert st.reason == 'rejected (non-fast-forward)' # check that it's added as title on the staging doc = html.fromstring(page('/runbot_merge')) _new, prev = doc.cssselect('li.staging') assert 'bg-gray-lighter' in prev.classes, "ff failure is ~ cancelling" - assert 'fast forward failed (update is not a fast forward)' in prev.get('title') + assert 'fast forward failed (rejected (non-fast-forward))' in prev.get('title') assert to_pr(env, prx).staging_id, "merge should not have succeeded" assert repo.commit('staging.master').id != staging.id,\ @@ -1448,8 +1448,12 @@ def test_pr_no_method(self, repo, env, users, config): Commit('B1', tree={'b': '1'}), ) prx = repo.make_pr(title='title', body='body', target='master', head=b1) - repo.post_status(prx.head, 'success') prx.post_comment('hansen r+', config['role_reviewer']['token']) + # wait hook and run crons before status to works around a race in + # runbot mode (with high parallelism) + env.run_crons() + with repo: + repo.post_status(prx.head, 'success') env.run_crons() assert not to_pr(env, prx).staging_id @@ -1464,7 +1468,6 @@ def test_pr_no_method(self, repo, env, users, config): * `rebase-merge` to rebase and merge, using the PR as merge commit message * `rebase-ff` to rebase and fast-forward """.format_map(users)), - (users['user'], "@{user} @{reviewer} unable to stage: missing merge method".format_map(users)), ] def test_pr_method_no_review(self, repo, env, users, config): @@ -2581,6 +2584,7 @@ def test_update_incorrect_commits_count(self, port, env, project, repo, config, with repo: pr.post_comment("hansen r+", config['role_reviewer']['token']) + with repo: repo.post_status(c, 'success') env.run_crons() assert not pr_id.blocked diff --git a/runbot_merge/tests/test_batching.py b/runbot_merge/tests/test_batching.py index 0bf4ed061..ab9c0713f 100644 --- a/runbot_merge/tests/test_batching.py +++ b/runbot_merge/tests/test_batching.py @@ -581,7 +581,7 @@ def test_not_prestage(env, project, repo, users, config): # pytest assertion rewriting tacks the extra info to the user-provided # assertion message, so we need to strip it payload, _ = e.value.args[0].split('\n', 1) - assert json.loads(payload)['status'] == '404' + assert json.loads(payload)['status'] == '422' def test_split_depthfirst(env, project, repo, users, config): project.branch_ids.depth_first_splits = True @@ -656,4 +656,17 @@ def test_solve_case(env, project, repo, users, config, on_fail, cutoff): # been picked up, with PR 5+~6 being overflow assert pr_ids[0].error assert all([p.staging_id for p in pr_ids[1:cutoff]]) - assert all([not p.staging_id for p in pr_ids[cutoff:]]) \ No newline at end of file + assert all([not p.staging_id for p in pr_ids[cutoff:]]) + +def test_join_last(env, project, repo, users, config): + project.branch_ids.on_fail = 'join' + with repo: + repo.make_commits(None, Commit('x', tree={'a': 'a'}), ref='heads/master') + pr = _pr(repo, 'y', [{'c': '1'}], user=config['role_user']['token'], reviewer=config['role_reviewer']['token']) + env.run_crons() + + with repo: + repo.post_status('staging.master', 'failure') + env.run_crons() + + assert to_pr(env, pr).error \ No newline at end of file diff --git a/runbot_merge/tests/test_oddities.py b/runbot_merge/tests/test_oddities.py index 67905346f..d5d871441 100644 --- a/runbot_merge/tests/test_oddities.py +++ b/runbot_merge/tests/test_oddities.py @@ -1,3 +1,5 @@ +import datetime + import pytest import requests @@ -490,7 +492,7 @@ def test_staging_push_blocked(env, project, repo, config, users): f"""\ <pre>\ remote: error: GH013: Repository rule violations found for refs/heads/staging.master. -remote: Review all repository rules +remote: Review all repository rules at https://github.com/{repo.name}/rules?ref=refs%2Fheads%2Fstaging.master remote: remote: - Cannot create ref due to creations being restricted. remote: @@ -635,4 +637,118 @@ def test_cron_autodisable(env, code, active): assert cron.active cron.trigger() env.run_crons() - assert cron.active == active \ No newline at end of file + assert cron.active == active + +def test_empty_split(env, project, repo, users, config): + b = env['runbot_merge.batch'].create({ + 'target': project.branch_ids.id, + 'merge_date': datetime.datetime.now(), + }) + st = env['runbot_merge.stagings'].create({ + 'target': project.branch_ids.id, + 'active': False, + 'state': 'failure', + 'staging_end': datetime.datetime.now(), + 'staging_batch_ids': [(0, 0, {'runbot_merge_batch_id': b.id})] + }) + env['runbot_merge.split'].create({ + 'target': project.branch_ids.id, + 'staging_id': st.id, + 'batch_ids': [], + 'original_batches': [], + }) + with repo: + [m] = repo.make_commits(None, Commit('initial', tree={'m': 'm'}), ref='heads/master') + + repo.make_commits(m, Commit('thing1', tree={}), ref='heads/other1') + pr1 = repo.make_pr(target='master', head='other1') + repo.post_status(pr1.head, 'success') + pr1.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() + + assert to_pr(env, pr1).staging_id + +def test_create_commits_count( + port, env, project, repo, users, config, +): + with repo: + [m] = repo.make_commits(None, Commit('initial', tree={'m': 'm'}), ref='heads/master') + + [c] = repo.make_commits(m, Commit('thing1', tree={}), ref='heads/other1') + with repo.disable_hooks(): + pr = repo.make_pr(target='master', head=c) + env.run_crons() + + with pytest.raises(TimeoutError): + to_pr(env, pr) + + r = requests.post( + f"http://localhost:{port}/runbot_merge/hooks", + headers={ + "X-Github-Event": "pull_request", + }, + json={ + 'action': 'opened', + 'sender': {'login': users['user']}, + 'repository': {'full_name': repo.name}, + 'pull_request': { + 'number': pr.number, + 'state': 'open', + 'user': {'login': users['user']}, + 'head': {'sha': c, 'label': f'{repo.owner}:other1'}, + 'base': {'ref': 'master', 'repo': {'full_name': repo.name}}, + 'title': "c", + 'commits': 0, + 'draft': False, + } + } + ) + r.raise_for_status() + + pr_id = to_pr(env, pr) + assert not pr_id.squash + env.run_crons() + assert pr_id.squash + +def test_sync_commits_count(port, env, project, repo, users, config) -> None: + with repo: + [m] = repo.make_commits(None, Commit('initial', tree={'m': 'm'}), ref='heads/master') + + [c] = repo.make_commits(m, Commit('thing1', tree={}), ref='heads/other1') + pr = repo.make_pr(target='master', head=c) + env.run_crons() + + # simulate github being stupid + r = requests.post( + f"http://localhost:{port}/runbot_merge/hooks", + headers={ + "X-Github-Event": "pull_request", + }, + json={ + 'action': 'labeled', + 'sender': { + 'login': users['user'], + }, + 'repository': { + 'full_name': repo.name, + }, + 'pull_request': { + 'number': pr.number, + 'head': {'sha': c}, + 'title': "c", + 'commits': 0, + 'base': { + 'ref': 'xxx', + 'repo': { + 'full_name': repo.name, + }, + } + } + } + ) + r.raise_for_status() + + pr_id = to_pr(env, pr) + assert not pr_id.squash + env.run_crons() + assert pr_id.squash \ No newline at end of file diff --git a/runbot_merge/tests/test_provisioning.py b/runbot_merge/tests/test_provisioning.py index 104cbadaf..531a831ec 100644 --- a/runbot_merge/tests/test_provisioning.py +++ b/runbot_merge/tests/test_provisioning.py @@ -1,3 +1,4 @@ +import pytest import requests GEORGE = { @@ -108,6 +109,43 @@ def test_duplicates(env, port): 'sub': '43' }]) == [0, 0] +def test_change_login(env, port): + """When a `github_login` is updated on an employee, first the original + github_login is deprovisioned then the new login is provisioned. + + This causes confusion in the ranks, because deprovisioning removes the + two factors we use to find existing partners. + """ + assert provision_user(port, [{ + 'name': "foo", + 'email': 'foo@example.com', + 'github_login': 'foo', + 'sub': '42' + }]) == [1, 0] + + requests.post(f'http://localhost:{port}/runbot_merge/disable_users', json={ + 'jsonrpc': '2.0', + 'id': None, + 'method': 'call', + 'params': {'github_logins': ['foo']}, + }).raise_for_status() + + assert provision_user(port, [{ + 'name': "foo", + 'email': 'foo@example.com', + 'github_login': 'bar', + 'sub': '42' + }]) == [0, 1] + + u = env['res.users'].search([('login', '=', 'foo@example.com')]) + assert u + assert u.active + internal = env.ref('base.group_user') + assert (u.groups_id & internal) == internal + assert u.partner_id.active + assert u.partner_id.github_login == 'bar' + + def test_no_email(env, port): """ Provisioning system should ignore email-less entries """ diff --git a/runbot_merge/tests/test_statuses.py b/runbot_merge/tests/test_statuses.py new file mode 100644 index 000000000..16d74b715 --- /dev/null +++ b/runbot_merge/tests/test_statuses.py @@ -0,0 +1,131 @@ +import json + +import pytest + +from utils import Commit, to_pr, seen + + +def test_optional_statuses(env, project, make_repo, users, setreviewers, config): + repository = make_repo('repo') + env['runbot_merge.repository'].create({ + 'project_id': project.id, + 'name': repository.name, + 'status_ids': [(0, 0, {'context': 'l/int', 'prs': 'optional'})] + }) + setreviewers(*project.repo_ids) + env['runbot_merge.events_sources'].create({'repository': repository.name}) + + with repository: + m = repository.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') + + repository.make_commits(m, Commit('pr', tree={'a': '2'}), ref='heads/change') + pr = repository.make_pr(target='master', title='super change', head='change') + env.run_crons() + + # if an optional status is never received then the PR is valid + pr_id = to_pr(env, pr) + assert pr_id.state == 'validated' + + # If a run has started, then the PR is pending (not considered valid), this + # limits the odds of merging a PR even though it's not valid, as long as the + # optional status starts running before all the required statuses arrive + # (with a success result). + with repository: + repository.post_status(pr.head, 'pending', 'l/int') + env.run_crons() + assert pr_id.state == 'opened' + + # If the status fails, then the PR is rejected. + with repository: + repository.post_status(pr.head, 'failure', 'l/int') + env.run_crons() + assert pr_id.state == 'opened' + + # re-run the job / fix the PR + with repository: + repository.post_status(pr.head, 'pending', 'l/int') + env.run_crons() + assert pr_id.state == 'opened' + + with repository: + repository.post_status(pr.head, 'success', 'l/int') + env.run_crons() + assert pr_id.state == 'validated' + +def test_incomplete_statuses_request(env, project, make_repo, users, setreviewers, config): + """If "request missing statuses" is set (configured?) and a PR does not + have at least a `pending` for every required status, send a request. + """ + project.request_missing_statuses = True + repo = make_repo('repo') + env['runbot_merge.repository'].create({ + 'project_id': project.id, + 'name': repo.name, + 'status_ids': [(0, 0, {'context': 'l/azy'})] + }) + setreviewers(*project.repo_ids) + env['runbot_merge.events_sources'].create({'repository': repo.name}) + + with repo: + m = repo.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') + + [c] = repo.make_commits(m, Commit('pr', tree={'a': '2'}), ref='heads/change') + pr = repo.make_pr(target='master', title='super change', head=c) + env.run_crons() + + assert env['saas.calls'].search([]) == env['saas.calls'] + + with repo: + pr.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() + + assert env['saas.calls'].search_read([], ['method', 'url', 'body']) == [{ + 'id': 1, + 'method': 'POST', + 'url': 'https://runbot.odoo.com/runbot/request_ci', + 'body': json.dumps({"pull_requests": [f"{repo.name}#{pr.number}"]}) + }] + +@pytest.mark.parametrize('status', [ + 'success', 'failure', 'pending', 'error', +]) +def test_complete_statuses_norequest( + env, project, make_repo, users, setreviewers, config, status +): + """If *any* status has been sent on the status we don't trigger the + request. + """ + project.request_missing_statuses = True + repo = make_repo('repo') + env['runbot_merge.repository'].create({ + 'project_id': project.id, + 'name': repo.name, + 'status_ids': [(0, 0, {'context': 'l/azy'}), (0, 0, {'context': 'o/ptional', 'prs': 'optional'})] + }) + setreviewers(*project.repo_ids) + env['runbot_merge.events_sources'].create({'repository': repo.name}) + + with repo: + m = repo.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') + + [c] = repo.make_commits(m, Commit('pr', tree={'a': '2'}), ref='heads/change') + pr = repo.make_pr(target='master', title='super change', head=c) + repo.post_status(c, status, 'l/azy') + env.run_crons() + + with repo: + pr.post_comment('hansen r+', config['role_reviewer']['token']) + env.run_crons() + + assert env['saas.calls'].search([]) == env['saas.calls'] + if status not in ('error', 'failure'): + assert pr.comments == [ + seen(env, pr, users), + (users['reviewer'], 'hansen r+'), + ] + else: + assert pr.comments == [ + seen(env, pr, users), + (users['reviewer'], 'hansen r+'), + (users['user'], "@{reviewer} you may want to rebuild or fix this PR as it has failed CI.".format_map(users)), + ] \ No newline at end of file diff --git a/runbot_merge/tests/test_statuses_optional.py b/runbot_merge/tests/test_statuses_optional.py deleted file mode 100644 index 7627676ed..000000000 --- a/runbot_merge/tests/test_statuses_optional.py +++ /dev/null @@ -1,49 +0,0 @@ -from utils import Commit, to_pr - - -def test_basic(env, project, make_repo, users, setreviewers, config): - repository = make_repo('repo') - env['runbot_merge.repository'].create({ - 'project_id': project.id, - 'name': repository.name, - 'status_ids': [(0, 0, {'context': 'l/int', 'prs': 'optional'})] - }) - setreviewers(*project.repo_ids) - env['runbot_merge.events_sources'].create({'repository': repository.name}) - - with repository: - m = repository.make_commits(None, Commit('root', tree={'a': '1'}), ref='heads/master') - - repository.make_commits(m, Commit('pr', tree={'a': '2'}), ref='heads/change') - pr = repository.make_pr(target='master', title='super change', head='change') - env.run_crons() - - # if an optional status is never received then the PR is valid - pr_id = to_pr(env, pr) - assert pr_id.state == 'validated' - - # If a run has started, then the PR is pending (not considered valid), this - # limits the odds of merging a PR even though it's not valid, as long as the - # optional status starts running before all the required statuses arrive - # (with a success result). - with repository: - repository.post_status(pr.head, 'pending', 'l/int') - env.run_crons() - assert pr_id.state == 'opened' - - # If the status fails, then the PR is rejected. - with repository: - repository.post_status(pr.head, 'failure', 'l/int') - env.run_crons() - assert pr_id.state == 'opened' - - # re-run the job / fix the PR - with repository: - repository.post_status(pr.head, 'pending', 'l/int') - env.run_crons() - assert pr_id.state == 'opened' - - with repository: - repository.post_status(pr.head, 'success', 'l/int') - env.run_crons() - assert pr_id.state == 'validated' diff --git a/runbot_merge/views/runbot_merge_project.xml b/runbot_merge/views/runbot_merge_project.xml index 12b97b027..efae64324 100644 --- a/runbot_merge/views/runbot_merge_project.xml +++ b/runbot_merge/views/runbot_merge_project.xml @@ -64,6 +64,7 @@ <field name="use_mergiraf" widget="boolean_toggle"/> <field name="lateness_limit"/> <field name="uniquifier" widget="boolean_toggle"/> + <field name="request_missing_statuses" widget="boolean_toggle"/> </group> </group> </page> diff --git a/runbot_merge/views/templates.xml b/runbot_merge/views/templates.xml index e7245a658..ffe0eb910 100644 --- a/runbot_merge/views/templates.xml +++ b/runbot_merge/views/templates.xml @@ -23,11 +23,17 @@ </template> <template id="staging-statuses" name="dropdown statuses list of stagings"> - <details name="dropdown" t-if="staging.heads"> - <summary t-attf-title="Staged at {{staging.staged_at}}Z for {{round(staging.staging_duration)}}s"> + <t t-if="staging.heads"> + <button t-attf-popovertarget="statuses-{{staging.id}}" + t-attf-style="anchor-name: --anchor-staging-{{staging.id}}" + t-attf-title="Staged at {{staging.staged_at}}Z for {{round(staging.staging_duration)}}s" + > <t t-out="0"/> - </summary> - <div class="staging-statuses"> + </button> + <div class="dropdown staging-statuses" popover="" + t-attf-id="statuses-{{staging.id}}" + t-attf-style="--my-anchor: --anchor-staging-{{staging.id}}" + > <a groups="runbot_merge.group_admin" role="menuitem" t-attf-href="/web#id={{staging.id}}&view_type=form&model=runbot_merge.stagings" @@ -51,7 +57,7 @@ </a> </t> </div> - </details> + </t> </template> <template id="alerts"> @@ -390,17 +396,20 @@ <td class="pr-listing"> <ul> <t t-foreach="staging.batch_ids" t-as="batch"> - <li class="dropdown" t-if="batch.prs"> - <details name="dropdown" style="padding: 0.25rem 1rem"> - <summary t-out="batch.name"/> - <div> - <t t-foreach="batch.prs" t-as="pr"> - <t t-call="runbot_merge.link-pr"> - <t t-set="target">_blank</t> - </t> + <li t-if="batch.prs" t-att-data-batch-id="batch.id"> + <button t-attf-popovertarget="batch-{{batch.id}}" + t-attf-style="anchor-name: --anchor-batch-{{batch.id}}" + t-out="batch.name"/> + <div class="dropdown" popover="" + t-attf-id="batch-{{batch.id}}" + t-attf-style="--my-anchor: --anchor-batch-{{batch.id}}" + > + <t t-foreach="batch.prs" t-as="pr"> + <t t-call="runbot_merge.link-pr"> + <t t-set="target">_blank</t> </t> - </div> - </details> + </t> + </div> </li> </t> </ul>