Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
85 changes: 85 additions & 0 deletions lib/bots_automerge.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# This file is part of Cockpit.
#
# Copyright (C) 2025 Red Hat, Inc.
#
# Cockpit is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Cockpit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see <http://www.gnu.org/licenses/>.

import logging

from lib.aio.jsonutil import get_int, get_str
from task import github

logger = logging.getLogger(__name__)

# TODO: verify if this is always the same
GITHUB_CI = {
'login': 'github-actions[bot]',
'id': 41898282,
}

COCKPITUOUS = {
'login': 'cockpituous',
'id': 14330603,
}


def is_ci_bot(api: github.GitHub, pr: int) -> bool:
author = api.get_author(pr)
print(author)
login = get_str(author, 'login')
login_id = get_int(author, 'id')

return ((login == GITHUB_CI['login'] and login_id == GITHUB_CI['id']) or
(login == COCKPITUOUS['login'] and login_id == COCKPITUOUS['id']))


def all_checks_pass(api: github.GitHub, commit_hash: str) -> bool:
statuses = api.statuses(commit_hash)
print(f"STATUSES: {statuses}")

print("Checking statuses:")
if len(statuses) == 0:
print("No statuses found for commit %s", commit_hash)
return False

for context in statuses:
status = statuses[context]
status_state = get_str(status, 'state')
print("Status for context '%s': %s", context, status_state)
if status_state != 'success':
return False

return True


def auto_merge_bots_pr(repo: str, pr: int, sha: str) -> None:
api = github.GitHub(repo=repo)

print(f"is_cu_bot: {is_ci_bot(api, pr)}")
# Make sure that the PR was made by cockpituous or github actions
# if not is_ci_bot(api, pr):
# logger.info("PR not made by CI bot, skipping automerge")
# return
Comment on lines +71 to +73

Check notice

Code scanning / CodeQL

Commented-out code Note

This comment appears to contain commented-out code.

# check that all checks are green
all_pass = all_checks_pass(api, sha)
print(f"all_checks_pass: {all_pass}")
if not all_pass:
print("Not every check has passed, skipping automerge")
return

logger.info("All checks green, can automerge")
print("All checks green, can automerge")
# merge the PR
api.approve_pr(pr, sha)
1 change: 1 addition & 0 deletions lib/testmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,7 @@ def is_valid_context(context: str, repo: str) -> bool:
image = image_scenario.split('/')[0]
# if the context specifies a repo, use that one instead
branch_contexts = tests_for_project(context_repo or repo)
print(f"CONTEXTS: {branch_contexts}")
if context_repo:
# if the context specifies a repo, only look at that particular branch
try:
Expand Down
34 changes: 25 additions & 9 deletions run-queue
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@

import pika

from lib.aio.base import SubjectSpecification
from lib.aio.jsonutil import JsonObject, get_int, get_str

Check notice

Code scanning / CodeQL

Unused import Note

Import of 'JsonObject' is not used.
Import of 'get_int' is not used.
Import of 'get_str' is not used.
from lib.bots_automerge import auto_merge_bots_pr
from lib.directories import get_images_data_dir
from lib.network import redhat_network
from lib.stores import LOG_STORE
Expand All @@ -43,7 +46,7 @@
# as per pika docs
DeliveryTag = int

ConsumeResult = tuple[Sequence[str] | str | None, DeliveryTag | None]
ConsumeResult = tuple[Sequence[str] | str | None, DeliveryTag | None, SubjectSpecification | None]


# Returns a command argv to execute and the delivery tag needed to ack the message
Expand All @@ -52,7 +55,7 @@
# call tests-scan or issue-scan appropriately
method_frame, _header_frame, message = dq.channel.basic_get(queue='webhook')
if not method_frame or not message:
return None, None
return None, None, None

body = json.loads(message)
event = body['event']
Expand Down Expand Up @@ -97,9 +100,9 @@
cmd = ['./issue-scan', '--issues-data', json.dumps(request), '--amqp', dq.address]
else:
logging.error('Unkown event type in the webhook queue')
return None, None
return None, None, None

return cmd, method_frame.delivery_tag
return cmd, method_frame.delivery_tag, None


# Returns a command to execute and the delivery tag needed to ack the message
Expand All @@ -119,18 +122,22 @@
queue = ['public', 'rhel'][random.randrange(2)]
else:
# nothing to do
return None, None
return None, None, None

method_frame, _header_frame, message = dq.channel.basic_get(queue=queue)
if not method_frame or not message:
return None, None
return None, None, None

body = json.loads(message)
if job := body.get('job'):
print(f"\njob: {job}")
command = ['./job-runner', 'json', json.dumps(job)]
job_subject = SubjectSpecification(job)
else:
print("\nNO JOB :(")
command = body['command']
return command, method_frame.delivery_tag
job_subject = None
return command, method_frame.delivery_tag, job_subject


def mail_notification(body: str) -> None:
Expand Down Expand Up @@ -159,14 +166,14 @@
opts = parser.parse_args()

with distributed_queue.DistributedQueue(opts.amqp, ['webhook', 'rhel', 'public', 'statistics']) as dq:
cmd, delivery_tag = consume_webhook_queue(dq)
cmd, delivery_tag, job_subj = consume_webhook_queue(dq)
if not cmd and delivery_tag:
logging.info("Webhook message interpretation generated no command")
dq.channel.basic_ack(delivery_tag)
return 0

if not cmd:
cmd, delivery_tag = consume_task_queue(dq)
cmd, delivery_tag, job_subj = consume_task_queue(dq)
if not cmd:
logging.info("All queues are empty")
return 1
Expand All @@ -191,6 +198,15 @@
if delivery_tag is not None:
dq.channel.basic_ack(delivery_tag)

if job_subj is not None:
print(f"\n\n\nrepo: {job_subj.repo}\n, pull: {job_subj.pull}\n, sha: {job_subj.sha}\n")
# skip automerge if jobs don't run against a PR
if job_subj.repo is not None and job_subj.pull is not None and job_subj.sha is not None:
print("starting automerge")
auto_merge_bots_pr(job_subj.repo, job_subj.pull, job_subj.sha)
else:
logging.info("Skipping automerge for job: %s", job_subj)

return 0


Expand Down
25 changes: 24 additions & 1 deletion task/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,8 +336,11 @@ def statuses(self, revision: str) -> Mapping[str, JsonObject]:
data = self.get_obj(f"commits/{revision}/status?page={page}&per_page={count}")
count = 0
page += 1
print(f"DATA: {data}")
for status in get_dictv(data, "statuses", ()):
context = get_str(status, "context")
print(f"CONTEXT: {context}")
print(f"REPO: {self.repo}")
if is_valid_context(context, self.repo) and context not in result:
result[context] = status
count += 1
Expand Down Expand Up @@ -413,10 +416,30 @@ def issue_comments(self, number: int) -> Sequence[JsonObject]:
count = len(comments)
return result

def get_pr_info(self, pr: int) -> JsonObject:
return self.get_obj(f"pulls/{pr}", {})

def get_head(self, pr: int) -> str | None:
pull = self.get_obj(f"pulls/{pr}", {})
pull = self.get_pr_info(pr)
return get_str(get_dict(pull, "head", {}), "sha", None)

def get_author(self, pr: int) -> JsonObject:
pull = self.get_pr_info(pr)
print(f"PR INFO: {pull}")
return get_dict(pull, "user", {})

def approve_pr(self, pr: int, sha: str) -> None:
# https://docs.github.com/en/rest/pulls/reviews?apiVersion=2022-11-28#create-a-review-for-a-pull-request
data = {
'commit_id': sha,
'event': 'APPROVE',
'comments': 'So cool'
}
rw = self.post(f'pulls/{pr}/reviews', data)
print(f"post {rw}")

# let's not write the merge code yet :)


class Checklist:
# NB: GitHub sends `body: null` for issues with empty bodies
Expand Down
Loading