Skip to content

Instantly share code, notes, and snippets.

@smiklosovic
Last active February 26, 2024 09:35
Show Gist options
  • Save smiklosovic/ac632fc496ed7a26a0a81eec0169c201 to your computer and use it in GitHub Desktop.
Save smiklosovic/ac632fc496ed7a26a0a81eec0169c201 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python3
import webbrowser
import subprocess
import hashlib
import argparse
import json
import requests
import jq
import os
import time
import schedule
import string
import random
from datetime import datetime, timezone
from pprint import pformat
def read_organisation_and_repo():
if not os.path.isfile('.cici'):
return "instaclustr", "cassandra"
else:
with open('.cici', 'r') as dot_cici:
file_content = dot_cici.readlines()
return file_content[0].replace("\n", ""), file_content[1].replace("\n", "")
def resolve_slug(template):
org, repo = read_organisation_and_repo()
return template.format(org, repo)
pipeline_url = resolve_slug("https://circleci.com/api/v2/project/github/{}/{}/pipeline")
workflow_url = "https://circleci.com/api/v2/pipeline/%s/workflow"
jobs_url = "https://circleci.com/api/v2/workflow/%s/job"
workflow_cancel = "https://circleci.com/api/v2/workflow/%s/cancel"
tests_url = resolve_slug("https://circleci.com/api/v2/project/github/{}/{}/%s/tests")
trigger_url = "https://circleci.com/api/v2/workflow/{}/approve/{}"
workflow_links_url = resolve_slug("https://app.circleci.com/pipelines/github/{}/{}")
default_headers = {'Circle-Token': 'PUT YOUR OWN', 'Accept': 'application/json', 'Content-Type': 'application/json'}
auth_headers = {'authorization': 'Basic PUT YOUR OWN:', 'Accept': 'application/json', 'Content-Type': 'application/json'}
verbose = False
args = None
git_branch = None
def parse_arguments():
organisation, repository = read_organisation_and_repo()
parser = argparse.ArgumentParser(description="CircleCi helper")
parser.add_argument('--branch', '-b', help='git branch')
parser.add_argument('--workflow', '-w', help='display only this workflow workflow')
parser.add_argument('--trigger-workflow', '-t', dest = 'trigger_workflow', nargs='+')
parser.add_argument('--cancel', '-c', dest = 'cancel_workflow', nargs='+')
parser.add_argument('--monitor', '-m', help = 'monitor the build for branch', action = 'store_true')
parser.add_argument('--trigger-job', dest = 'trigger_job', nargs='+')
parser.add_argument('--verbose', '-v', help='verbose mode', action='store_true')
parser.add_argument('--jira', dest = 'jira_summary', action='store_true')
parser.add_argument('--jira-silent', dest = 'jira_silent', action='store_true')
parser.add_argument('--ticket', '-o', help = 'open ticket in a brower', action = 'store_true')
parsed = parser.parse_args()
verbose = parsed.verbose
return parsed
def resolve_git_branch(git_branch):
if git_branch == None:
return os.popen("git rev-parse --abbrev-ref HEAD").read().replace("\n","")
else:
if not git_branch.startswith("CASSANDRA"):
return "CASSANDRA-" + git_branch
else:
return git_branch
###################################
def get_pipeline_id(branch):
pipelines = json.dumps(requests.get(pipeline_url, headers=default_headers).json())
return jq.compile("[.items[] | select(.vcs.branch == \"%s\")][0].id" % branch).input_text(pipelines).first()
def get_pipeline_number(branch):
pipelines = json.dumps(requests.get(pipeline_url, headers=default_headers).json())
return jq.compile("[.items[] | select(.vcs.branch == \"%s\")][0].number" % branch).input_text(pipelines).first()
def get_workflows(pipeline):
workflows = json.dumps(requests.get(workflow_url % pipeline, headers=default_headers).json())
#return jq.compile("[.items[] | select(.status != \"canceled\")][]|{id: .id, name: .name, status: .status}").input_text(workflows).text().splitlines()
return jq.compile("[.items[]][]|{id: .id, name: .name, status: .status}").input_text(workflows).text().splitlines()
def cancel_workflow(git_branch, workflow):
pipeline_id = get_pipeline_id(git_branch)
workflow_id = get_workflow_id(pipeline_id, workflow)
return json.dumps(requests.post(workflow_cancel % workflow_id, headers=default_headers).json())
def get_workflow_id(pipeline, workflow_name):
workflows = json.dumps(requests.get(workflow_url % pipeline, headers=default_headers).json())
return jq.compile("[.items[] | select(.status != \"canceled\" and .name == \"" + workflow_name + "\")][].id").input_text(workflows).first()
def get_jobs(workflow):
jobs = json.dumps(requests.get(jobs_url % workflow, headers=default_headers).json())
failed = jq.compile("[.items[]|select(.type==\"build\" and .status==\"failed\")]").input_text(jobs).text()
success = jq.compile("[.items[]|select(.type==\"build\" and .status==\"success\")]").input_text(jobs).text()
running = jq.compile("[.items[]|select(.type==\"build\" and .status==\"running\")]").input_text(jobs).text()
not_running = jq.compile("[.items[]|select(.type==\"build\" and .status==\"not_running\")]").input_text(jobs).text()
started_pre_commit_tests = jq.compile("[.items[]|select(.name==\"start_pre-commit_tests\")][0]").input_text(jobs).text()
return {'failed': failed, 'success': success, 'running': running, 'not_running': not_running, 'started_pre_commit': started_pre_commit_tests}
def get_job_id(workflow, job_name):
jobs = json.dumps(requests.get(jobs_url % workflow, headers=default_headers).json())
return jq.compile("[.items[]|select(.name==\"%s\")][].id" % job_name).input_text(jobs).first()
def get_job_approval_id(workflow, job_name):
jobs = json.dumps(requests.get(jobs_url % workflow, headers=default_headers).json())
return jq.compile("[.items[]|select(.name==\"%s\")][].approval_request_id" % job_name).input_text(jobs).first()
def get_success_jobs(workflow):
jobs = json.dumps(requests.get(jobs_url % workflow, headers=default_headers).json())
jobs = json.dumps(requests.get(jobs_url % workflow, headers=default_headers).json())
def get_failing_tests(job):
failing_tests = json.dumps(requests.get(tests_url % job, headers=default_headers).json())
return jq.compile("[.items[]|select(.result == \"failure\")]").input_text(failing_tests).text()
def trigger_job(git_branch, workflow, starting_job):
pipeline_id = get_pipeline_id(git_branch)
workflow_id = get_workflow_id(pipeline_id, workflow)
job_id = get_job_approval_id(workflow_id, starting_job)
return json.dumps(requests.post(trigger_url.format(workflow_id, job_id), headers=default_headers).json())
def get_assignee_from_jira(ticket):
r = requests.get("https://issues.apache.org/jira/rest/api/latest/issue/" + ticket).json()
data = json.loads(json.dumps(r))
if data['fields']['assignee']:
return data['fields']['assignee']['displayName']
return None
def get_reviewers_from_jira(ticket):
r = requests.get("https://issues.apache.org/jira/rest/api/latest/issue/" + ticket).json()
data = json.loads(json.dumps(r))
reviewers = data['fields']['customfield_12313420']
if reviewers:
return ', '.join([reviewer['displayName'] for reviewer in reviewers])
return None
def get_summary_from_jira(ticket):
r = requests.get("https://issues.apache.org/jira/rest/api/latest/issue/" + ticket).json()
data = json.loads(json.dumps(r))
return data['fields']['summary']
def open_ticket(ticket):
webbrowser.open("https://issues.apache.org/jira/browse/%s" % ticket)
# Utils #################################
def cici_print(message, outputFile = None):
if outputFile is not None:
with open(outputFile, 'a') as f:
print(message, file = f)
else:
print(message)
def color_print(text, color, colorful, silent, outputFile = None):
if silent:
return
if colorful and outputFile is not None:
colorful = False
if colorful:
print(f"\033[{color}m{text}\033[0m")
else:
if outputFile is None:
print(text)
else:
cici_print(text, outputFile)
def red(text, colorful = True, silent = False, outputFile = None):
color_print(text, 31, colorful, silent, outputFile)
def green(text, colorful = True, silent = False, outputFile = None):
color_print(text, 32, colorful, silent, outputFile)
def yellow(text, colorful = True, silent = False, outputFile = None):
color_print(text, 33, colorful, silent, outputFile)
def blue(text, colorful = True, silent = False, outputFile = None):
color_print(text, 34, colorful, silent, outputFile)
def white(text, colorful = True, silent = False, outputFile = None):
color_print(text, 37, colorful, silent, outputFile)
def parse_time(started, stopped):
start_time = datetime.strptime(started, "%Y-%m-%dT%H:%M:%SZ")
end_time = datetime.strptime(stopped, "%Y-%m-%dT%H:%M:%SZ")
duration = end_time - start_time;
hours = duration.seconds // 3600
minutes = duration.seconds % 3600 // 60
seconds = duration.seconds % 60
if hours == 0:
return "{}m {}s".format(minutes, seconds)
else:
return "{}h {}m {}s".format(hours, minutes, seconds)
def since_date(started):
now_time = datetime.strftime(datetime.now().astimezone(timezone.utc), "%Y-%m-%dT%H:%M:%SZ")
return parse_time(started, now_time)
def contains_timeout(line):
timeout_messages = [
"java.util.concurrent.TimeoutException",
"Timeout occurred. Please note the time in the report does not reflect the time until the timeout"
]
for message in timeout_messages:
if line.find(message) != -1:
return True
return False
# Pipeline printing ######################################
def print_pipelines(git_branch, verbose, jira_summary, silent = False, workflow_only = None, outputFile = None):
pipeline_id = get_pipeline_id(git_branch)
pipeline_number = get_pipeline_number(git_branch)
workflow_links = {}
if jira_summary or silent:
cici_print("[%s|https://github.com/instaclustr/cassandra/tree/%s]" % (git_branch, git_branch))
cici_print("{noformat}")
workflow_failures = {}
for workflow in get_workflows(pipeline_id):
workflow_json = json.loads(workflow)
workflow_name = workflow_json["name"]
workflow_failed = False
if workflow_only is not None and workflow_name != workflow_only:
continue
jobs = get_jobs(workflow_json["id"])
failing_jobs = json.loads(jobs['failed'])
success_jobs = json.loads(jobs['success'])
running_jobs = json.loads(jobs['running'])
not_running_jobs = json.loads(jobs['not_running'])
started_pre_commit = json.loads(jobs['started_pre_commit'])
workflow_links[workflow_name] = workflow_links_url + "/{}/workflows/{}".format(pipeline_number, workflow_json["id"])
if workflow_json["status"] == "canceled":
blue("{:<49}{:>10}".format(workflow_name, workflow_json["status"]), colorful = not jira_summary, silent = silent, outputFile = outputFile)
continue
if started_pre_commit == None:
blue("{:<49}".format(workflow_name), colorful = not jira_summary, silent = silent, outputFile = outputFile)
else:
workflow_start = since_date(started_pre_commit['started_at'])
blue("{:<48}".format(workflow_name), colorful = not jira_summary, silent = silent, outputFile = outputFile)
for success_job in success_jobs:
green(" ✓ {:<45}{:>10}".format(success_job["name"], parse_time(success_job["started_at"], success_job["stopped_at"])), colorful = not jira_summary, silent = silent, outputFile = outputFile)
for running_job in running_jobs:
yellow(" {:<45}{:>10}".format(running_job["name"], since_date(running_job['started_at'])), colorful = not jira_summary, silent = silent, outputFile = outputFile)
for not_running in not_running_jobs:
white(" {:<45}{:>10}".format(not_running["name"], "pending"), colorful = not jira_summary, silent = silent, outputFile = outputFile)
failing_jobs_map = {}
for failing_job in failing_jobs:
workflow_failed = True
red(" ✕ {:<45}{:>10}".format(failing_job["name"], parse_time(failing_job["started_at"], failing_job["stopped_at"])), colorful = not jira_summary, silent = silent, outputFile = outputFile)
failing_tests = json.loads(get_failing_tests(failing_job["job_number"]))
workflow_failed_tests = []
for failing_test in failing_tests:
class_name = failing_test["classname"]
method_name = failing_test["name"].split('-')[0]
workflow_failed_tests.append(class_name + " " + method_name)
message_lines = failing_test["message"].splitlines()
timeouted = False
if len(message_lines) > 0:
timeouted = contains_timeout(message_lines[0])
if timeouted and not verbose:
yellow(" {} {} TIMEOUTED".format(class_name, method_name), colorful = not jira_summary, silent = silent, outputFile = outputFile)
else:
yellow(" {} {}".format(class_name, method_name), colorful = not jira_summary, silent = silent, outputFile = outputFile)
if verbose:
for failure_message in message_lines:
red(" %s" % failure_message, colorful = not jira_summary, silent = silent, outputFile = outputFile)
failing_jobs_map.update({failing_job["name"]: workflow_failed_tests})
workflow_failures.update({workflow_name: failing_jobs_map})
if silent:
for wf, jf in workflow_failures.items():
if wf != "java17_pre-commit_tests" and wf != "java11_pre-commit_tests":
continue
if len(jf) == 0:
cici_print("PASSED " + wf, outputFile = outputFile)
continue
cici_print(wf, outputFile = outputFile)
for job, failures in jf.items():
cici_print(" %s" % job, outputFile = outputFile)
for failure in failures:
cici_print(" %s" % failure, outputFile = outputFile)
if jira_summary or silent:
cici_print("{noformat}", outputFile)
cici_print("", outputFile)
for workflow, link in workflow_links.items():
if jira_summary or silent:
cici_print("[{}|{}]".format(workflow, link), outputFile)
else:
cici_print("{:<25} {:>10}".format(workflow, link), outputFile)
###########################################################
start_jobs_by_workflow = {
'java17_pre-commit_tests': 'start_pre-commit_tests',
'java11_pre-commit_tests': 'start_pre-commit_tests',
'java8_pre-commit_tests': 'start_pre-commit_tests'
}
approval_jobs_by_workflow = {
'java17_pre-commit_tests': [
'start_pre-commit_tests',
'j17_dtests_vnode'
],
'java11_pre-commit_tests': [
'start_pre-commit_tests',
'start_j17_unit_tests_repeat',
'j17_dtests_vnode'
],
'java17_separate_tests': [
'start_j17_utests_trie_repeat',
'start_j17_utests_trie',
'start_j17_utests_system_keyspace_directory_repeat',
'start_j17_utests_system_keyspace_directory',
'start_j17_utests_stress',
'start_j17_utests_oa_repeat',
'start_j17_utests_oa',
'start_j17_utests_long',
'start_j17_utests_fqltool',
'start_j17_utests_compression_repeat',
'start_j17_utests_compression',
'start_j17_utests_cdc_repeat',
'start_j17_utests_cdc',
'start_j17_unit_tests_repeat',
'start_j17_unit_tests',
'start_j17_jvm_dtests_vnode',
'start_j17_jvm_dtests',
'start_j17_dtests_vnode',
'start_j17_dtests_offheap',
'start_j17_dtests_large_vnode',
'start_j17_dtests_large',
'start_j17_dtests',
'start_j17_cqlshlib_tests',
'start_j17_cqlshlib_cython_tests',
'start_j17_cqlsh_tests',
'start_j17_cqlsh-dtests-offheap',
'start_j17_build'
],
'java11_separate_tests': [
'start_j11_build',
'start_j11_unit_tests_repeat',
'j17_dtests_vnode'
],
'java8_pre-commit_tests': [
'start_pre-commit_tests'
]
}
def valid_workflow(value):
return approval_jobs_by_workflow.get(value) is not None
def invalid_workflow(value):
return not valid_workflow(value)
def dump_workflows():
print(json.dumps(approval_jobs_by_workflow, indent = 2))
##################################
JOB_DONE = False
OLD_HASH = ""
def calculate_md5(file_path):
# Open the file in binary mode
with open(file_path, 'rb') as f:
# Read the contents of the file in chunks for memory efficiency
# Update the MD5 hash object with the contents of each chunk
md5_hash = hashlib.md5()
for chunk in iter(lambda: f.read(4096), b""):
md5_hash.update(chunk)
# Return the hexadecimal representation of the MD5 hash
return md5_hash.hexdigest()
def generate_random_string(length):
characters = string.ascii_letters + string.digits
random_string = ''.join(random.choice(characters) for _ in range(length))
return random_string
def kill_monitoring_if_running(branch):
file_name = '/tmp/%s' % branch
if os.path.exists(file_name):
with open(file_name, 'r') as file:
for line in file:
monitoring_pid = line.strip()
subprocess.run(['kill', '-9', monitoring_pid], stderr=subprocess.DEVNULL)
break
os.remove(file_name)
def write_build_manifest(branch):
with open('/tmp/%s' % branch, 'a') as f:
print(os.getpid(), file = f)
def remove_build_manifest(branch):
file_name = '/tmp/%s' % branch
if os.path.exists(file_name):
os.remove(file_name)
def monitor_build(git_branch, args):
kill_monitoring_if_running(git_branch)
write_build_manifest(git_branch)
def task():
global JOB_DONE
global OLD_HASH
outputFile = "/tmp/%s" % generate_random_string(10)
print_pipelines(git_branch, args.verbose, args.jira_summary, args.jira_silent, outputFile = outputFile)
fileHash = calculate_md5(outputFile)
if OLD_HASH == fileHash:
JOB_DONE = True
return schedule.CancelJob
else:
OLD_HASH = fileHash
os.remove(outputFile)
schedule.every(20).minutes.do(task)
while not JOB_DONE:
schedule.run_pending()
time.sleep(1)
remove_build_manifest(git_branch)
notify_me(git_branch)
def notify_me(branch):
split = branch.split("-")
voice_branch = "trank"
if len(split) == 3:
voice_branch = split[2]
text_branch = voice_branch
if voice_branch != "trank" and voice_branch != "trunk":
voice_branch = ' '.join(voice_branch)
text_ticket = split[1]
voice_ticket = ' '.join(text_ticket)
summary = get_summary_from_jira("CASSANDRA-{}".format(text_ticket))
text_message = "Build pre {} pre ticket {} je dokončený ({})".format(text_branch, text_ticket, summary)
subprocess.run(["notify-send", "-u", "normal", "%s finished" % branch, "%s" % summary], check=True)
subprocess.run(["google_speech", "-l", "sk", "-o", "/tmp/%s.mp3" % branch, "Bild pre {} pre tiket {} je dokončený.".format(voice_branch, text_ticket)])
time.sleep(1)
audio_ids = subprocess.run("pacmd list-sink-inputs | grep index | tr -s \" \" | cut -d\" \" -f3", shell = True, capture_output = True, text = True).stdout.split('\n')
for i in audio_ids:
sink_id = "{}".format(i.replace('\n', ''))
if sink_id.isspace() or sink_id == "":
continue
subprocess.run(["pacmd", "set-sink-input-mute", sink_id, "1"])
time.sleep(1)
subprocess.run(["ffplay", "-v", "0", "-nodisp", "-autoexit", "/tmp/%s.mp3" % branch])
time.sleep(1)
for i in audio_ids:
sink_id = "{}".format(i.replace('\n', ''))
if sink_id.isspace() or sink_id == "":
continue
subprocess.run(["pacmd", "set-sink-input-mute", sink_id, "0"])
subprocess.run(["rm", "/tmp/%s.mp3" % branch])
subprocess.run(["signal-cli", "-a", "+421907366768", "send", "--note-to-self", "--notify-self", "-m", text_message], stderr=subprocess.DEVNULL)
subprocess.run(["signal-cli", "-a", "+421907366768", "send", "--note-to-self", "--notify-self", "-m", "https://issues.apache.org/jira/browse/" + branch], stderr=subprocess.DEVNULL)
####################################
def main():
args = parse_arguments()
git_branch = resolve_git_branch(args.branch)
if args.verbose and not args.jira_summary:
jira = "CASSANDRA-" + git_branch.split('-')[1]
print("Assignee : %s" % get_assignee_from_jira(jira))
print("Reviewers: %s" % get_reviewers_from_jira(jira))
if args.workflow is not None:
print_pipelines(git_branch, args.verbose, args.jira_summary, workflow_only = args.workflow)
elif args.trigger_workflow is not None:
for workflow in args.trigger_workflow:
starting_job = start_jobs_by_workflow.get(workflow)
if starting_job is not None:
trigger_job(git_branch, workflow, starting_job)
if args.monitor:
monitor_build(git_branch, args)
elif args.trigger_job is not None:
workflow = args.trigger_job[0]
job = args.trigger_job[1]
jobs_list = approval_jobs_by_workflow.get(workflow)
if jobs_list is None or job not in jobs_list:
dump_workflows()
else:
print(trigger_job(git_branch, workflow, job))
elif args.cancel_workflow is not None:
for workflow in args.cancel_workflow:
if valid_workflow(workflow):
print(cancel_workflow(git_branch, workflow))
else:
dump_workflows()
elif args.monitor:
monitor_build(git_branch, args)
elif args.ticket:
open_ticket("CASSANDRA-" + git_branch.split('-')[1])
else:
print_pipelines(git_branch, args.verbose, args.jira_summary, args.jira_silent)
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment