Skip to content

Instantly share code, notes, and snippets.

@Mytherin
Last active March 6, 2023 08:46
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save Mytherin/23e6a03ec169da2ce705b3e33bd079a6 to your computer and use it in GitHub Desktop.
Save Mytherin/23e6a03ec169da2ce705b3e33bd079a6 to your computer and use it in GitHub Desktop.
Script to rerun failed github action workflows belonging to a PR with a specific title
import subprocess
import duckdb
import os
import pandas as pd
import argparse
parser = argparse.ArgumentParser(description='Cancel all workflows related to a PR.')
parser.add_argument('--max_workflows', dest='max_workflows',
action='store', help='The maximum number of workflows to look at (starting from the latest)', default=200)
parser.add_argument('--title', dest='title',
action='store', help='The title of the PR for which we want to rerun workflows (or part of the title) - or "master" for all pushes', required=True)
args = parser.parse_args()
nlimit = args.max_workflows
query = args.title
proc = subprocess.Popen(['gh', 'run', 'list', '--json', 'displayTitle,databaseId,status,conclusion,headSha,event', f'--limit={nlimit}'], stdout=subprocess.PIPE)
text = proc.stdout.read().decode('utf8')
df = pd.read_json(text)
if query == 'master':
result = duckdb.query(f"select databaseId from df WHERE status IN ('queued', 'in_progress') AND event='push'").fetchall()
else:
result = duckdb.query(f"select databaseId from df WHERE status IN ('queued', 'in_progress') AND displayTitle LIKE '%{query}%'").fetchall()
if len(result) == 0:
print(f"No workflows found in the latest {nlimit} workflows that contain the text {query}.\nPerhaps try running with a higher --max_workflows parameter?")
exit(1)
for databaseId in [x[0] for x in result]:
os.system(f'gh run cancel {databaseId}')
import subprocess
import duckdb
import os
import pandas as pd
import argparse
parser = argparse.ArgumentParser(description='Rerun failed workflows from a PR.')
parser.add_argument('--max_workflows', dest='max_workflows',
action='store', help='The maximum number of workflows to look at (starting from the latest)', default=200)
parser.add_argument('--title', dest='title',
action='store', help='The title of the PR for which we want to rerun workflows (or part of the title)', required=True)
parser.add_argument('--rerun_cancelled', dest='rerun_cancelled',
action='store', help='The title of the PR for which we want to rerun workflows (or part of the title)', default=False)
args = parser.parse_args()
nlimit = args.max_workflows
query = args.title
proc = subprocess.Popen(['gh', 'run', 'list', '--json', 'displayTitle,databaseId,status,conclusion,headSha', f'--limit={nlimit}'], stdout=subprocess.PIPE)
text = proc.stdout.read().decode('utf8')
df = pd.read_json(text)
result = duckdb.query(f"select headSha from df where displayTitle LIKE '%{query}%' limit 1").fetchall()
if len(result) == 0:
print(f"No workflows found in the latest {nlimit} workflows that contain the text {query}.\nPerhaps try running with a higher --max_workflows parameter?")
exit(1)
headSha = result[0][0]
result = duckdb.query(f"select databaseId from df where conclusion IN ('failure', 'cancelled') AND displayTitle LIKE '%{query}%' and headSha='{headSha}'").fetchall()
if len(result) == 0:
print(f"Found runs that match the text {query} but no failing or cancelled runs were found")
for databaseId in [x[0] for x in result]:
os.system(f'gh run rerun {databaseId}')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment