Skip to content

Instantly share code, notes, and snippets.

@AntoineDao
Created September 22, 2021 07:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save AntoineDao/5fa7d70bc9b8ac7f619a416b6f3e1494 to your computer and use it in GitHub Desktop.
Save AntoineDao/5fa7d70bc9b8ac7f619a416b6f3e1494 to your computer and use it in GitHub Desktop.
Script to batch update ladybug-tools github actions

Automating CI Script Updates

We have encountered issues where a CI template that is copied accross hundreds of repositories has to be updated. More specifically we had a dependency on semantic-release which caused issues because we used an unpinned version.

To resolve this we needed to list all repositories in a given Github organization and update their CI workflows.

Dependencies

This script requires the following installed dependencies:

  • Python 3
  • Git command line executable
  • Github CLI
  • A valid Github Token that has access to the organization and it's repositories

How It Works

This script operates in a straightforward manner:

  1. List all repositories in an organization and clone them
  2. Update any files in the .github/ folder of that repository using the delete_project_manager and clean_file functions
  3. Create a new branch called bulk-fix-ci
  4. Add and commit the changes
  5. Push the changes to the repository on Github at branch bulk-fix-ci
  6. Create a pull request from the update branch to the master branch

Issues

There is one minor annoying issue I couldn't resolve which is that the gh CLI keeps asking for confirmation of the repository to create the PR on...

import shutil
import os
from posix import listdir
import urllib.request
import json
import sys
import subprocess
import pathlib
import re
import yaml
ACCESS_TOKEN = os.getenv('GH_TOKEN', "")
ORGANIZATION_NAME = 'ladybug-tools'
def get_organization_repositories(cursor: str = None):
endpoint = "https://api.github.com/graphql"
query_with_variables = """query ($login: String!, $endCursor: String) {
repositoryOwner(login:$login) {
repositories(first: 50, after: $endCursor, isFork:false, isLocked: false){
totalCount
pageInfo{
startCursor
endCursor
hasNextPage
hasPreviousPage
}
nodes{
name
}
}
}
}
"""
jsondata = json.dumps({'query': query_with_variables, "variables": {"login": ORGANIZATION_NAME, "endCursor": cursor}})
req = urllib.request.Request(url=endpoint, method='POST', data=jsondata.encode('utf-8') )
req.add_header('Authorization', f'Bearer {ACCESS_TOKEN}')
response = urllib.request.urlopen(req)
return json.loads(response.read().decode('utf-8'))
def clone_repository(repo_name: str) -> pathlib.Path:
clone_url = f"https://{ACCESS_TOKEN}@github.com/{ORGANIZATION_NAME}/{repo_name}.git"
os.makedirs('tmp', exist_ok=True)
repo_path = pathlib.Path(f'tmp/{repo_name}').absolute()
try:
subprocess.call(["git", "clone", clone_url, repo_path.as_posix()])
except Exception as err:
print(err)
return repo_path
def add_commit_and_push(repo_path: pathlib.Path):
cwd = repo_path.as_posix()
repo_name = os.path.basename(os.path.normpath(repo_path))
subprocess.call(["git", "checkout", "-b", "bulk-fix-ci"], cwd=cwd)
subprocess.call(["git", "add", "."], cwd=cwd)
subprocess.call(["git", "commit", "-m", 'ci(actions): update workflows from templates'], cwd=cwd)
push_url = f"https://{ACCESS_TOKEN}@github.com/{ORGANIZATION_NAME}/{repo_name}.git"
subprocess.call(["git", "push", push_url, "bulk-fix-ci"], cwd=cwd)
subprocess.call(["gh", "pr", "--repo", f"{ORGANIZATION_NAME}/{repo_name}", "create", "--title", "ci(actions): bulk fix buggy actions", "--body", "Bulk CI fix to be reviewed by @chriswmackey"], cwd=cwd)
def delete_project_manager(repo_path: pathlib.Path):
file_names = ['project-manager.yml', 'project-manager.yaml']
workflows_paths = [repo_path.joinpath('.github', 'workflows'), repo_path.joinpath('.github') ]
for workflow_path in workflows_paths:
for file_name in file_names:
path = workflow_path.joinpath(file_name)
if path.exists():
print(f"Found and deleting: {path.as_posix()}")
os.remove(path.as_posix())
def has_ci(repo_path: pathlib.Path):
workflows_paths = [repo_path.joinpath('.github', 'workflows'), repo_path.joinpath('.github') ]
for workflow_path in workflows_paths:
if not workflow_path.exists():
continue
for file in os.listdir(workflow_path.as_posix()):
if file.endswith('ci.yaml'):
return True
return False
def clean_file(repo_path: pathlib.Path):
workflows_paths = [repo_path.joinpath('.github', 'workflows'), repo_path.joinpath('.github') ]
for workflow_path in workflows_paths:
if not workflow_path.exists():
continue
for file in os.listdir(workflow_path.as_posix()):
file_path = workflow_path.joinpath(file)
if file_path.is_dir():
continue
with open(file_path.as_posix(), 'r') as f:
content = f.read()
content = re.sub(r"(npx semantic-release)(\s)",
"\\1@^17.0.0\\2", content)
content = re.sub(r"coverage report\s*coveralls",
'echo "Coveralls is no longer supported"', content)
with open(file_path.as_posix(), 'w') as f:
f.write(content)
if __name__ == '__main__':
has_more = True
cursor = None
repositories = []
while has_more:
res = get_organization_repositories(cursor=cursor)
page_info = res['data']['repositoryOwner']['repositories']['pageInfo']
repos = res['data']['repositoryOwner']['repositories']['nodes']
for repo in repos:
repositories.append(repo['name'])
has_more = page_info['hasNextPage']
cursor = page_info['endCursor']
for repo in repositories:
path = clone_repository(repo)
# Use this section if you have already cloned certain
# repositories but only want to delete/re-clone the ones
# that have a ".github/workflows/ci.yaml" file
# if has_ci(path):
# shutil.rmtree(path.as_posix())
# clone_repository(repo)
for repo in os.listdir('tmp'):
path = pathlib.Path('tmp', repo).absolute()
delete_project_manager(path)
clean_file(path)
add_commit_and_push(path)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment