Skip to content

Instantly share code, notes, and snippets.

@apurvam
Last active February 13, 2019 22:05
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save apurvam/ae5192bedf0f192ab6f1975fe7930c4a to your computer and use it in GitHub Desktop.
Save apurvam/ae5192bedf0f192ab6f1975fe7930c4a to your computer and use it in GitHub Desktop.
Get all open PRs in all repositories matching a regex.
#!/usr/bin/python
from urllib2 import Request, urlopen, URLError
import sys
from argparse import ArgumentParser
import json
import re
from dateutil.parser import parse
MAX_RESULTS = 50
def get_all_repos(regex, token):
page = 0
matching_repos = []
while True:
url = "https://api.github.com/orgs/confluentinc/repos?type=private&page=%d&per_page=%d" % (page, MAX_RESULTS)
request = Request(url)
request.add_header("Accept", "application/vnd.github.symmetra-preview+json")
request.add_header("Authorization", "token " + token)
try:
response = urlopen(request)
except URLError as e:
if hasattr(e, 'reason'):
print("We failed to reach the API server. Reason: " + str(e.reason))
elif hasattr(e, 'code'):
print("The github server couldn't fulfill the request. Error code: " + str(e.code))
sys.exit(1)
pattern = re.compile(regex)
repos = json.loads(response.read())
if len(repos) < MAX_RESULTS:
# We have reached the last page since we didn't fill the
# quota
break
page = page + 1
matching_repos.extend(filter(pattern.match, [repo.get('name') for repo in repos]))
return matching_repos
def print_repo_pulls(repo, author_list, token):
page = 0
date_format = '%m/%d/%Y %H:%M:%S'
while True:
url = "https://api.github.com/repos/confluentinc/%s/pulls?page=%d&per_page=%d" % (repo, page, MAX_RESULTS)
request = Request(url)
request.add_header("Accept", "application/vnd.github.symmetra-preview+json")
request.add_header("Authorization", "token " + token)
try:
response = urlopen(request)
except URLError as e:
if hasattr(e, 'reason'):
print("We failed to reach the API server. Reason: " + str(e.reason))
elif hasattr(e, 'code'):
print("The github server couldn't fulfill the request. Error code: " + str(e.code))
sys.exit(1)
pulls = json.loads(response.read())
if author_list and len(author_list) > 0:
pulls = filter (lambda pull: pull['user']['login'] in author_list,
pulls)
for pull_request in pulls:
created = parse(pull_request['created_at']).strftime(date_format)
updated = parse(pull_request['updated_at']).strftime(date_format)
print "%s, %s, %s, %s, %s, %s" % (repo,
created,
updated,
pull_request['user']['login'],
pull_request['title'],
pull_request['html_url'])
if len(pulls) < MAX_RESULTS:
# We have reached the last page since we didn't fill the
# quota
break
page = page + 1
def main():
parser = ArgumentParser()
parser.add_argument("-r", "--repos", dest="repo_regex", required=True,
help="The name pattern for repos whose pull requests you want to list. eg. 'cc*'")
parser.add_argument("-t", "--token", dest="github_token", required=True,
help="The github oauth token to use for authentication")
parser.add_argument("-a", "--authors", dest="author_list", required=False, nargs='+',
help="Only return open PRs by these authors (github user names)")
args = parser.parse_args()
repos = get_all_repos(args.repo_regex, args.github_token)
print "repo, created at, updated at, user, title, url"
for repo in repos:
print_repo_pulls(repo, args.author_list, args.github_token)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment