Last active
January 11, 2024 02:19
-
-
Save kei-p/67972096650215c546dd64ed4a912cf6 to your computer and use it in GitHub Desktop.
generate pull request lists
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# HOW TO USE: | |
# ruby gen_pull_request_csv.rb ownername/repo 2024-01-01 | |
require 'json' | |
require 'csv' | |
@token = ENV.fetch('GH_TOKEN') | |
def run_api(path) | |
call = <<~CURL | |
curl -L \ | |
-H "Accept: application/vnd.github+json" \ | |
-H "Authorization: Bearer #{@token}" \ | |
-H "X-GitHub-Api-Version: 2022-11-28" \ | |
https://api.github.com#{path} 2> /dev/null | |
CURL | |
JSON.parse(`#{call}`) | |
end | |
def run_graphql(query, variables) | |
data = { | |
query: query.gsub("\n", '').gsub("\s\+", ' '), | |
variables: variables | |
} | |
call = <<~CURL | |
curl -L \ | |
-H "Accept: application/vnd.github+json" \ | |
-H "Authorization: Bearer #{@token}" \ | |
-H "X-GitHub-Api-Version: 2022-11-28" \ | |
-X POST \ | |
-d '#{data.to_json}' \ | |
https://api.github.com/graphql 2> /dev/null | |
CURL | |
JSON.parse(`#{call}`) | |
end | |
def load_all_pull_requests(repo:, created_after:) | |
all_items = [] | |
end_cursor = nil | |
c = 1 | |
search_query = "is:pr is:merged repo:#{repo} created:>=#{created_after}" | |
loop do | |
query = <<~GQL | |
query loadPullRequest($query: String!, $after: String) { | |
search(type: ISSUE, query: $query, first: 100, after: $after) { | |
nodes { | |
__typename | |
... on PullRequest { | |
id | |
title | |
state | |
author { | |
login | |
} | |
closedAt | |
baseRefName | |
headRefName | |
} | |
} | |
pageInfo { | |
endCursor | |
hasNextPage | |
} | |
} | |
} | |
GQL | |
sleep 1 | |
puts "load - attempts #{c}" | |
result = run_graphql(query, { | |
query: search_query, | |
after: end_cursor, | |
}) | |
items = result.dig('data', 'search', 'nodes') | |
all_items += items | |
page_info = result.dig('data', 'search', 'pageInfo') | |
return all_items unless page_info.dig('hasNextPage') | |
end_cursor = page_info.dig('endCursor') | |
c += 1 | |
end | |
end | |
puts "load pull request with #{ARGV[0..1].join(",")}" | |
pull_requests = load_all_pull_requests(repo: ARGV[0], created_after: ARGV[1]) | |
.reject do | |
_1.dig('headRefName') == 'release-candidate' \ | |
|| _1.dig('author', 'login') == 'dependabot' \ | |
|| _1.dig('baseRefName') != 'release-candidate' | |
end | |
.map do |item| | |
{ | |
title: item.dig('title').sub(/^\[review\]\s+/, ''), | |
state: item.dig('state'), | |
author_login: item.dig('author', 'login'), | |
closed_at: item.dig('closedAt') | |
} | |
end | |
.sort_by { _1[:closed_at] } | |
CSV.open("./gh_pull_requests.csv", "wb") do |csv| | |
csv << pull_requests.first.keys | |
pull_requests.each do |i| | |
csv << i.values | |
end | |
end | |
puts "out: ./gh_pull_requests.csv" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment