Created
June 12, 2012 19:01
-
-
Save aheld/2919437 to your computer and use it in GitHub Desktop.
JIRA API quick and dirty calculations
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
require 'pp' | |
require 'rubygems' | |
require 'open-uri' | |
require "uri" | |
require "net/http" | |
require 'openssl' | |
require 'json' | |
require 'digest/sha1' | |
require 'set' | |
require 'Base64' | |
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE | |
@points_by_project = Hash.new | |
def pb_project(metric, data) | |
by_project = {} | |
data["issues"].group_by{|x| x["fields"]["project"]["key"]}.each{|x, data| | |
by_project[x] = 0 unless by_project.has_key?(x) | |
by_project[x] += data.inject(0){|sum, item| sum + item["fields"]["customfield_10003"]} | |
} | |
@points_by_project[metric] = by_project | |
end | |
def getData(api,qs="") | |
url = "https://#{HOSTNAME}.atlassian.net/rest/api/2/#{api}?#{qs}" | |
#{URI::encode(name)}" | |
#puts url | |
key = Digest::SHA1.hexdigest(api + qs) | |
if File.exists?("metrics/cache-#{key}.json") | |
res = JSON.parse(File.open("metrics/cache-#{key}.json").read) | |
else | |
res = open(url, | |
"Authorization" => "Basic " + Base64.strict_encode64("#{ARGV[0]}:#{ARGV[1]}")) {|f| | |
JSON.parse(f.read) | |
} | |
end | |
File.open("metrics/cache-#{key}.json","w") do |f| | |
f.write(JSON.pretty_generate(res)) | |
end | |
return res | |
end | |
sprint_date = Date.parse(ARGV[2]) #'20120521') | |
if (sprint_date == Date.parse('20120528')) | |
commit_date = sprint_date + 0 | |
else | |
commit_date = sprint_date #+ 1 | |
end | |
committed_jql = 'fixversion = '+ sprint_date.strftime("%Y%m%d") + ' and fixversion was '+ sprint_date.strftime("%Y%m%d") + ' ON ' + commit_date.strftime("\"%Y/%m/%d\"") + | |
' AND "Story Points" is not EMPTY AND status not in (canceled, "on hold")' | |
completed_jql = 'fixversion = '+ sprint_date.strftime("%Y%m%d") + ' and fixversion was '+ sprint_date.strftime("%Y%m%d") + ' ON ' + commit_date.strftime("\"%Y/%m/%d\"") + | |
' and "Story Points" is not EMPTY and resolution != Unresolved and status != Canceled' | |
delivered_jql = 'fixversion = '+ sprint_date.strftime("%Y%m%d") + | |
' and "Story Points" is not EMPTY and resolution != Unresolved and status != Canceled' | |
number_of_issues_jql = 'fixversion = '+ sprint_date.strftime("%Y%m%d") + | |
' and "Story Points" is not EMPTY and resolution != Unresolved and status != Canceled' | |
delivered_failed_qa_jql = 'fixversion = '+ sprint_date.strftime("%Y%m%d") + | |
' and status was "Failed QA"' + | |
' AND "Story Points" is not EMPTY AND resolution != Unresolved and status != Canceled' | |
total_jql = 'fixversion = '+ sprint_date.strftime("%Y%m%d") + | |
' and "Story Points" is not EMPTY and status != Canceled' | |
in_progress_jql = 'fixversion = '+ sprint_date.strftime("%Y%m%d") + ' AND "Story Points" is not EMPTY and status in ("Failed QA", "In Progress", "In Development")' | |
res = getData("search", "jql=" + URI::encode(committed_jql + '&fields=customfield_10003&fields=project' + '&maxResults=400')) | |
pb_project("committed",res) | |
puts sprint_date.strftime("%Y%m%d") + " committed: " + res["issues"].inject(0){|sum, item| sum + item["fields"]["customfield_10003"]}.to_s | |
committed_by_project = res["issues"].group_by{|x| x["fields"]["project"]["key"]} | |
puts sprint_date.strftime("%Y%m%d") + " count: " + res["issues"].length.to_s | |
res = getData("search", "jql=" + URI::encode(total_jql + '&maxResults=400')) | |
puts sprint_date.strftime("%Y%m%d") + " final count: " + res["issues"].length.to_s | |
res = getData("search", "jql=" + URI::encode(completed_jql + '&fields=customfield_10003&fields=project' + '&maxResults=400')) | |
pb_project("completed",res) | |
puts sprint_date.strftime("%Y%m%d") + " completed: " + res["issues"].inject(0){|sum, item| sum + item["fields"]["customfield_10003"]}.to_s | |
res = getData("search", "jql=" + URI::encode(delivered_jql + '&fields=customfield_10003&fields=project' + '&maxResults=400')) | |
pb_project("delivered",res) | |
puts sprint_date.strftime("%Y%m%d") + " delivered: " + res["issues"].inject(0){|sum, item| sum + item["fields"]["customfield_10003"]}.to_s | |
#pp @points_by_project | |
@headers = Set.new [] | |
@points_by_project.each{|k,v| | |
#pp v | |
@headers = @headers.merge(v.keys) | |
} | |
@headers = @headers.to_a.sort | |
puts " \t" + @headers.join("\t") | |
@points_by_project.each{|k,v| | |
print k + "\t" | |
@headers.each{|header| | |
print v.has_key?(header) ? v[header] : 0 | |
print "\t" | |
} | |
print "\n" | |
} | |
total_stories = Float(res["issues"].length) | |
puts "total " + total_stories.to_s | |
res = getData("search", "jql=" + URI::encode(delivered_failed_qa_jql + '&fields=customfield_10003' + '&maxResults=400')) | |
total_stories_with_failure = Float(res["issues"].length) | |
puts "total w/ fail #{total_stories_with_failure.to_s}" | |
puts "Stories with one QA pass " + ( 100* (total_stories - total_stories_with_failure)/total_stories ).round.to_s + "%" | |
res = getData("search", "jql=" + URI::encode(in_progress_jql + '&fields=customfield_10003' + '&maxResults=400')) | |
puts sprint_date.strftime("%Y%m%d") + " Stories in Progress: " + res["issues"].length.to_s | |
all_stories_jql = "fixversion was " + sprint_date.strftime("%Y%m%d") + ' AFTER ' + commit_date.strftime("\"%Y/%m/%d\"") + | |
' AND fixVersion was not ' + sprint_date.strftime("%Y%m%d") + ' before ' + (commit_date + 1).strftime("\"%Y/%m/%d\"") + | |
' AND "Story Points" is not EMPTY AND status != canceled ORDER BY status DESC, project ASC, key DESC' | |
puts all_stories_jql | |
res = getData("search", "jql=" + URI::encode(all_stories_jql + '&fields=customfield_10003' + '&maxResults=400')) | |
puts "Total points added in sprint " + res["issues"].inject(0){|sum, item| sum + item["fields"]["customfield_10003"]}.to_s |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment