|
#!/usr/bin/env ruby |
|
# frozen_string_literal: true |
|
|
|
require 'csv' |
|
require 'json' |
|
require 'fileutils' |
|
|
|
data = JSON.parse(File.read('data.json')) |
|
page_names = data['pages'].map { |x| x['name'] } |
|
vuss = data['vus'] |
|
scenarios = [] |
|
|
|
page_names.each do |page_name| |
|
vuss.each do |vus| |
|
scenarios << "#{page_name}_#{vus}" |
|
scenarios << "#{page_name}_#{vus}_graphql" |
|
end |
|
end |
|
|
|
FileUtils.mkdir_p('summaries') |
|
|
|
scenarios.each_with_index do |scenario, index| |
|
puts "\n#{Time.now} Starting scenario: #{scenario}" |
|
`SCENARIO=#{scenario} k6 run k6.js --summary-export summaries/#{scenario}_summary.json` |
|
puts "#{Time.now} Finished scenario: #{scenario}" |
|
sleep(data['sleepPeriod']) unless index == scenarios.size - 1 |
|
end |
|
|
|
OUTPUT_LOOKUP = { |
|
'request_count' => %w[metrics http_reqs count], |
|
'request_duration_avg' => %w[metrics http_req_duration avg], |
|
'request_duration_p90' => ['metrics', 'http_req_duration', 'p(90)'], |
|
'request_duration_p95' => ['metrics', 'http_req_duration', 'p(95)'], |
|
'failed_request_count' => %w[metrics http_req_failed passes] # this is correct because k6 loves the weird double negative of a failure pass |
|
}.freeze |
|
|
|
CSV.open('summaries/data_amalgam.csv', 'w') do |csv| |
|
csv << ['page_name', 'vus', 'graphql?'] + OUTPUT_LOOKUP.keys |
|
page_names.each do |page_name| |
|
vuss.each do |vus| |
|
[false, true].each do |graphql| |
|
filename = if graphql |
|
"summaries/#{page_name}_#{vus}_graphql_summary.json" |
|
else |
|
"summaries/#{page_name}_#{vus}_summary.json" |
|
end |
|
|
|
summary_data = JSON.parse(File.read(filename)) |
|
|
|
csv << [page_name, vus, graphql] + |
|
OUTPUT_LOOKUP.map { |_, dig_path| summary_data.dig(*dig_path) } |
|
end |
|
end |
|
end |
|
end |