Skip to content

Instantly share code, notes, and snippets.

@teone
Last active April 23, 2020 02:15
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save teone/35ef280df87605257e72a55dbf67c0bc to your computer and use it in GitHub Desktop.
Save teone/35ef280df87605257e72a55dbf67c0bc to your computer and use it in GitHub Desktop.
Collect results from builds on the voltha-scale-measurement job
"""
Script to get data from voltha-scale-measurements Jenkins builds
Example usage:
python jenkins-collect.py
(and follow the prompts)
"""
import csv
from urllib import urlopen
from datetime import date
import sys
import pprint
import json
import logging
class JenkinsWalker:
def __init__(self, jenkins_base_url, jenkins_job_name, jobs, jobs_info, jobs_to_collect):
self.jenkins_base_url = jenkins_base_url
self.jenkins_job_name = jenkins_job_name
self.jobs_partial_name = jobs
self.jobs_info = jobs_info
self.jobs_to_collect = jobs_to_collect
self.jobs = []
self.job_hash_map = {}
def build_job_hash_map(self):
for job in self.jobs_partial_name:
full_name = self.get_job_name(job)
self.jobs.append(full_name)
self.job_hash_map[full_name] = {
"build-range": "",
"last-build": None,
"total-builds": 0,
"successful-builds": 0,
"voltha-onu-results": [],
"voltha-onu-avg": None,
"voltha-flows-results": [],
"voltha-flows-avg": None,
"onos-ports-results": [],
"onos-ports-avg": None,
"onos-flows-results": [],
"onos-flows-avg": None,
}
def get_job_name(self, partial):
return "{}-{}".format(self.jenkins_job_name, partial)
def get_last_build(self, job_name):
"http://jenkins.opencord.org/job/voltha-scale-measurements-periodic-1-64-1000ms/lastBuild/api/json"
try:
last_build = urlopen("%s/%s/lastBuild/api/json" % (self.jenkins_base_url, job_name)).read().strip('\n')
return int(json.loads(last_build)["id"])
except ValueError:
return "unknown"
def compute_last_build_for_all_jobs(self):
for job in self.jobs:
self.job_hash_map[job]["last-build"] = self.get_last_build(job)
def fetch_results(self, job_name, first_build):
for j in range(first_build, first_build + self.jobs_to_collect):
logging.debug("Fetching results for build %s #%s" % (job_name, j))
self.job_hash_map[job_name]["total-builds"] = self.job_hash_map[job_name][
"total-builds"] + 1
# check the build was successful
build_status = urlopen(
"%s/%s/%s/api/json" % (self.jenkins_base_url, job_name, j)).read().strip('\n')
if json.loads(build_status)["result"] != "SUCCESS":
logging.warn("Build %s #%s failed" % (job_name, j))
continue
self.job_hash_map[job_name]["successful-builds"] = self.job_hash_map[job_name][
"successful-builds"] + 1
# get VOLTHA devices activation time
voltha_devices = urlopen("%s/%s/%s/artifact/voltha-devices-time-num.txt" %
(self.jenkins_base_url, job_name, j)).read().strip('\n')
self.job_hash_map[job_name]["voltha-onu-results"].append(int(voltha_devices))
# get ONOS ports discovery time
onos_ports = urlopen(
"%s/%s/%s/artifact/onos-ports-time-num.txt" % (self.jenkins_base_url, job_name, j)).read().strip('\n')
self.job_hash_map[job_name]["onos-ports-results"].append(int(onos_ports))
# If the job check for flows then collect those numbers
if self.jobs_info[job_name]['flows']:
voltha_flows = urlopen(
"%s/%s/%s/artifact/voltha-flows-time-num.txt" % (self.jenkins_base_url, job_name, j))\
.read().strip('\n')
self.job_hash_map[job_name]["voltha-flows-results"].append(int(voltha_flows))
onos_flows = urlopen(
"%s/%s/%s/artifact/onos-flows-time-num.txt" % (self.jenkins_base_url, job_name, j)) \
.read().strip('\n')
self.job_hash_map[job_name]["onos-flows-results"].append(int(onos_flows))
def collect(self):
self.build_job_hash_map()
self.compute_last_build_for_all_jobs()
for job_name in self.jobs:
logging.info("Processing job %s" % job_name)
if self.job_hash_map[job_name]["last-build"] == "unknown":
# means this job does not exists
logging.warn("Skipping job %s as it does not exist" % job_name)
continue
first_job = self.job_hash_map[job_name]["last-build"] + 1 - self.jobs_to_collect
self.job_hash_map[job_name]["build-range"] = "From %s to %s" % (first_job, first_job + self.jobs_to_collect)
self.fetch_results(job_name, first_job)
self.compute_average()
# pprint.pprint(self.job_hash_map)
def compute_average(self):
for k in self.job_hash_map:
# VOLTHA devices average
if len(self.job_hash_map[k]["voltha-onu-results"]) > 0:
self.job_hash_map[k]["voltha-onu-avg"] = \
sum(self.job_hash_map[k]["voltha-onu-results"]) \
/ len(self.job_hash_map[k]["voltha-onu-results"])
elif self.job_hash_map[k]["last-build"] == "unknown":
self.job_hash_map[k]["voltha-onu-avg"] = "job does not exists"
else:
self.job_hash_map[k]["voltha-onu-avg"] = "failed"
# ONOS ports average
if len(self.job_hash_map[k]["onos-ports-results"]) > 0:
self.job_hash_map[k]["onos-ports-avg"] = \
sum(self.job_hash_map[k]["onos-ports-results"]) \
/ len(self.job_hash_map[k]["onos-ports-results"])
elif self.job_hash_map[k]["last-build"] == "unknown":
self.job_hash_map[k]["onos-ports-avg"] = "job does not exists"
else:
self.job_hash_map[k]["onos-ports-avg"] = "failed"
# VOLTHA flows average
if len(self.job_hash_map[k]["voltha-flows-results"]) > 0:
self.job_hash_map[k]["voltha-flows-avg"] = \
sum(self.job_hash_map[k]["voltha-flows-results"]) \
/ len(self.job_hash_map[k]["voltha-flows-results"])
else:
self.job_hash_map[k]["voltha-flows-avg"] = "unknown"
# ONOS flows average
if len(self.job_hash_map[k]["onos-flows-results"]) > 0:
self.job_hash_map[k]["onos-flows-avg"] = \
sum(self.job_hash_map[k]["onos-flows-results"]) \
/ len(self.job_hash_map[k]["onos-flows-results"])
else:
self.job_hash_map[k]["onos-flows-avg"] = "unknown"
def export(self, csv_file):
csvColumns = [
'Name',
'Total ONUs',
'OLTs',
'PON Ports',
'ONUs per PON port',
'Delay',
'Has Flows',
'Average Devices in VOLTHA (s)', 'Average Ports in ONOS (s)',
'Average Flows in VOLTHA (s)', 'Average Flows in ONOS (s)',
'Notes']
with open(csv_file, mode='w') as file:
writer = csv.DictWriter(file, fieldnames=csvColumns)
try:
writer.writeheader()
for job in self.jobs:
jf = self.jobs_info[job]
data = {
'Name': job,
'Total ONUs': jf["olts"] * jf["pons"] * jf["onus"],
'OLTs': jf["olts"],
'PON Ports': jf["pons"],
'ONUs per PON port': jf["onus"],
'Delay': jf["delay"],
'Has Flows': "Y" if jf["flows"] else "N",
'Average Devices in VOLTHA (s)': self.job_hash_map[job]["voltha-onu-avg"],
'Average Ports in ONOS (s)': self.job_hash_map[job]["onos-ports-avg"],
'Average Flows in VOLTHA (s)': self.job_hash_map[job]["voltha-flows-avg"],
'Average Flows in ONOS (s)': self.job_hash_map[job]["onos-flows-avg"],
'Notes': "Total Builds: %s \n Successful Builds: %s \n Build range: %s" %
(self.job_hash_map[job]["total-builds"],
self.job_hash_map[job]["successful-builds"],
self.job_hash_map[job]["build-range"]),
}
writer.writerow(data)
print "Data voltha stored in %s" % csv_file
except IOError:
print("I/O Error")
if __name__ == "__main__":
# TODO make logging configurable
logging.basicConfig(level=0)
print("This tool will collect the results of the voltha-scale-measurements-periodic jobs"
" and output a .csv file containing the results")
jobs_to_collect = int(input("How many jobs do you want to collect? "))
# jobs_to_collect = 2
output_file = "jenkins-%s.csv" % date.today().strftime("%m-%d-%Y")
# job name in jenkins
jenkins_base_url = "http://jenkins.opencord.org/job"
jenkins_job_name = "voltha-scale-measurements-periodic"
jobs = [
"1-1-200ms",
"1-128-200ms",
"1-64-200ms",
"10-20-200ms",
"10-20-200ms-with-flows",
"16-32-200ms",
"16-64-200ms",
"2-OLTs-10-10-200ms",
"4-32-200ms",
"4-64-200ms",
"8-16-200ms",
"8-32-200ms",
]
jobs_info = {
"%s-1-1-200ms" % jenkins_job_name: {"olts": 1, "pons": 1, "onus": 1, "delay": "200ms", "flows": False},
"%s-1-128-200ms" % jenkins_job_name: {"olts": 1, "pons": 1, "onus": 128, "delay": "200ms", "flows": False},
"%s-1-64-200ms" % jenkins_job_name: {"olts": 1, "pons": 1, "onus": 64, "delay": "200ms", "flows": False},
"%s-10-20-200ms" % jenkins_job_name: {"olts": 1, "pons": 10, "onus": 20, "delay": "200ms", "flows": False},
"%s-10-20-200ms-with-flows" % jenkins_job_name: {"olts": 1, "pons": 10, "onus": 20, "delay": "200ms", "flows": True},
"%s-16-32-200ms" % jenkins_job_name: {"olts": 1, "pons": 16, "onus": 32, "delay": "200ms", "flows": False},
"%s-16-64-200ms" % jenkins_job_name: {"olts": 1, "pons": 16, "onus": 64, "delay": "200ms", "flows": False},
"%s-2-OLTs-10-10-200ms" % jenkins_job_name: {"olts": 2, "pons": 10, "onus": 10, "delay": "200ms", "flows": False},
"%s-4-32-200ms" % jenkins_job_name: {"olts": 1, "pons": 4, "onus": 32, "delay": "200ms", "flows": False},
"%s-4-64-200ms" % jenkins_job_name: {"olts": 1, "pons": 4, "onus": 64, "delay": "200ms", "flows": False},
"%s-8-16-200ms" % jenkins_job_name: {"olts": 1, "pons": 8, "onus": 16, "delay": "200ms", "flows": False},
"%s-8-32-200ms" % jenkins_job_name: {"olts": 1, "pons": 8, "onus": 32, "delay": "200ms", "flows": False},
}
jw = JenkinsWalker(
jenkins_base_url,
jenkins_job_name,
jobs,
jobs_info,
jobs_to_collect
)
jw.collect()
jw.export(output_file)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment