Skip to content

Instantly share code, notes, and snippets.

@nvgoldin
Created February 22, 2017 19:31
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save nvgoldin/17a438274f91e271472bea80239e5323 to your computer and use it in GitHub Desktop.
Save nvgoldin/17a438274f91e271472bea80239e5323 to your computer and use it in GitHub Desktop.
Compare jenkins job average execution time relative to a build
import numpy as np
import jenkins
import getpass
import time
"""
compare a jenkins job average execution time relative to a build
advisable to have: https://wiki.jenkins-ci.org/display/JENKINS/Metrics+Plugin
"""
def get_server(auth):
return jenkins.Jenkins(**auth)
def collect_metrics(auth, job_name, start, end):
print('jobs {0}..{1}, collecting data...'.format(start, end))
metrics = []
server = get_server(auth)
for idx in range(start, end):
failed = False
retry = 5
while True:
try:
build = server.get_build_info(job_name, idx, depth=2)
break
except:
if retry == 0:
failed = True
break
retry = retry - 1
time.sleep(1)
server = get_server(auth)
if failed:
continue
if build['result'] == 'SUCCESS':
try:
metric = next(
action for action in build['actions']
if action['_class'] ==
'jenkins.metrics.impl.TimeInQueueAction'
)
metrics.append(
[
metric['totalDurationMillis'] -
metric['queuingDurationMillis'],
metric['queuingDurationMillis']
]
)
except StopIteration:
# no build metrics plugin
metrics.append([build['duration'], np.nan])
except:
continue
metrics = np.array(metrics)
avg_exe = np.nanmean(metrics[:, 0]) / 10**3 / 60
std_exe = np.nanstd(metrics[:, 0]) / 10**3 / 60
avg_queue = np.nanmean(metrics[:, 1]) / 10**3 / 60
top_10_max = np.sort(metrics[:, 0])[-10:] / 10**3 / 60
top_10_min = np.sort(metrics[:, 0])[:10] / 10**3 / 60
max_exec = np.nanmax(metrics[:, 0]) / 10**3 / 60
min_exec = np.nanmin(metrics[:, 0]) / 10**3 / 60
print(
'jobs {0}..{1}, found {2} successful builds'.
format(start, end, metrics.shape[0])
)
print('average execution time: {0:.3} minutes'.format(avg_exe))
print('stddev execution time: {0:.3} minutes'.format(std_exe))
print('average time in queue: {0:.3} minutes'.format(avg_queue))
print('max execution time: {0:.3} minutes'.format(max_exec))
print('min execution time: {0:.3} minutes'.format(min_exec))
np.set_printoptions(precision=3, suppress=True)
print('top 10 longest execution time(minutes): {0}'.format(top_10_max))
print('top 10 shortest execution time(minutes): {0}'.format(top_10_min))
return metrics
def main():
job_name = 'test-repo_ovirt_experimental_master'
job_name = input('job_name [{0}]: '.format(job_name)) or job_name
cutoff_build = 5357
cutoff_build = input('cutoff build [{0}]: '.format(cutoff_build)
) or cutoff_build
cutoff_build = int(cutoff_build)
padding = 10
padding = input('padding [{0}]: '.format(padding)) or padding
padding = int(padding)
auth = {
'url': 'http://jenkins.ovirt.org',
'username': input('username:'),
'password': getpass.getpass(),
'timeout': 10
}
server = get_server(auth)
jobs = server.get_job_info(job_name, fetch_all_builds=True)
builds = sorted([build['number'] for build in jobs['builds']])
assert cutoff_build in builds
print('fetched %s jobs metadata' % len(builds))
collect_metrics(
auth, job_name, max(min(builds), cutoff_build - padding), cutoff_build
)
print('*' * 50)
collect_metrics(
auth, job_name, cutoff_build, min(cutoff_build + padding, max(builds))
)
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment