Skip to content

Instantly share code, notes, and snippets.

@apevec
Last active June 11, 2018 14:15
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save apevec/f663ca8f7e3aba6eb6b9bb92008f51df to your computer and use it in GitHub Desktop.
Save apevec/f663ca8f7e3aba6eb6b9bb92008f51df to your computer and use it in GitHub Desktop.
Determine maximum number of concurrently running Zuul jobs
# copied&modified slightly from https://github.com/openstack-infra/zuul/blob/master/zuul/driver/sql/sqlconnection.py
# Copyright 2014 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import sqlalchemy as sa
import sqlalchemy.pool
from sqlalchemy.sql import select
BUILDSET_TABLE = 'zuul_buildset'
BUILD_TABLE = 'zuul_build'
class Report:
log = logging.getLogger("zuul.Report")
def __init__(self, dburi):
self.dburi = dburi
self.engine = None
self.connection = None
self.tables_established = False
self.table_prefix = ""
try:
self.engine = sa.create_engine(self.dburi)
self.zuul_buildset_table, self.zuul_build_table \
= self._setup_tables()
self.tables_established = True
except sa.exc.NoSuchModuleError:
self.log.exception(
"The required module for the dburi dialect isn't available. "
"SQL connection %s will be unavailable." % connection_name)
except sa.exc.OperationalError:
self.log.exception(
"Unable to connect to the database or establish the required "
"tables. Reporter %s is disabled" % self)
def _setup_tables(self):
metadata = sa.MetaData()
zuul_buildset_table = sa.Table(
self.table_prefix + BUILDSET_TABLE, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('zuul_ref', sa.String(255)),
sa.Column('pipeline', sa.String(255)),
sa.Column('project', sa.String(255)),
sa.Column('branch', sa.String(255)),
sa.Column('change', sa.Integer, nullable=True),
sa.Column('patchset', sa.String(255), nullable=True),
sa.Column('ref', sa.String(255)),
sa.Column('oldrev', sa.String(255)),
sa.Column('newrev', sa.String(255)),
sa.Column('ref_url', sa.String(255)),
sa.Column('result', sa.String(255)),
sa.Column('message', sa.TEXT()),
sa.Column('tenant', sa.String(255)),
)
zuul_build_table = sa.Table(
self.table_prefix + BUILD_TABLE, metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('buildset_id', sa.Integer,
sa.ForeignKey(self.table_prefix +
BUILDSET_TABLE + ".id")),
sa.Column('uuid', sa.String(36)),
sa.Column('job_name', sa.String(255)),
sa.Column('result', sa.String(255)),
sa.Column('start_time', sa.DateTime()),
sa.Column('end_time', sa.DateTime()),
sa.Column('voting', sa.Boolean),
sa.Column('log_url', sa.String(255)),
sa.Column('node_name', sa.String(255)),
)
return zuul_buildset_table, zuul_build_table
def query_build(self):
build = self.zuul_build_table
query = select([
build.c.uuid,
build.c.job_name,
build.c.voting,
build.c.node_name,
build.c.start_time,
build.c.end_time]).select_from(build)
return query.order_by(build.c.id)
#return query.limit(100).order_by(build.c.id)
def get_sorted_builds(self):
"""Return a list of build"""
builds = []
with self.engine.begin() as conn:
for row in conn.execute(self.query_build()):
build = dict(row)
# Compute run duration
if row.start_time and row.end_time:
build['duration'] = (row.end_time -
row.start_time).total_seconds()
builds.append(build)
return builds
from itertools import filterfalse
import sys
try:
dburi = sys.argv[1]
except IndexError:
dburi = "mysql+pymysql://zuul:XXX@localhost/zuul"
report = Report(dburi)
# START Determine maximum number of concurrently running Zuul jobs
global CURRENT_TIME
def has_finished(item):
return item['end_time'] < CURRENT_TIME
running = []
max_running = 0
n = 0
# Assumption: builds in the running order (id or start_time)
# build: {'uuid': '3c2b8e6839b6426e88e9df0b050de6d3', 'job_name': 'linters', 'voting': True, 'node_name': None, 'start_time': datetime.datetime(2017, 7, 25, 9, 55, 1), 'end_time': datetime.datetime(2017, 7, 25, 9, 55, 6), 'duration': 5.0}
for build in report.get_sorted_builds():
# skip invalid records
if build['start_time'] is None or build['end_time'] is None:
continue
CURRENT_TIME = build['start_time']
# remove jobs which finished
running[:] = filterfalse(has_finished, running)
running.append(build)
max_running = max(max_running, len(running))
n = n + 1
print("Total %d jobs, max. concurrently running %d" % (n, max_running))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment