Skip to content

Instantly share code, notes, and snippets.

@sshnaidm
Last active January 16, 2018 12:24
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save sshnaidm/acc5ec86b2ea7e98c880ec9fa929941b to your computer and use it in GitHub Desktop.
Save sshnaidm/acc5ec86b2ea7e98c880ec9fa929941b to your computer and use it in GitHub Desktop.
generate data for influxdb
import random, time
def get_data():
d = {
'branch': random.choice(["master"]*3 + ["pike", "ocata"]),
'cloud': random.choice(["rdo-cloud", "rh1", "infra1", "infra2"]),
'pipeline': random.choice(["check"]*2 + ["periodic", "gate"]),
'toci_jobtype': random.choice(['ovb-ha-oooq']*2 + ['multinode-2ctlr-featureset032', 'multinode-1ctlr-featureset016']),
'job_duration': random.randrange(7200, 10800),
'logs_size': random.randrange(10, 27),
'testenv_prepare': random.randrange(300, 700),
'zuul_host_prepare': random.randrange(270, 440),
'quickstart_prepare': random.randrange(250, 400),
'undercloud_install': random.randrange(1200, 2200),
'prepare_images': random.randrange(600, 1000),
'images_update': random.randrange(800, 1200),
'images_build': random.randrange(2000, 2800),
'containers_prepare': random.randrange(1200, 2200),
'overcloud_deploy': random.randrange(3900, 7200),
'pingtest': random.randrange(150, 300),
'tempest_run': random.randrange(240, 400),
'undercloud_reinstall': random.randrange(800, 1800),
'overcloud_delete': random.randrange(150, 300),
'overcloud_upgrade': random.randrange(1200, 2400),
'undercloud_upgrade': random.randrange(800, 1100),
'timestamp': random.randrange(int(time.time()) - 86400, int(time.time()))
}
data = "job,branch={branch},cloud={cloud},pipeline={pipeline},toci_jobtype={toci_jobtype} job_duration={job_duration},logs_size={logs_size},testenv_prepare={testenv_prepare},zuul_host_prepare={zuul_host_prepare},quickstart_prepare={quickstart_prepare},undercloud_install={undercloud_install},prepare_images={prepare_images},images_update={images_update},images_build={images_build},containers_prepare={containers_prepare},overcloud_deploy={overcloud_deploy},pingtest={pingtest},tempest_run={tempest_run},undercloud_reinstall={undercloud_reinstall},overcloud_delete={overcloud_delete},overcloud_upgrade={overcloud_upgrade},undercloud_upgrade={undercloud_upgrade} {timestamp}".format(**d)
return data
with open("/tmp/data.txt", "w") as f:
for _ in range(300):
data = get_data()
f.write(data + "\n")
# # And in bash
# mkdir grafana influxdb
# docker run --rm --name grafana -p 80:80 -p 3000:3000 -v $PWD/grafana:/var/lib/grafana:z grafana/grafana
# docker run --name influxdb --rm -p 8083:8083 -p 8086:8086 -v $PWD/influxdb:/var/lib/influxdb:z influxdb:1.4-alpine
# to drop db:
# curl -i -XPOST http://localhost:8086/query --data-urlencode "q=DROP DATABASE mydb";
#curl -i -XPOST http://localhost:8086/query --data-urlencode "q=CREATE DATABASE mydb"
#curl -i -XPOST 'http://localhost:8086/write?db=mydb&precision=s' --data-binary @/tmp/data.txt
#curl -G 'http://localhost:8086/query?db=mydb' --data-urlencode 'q=SELECT * FROM "job"' | less
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment