Skip to content

Instantly share code, notes, and snippets.

@paul
Created January 4, 2012 17:14
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save paul/1561031 to your computer and use it in GitHub Desktop.
Save paul/1561031 to your computer and use it in GitHub Desktop.
# A sample Gemfile
source "http://rubygems.org"
gem "rbench"
gem "mongomatic"
gem "bson_ext"
gem "activerecord"
gem "sequel"
gem "pg"
gem "awesome_print"
gem "progress_bar"
require 'sequel'
require 'logger'
require 'ap'
require 'progress_bar'
DB = Sequel.connect('postgres://localhost/tycho-bench')
puts "Creating postgres tables"
DB << %{ DROP TABLE IF EXISTS metrics }
DB << %{
CREATE TABLE metrics (
id serial PRIMARY KEY,
account_id INTEGER NOT NULL,
name VARCHAR NOT NULL,
latest_datapoint_at TIMESTAMP NOT NULL,
created_at TIMESTAMP NOT NULL,
updated_at TIMESTAMP NOT NULL
)
}
DB << %{ CREATE UNIQUE INDEX ON metrics (account_id, name) }
DB << %{ CREATE INDEX ON metrics (account_id, name, latest_datapoint_at DESC) }
DB << %{ CREATE INDEX ON metrics (account_id, updated_at DESC) }
%w[daily monthly yearly].each do |period|
table = "#{period}_datapoints"
DB << %{ DROP TABLE IF EXISTS #{table} }
DB << %{
CREATE TABLE #{table} (
metric_id INTEGER NOT NULL,
timestamp TIMESTAMP NOT NULL,
count DECIMAL NOT NULL,
sum DECIMAL NOT NULL,
sum_sq DECIMAL NOT NULL
)
}
DB << %{ CREATE UNIQUE INDEX ON #{table} (metric_id, timestamp DESC) }
end
puts "creating mongo indexes"
require 'mongo'
Mongo::Connection.new.drop_database("tycho-bench")
M = Mongo::Connection.new.db("tycho-bench")
M['metrics'].create_index [[:account_id, Mongo::ASCENDING], [:name, Mongo::ASCENDING]], :unique => true
M['metrics'].create_index [[:account_id, Mongo::ASCENDING], [:name, Mongo::ASCENDING], [:latest_datapoint_at, Mongo::DESCENDING]]
M['metrics'].create_index [[:account_id, Mongo::ASCENDING], [:updated_at, Mongo::DESCENDING]]
%w[daily monthly yearly].each do |period|
coll = M["#{period}_datapoints"]
coll.create_index [["m", 1], ["t", -1]]
end
time = Time.utc(2011, 1, 1, 0, 0, 0)
end_time = Time.utc(2011, 1, 2, 0, 0, 0)
daily_times = [time]
until time > end_time
time += 60
daily_times << time
end
time = Time.utc(2011, 1, 1, 0, 0, 0)
end_time = Time.utc(2011, 2, 1, 0, 0, 0)
monthly_times = [time]
until time > end_time
time += 60*60
monthly_times << time
end
time = Time.utc(2011, 1, 1, 0, 0, 0)
end_time = Time.utc(2012, 1, 1, 0, 0, 0)
yearly_times = [time]
until time > end_time
time += 24*60*60
yearly_times << time
end
puts "inserting 1000 metrics & datapoints in postgres"
bar = ProgressBar.new 1000
1000.times do |i|
t = Time.now
DB[:metrics].insert(:account_id => 0, :name => "example.com/cpu/#{i}/idle", :latest_datapoint_at => t, :created_at => t, :updated_at => t)
DB[:daily_datapoints].multi_insert(daily_times.map { |time|
{:metric_id => i, :timestamp => time, :count => 1, :sum => 1, :sum_sq => 1}
})
DB[:monthly_datapoints].multi_insert(monthly_times.map { |time|
{:metric_id => i, :timestamp => time, :count => 1, :sum => 1, :sum_sq => 1}
})
DB[:yearly_datapoints].multi_insert(yearly_times.map { |time|
{:metric_id => i, :timestamp => time, :count => 1, :sum => 1, :sum_sq => 1}
})
bar.increment!
end
puts "inserting 1000 metrics & datapoints in mongo"
bar = ProgressBar.new 1000
1000.times do |i|
t = Time.now
M['metrics'].insert(:account_id => 0, :name => "example.com/cpu/#{i}/idle", :latest_datapoint_at => t, :created_at => t, :updated_at => t)
M['daily_datapoints'].insert(daily_times.map { |time|
{:m => i, :t => time, :c => 1, :s => 1, :q => 1}
})
M['monthly_datapoints'].insert(monthly_times.map { |time|
{:m => i, :t => time, :c => 1, :s => 1, :q => 1}
})
M['yearly_datapoints'].insert(yearly_times.map { |time|
{:m => i, :t => time, :c => 1, :s => 1, :q => 1}
})
bar.increment!
end
puts "Postgres Sizes"
puts
sizes = DB[%{SELECT relname AS "relation", pg_relation_size(C.oid) AS "size"
FROM pg_class C
LEFT JOIN pg_namespace N ON (N.oid = C.relnamespace)
WHERE nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
ORDER BY pg_relation_size(C.oid) DESC
LIMIT 20;}]
puts "name".ljust(50) + ' ' + "size".rjust(12)
sizes.each do |item|
puts item[:relation].ljust(50) + ' ' + ("%0.2f KB" % (item[:size].to_f / 1024)).rjust(12)
end
total = sizes.inject(0) { |sum, item| sum += item[:size] }
puts "TOTAL".rjust(50) + ' ' + ("%0.2f KB" % (total.to_f / 1024)).rjust(12)
puts "MongoDB Sizes"
puts
stats = M.collections.map{|c| c.stats}
stats.each do |stat|
puts stat["ns"].ljust(50) + ' ' + ("%0.2f KB" % (stat["storageSize"].to_f / 1024)).rjust(12)
stat["indexSizes"].each do |name, size|
puts name.rjust(50) + ' ' + ("%0.2f KB" % (size.to_f / 1024)).rjust(12)
end
end
puts "Total: "
s = M.stats
puts "storageSize " + ("%0.2f KB" % (s["storageSize"].to_f / 1024)).rjust(12)
puts "indexSize " + ("%0.2f KB" % (s["indexSize"].to_f / 1024)).rjust(12)
puts "grand total " + ("%0.2f KB" % ((s["indexSize"] + s["storageSize"]).to_f / 1024)).rjust(12)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment