Skip to content

Instantly share code, notes, and snippets.

@hayduke19us
Last active February 18, 2018 23:55
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save hayduke19us/6a314d3941a4bedf46e9b555be8f7139 to your computer and use it in GitHub Desktop.
Save hayduke19us/6a314d3941a4bedf46e9b555be8f7139 to your computer and use it in GitHub Desktop.
require 'byebug'
require 'benchmark/ips'
require 'csv'
RESOURCES = 30
BLOCKING_TIME = 3
class SuperUploader
THREAD_MAX = 10
attr_reader :outgoing, :thread_max, :queue, :results, :threads, :switch
def initialize(outgoing)
@outgoing = outgoing
@thread_max = [outgoing.count, THREAD_MAX].min
@queue = SizedQueue.new @thread_max
@sysexit = false
@results = []
@threads = Array.new(@thread_max).extend(MonitorMixin)
@switch = threads.new_cond
end
def open_slot?(t)
t.nil? || !t.status
end
def execute
Thread.abort_on_exception = true
uploader = Thread.new do
loop do
break if @sysexit && queue.empty?
index = nil
threads.synchronize do
switch.wait_until { threads.any? { |t| open_slot? t } }
index = threads.rindex { |t| open_slot? t }
end
outgoing_item = queue.deq
threads[index] = Thread.new(outgoing_item) do |item|
sleep(BLOCKING_TIME)
results.push item
threads.synchronize { switch.signal }
end
end
end
schedular = Thread.new do
outgoing.each do |o|
queue.enq o
threads.synchronize { switch.signal }
end
@sysexit = true
end
schedular.join
uploader.join
threads.each { |t| t&.join }
end
end
ITERATIONS = 1
BENCH_ARGS = { iterations: ITERATIONS }
multi = begin
Benchmark.ips(quiet: true) do |b|
b.config BENCH_ARGS
b.report('multi') { SuperUploader.new((1..RESOURCES).to_a).execute }
end
end
single = begin
Benchmark.ips(quiet: true) do |b|
b.config BENCH_ARGS
b.report('single') do
SuperUploader.new((1..RESOURCES).to_a).outgoing.each do |o|
sleep(BLOCKING_TIME)
[].push o
end
end
end
end
unless File.exist?('cloudinary_uploader_stats.csv')
::CSV.open('cloudinary_uploader_stats.csv', 'w') do |csv|
csv << %w(resources threads i/o_blocking_time iterations_per_second)
end
end
::CSV.open('cloudinary_uploader_stats.csv', 'a') do |csv|
csv << [RESOURCES, 1, "#{BLOCKING_TIME}", (single.entries.first&.microseconds / 1_000_000)]
csv << [RESOURCES, 10, "#{BLOCKING_TIME}", (multi.entries.first&.microseconds / 1_000_000)]
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment