Skip to content

Instantly share code, notes, and snippets.

@anowell
Last active December 21, 2016 20:53
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save anowell/fd8cd6706b6c16080f09e67765e6915d to your computer and use it in GitHub Desktop.
Save anowell/fd8cd6706b6c16080f09e67765e6915d to your computer and use it in GitHub Desktop.
Apply random deep style filters to a bunch of images
#!/usr/bin/env ruby
#
# Takes an arbitrary number of files as arguments
# and randomly generates PER_IMAGE filtered versions
# of each image while using each filter an equal
# number of times (approx).
#
# It uses batch inputs to minimize API calls,
# but still expect at least 1 API call per filter
#
# Example usage:
#
# export ALGORITHIA_API_KEY=sim.....
# ./random-filter ~/Pictures/*.jpg
#
require 'algorithmia'
require 'pathname'
require 'pp'
# Change this to set the number of filters to apply to each image
PER_IMAGE = 4
# Filters that just never seemed to appeal to me
IGNORE_FILTERS = %w(plentiful alien_goggles colorful_blocks green_zuma aqua cinnamon_rolls really_hot space_pizza purp_paper plentiful)
ALL_FILTERS = %w(alien_goggles aqua blue_brush blue_granite bright_sand cinnamon_rolls clean_view colorful_blocks colorful_dream crafty_painting creativity crunch_paper dark_rain dark_soul deep_connections dry_skin far_away gan_vogh gred_mash green_zuma hot_spicy neo_instinct oily_mcoilface plentiful post_modern purp_paper purple_pond purple_storm rainbow_festival really_hot sand_paper smooth_ride space_pizza spagetti_accident sunday yellow_collage yellow_paper) - IGNORE_FILTERS
all_paths = ARGV
client = Algorithmia.client(ENV['ALGORITHMIA_API_KEY'])
data_dir_uri = 'data://.my/test'
data_dir = client.dir(data_dir_uri)
# Pre-upload any files that aren't already uploaded
# So that we can use them in multiple algo calls
# without re-uploading them
dir_listing = data_dir.each_file.map{|f| f.basename}.to_a
all_paths.each do |p|
basename = Pathname.new(p).basename
unless dir_listing.include?(basename.to_s)
puts "Uploading #{basename}"
data_file = data_dir.file(basename)
data_dir.put_file(p)
end
end
def generated_filename(source, filter)
"#{Pathname.new(source).basename('.*')}-#{filter}.jpg"
end
# Generate pairings of filters with paths such that
# each image has PER_IMAGE unique filters
# and each filter gets assigned to approx same number of images
# Also, avoid re-generating images from previus runs
filter_paths = {}
filter_set = []
dir_listing = data_dir.each_file.map{|f| f.basename}.to_a
all_paths.shuffle.each do |p|
redoes = 0
PER_IMAGE.times do
filter_set = ALL_FILTERS.shuffle if filter_set.empty?
f = filter_set.pop
if !dir_listing.include?(generated_filename(p, f))
filter_paths[f] ||= []
filter_paths[f] << p
elsif redoes < ALL_FILTERS.length
redo
end
end
end
# Now call the algorithm for each filter-path pariring
# since the algorithm allows batch operation per-filter
filter_paths.each do |filter, paths|
return if paths.empty?
src_paths = paths.map do |p|
basename = Pathname.new(p).basename
data_dir.file(basename).data_uri
end
save_paths = paths.map do |p|
"#{data_dir_uri}/#{generated_filename(p, filter)}"
end
data = {
images: src_paths,
savePaths: save_paths,
filterName: filter,
}
puts "\n### Rendering #{src_paths.length} images with #{filter} filter..."
for i in 0..3
begin
algo = client.algo('deeplearning/DeepFilter/0.5.7')
algo.set_timeout(300)
resp = algo.pipe(data)
resp.result['savePaths'].each { |p| puts p }
break
rescue => e
puts "Error: #{e.inspect}"
if i == 3
pp data
else
puts "Retrying..."
end
end
end
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment