Skip to content

Instantly share code, notes, and snippets.

@zitooon
Last active August 29, 2015 14:00
Show Gist options
  • Save zitooon/11391758 to your computer and use it in GitHub Desktop.
Save zitooon/11391758 to your computer and use it in GitHub Desktop.
AWS S3 useful methods to upload/download/delete and check existing files recursively
# OVERWRITE_AWS_CONFIG is used to create a s3 client which use production environment when using methods in development
require 'fileutils'
module S3Utils
def self.files_exists_in?(dir_name)
bucket = AWS::S3.new(OVERWRITE_AWS_CONFIG).buckets[APP_CONF[:s3][:bucket]]
bucket.as_tree(prefix: "#{dir_name}").children.each do |obj|
if obj.branch?
return true if self.files_exists_in?(obj.prefix)
else
next if obj.key.last == '/'
return true
end
end
false
end
def self.list_files_in(s3_path)
bucket = AWS::S3.new(OVERWRITE_AWS_CONFIG).buckets[APP_CONF[:s3][:bucket]]
files = []
bucket.as_tree(prefix: "#{s3_path}").children.each do |obj|
unless obj.branch?
next if obj.key.last == '/'
files << obj.key
end
end
files
end
def self.download_files(s3_path, recur_num=1, local_dir_name=nil)
local_dir_name ||= s3_path.split('/').last
dir_path = "#{Rails.root}/log/#{local_dir_name}"
FileUtils.rm_rf dir_path if Dir.exists? dir_path
Dir.mkdir dir_path
bucket = AWS::S3.new(OVERWRITE_AWS_CONFIG).buckets[APP_CONF[:s3][:bucket]]
puts "#{' '*recur_num*2}--> In folder #{s3_path}"
bucket.as_tree(prefix: "#{s3_path}").children.each do |obj|
if obj.branch?
self.download_files(obj.prefix, recur_num+1, "#{local_dir_name}/#{obj.prefix.split('/').last}")
else
next if obj.key.last == '/'
puts "#{' '*(recur_num*2+2)}--> Downloading #{obj.key} to #{dir_path.gsub(Rails.root.to_s, '')}/#{obj.key.split('/').last}"
File.open("#{dir_path}/#{obj.key.split('/').last}", 'wb') do |file|
bucket.objects[obj.key].read do |chunk|
file.write(chunk)
end
end
end
end
end
def self.upload_files(local_path, s3_path)
bucket = AWS::S3.new(OVERWRITE_AWS_CONFIG).buckets[APP_CONF[:s3][:bucket]]
local_path = local_path+'/' unless local_path.last == '/'
Dir.glob("#{local_path}**/**").each do |file|
next if File.directory? file
file_name = file.gsub(local_path, '')
puts " --> Uploading #{file_name} to s3://#{bucket.name}/#{s3_path}/#{file_name}"
bucket.objects["#{s3_path}/#{file_name}"].write(file: file)
end
end
def self.delete_files(s3_path, recur_num=1)
bucket = AWS::S3.new(OVERWRITE_AWS_CONFIG).buckets[APP_CONF[:s3][:bucket]]
puts "#{' '*recur_num*2}--> In folder #{s3_path}"
bucket.as_tree(prefix: "#{s3_path}").children.each do |obj|
if obj.branch?
self.delete_files(obj.prefix, recur_num+1)
else
next if obj.key.last == '/'
puts "#{' '*(recur_num*2+2)}--> Deleting #{obj.key}"
AWS::S3::Client.new(OVERWRITE_AWS_CONFIG).delete_object(bucket_name: APP_CONF[:s3][:bucket], key: obj.key)
end
end
end
def self.process_files_for(dir_name, delete_s3_files=true, delete_local_files=true)
puts "Processing #{dir_name}..."
dir_path = "#{Rails.root}/log/#{dir_name.split('/').last}"
S3Utils.download_files(dir_name)
yield
if Dir.exists?(dir_path) and delete_local_files
puts " --> Deleting local directory #{dir_path}"
FileUtils.rm_rf(dir_path)
end
if delete_s3_fileso
puts " --> Deleting s3 directory #{dir_name}"
S3Utils.delete_files(dir_name)
end
end
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment