Skip to content

Instantly share code, notes, and snippets.

@tddium
Created March 28, 2012 21:01
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save tddium/2230463 to your computer and use it in GitHub Desktop.
Save tddium/2230463 to your computer and use it in GitHub Desktop.
Secure S3 Storage for Tddium
#! /usr/bin/env ruby
# Copyright (c) 2012 Solano Labs All Rights Reserved
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This script enables encrypted upload/download of files from S3.
# The intended use case is for moving large objects between your
# local environment and Tddium without polluting your git repository.
#
# We use OpenSSL with AES 192 in CBC mode to provide privacy
# http://www.schneier.com/blog/archives/2009/07/another_new_aes.html
#
# KNOWN LIMITATIONS:
# 1. Confidentiality only (e.g. no HMAC)
# 2. Plain text is not compressed before encryption
# 3. Symmetric encryption only (in fact AES 192 in CBC mode only)
#
# FUTURE:
# 1. Digital signature (e.g. HMAC of ciphertext)
# 2. Encrypt and sign with public key
# 2. Consider generating key from pass phrase with scrypt/bcrypt/PBKDF2
#
# There are two pre-shared secrets:
#
# 1. Passphrase used to generate symmetric key
# Generate the AES key in your environment and use Tddium config
# variables to inject it into the build via an environment variable
# 2. An AWS identity
# Generate an IAM identity with access only to the S3 bucket with the
# objects needed by the build. You may wish to create two IAM
# identities: one with read/write priveleges in your environment and
# one that is read-only for use in Tddium. In either case, send the
# AWS region, secret key id, and access key via a Tddium config variable
#
# Environment Variables:
# 1. TDDIUM_S3_REGION - AWS S3 region, defaults to us-east-1
# 2. TDDIUM_S3_KEY_ID - AWS S3 secret key ID
# 3. TDDIUM_S3_SECRET - AWS S3 secret access key
# 4. TDDIUM_S3_PASSPHRASE - OpenSSL AES passphrase
require 'fog'
require 'uri'
require 'thor'
require 'yaml'
require 'digest/md5'
require 'highline/import'
class S3Store
def initialize(config)
@config = config || {:aws => {}}
aws = @config[:aws]
region = aws[:region] || ENV['TDDIUM_S3_REGION'] || 'us-east-1'
aws_key_id = aws[:aws_access_key_id] || ENV['TDDIUM_S3_KEY_ID']
aws_secret = aws[:aws_secret_access_key] || ENV['TDDIUM_S3_SECRET']
if aws_key_id.nil? || aws_secret.nil? then
raise "no AWS keys specified"
end
@creds = {
:provider => 'AWS',
:region => region,
:aws_access_key_id => aws_key_id,
:aws_secret_access_key => aws_secret
}
@store = Fog::Storage.new(@creds)
end
def openssl_encrypt(path, passphrase)
ctl = IO.pipe
fd = "fd:#{ctl[0].fileno}"
cipher = '-aes-192-cbc'
args = ['enc', '-e', cipher, '-md', 'sha1', '-pass', fd, '-in', path]
return openssl_run(passphrase, true, ctl, args)
end
def openssl_decrypt(path, passphrase)
ctl = IO.pipe
fd = "fd:#{ctl[0].fileno}"
cipher = '-aes-192-cbc'
args = ['enc', '-d', cipher, '-md', 'sha1', '-pass', fd, '-out', path]
return openssl_run(passphrase, false, ctl, args)
end
def openssl_run(passphrase, encrypt, ctl, args)
data = IO.pipe
pid = Kernel.fork
if pid.nil? then
Process.setsid
pid = Kernel.fork
Kernel.exit(0) unless pid.nil?
ctl[1].close
devnull = File.open('/dev/null', 'w')
$stderr.reopen(devnull)
if encrypt then
data[0].close
$stdin.reopen(File.open('/dev/null', 'r'))
$stdout.reopen(data[1])
else
data[1].close
$stdin.reopen(data[0])
$stdout.reopen(devnull)
end
Kernel.exec('openssl', *args)
end
result_fd = nil
if encrypt then
data[1].close
result_fd = data[0]
else
data[0].close
result_fd = data[1]
end
ctl[0].close
ctl[1].puts(passphrase)
ctl[1].puts(passphrase)
ctl[1].close
return result_fd
end
def upload_multipart(file, bucket, obj)
rv = @store.initiate_multipart_upload(bucket, obj)
id = rv.body['UploadId']
parts = []
chunk = 1
total_data = 0
chunk_size = 8388608
while true do
data = file.read(chunk_size)
break if data.nil?
rv = @store.upload_part(bucket, obj, id, chunk, data)
parts.push(rv.headers['ETag'])
chunk += 1
total_data += data.length
end
rv = @store.complete_multipart_upload(bucket, obj, id, parts)
return total_data
end
def upload_secure(file_path, s3_url, passphrase)
s3_uri = URI.parse(s3_url)
s3_path = s3_uri.path.sub(/^\//, '')
s3_bucket = s3_uri.host.sub(/[.].*/, '')
s3_dir = @store.directories.get(s3_bucket)
if s3_dir == nil then
raise "missing bucket #{s3_bucket}"
end
pipe = openssl_encrypt(file_path, passphrase)
upload_multipart(pipe, s3_bucket, s3_path)
end
def download_secure(s3_url, file_path, passphrase)
s3_uri = URI.parse(s3_url)
if s3_uri.path.nil? || s3_uri.host.nil? then
raise "#{s3_uri.inspect} does not look like a valid s3 URL"
end
s3_path = s3_uri.path.sub(/^\//, '')
s3_bucket = s3_uri.host.sub(/[.].*/, '')
s3_dir = @store.directories.get(s3_bucket)
if s3_dir == nil then
raise "missing bucket #{bucket}"
end
pipe = openssl_decrypt(file_path, passphrase)
s3_dir.files.get(s3_path) do |chunk, remaining, total|
pipe.write(chunk)
end
pipe.close
end
def self.urandom(n)
data = File.read('/dev/urandom',n)
return data.unpack('H*').shift
end
def self.roundup(x, y)
return ((x+y-1)/y)*y
end
end
class S3StoreCmd < Thor
desc "store <file> <url>", "store a file in s3"
method_option :passprompt, :aliases => '-P', :type => :boolean
method_option :passphrase, :aliases => '-p', :type => :string, :default => nil
method_option :config, :aliases => '-c', :type => :string, :default => nil
def store(file_path, s3_url)
set_shell
config = YAML.load_file(options[:config]) if options[:config]
passphrase = YAML.load_file(options[:passphrase]) if options[:passphrase]
if passphrase.nil? then
if options[:passprompt] then
passphrase = HighLine.ask('Passphrase: ') { |q| q.echo = "*" }
elsif ENV.member?('TDDIUM_S3_PASSPHRASE')
passphrase = ENV['TDDIUM_S3_PASSPHRASE']
else
passphrase = S3Store.urandom(32)
puts "Passphrase is: #{passphrase}"
end
end
store = S3Store.new(config)
store.upload_secure(file_path, s3_url, passphrase)
end
desc "fetch <url> [file]", "fetch a file in s3"
method_option :passphrase, :aliases => '-p', :type => :string, :default => nil
method_option :config, :aliases => '-c', :type => :string, :default => nil
def fetch(s3_url, *args)
set_shell
file_path = args.first
s3_uri = URI.parse(s3_url)
s3_path = s3_uri.path.sub(/^\//, '')
file_path = File.basename(s3_path) if file_path.nil?
config = YAML.load_file(options[:config]) if options[:config]
passphrase = YAML.load_file(options[:passphrase]) if options[:passphrase]
passphrase ||= ENV['TDDIUM_S3_PASSPHRASE']
passphrase ||= HighLine.ask('Passphrase: ') { |q| q.echo = "*" }
store = S3Store.new(config)
store.download_secure(s3_url, file_path, passphrase)
end
protected
def set_shell
if !$stdout.tty? || !$stderr.tty? then
@shell = Thor::Shell::Basic.new
end
end
end
S3StoreCmd.start
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment