Skip to content

Instantly share code, notes, and snippets.

@tader
Created October 25, 2013 13:05
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save tader/7154403 to your computer and use it in GitHub Desktop.
Save tader/7154403 to your computer and use it in GitHub Desktop.
#!/usr/bin/env ruby
require 'fog'
require 'yaml'
PACKAGES = 'build-essential git-core ruby1.9.1 ruby1.9.1-dev libxml2-dev libxslt1-dev libpq-dev libsqlite3-dev zlib1g-dev libmysqlclient-dev libpgsql-ruby1.9.1'
region = ARGV[0] || 'eu-west-1'
ami = ARGV[1] || 'ami-ce7b6fba' # this needs to be an Ubuntu ami (I use 12.04)
flavor = ARGV[2] || 'c1.medium' # c1.medium has more CPU power and is a little quicker
# Helper functions
def run(server, command)
log "Running -> #{command}\n"
server.ssh(command).each { |x| puts ">> #{x.stdout}"}
end
def colorize(text, color_code)
"\e[#{color_code}m#{text}\e[0m"
end
def red(text); colorize(text, 31); end
def green(text); colorize(text, 32); end
def log(text); print("#{green("#")} #{text}"); end
log "Creating server\n"
ec2 = Fog::Compute.new({
:provider => 'AWS',
:region => region,
:aws_access_key_id => ENV['BOSH_AWS_ACCESS_KEY_ID'],
:aws_secret_access_key => ENV['BOSH_AWS_SECRET_ACCESS_KEY'] })
server = ec2.servers.bootstrap(:image_id => ami, :flavor_id => flavor, :private_key_path => '~/.ssh/id_rsa', :public_key_path => '~/.ssh/id_rsa.pub', :username => 'ubuntu')
run(server, "sudo apt-get update") # Update packages in apt
sleep 5 # I found without this I was getting odd errors with regard to missing packages!
run(server, "sudo apt-get -y install #{PACKAGES}") # Install all pre-requisite Ubuntu packages
run(server, "echo 'install: --no-rdoc --no-ri' > ~/.gemrc") # Skip gem docs :-)
run(server, 'sudo gem install bundler') # Install bundler
run(server, 'git clone git://github.com/cloudfoundry/bosh.git') # Clone bosh
run(server, 'cd bosh && bundle install --deployment') # Bundle install for bosh
log "Publishing MicroBOSH stemcell\n"
# Download the latest stemcell for MicroBOSH from us-east-1
run(server, 'curl http://bosh-jenkins-artifacts.s3.amazonaws.com/last_successful_micro-bosh-stemcell-aws.tgz -o /tmp/microbosh-stemcell-aws.tgz')
# Run 'artifacts:candidates:publish' rake task to publish the tgz as an ami on our own region.
# The last step in this rake task is expected to fail!
run(server, "cd bosh && sudo AWS_ACCESS_KEY_ID=\"#{ENV['BOSH_AWS_ACCESS_KEY_ID']}\" AWS_SECRET_ACCESS_KEY=\"#{ENV['BOSH_AWS_SECRET_ACCESS_KEY']}\" bundle exec rake artifacts:candidates:publish['/tmp/microbosh-stemcell-aws.tgz']")
# Extract stemcell.MF from the light stemcell, it has the AMI id in it
run(server, "cd /tmp && tar xvzf light-microbosh-stemcell-aws.tgz stemcell.MF")
log "Looking up new AMI\n"
# Read the ami from the yaml file and instruct user on what to export
yaml = server.ssh("cat /tmp/stemcell.MF")[0].stdout
stemcell = YAML.load(yaml)
if stemcell.is_a? Hash
ami=stemcell['cloud_properties']['ami'][region]
log "Run 'export BOSH_OVERRIDE_MICRO_STEMCELL_AMI=#{ami}'\n"
end
log "Publishing BOSH stemcell\n"
# Download the latest stemcell for BOSH from us-east-1
run(server, 'curl http://bosh-jenkins-artifacts.s3.amazonaws.com/last_successful_bosh-stemcell-aws.tgz -o /tmp/bosh-stemcell-aws.tgz')
# Run 'artifacts:candidates:publish' rake task to publish the tgz as an ami on our own region.
# The last step in this rake task is expected to fail!
run(server, "cd bosh && sudo AWS_ACCESS_KEY_ID=\"#{ENV['BOSH_AWS_ACCESS_KEY_ID']}\" AWS_SECRET_ACCESS_KEY=\"#{ENV['BOSH_AWS_SECRET_ACCESS_KEY']}\" bundle exec rake artifacts:candidates:publish['/tmp/bosh-stemcell-aws.tgz']")
# This time upload the light stemcell to the bosh-blobstore bucket created earlier
# in the process, we need the aws-s3 gem for this
run(server, "sudo gem i aws-s3")
blob_store = "#{ENV['BOSH_VPC_SUBDOMAIN']}-#{ENV['BOSH_VPC_DOMAIN'].gsub(/\./,'-')}-bosh-blobstore"
s3_endpoint = "s3-#{region}.amazonaws.com"
# This script which is interpreted on the server, uploads the stemcell to the bucket and then reports back the URL
cmd = <<-cmd
require "aws/s3"
require "uri"
include AWS::S3
AWS::S3::DEFAULT_HOST.replace "#{s3_endpoint}"
AWS::S3::Base.establish_connection!(:access_key_id => "#{ENV['BOSH_AWS_ACCESS_KEY_ID']}", :secret_access_key => "#{ENV['BOSH_AWS_SECRET_ACCESS_KEY']}")
S3Object.store("light-bosh-stemcell-aws.tgz", open("/tmp/light-bosh-stemcell-aws.tgz"), "#{blob_store}", :access => :public_read, :content_type => "application/x-compressed")
uri = URI(S3Object.find("light-bosh-stemcell-aws.tgz", "#{blob_store}").url)
puts "\#{uri.scheme}://\#{uri.host}\#{uri.path}"
cmd
cmd = "ruby -e '#{cmd}'"
# Run the script
url_for_stemcell_lite = server.ssh(cmd)[0].stdout.strip
# Report back
log "Now run the following before proceding with install;\n"
puts "export BOSH_OVERRIDE_MICRO_STEMCELL_AMI=#{ami}"
puts "export BOSH_OVERRIDE_LIGHT_STEMCELL_URL=#{url_for_stemcell_lite}"
# Destroy the server, we're done with it!
log "Destroying server\n"
server.destroy
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment