namespace :cache do | |
desc "Clears Rails cache" | |
task :clear => :environment do | |
Rails.cache.clear | |
end | |
end |
namespace :cloud_assets do | |
task :sync do | |
#headers = -> file { | |
#if file =~ /opensans|fontawesome/i || file =~ /angular\/templates/i | |
#{ 'access-control-allow-origin' => "*" } | |
#else | |
#{} | |
#end | |
#} | |
# Disabled for Rails 4 | |
#Rails.application.initialize!(:assets) | |
#pins = Dir.chdir(Rails.public_path) { Dir["assets/pins/**/**"] } | |
#angular_templates = Dir.chdir(Rails.public_path) { Dir["assets/angular/templates/**/**"] } | |
require 'fog_cloud_assets' | |
ca = FogCloudAssets.new( | |
storage: { | |
provider: 'AWS', | |
aws_access_key_id: ENV['AWS_ACCESS_KEY_ID'], | |
aws_secret_access_key: ENV['AWS_SECRET_ACCESS_KEY'] | |
}, | |
container: ENV['AWS_BUCKET'], | |
#ignore: %r|pins/.*-[a-f0-9]{32}\.|, | |
#always_upload: pins + angular_templates, | |
#headers: headers, | |
reupload: ENV['CLOUD_ASSETS_REUPLOAD'], | |
remote_delete: ENV['CLOUD_ASSETS_REMOTE_DELETE']) | |
ca.sync | |
end | |
end |
# Description: | |
# Deploy apps. | |
# | |
# Commands: | |
# hubot deploy to staging - Deploy github "master" branch to staging. Run migrations. | |
# hubot deploy to production - Deploy github "prod" branch to staging. Run migrations. | |
# hubot deploy <branch> to <production|staging> - Deploy specific branch to staging/production. Run migrations. | |
# hubot quick deploy to staging - Deploy github "master" branch to staging. Don't run migrations. | |
# hubot quick deploy to production - Deploy github "prod" branch to staging. Don't run migrations. | |
# hubot quick deploy <branch> to <production|staging> - Deploy specific branch to staging/production. Don't run migrations. | |
# hubot disable deploys to <production|staging> <reason> - Temporarily disable deploys due to <reason>. | |
# hubot enable deploys to <production|staging> - Reenable deploys | |
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0" # Avoids DEPTH_ZERO_SELF_SIGNED_CERT error for self-signed certs | |
APPS = ['production', 'staging'] | |
JENKINS_JOBS = (process.env["HUBOT_#{app.replace(/[ ]/g, '_')}_JOB".toUpperCase()] for app in APPS) | |
module.exports = (robot) -> | |
deployData = -> | |
robot.brain.data.deploy or= { | |
deploy_start_times: {}, | |
deploys_disabled: {} | |
} | |
console.log "Known jenkins jobs: ", JENKINS_JOBS | |
markDeployStarted = (url) -> | |
deployData().deploy_start_times[url] = Date.now() | |
cleanupDeployData = (url) -> | |
delete deployData().deploy_start_times[url] | |
deployTime = (url) -> | |
t = deployData().deploy_start_times[url] | |
return unless t | |
elapsed_msecs = Date.now() - t | |
if elapsed_msecs < 60000 # 1 min | |
"#{Math.round(elapsed_msecs / 1000)}s" | |
else | |
"#{(elapsed_msecs / 60000).toFixed(1)}m" | |
allowedToDeploy = (user, app) -> | |
username = user.email_address | |
acl = process.env["HUBOT_#{app}_ACL".toUpperCase()] || '' | |
acl == 'everyone' or acl.split(",").indexOf(username) >= 0 | |
areDeploysDisabled = (app) -> deployData().deploys_disabled[app] | |
enableDeploys = (app) -> | |
deployData().deploys_disabled or= {} | |
deployData().deploys_disabled[app] = null | |
disableDeploys = (app, user, reason) -> | |
deployData().deploys_disabled or= {} | |
deployData().deploys_disabled[app] = | |
disabler: user | |
time: (new Date()).toString() | |
reason: reason | |
###### | |
robot.router.post "/hubot/jenkins_status", (req, res) -> | |
data = req.body | |
res.end "OK" | |
message = null | |
#console.log data | |
if data.name in JENKINS_JOBS | |
app = data.build.parameters.APP | |
deployment = if data.build.parameters.QUICK == 'true' | |
"Quick deployment" | |
else | |
"Deployment" | |
if data.build.phase == 'STARTED' | |
message = "#{deployment} of #{app} started" | |
markDeployStarted(data.build.url) | |
else if data.build.phase == 'FINISHED' || data.build.phase == 'FINALIZED' | |
message = "#{deployment} of #{app} finished. #{data.build.status}" | |
time = deployTime(data.build.url) | |
if time | |
cleanupDeployData(data.build.url) | |
message += ", #{time}" | |
if message | |
message += " (#{data.build.full_url})" | |
#user = robot.userForId 'broadcast' | |
#user.room = process.env.HUBOT_CAMPFIRE_ROOMS.split(",")[0] | |
#user.type = 'groupchat' | |
#robot.send user, message | |
room = process.env.HUBOT_ROOMS.split(",")[0] | |
robot.messageRoom room, message | |
robot.respond RegExp("(enable|disable) deploys? to (#{APPS.join('|')})(.*)", "i"), (msg) -> | |
app = msg.match[2].replace(/[ ]/g, "_") | |
reason = msg.match[3] | |
user = msg.message.user | |
unless allowedToDeploy(user, app) | |
msg.send "Hey, #{user.name}, you are not allowed to deploy to #{app}!" | |
console.log "Don't allow", user, "to deploy", app | |
return | |
if msg.match[1] == 'enable' | |
enableDeploys(app) | |
msg.send "Deploys to #{app} enabled" | |
else | |
disableDeploys(app, user.name, reason) | |
msg.send "Deploys to #{app} temporarily disabled" | |
robot.respond RegExp("(quick )?deploy( [\\w\\-/_.]+)? to (#{APPS.join('|')})(.*)", "i"), (msg) -> | |
app = msg.match[3].replace(/[ ]/g, '_') | |
heroku_app = process.env["HUBOT_#{app}_APP".toUpperCase()] | |
job = process.env["HUBOT_#{app}_JOB".toUpperCase()] | |
unless heroku_app and job | |
msg.send "We have no #{app} app yet, sorry!" | |
return | |
user = msg.message.user | |
if disabled_info = areDeploysDisabled(app) | |
msg.send "Hey, #{user.name}, deploys are disabled by #{disabled_info.disabler} at #{disabled_info.time}, reason is #{disabled_info.reason}" | |
return | |
unless allowedToDeploy(user, app) | |
msg.send "Hey, #{user.name}, you are not allowed to deploy to #{app}!" | |
console.log "Don't allow", user, "to deploy", app | |
return | |
default_branch = process.env["HUBOT_#{app}_DEFAULT_BRANCH".toUpperCase()] or 'master' | |
branch = (msg.match[2] or default_branch).trim() | |
params = | |
token: process.env.HUBOT_JENKINS_BUILD_TOKEN | |
job: job | |
BRANCH: branch | |
APP: heroku_app | |
DEPLOYER: user.name | |
EXTRA: msg.match[4] | |
if msg.match[1] | |
params.QUICK = 'true' | |
quick = ' QUICK' | |
else | |
params.QUICK = 'false' | |
quick = '' | |
url_base = process.env["HUBOT_#{app}_JENKINS_URL".toUpperCase()] or process.env.HUBOT_JENKINS_URL | |
url = "#{url_base}/buildByToken/buildWithParameters" | |
console.log "Making request to #{url} with #{params}" | |
msg | |
.http(url) | |
.query(params) | |
.get() (err, res, body) -> | |
if err | |
msg.send "Jenkins says: #{err}" | |
else if res.statusCode == 302 or body.trim() == 'Scheduled.' or res.statusCode == 201 | |
msg.send "Triggered#{quick} #{heroku_app} deploy (branch #{branch})" | |
else | |
msg.send "Jenkins says: #{res.statusCode} #{body}" |
#!/bin/bash | |
set -e | |
git_push() { | |
git push -f "git@heroku.com:$APP.git" heroku_tmp:master | |
} | |
set_extra_flags() { | |
if [[ "$EXTRA" =~ 'reupload assets' ]]; then | |
export CLOUD_ASSETS_REUPLOAD=1 | |
fi | |
if [[ "$EXTRA" =~ 'recompile assets' ]]; then | |
export CLOUD_ASSETS_RECOMPILE=1 | |
fi | |
if [[ "$EXTRA" =~ 'cleanup assets' ]]; then | |
export CLOUD_ASSETS_REMOTE_DELETE=1 | |
fi | |
if [[ "$EXTRA" =~ 'skip heroku' ]]; then | |
export SKIP_HEROKU=1 | |
fi | |
if [[ "$EXTRA" =~ 'clear cache' ]]; then | |
export CLEAR_CACHE=1 | |
fi | |
} | |
compile_assets() { | |
current_sha=`git log -n 1 --pretty=format:%H app/assets vendor/assets` | |
if [[ -f public/assets/CURRENT_SHA && `cat public/assets/CURRENT_SHA` == $current_sha && "$CLOUD_ASSETS_REUPLOAD" == '' && "$CLOUD_ASSETS_RECOMPILE" == '' ]]; then | |
echo "Assets did not change (SHA $current_sha)" | |
else | |
echo "Recompiling assets" | |
bundle install --quiet --without=test | |
rm -rf public/assets | |
time bundle exec rake assets:precompile cloud_assets:sync | |
echo $current_sha > public/assets/CURRENT_SHA | |
fi | |
} | |
save_deploy_information() { | |
cat > lib/deploy_info.rb <<HEREDOC | |
module DeployInfo | |
BRANCH='$BRANCH' | |
GIT_COMMIT='$GIT_COMMIT' | |
BUILD_NUMBER='$BUILD_NUMBER' | |
BUILD_ID='$BUILD_ID' | |
DEPLOYER='$DEPLOYER' | |
def message | |
return @message if defined?(@message) | |
text = File.read(__FILE__) | |
text =~ /[_]_END__(.*)$/m | |
@message = (\$1 || '').strip | |
end | |
module_function :message | |
end | |
__END__ | |
HEREDOC | |
git log -1 --pretty=medium >> lib/deploy_info.rb | |
} | |
commit() { | |
git checkout -B heroku_tmp $GIT_COMMIT | |
# For Rails 4 with sprockets 3.x: | |
manifest=`ls public/assets/.sprockets-manifest-*.json` | |
# For Rails 4 with sprockets 2.x: | |
#manifest=`ls public/assets/manifest-*.json` | |
# For Rails 3: | |
#manifest=public/assets/manifest.yml | |
git add -f $manifest lib/deploy_info.rb | |
cp $manifest $manifest.copy | |
git commit -m "Assets manifest (build $BUILD_ID)" | |
git checkout -f -q $GIT_COMMIT | |
# This allows us to retain manifest file within the deploys | |
mv $manifest.copy $manifest | |
} | |
set_extra_flags | |
compile_assets | |
save_deploy_information | |
commit | |
if [ "$QUICK" = "true" ]; then | |
if [[ "$SKIP_HEROKU" == '' ]]; then | |
git_push | |
else | |
echo "Skipping heroku push as requested" | |
fi | |
#if [[ "$CLEAR_CACHE" == '1' ]]; then | |
# echo "Clearing cache per request" | |
# heroku run rake cache:clear --app $APP | |
#fi | |
echo "QUICK mode, not running migrations" | |
else | |
if [[ "$SKIP_HEROKU" == '' ]]; then | |
heroku maintenance:on --app $APP | |
git_push | |
heroku run rake db:migrate --app $APP #cache:clear db:migrate --app $APP | |
heroku maintenance:off --app $APP | |
heroku restart --app $APP | |
else | |
echo "Skipping heroku push as requested" | |
fi | |
fi |
require 'fog/aws' | |
class FogCloudAssets | |
# Available options: | |
# | |
# :storage (Hash). Fog::Storage params | |
# :container (String). Container (bin) | |
# | |
# :remote_delete => true (default: false). Clean outdated and unused files from cloud | |
# :reupload => true (default: false). Reupload everything | |
# :prefix (String) (default: 'assets') | |
# :public_path (String) (default: Rails.public_path) | |
# :logger (Logger) (default: create stdout logger) | |
# | |
# :always_upload (Array) (default: []). Files which will be uploaded even if | |
# they are not in the manifest | |
# :headers (Hash/Proc) (default: {}). Set headers for files. If Proc is given, | |
# it will be called given full filename for each file to obtain headers hash | |
# :ignore (Array/RegExp) (default: none). | |
def initialize(options = {}) | |
@options = options | |
@connection = Fog::Storage.new(@options[:storage]) | |
@container = @connection.directories.get(@options[:container]) | |
@logger = options[:logger] | |
end | |
def sync | |
logger.info "CloudAssets. Uploading files" | |
upload_files | |
delete_extra_remote_files if @options[:remote_delete] | |
logger.info "CloudAssets. Done" | |
end | |
private | |
def upload_files | |
get_remote_filelist unless @options[:reupload] | |
get_local_files | |
files_to_upload = @local_files.select { |f| need_to_upload?(f) } | |
count = files_to_upload.size | |
logger.info "CloudAssets. #{count} file(s) to upload" | |
format = "%#{count.to_s.length}d" | |
files_to_upload.each_with_index do |file, idx| | |
idx = format % (idx + 1) | |
logger.info "[#{idx}/#{count}] Uploading #{file}" | |
upload_file(file, headers_for_file(file)) | |
end | |
end | |
def delete_extra_remote_files | |
logger.info "CloudAssets. Deleting remote files" | |
get_remote_filelist | |
files_to_delete = @remote_objects.map(&:key) - @local_files | |
count = files_to_delete.count | |
logger.info "CloudAssets. #{count} file(s) to delete" | |
format = "%#{count.to_s.length}d" | |
files_to_delete.each_with_index do |file, idx| | |
idx = format % (idx + 1) | |
logger.info "[#{idx}/#{count}] Deleting #{file}" | |
delete_file(file) | |
end | |
end | |
def get_remote_filelist | |
logger.debug "CloudAssets. Getting remote file list" | |
@remote_objects = @container.files.to_a | |
end | |
def get_local_files | |
@local_files = if manifest_path | |
#manifest = JSON.parse(IO.read(manifest_path)) | |
logger.debug "Using manifest #{manifest_path}" | |
#manifest['assets'].values.uniq.map { |f| File.join(assets_prefix, f) } | |
read_manifest | |
else | |
logger.error "Manifest not found" | |
[] | |
end | |
if @options[:always_upload] | |
@local_files += @options[:always_upload].to_a | |
end | |
case @options[:ignore] | |
when Regexp | |
@local_files.reject! { |f| f =~ @options[:ignore] } | |
when Array | |
@local_files -= @options[:ignore] | |
end | |
end | |
ONE_YEAR = 31557600 | |
def upload_file(file, headers = {}) | |
local = local_path(file) | |
attributes = { | |
key: file, | |
last_modified: File.mtime(local).utc.to_s, | |
body: File.open(local), | |
public: true | |
} | |
# Set content type related stuff | |
ext = File.extname(file)[1..-1] | |
if ext == 'gz' | |
uncompressed_filename = file[0..-4] | |
ext = File.extname(uncompressed_filename)[1..-1] | |
attributes[:content_encoding] = 'gzip' | |
end | |
mime = mime_type(ext) | |
attributes[:content_type] = mime ? mime.to_s : 'application/octet-stream' | |
# Set cache control for assets with sha1 name suffix | |
if /-[0-9a-fA-F]{32}$/.match(File.basename(file, File.extname(file))) | |
attributes[:cache_control] = "public, max-age=#{ONE_YEAR}" | |
attributes[:expires] = CGI.rfc1123_date(Time.now + ONE_YEAR) | |
end | |
# Allow to override anything | |
attributes.merge!(headers) | |
@container.files.create(attributes) | |
end | |
def delete_file(file) | |
@container.files.new(key: file).destroy | |
end | |
def headers_for_file(file) | |
case @options[:headers] | |
when Hash | |
@options[:headers] | |
when Proc | |
@options[:headers].(file) | |
else | |
{} | |
end | |
end | |
# Upload if file does not exist in the cloud | |
def need_to_upload?(file) | |
return true unless @remote_objects # Always upload | |
obj = @remote_objects.find { |obj| obj.key == file } | |
path = local_path(file) | |
need = File.file?(path) && (obj.nil? || obj.last_modified < File.mtime(path)) | |
# if need | |
# puts "#{path} #{obj.inspect} #{obj.try(:last_modified).inspect} #{File.mtime(path).inspect}" | |
# end | |
need | |
end | |
def asset_path | |
@asset_path ||= File.join(public_path, assets_prefix) | |
end | |
def assets_prefix | |
@options[:prefix] || Rails.application.config.assets.prefix.sub(/^\//, '') | |
end | |
def public_path | |
@options[:public_path] || Rails.public_path | |
end | |
def local_path(filename) | |
File.join public_path, filename | |
end | |
def read_manifest | |
m = manifest_path | |
data = IO.read(m) | |
filenames = if m.ends_with?('json') | |
JSON.parse(data)['assets'].values | |
else | |
YAML.load(data).values | |
end | |
if filenames | |
filenames = filenames.uniq.map { |f| File.join(assets_prefix, f) } | |
# Now all .gz versions if any | |
gzipped = [] | |
filenames.each do |fn| | |
gz = fn + '.gz' | |
gzipped << gz if File.exists?(local_path(gz)) | |
end | |
filenames + gzipped | |
end | |
end | |
def manifest_path | |
@manifest_path ||= | |
begin | |
dir = Rails.application.config.assets.manifest || asset_path | |
yml = File.join(dir, "manifest.yml") | |
# Prefer Rails 4 manifest | |
Dir[File.join(dir, ".sprockets-manifest-*.json")].first || Dir[File.join(dir, "manifest-*.json")].first || (File.exists?(yml) ? yml : nil) | |
end | |
end | |
def logger | |
@logger ||= Logger.new(STDERR) | |
end | |
def mime_type(ext) | |
# Stolen from https://github.com/rumblelabs/asset_sync/blob/master/lib/asset_sync/multi_mime.rb | |
if defined?(Mime::Type) | |
Mime::Type.lookup_by_extension(ext) | |
elsif defined?(Rack::Mime) | |
ext_with_dot = ".#{ext}" | |
Rack::Mime.mime_type(ext_with_dot) | |
else | |
MIME::Types.type_for(ext).first | |
end | |
end | |
end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment