Skip to content

Instantly share code, notes, and snippets.

@drcongo
Last active May 15, 2018 23:28
Show Gist options
  • Save drcongo/5714673 to your computer and use it in GitHub Desktop.
Save drcongo/5714673 to your computer and use it in GitHub Desktop.
A Fabric task to deploy static assets to S3, gzipping the js and css files, and invalidating cache objects on CloudFront. Feel free to point out where it could be improved.
import datetime
import arrow
from fabric.api import *
import boto
from boto.cloudfront import CloudFrontConnection
import os
from datetime import date, timedelta
import gzip
import shutil
CDN = {
'aws_cloudfront_id': 'YOUR_CLOUDFRONT_ID',
'aws_access_key' : 'YOUR_AWS_ACCES_KEY',
'aws_secret_key' : 'YOUR_AWS_SECRET_KEY',
'aws_bucket' : 'YOUR_BUCKET_NAME',
'aws_endpoint' : 's3-eu-west-1.amazonaws.com',
'aws_sitename' : 'YOUR_WEBSITE_NAME', # use the domain name
'static_folders' : [
'./img',
'./js',
'./css',
],
'files': []
}
# files will live at...
# yourcdn/YOUR_WEBSITE_NAME/
@task
def deploy_static():
"""Deploys the static assets to S3"""
require('CDN', provided_by=[prod, stage])
if not os.path.exists('fabfile') or not os.path.isdir('fabfile'):
abort(red("It looks like you're running the fab task from somewhere other than the project root."))
return
if not os.path.exists('fabfile/logs'):
os.makedirs('fabfile/logs')
last_run = get_last_run()
with open('fabfile/logs/static_files_%s.log' % env.tag, 'a+') as files_log:
files_log.seek(0)
logged_files = [_.rstrip("\n") for _ in files_log.readlines()]
print(blue("%s static files logged." % len(logged_files)))
for folder in env.CDN['static_folders']:
to_upload = len(env.CDN['files'])
folder_to_s3(folder, logged_files, files_log, last_run)
if len(env.CDN['files']) == to_upload:
print(yellow('No new or modified files in %s' % folder))
files_log.close()
if os.path.exists('TEMP'):
shutil.rmtree('TEMP')
set_last_run()
invalidate_cloudfront()
def get_last_run():
pointer = os.path.join('fabfile', 'logs', 'static_times_%s.log' % env.tag)
with open(pointer, 'a+') as f:
f.seek(0)
times = [_.rstrip("\n") for _ in f.readlines()]
last = times[-1]
try:
l = arrow.get(last).floor('second')
except Exception as e:
print(red(e))
l = arrow.get('1969-01-01').floor('second')
f.close()
print(yellow("Static last run: %s " % l))
return l
def folder_to_s3(folder, logged_files, log, last_run):
require('CDN', provided_by=[prod, stage])
compressable = ['.css', '.js', '.svg']
ignore_files = ['.DS_Store', '.gitkeep']
ignore_dirs = ['projects']
bucket = init_bucket()
the_future = date.today() + timedelta(days=365*10)
for path, dir, files in os.walk(folder):
curr_dir = os.path.split(path)[1]
for f in files:
do_upload = False
m = arrow.get(os.path.getmtime(os.path.join(path, f)))
c = arrow.get(os.path.getmtime(os.path.join(path, f)))
if os.path.join(path, f) not in logged_files:
if f not in ignore_files and curr_dir not in ignore_dirs:
print(green('NEW: %s' % os.path.join(path, f)))
log.write('%s\n' % os.path.join(path, f))
do_upload = True
elif m.timestamp > last_run.timestamp or c.timestamp > last_run.timestamp:
print(green('MODIFIED: %s' % os.path.join(path, f)))
do_upload = True
if do_upload is True:
if f not in ignore_files and curr_dir not in ignore_dirs:
headers = {'Expires': the_future}
foldername = os.path.split(folder)[1]
putpath = "%s/%s" % (foldername, os.path.relpath(os.path.join(path, f), folder))
print(blue(" - Putting %s " % putpath))
k = bucket.new_key(putpath)
print(blue(os.path.splitext(f)[1]))
if os.path.splitext(f)[1] in compressable:
print(blue(" Compressing %s " % f))
headers['Content-Encoding'] = 'gzip'
k.set_contents_from_filename(gzipit(path, f), headers=headers)
else:
k.set_contents_from_filename(os.path.join(path, f), headers=headers)
k.make_public()
env.CDN['files'].append(putpath)
def init_bucket():
s3 = boto.connect_s3(CDN['aws_access_key'], CDN['aws_secret_key'])
s3.host = CDN['aws_endpoint']
bucket = s3.get_bucket(CDN['aws_bucket'])
return bucket
def invalidate_cloudfront():
conn = CloudFrontConnection(CDN['aws_access_key'], CDN['aws_secret_key'])
print conn.create_invalidation_request(CDN['aws_cloudfront_id'], CDN['files'])
print 'Invalidated cloudfront cache for ...\n%s' % '\n\t '.join(CDN['files'])
CDN['files'] = []
def gzipit(path,file):
fullpath = os.path.join(path,file)
f_in = open(fullpath, 'rb').read()
if not os.path.exists('TEMP'):
os.makedirs('TEMP')
f_out = gzip.open('TEMP/%s' % file, 'wb')
f_out.write(f_in)
f_out.close()
return 'TEMP/%s' % file
@skoczen
Copy link

skoczen commented May 27, 2015

Looks like some copy/paste errors - set_last_run() is missing entirely.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment