Skip to content

Instantly share code, notes, and snippets.

@jsundram
Created June 24, 2015 05:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jsundram/1ebd96f615550d71b984 to your computer and use it in GitHub Desktop.
Save jsundram/1ebd96f615550d71b984 to your computer and use it in GitHub Desktop.
Totally insane little script I use for deploying single-folder static apps to viz.runningwithdata.com. Everything gets gzipped up and minified.
import boto
import gzip
import os
import shutil
import subprocess
from subprocess import Popen, PIPE
import sys
import tempfile
import json
"""
process_in_place takes a directory and:
combines all js
requires you to update your html to refer to the new mondo js file
destructively minifies css/js/html
destructively crushes pngs
gzips html, pjs, js, css, xml
You should only ever do this on a copy of a source directory, not the original
gzip_to:
gzips your files with the specified extensions
puts them in the specified folder or a temp folder
returns a list of all the new files
"""
def run(args):
"""returns the piped output of the given command"""
return Popen(args, stdout=PIPE).communicate()[0]
def do_gzip(f):
subprocess.call(['gzip', f])
os.rename(f + '.gz', f)
def process_in_place(combine=False, minify=False):
directory = sys.argv[1]
files = [os.path.join(directory, f) for f in os.listdir(directory)]
# combine all js files
if combine:
js = [f for f in files if os.path.splitext(f)[-1] == '.js']
if js:
alljs = run(['cat'] + js)
f = open(os.path.join(directory, 'code.js'), 'w')
f.write(alljs);
f.close();
for f in js:
os.unlink(f)
# combining the js with processing.js seems to break the sketch ...
try:
dummy = input("update the html to only refer to code.js, then press ENTER\n")
except SyntaxError:
pass
except EOFError:
pass
if minify:
# crush and minify
files = [os.path.join(directory, f) for f in os.listdir(directory)]
for f in files:
print f
ext = os.path.splitext(f)[-1]
tmp = os.tempnam()
if ext == '.swp':
os.unlink(f)
elif ext == '.png':
subprocess.call(['pngcrush', '--brute', f, tmp])
os.rename(tmp, f)
elif ext == '.css' or ext == '.js':
subprocess.call(['yuicompressor', f, '-o', tmp])
os.rename(tmp, f)
elif ext == '.html':
# would like to add --compress-js but can't because of path reasons
subprocess.call(['htmlcompressor', '--remove-intertag-spaces', '--remove-quotes', '--simple-doctype', '--remove-style-attr', '--remove-link-attr', '--remove-script-attr', '--remove-quotes', f, '-o', tmp])
os.rename(tmp, f)
# compress
files = [os.path.join(directory, f) for f in os.listdir(directory)]
to_gzip = set(['.html', '.pjs', '.js', '.css', '.xml', '.json', '.csv'])
for f in files:
ext = os.path.splitext(f)[-1]
if ext in to_gzip:
print "compressing %s" % f
do_gzip(f)
def ext(f):
return os.path.splitext(f)[-1]
def get_files(directory, avoid=['.swp']):
""" Get list of files in directory and all subdirectories"""
paths = []
avoid = set(avoid)
def should_add(f):
return not ('.git' in f or
ext(f) in avoid or
'.DS_Store' in f)
def concat(arg, top, names):
for f in names:
p = os.path.join(top, f)
if should_add(p):
paths.append(p)
os.path.walk(directory, concat, None)
return paths
def gzip_to(directory, destination=None, extensions=[]):
""" Gets all files in <directory>.
if destination is None, moves them to a tempdir
extensions specifies custom extensions for files that need gzipping
returns a list of the new filenames
"""
paths = get_files(directory)
directories = [f for f in paths if os.path.isdir(f)]
files = [f for f in paths if f not in set(directories)]
destination = destination or tempfile.mkdtemp()
# Need to make sure that both directory and destination end with pathsep
if not destination.endswith(os.path.sep): destination += os.path.sep
if not directory.endswith(os.path.sep): directory += os.path.sep
# Make sure destination directories exist
for d in directories:
newdir = d.replace(directory, destination, 1)
try:
os.makedirs(newdir)
except OSError, e:
if e.errno != 17: # file exists
raise(e)
# Copy things to tempdir and gzip them if necessary
outfiles = []
to_gzip = set(extensions) | set(['.html', '.pjs', '.js', '.css', '.xml', '.json'])
for path in files:
outfile = path.replace(directory, destination, 1)
if ext(path) in to_gzip:
with open(path) as f:
g = gzip.open(outfile, 'wb')
g.writelines(f)
g.close()
else:
shutil.copyfile(path, outfile)
outfiles.append(outfile)
print "gzipped files to %s" % destination
return outfiles
def upload(file_list, folder, bucket='mlt.where.com' ):
folder = os.path.basename(folder)
if folder.endswith(os.path.pathsep):
folder += os.path.pathsep
prefix = os.path.commonprefix(file_list)
def get_creds(bucket):
cred_file = os.environ['AWS_CREDENTIAL_FILE']
cred_file = cred_file.replace('.txt', '.json')
creds = {}
with open(cred_file) as f:
creds = json.load(f)
return creds[bucket]
def is_gzipped(filename):
"""potentially has false positives:
http://stackoverflow.com/questions/6059302/how-to-check-if-a-file-is-gzip-compressed
"""
with open(filename) as f:
return f.read(2) == '\x1f\x8b'
creds = get_creds(bucket)
aws_id = creds.get('aws_access_key_id')
aws_key = creds.get('aws_secret_access_key')
s3 = boto.connect_s3(aws_id, aws_key)
b = s3.get_bucket(bucket)
for f in file_list:
keyname = os.path.join(folder, f.replace(prefix, '', 1))
k = b.new_key(keyname)
h = {'Content-Encoding': 'gzip'} if is_gzipped(f) else {}
k.set_contents_from_filename(f, headers=h)
k.close()
print keyname
print "Uploaded %d files to s3://%s/%s" % (len(file_list), bucket, folder)
if __name__ == '__main__':
folder = sys.argv[1]
files = gzip_to(folder)
# want basename, and os.path.basename doesn't work if the path ends with a slash
basename = (p for p in reversed(folder.split('/')) if p != '').next()
upload(files, basename, 'viz.runningwithdata.com')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment