Skip to content

Instantly share code, notes, and snippets.

@kmcintyre
Last active July 10, 2016 01:31
Show Gist options
  • Save kmcintyre/6998159 to your computer and use it in GitHub Desktop.
Save kmcintyre/6998159 to your computer and use it in GitHub Desktop.
Everybody has a s3 boto copy. Here is mine
#!/usr/bin/python
#
# I'm pretty sure there's better scripts
#
import sys, getopt, boto, gzip, StringIO
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from socket import gethostname
bucket = ''
file = ''
targetfile = ''
contenttype = ''
acl = 'public-read'
encode = None
def usage():
print 'usage: s3copy.py -b <bucket> -f <inputfile> -t <targetfile> -c <contenttype> -p <acl> -e <encode>'
print ' --bucket, -b : bucket on S3 to copy file to ( will create if not there)'
print ' --file , -f : input file to copy to S3'
print ' --targetfile, -t : targetfile name'
print ' --contenttype, -c : content type'
print ' --acl, -p : acl publish setting'
print ' --encode, -e : encode (gzip)'
sys.exit()
def loadvalues(argv):
global bucket
global file
global targetfile
global contenttype
global acl
global encode
try:
opts, args = getopt.getopt(argv,"hk:b:f:t:c:p:e:",["bucket=","file=","targetfile=","contenttype=","acl=","encode="])
except getopt.GetoptError:
usage()
for opt, arg in opts:
if opt == '-h':
usage()
elif opt in ("-b", "--bucket"):
bucket = arg
elif opt in ("-f", "--file"):
file = arg
elif opt in ("-t", "--targetfile"):
targetfile = arg
elif opt in ("-c", "--contenttype"):
contenttype = arg
elif opt in ("-p", "--acl"):
acl = arg
elif opt in ("-e", "--encode"):
encode = arg
def copy():
con = S3Connection()
targetBucket = con.create_bucket(bucket)
k = Key(targetBucket)
print 'bucket:', bucket, 'file:', file, 'target:', targetfile, 'content-type:', contenttype, 'acl:', acl, 'encode:', encode
k.key = targetfile
k.set_metadata('Content-Type', contenttype)
if encode is not None and encode == 'gzip':
k.set_metadata('Content-Encoding', 'gzip')
gzmem = StringIO.StringIO()
gzip_file = gzip.GzipFile(fileobj=gzmem, mode='w')
with open(file, 'rb') as outfile:
gzip_file.write(outfile.read())
gzip_file.close()
k.set_contents_from_string(gzmem.getvalue())
else:
k.set_contents_from_filename(file)
if acl in ('private', 'public-read', 'public-read-write', 'authenticated-read'):
k.set_acl(acl)
if __name__ == "__main__":
loadvalues(sys.argv[1:])
copy();
#!/usr/bin/python
#
# I'm pretty sure there's better scripts
#
import sys, getopt, boto, gzip, StringIO
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from socket import gethostname
bucket = ''
file = ''
targetfile = ''
acl = 'public-read'
encode = None
def usage():
print 'usage: s3get.py -b <bucket> -f <inputfile> -t <targetfile>'
print ' --bucket, -b : bucket on S3 to copy file to ( will create if not there)'
print ' --file , -f : input file to copy to S3'
print ' --targetfile, -t : targetfile name'
sys.exit()
def loadvalues(argv):
global bucket
global file
global targetfile
try:
opts, args = getopt.getopt(argv,"hk:b:f:t:",["bucket=","file=","targetfile="])
except getopt.GetoptError:
usage()
for opt, arg in opts:
if opt == '-h':
usage()
elif opt in ("-b", "--bucket"):
bucket = arg
elif opt in ("-f", "--file"):
file = arg
elif opt in ("-t", "--targetfile"):
targetfile = arg
def get():
global bucket
global file
global targetfile
con = S3Connection()
targetBucket = con.create_bucket(bucket)
k = Key(targetBucket)
print 'bucket:', bucket, 'file:', file, 'target:', targetfile
k.key = targetfile
k.get_contents_to_filename(file)
if __name__ == "__main__":
loadvalues(sys.argv[1:])
get();
from twisted.internet import reactor
import signal
def loop(s):
print 'delay', s
reactor.callLater(s,loop,s)
reactor.callLater(0,loop,5)
def shutdown_handler():
print 'shutdown_handler'
reactor.addSystemEventTrigger('before', 'shutdown', shutdown_handler)
reactor.run()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment