Skip to content

Instantly share code, notes, and snippets.

@dpetzold
Created August 5, 2010 19:14
Show Gist options
  • Save dpetzold/510222 to your computer and use it in GitHub Desktop.
Save dpetzold/510222 to your computer and use it in GitHub Desktop.
#!/usr/bin/python
import os
import sys
import optparse
import progressbar
import time
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from boto.s3.key import Key
AWS_ACCESS_KEY_ID = ''
AWS_SECRET_ACCESS_KEY = ''
pbar = None
def sizeof_fmt(num):
for x in ['bytes','KB','MB','GB','TB']:
if num < 1024.0:
return "%3.1f%s" % (num, x)
num /= 1024.0
def progress_callback(current, total):
try:
pbar.update(current)
except AssertionError, e:
print e
def upload_file(filename, bucket, prefix=None, reduced_redundancy=False):
global pbar
key = Key(bucket)
if prefix:
key.key = '%s/%s' % (prefix, filename)
else:
key.key = '%s' % (filename)
size = os.stat(filename).st_size
if size == 0:
print 'Bad filesize for "%s"' % (filename)
return 0
widgets = [
unicode(filename, errors='ignore').encode('utf-8'), ' ',
progressbar.FileTransferSpeed(),
' <<<', progressbar.Bar(), '>>> ',
progressbar.Percentage(), ' ', progressbar.ETA()
]
pbar = progressbar.ProgressBar(widgets=widgets, maxval=size)
pbar.start()
try:
key.set_contents_from_filename(
filename,
cb=progress_callback,
num_cb=100,
reduced_redundancy=reduced_redundancy,
)
key.set_acl('public-read')
except IOError, e:
print e
return 0
pbar.finish()
return size
if __name__ == '__main__':
parser = optparse.OptionParser(usage='usage: %prog [options] ')
parser.add_option('-p', '--prefix', dest='prefix')
parser.add_option('-r', '--reduced_rendundancy', dest='reduced_redundancy', action='store_true', default=False)
(options, args) = parser.parse_args()
if len(args) < 2:
parser.print_help()
sys.exit(1)
conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
try:
bucket = conn.get_bucket(args[0])
except S3ResponseError, e:
if e.error_code == 'NoSuchBucket':
bucket = conn.create_bucket(args[0])
else:
raise e
stime = time.time()
total_bytes = 0
count = 0
for arg in args[1:]:
size = upload_file(arg, bucket, options.prefix, options.reduced_redundancy)
total_bytes += size
count += 1
if len(args) > 2:
print
print '%s files %s at %.2f kb/s' % (count, sizeof_fmt(total_bytes), (total_bytes / 1024)/time.time() - stime))
@sajanp
Copy link

sajanp commented Feb 6, 2013

Solid gist. Just getting into Python and working on an S3 upload script of my own. Just needed to look at how you did the progress bar. Thanks!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment