Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
#!/usr/bin/env python
import os
from multiprocessing.pool import ThreadPool
from boto.s3.connection import S3Connection
import logging
def upload(tup, dryrun=False):
(key, filename, bucket) = tup
key = bucket.new_key(key)
if not dryrun:
key.set_contents_from_filename(filename)
def upload_dir(path, bucket_name, key_prefix, aws_key, aws_secret, threads=1):
logging.debug("uploading %s to %s/%s" % (path, bucket_name, key_prefix))
to_upload = []
for root, dirs, files in os.walk(path):
if files:
to_upload.extend([os.path.join(root, f) for f in files])
conn = S3Connection(aws_key, aws_secret)
if conn is None:
logging.error("error connecting to s3")
return
bucket = conn.lookup(bucket_name)
if bucket is None:
logging.error("error connecting to bucket")
return
uploads = []
for p in to_upload:
key = key_prefix + p.replace(path, '').lstrip('/')
uploads.append((key, p, bucket))
pool = ThreadPool(processes=threads)
pool.map(upload, uploads)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.