Skip to content

Instantly share code, notes, and snippets.

@freewayz
Created August 12, 2016 10:35
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save freewayz/1fbd00928058c3d682a0e25367cc8ea4 to your computer and use it in GitHub Desktop.
Save freewayz/1fbd00928058c3d682a0e25367cc8ea4 to your computer and use it in GitHub Desktop.
How to use python and amazon boto to upload the Amazon s3
DIR_TO_UPLOAD = 'project-dist'
def upload_percent_cb(complete, total):
sys.stdout.write('-')
sys.stdout.flush()
def remove_root_dir_from_path(c):
s = c.split('/')
s.remove(DIR_TO_UPLOAD)
return '/'.join(s)
def start_s3_upload_process():
connection = S3Connection(aws_access_key_id=AWS_KEY_ID,
aws_secret_access_key=AWS_ACCESS_KEY)
my_bucket_name = "your-s3-bucket-name"
my_bucket = connection.get_bucket(my_bucket_name)
if not my_bucket:
print ("bucket does not exits creating bucket")
my_bucket = connection.create_bucket(my_bucket_name)
upload_file_names = []
print ("Walking through file DIST_DIR")
for path, subdir, files in os.walk(DIR_TO_UPLOAD):
print "Path is ", path, "\n Subdir is ", subdir, "File is " ,files
for filename in files :
upload_file_names.append(os.path.join(path, filename))
for src_path in upload_file_names:
dest_path = remove_root_dir_from_path(src_path)
print ("Uploading %s to Amazon s3 bucket %s " % (src_path, dest_path))
file_size = os.path.getsize(src_path)
if file_size > MAX_SIZE:
print ("Multi part upload")
mp = my_bucket.initiate_multipart_upload(dest_path)
with open(src_path, 'rb') as fp:
while (fp.tell() < file_size):
fp_num += 1
print ("uploading part %i" % fp_num)
mp.upload_part_from_file(fp, fp_num, cb=upload_percent_cb, num_cb=10, size=PART_SIZE)
mp.complete_upload()
else:
print "Single part uploading "
k = boto.s3.key.Key(my_bucket)
k.key = dest_path
k.set_contents_from_filename(src_path, cb=upload_percent_cb, num_cb=10)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment