"""Running this uploads all Python modules in this workspace to an S3 bucket, to | |
access when training on Colab. | |
""" | |
import os | |
import boto3 | |
S3_BUCKET_NAME = 'BUCKET-NAME' | |
# files in the local directory to upload as code. (yaml or json are often used for config) | |
use_extensions = ['.py', '.sh', '.yaml', '.json'] | |
# you may also set this to '/' if you want to use the root path of your S3 bucket | |
project_prefix = 'project_name/' | |
s3_bucket = boto3.resource('s3').Bucket(S3_BUCKET_NAME) | |
def s3_upload_data(s3_path, body): | |
"""Upload data to S3 bucket at a particular path | |
""" | |
s3_bucket.put_object(Key=s3_path, Body=body) | |
def recursive_code_upload(path): | |
for file in os.listdir(path): | |
file_path = os.path.join(path, file) | |
if any([file.endswith(ext) for ext in use_extensions]): | |
with open(file_path, 'r') as f: | |
py_code = f.read() | |
if file_path.startswith('./'): | |
file_path = file_path[2:] | |
print(f"Uploading {file_path}") | |
s3_upload_data(os.path.join(project_prefix, file_path), py_code) | |
elif os.path.isdir(file_path): | |
recursive_code_upload(file_path) | |
if __name__ == "__main__": | |
recursive_code_upload('.') |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment