Fetch dummy data folders and files
curl https://s3.amazonaws.com/ds2002-resources/data/bundle.tar.gz > \
bundle.tar.gz && \
tar -xzvf bundle.tar.gz && \
rm bundle.tar.gz
- Install - https://aws.amazon.com/cli/
- Documentation - https://docs.aws.amazon.com/cli/latest/reference/
- Setup/Authentication
aws configure # This prompts you for access key, secret access key, region, etc.
List buckets
aws s3 ls
Make a bucket
aws s3 mb s3://BUCKET
List bucket contents
aws s3 ls s3://BUCKET/
Copy a file
aws s3 cp FILE s3://BUCKET/
Remove a file
aws s3 rm s3://BUCKET/FILE
Sync a directory
aws s3 sync folder1/ s3://BUCKET/
aws s3 sync folder2/ s3://BUCKET/PATH/MORE/
Remove files recursively
aws s3 rm s3://BUCKET/ --recursive
Delete a bucket
aws s3 rb s3://BUCKET/
- Install
python3 -m pip install boto3
- Documentation - https://boto3.amazonaws.com/v1/documentation/api/latest/index.html
- Authentication - uses credentials set up for the AWS CLI above.
- Client - must be created in your code for each AWS service
import boto3
# name the class object something useful, i.e. the same name as the service
# this will become a reusable handle to call all methods against that service
s3 = boto3.client('s3')
ec2 = boto3.client('ec2')
sqs = boto3.clielnt('sqs')
List buckets
response = s3.list_buckets()
Make a bucket
BUCKET_NAME = "mybucket"
response = s3.create_bucket(Bucket=BUCKET_NAME)
List bucket contents
BUCKET_NAME = "mybucket"
response = s3.list_objects(Bucket=BUCKET_NAME)
Copy a file
BUCKET_NAME = "mybucket"
FILE_NAME = "myfile.txt"
KEY_NAME = "folderX/myfile.txt"
response = s3.put_object(
Bucket=BUCKET_NAME,
Body=FILE_NAME,
Key="KEY_NAME
)
Remove a file
BUCKET_NAME = "mybucket" # bucket name
KEY_NAME = "folder1/myfile.txt" # path to file within bucket
response = s3.delete_object(
Bucket=BUCKET_NAME,
Key="FILE_NAME
)
Delete a bucket
BUCKET_NAME = "mybucket" # bucket name / must be empty
response = s3.delete_bucket(
Bucket=BUCKET_NAME
)