Skip to content

Instantly share code, notes, and snippets.

@stantonk
Last active February 8, 2018 04:16
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save stantonk/304697af6ba73c7cfd2f9226cb1dc432 to your computer and use it in GitHub Desktop.
Save stantonk/304697af6ba73c7cfd2f9226cb1dc432 to your computer and use it in GitHub Desktop.
# from: http://www.slsmk.com/getting-the-size-of-an-s3-bucket-using-boto3-for-aws/
# modified to return rounded sizes in Gigabytes
# and sorted by descending total storage size
import boto3
import datetime
now = datetime.datetime.now()
cw = boto3.client('cloudwatch')
s3client = boto3.client('s3')
# Get a list of all buckets
allbuckets = s3client.list_buckets()
# Header Line for the output going to standard out
print('Bucket'.ljust(45) + 'Size in GigaBytes'.rjust(25))
bucket_sizes = []
# Iterate through each bucket
for bucket in allbuckets['Buckets']:
# For each bucket item, look up the cooresponding metrics from CloudWatch
response = cw.get_metric_statistics(Namespace='AWS/S3',
MetricName='BucketSizeBytes',
Dimensions=[
{'Name': 'BucketName', 'Value': bucket['Name']},
{'Name': 'StorageType', 'Value': 'StandardStorage'}
],
Statistics=['Average'],
Period=3600,
StartTime=(now-datetime.timedelta(days=1)).isoformat(),
EndTime=now.isoformat()
)
# probably empty buckets
if len(response["Datapoints"]) == 0:
bucket_sizes.append((bucket["Name"], 0.0))
continue
# The cloudwatch metrics will have the single datapoint, so we just report on it.
for item in response["Datapoints"]:
size_gb = float(item["Average"]) / 1024.0 / 1024.0 / 1024.0
bucket_sizes.append((bucket["Name"], size_gb))
for bucket, size_gb in sorted(bucket_sizes, key=lambda (b, s): s, reverse=True):
print(bucket.ljust(45) + "{:,}".format(round(size_gb, 2)).rjust(25))
# Note the use of "{:,}".format.
# This is a new shorthand method to format output.
# I just discovered it recently.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment