Skip to content

Instantly share code, notes, and snippets.

@drohr
Last active May 3, 2018 06:51
Show Gist options
  • Save drohr/7919d83cd4923cd130e6 to your computer and use it in GitHub Desktop.
Save drohr/7919d83cd4923cd130e6 to your computer and use it in GitHub Desktop.
Hack to fetch all bucket sizes in S3
from datetime import timedelta, datetime
import math
import boto3
cloudwatch = boto3.client('cloudwatch')
s3 = boto3.resource('s3')
def get_bucket_size(bucket):
try:
return cloudwatch.get_metric_statistics(
Namespace='AWS/S3', MetricName='BucketSizeBytes',
StartTime=datetime.utcnow() - timedelta(days=2) ,
EndTime=datetime.utcnow(), Period=86400,
Statistics=['Average'], Unit='Bytes',
Dimensions=[
{'Name': 'BucketName', 'Value': bucket},
{u'Name': 'StorageType', u'Value': 'StandardStorage'}
])['Datapoints'][0]['Average']
except IndexError:
return 0
def convertSize(size):
if size == 0:
return '0B'
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(size,1024)))
p = math.pow(1024,i)
s = round(size/p,2)
if (s > 0):
return '%s %s' % (s,size_name[i])
else:
return '0B'
buckets = list(s3.buckets.all())
for bucket in buckets:
print bucket.name, convertSize(get_bucket_size(bucket.name))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment