Lambda code for that function :-
import boto3
def lambda_handler(event, context):
# Create an S3 client
s3_client = boto3.client('s3')
# Get a list of all buckets in the account
response = s3_client.list_buckets()
# Initialize a variable to track the total storage size
total_storage_size = 0
# Iterate through each bucket
for bucket in response['Buckets']:
# Get the bucket name
bucket_name = bucket['Name']
# Get the size of the bucket
bucket_size = get_bucket_size_recursive(s3_client, bucket_name)
# Add the bucket size to the total storage size
total_storage_size += bucket_size
# Print or log the total storage size
print(f"Total storage size for all buckets: {total_storage_size} GB")
# Return a response
return {
'statusCode': 200,
'body': f"Total storage size for all buckets: {total_storage_size} GB"
}
def get_bucket_size_recursive(s3_client, bucket_name):
# Get the size of the bucket recursively
bucket_size = 0
# Paginate through the objects in the bucket
paginator = s3_client.get_paginator('list_objects_v2')
pages = paginator.paginate(Bucket=bucket_name)
for page in pages:
# Sum the size of objects in the bucket
bucket_size += sum([obj['Size'] for obj in page.get('Contents', [])])
# Paginate through the subfolders (common prefixes)
pages = paginator.paginate(Bucket=bucket_name, Delimiter='/')
for page in pages:
for common_prefix in page.get('CommonPrefixes', []):
subfolder_size = get_bucket_size_recursive(s3_client, bucket_name + '/' + common_prefix['Prefix'])
bucket_size += subfolder_size
return bucket_size / (1024 ** 3) # Convert to GB
This is the code im used for to get the storage size of the s3. but i didn't get the exact storage match for this.
Test Event Name
TestEvent
Response
{
"statusCode": 200,
"body": "Total storage size: 128.38 GB"
}
but in the storage its 280.80 GB. its not getting exact match, help me with this