"Python Atomation : AWS S3 Bucket Configuration | Create, Upload, Delete"
- Anu Solanki
- Dec 21, 2023
- 2 min read

install and import boto3 and botocore library for connect with AWS:
!pip install boto3 botocore
import boto3
import time
from botocore.exceptions import NoCredentialsError, PartialCredentialsError, EndpointConnectionError, ClientError
Declare important variable
must be change the variable values
# AWS Credentials
aws_access_key = 'your aws_access_key'
aws_secret_key = 'your aws_secret_key'
# S3 Bucket Name
bucket_name = 'your bucket name'
# AWS Region
region_name = 'ap-south-1' # Replace with your desired AWS region, e.g., 'us-east-1'
# S3 Bucket and Object Configuration
local_file_path = '/content/drive/MyDrive/Colab Notebooks/IMDB_Movie_Ratings.xlsx' # Replace with the path to your local file
s3_object_key = 'IMDB_Movie_Ratings.xlsx' # Replace with the desired S3 object key
AWS S3 Client Initialization with Access Keys and Region Configuration
# Create S3 client object with the specified region
s3 = boto3.client('s3', aws_access_key_id=aws_access_key, aws_secret_access_key=aws_secret_key, region_name=region_name)
Function to Create an AWS S3 Bucket with Error Handling and Retries
# Function to Create an AWS S3 Bucket
def create_s3_bucket(bucket_name, aws_access_key, aws_secret_key, region_name):
max_retries = 3 # Adjust the number of retries as needed
retry_count = 0
while retry_count < max_retries:
try:
# Check if the bucket already exists
existing_buckets = [bucket['Name'] for bucket in s3.list_buckets()['Buckets']]
if bucket_name not in existing_buckets:
# Create S3 bucket with the correct location constraint
s3.create_bucket(Bucket=bucket_name,
CreateBucketConfiguration={'LocationConstraint': region_name})
print(f"S3 bucket '{bucket_name}' created successfully in region '{region_name}'.")
break
else:
print(f"S3 bucket '{bucket_name}' already exists. Choose a different name or proceed with your logic.")
break
except (NoCredentialsError, PartialCredentialsError) as e:
print(f"Credentials not available or incorrect. {e}")
break
except EndpointConnectionError as e:
print(f"Error connecting to AWS endpoint. {e}")
break
except ClientError as e:
if e.response['Error']['Code'] == 'OperationAborted':
retry_count += 1
print(f"Retrying ({retry_count}/{max_retries})...")
time.sleep(5) # Introduce a delay between retries
continue
else:
print(f"Error creating S3 bucket. {e}")
break
except Exception as e:
print(f"An unexpected error occurred. {e}")
retry_count += 1
print(f"Retrying ({retry_count}/{max_retries})...")
time.sleep(5) # Introduce a delay between retries
continue
if retry_count == max_retries:
print(f"Failed to create S3 bucket '{bucket_name}' after {max_retries} attempts.")
Run the create_s3_bucket() Function
create_s3_bucket(bucket_name, aws_access_key, aws_secret_key, region_name)
Function to Upload a Local File to an AWS S3 Bucket
def upload_to_s3(local_file, bucket, s3_key):
try:
# Upload the file
s3.upload_file(local_file, bucket, s3_key)
print(f"File '{local_file}' uploaded to '{bucket}' with key '{s3_key}' successfully.")
except FileNotFoundError:
print(f"The file '{local_file}' was not found.")
except NoCredentialsError:
print("Credentials not available or incorrect.")
Run the upload_to_s3() Function
# Upload the file to S3
upload_to_s3(local_file_path, bucket_name, s3_object_key)
Function to Delete an AWS S3 Bucket with its Objects
def delete_s3_bucket(bucket_name, aws_access_key, aws_secret_key, aws_region):
try:
# List objects in the bucket
response = s3.list_objects_v2(Bucket=bucket_name)
# Check if the bucket is not empty
if 'Contents' in response:
# Delete all objects in the bucket
objects = [{'Key': obj['Key']} for obj in response['Contents']]
s3.delete_objects(Bucket=bucket_name, Delete={'Objects': objects})
print(f"All objects deleted from bucket '{bucket_name}'.")
# Delete the bucket
s3.delete_bucket(Bucket=bucket_name)
print(f"Bucket '{bucket_name}' deleted successfully.")
except Exception as e:
print(f"Error deleting bucket: {e}")
Run the delete_s3_bucket() Function
delete_s3_bucket(bucket_name, aws_access_key, aws_secret_key, region_name)
Comments