Technical Article

Connect to AWS S3 using Python script from Local

,

You can connect to AWS S3 via Python from local utilizing a boto3 package 

#install the boto3 package
pip install boto3
import os
import pandas as pd

#utilize env variables
os.environ["AWS_DEFAULT_REGION"] = 'us-east-2'
os.environ["AWS_ACCESS_KEY_ID"] = 'your_access_key'
os.environ["AWS_SECRET_ACCESS_KEY"] = 'your_secret'

#authenticate to S3 using boto
s3 = boto3.resource(
service_name='s3',
region_name='us-east-2',
aws_access_key_id='your_access_key',
aws_secret_access_key='your_secret'
)

# Make dataframes
tab1 = pd.DataFrame({'a': [1, 2, 3], 'b': ['a', 'b', 'c']})
tab2 = pd.DataFrame({'a': [10, 20, 30], 'b': ['aa', 'bb', 'cc']})

# Save to csv
tab1.to_csv('tab1.csv')
tab2.to_csv('tab2.csv')

# Upload files to S3 bucket, George_Clooney is the name of the bucket
s3.Bucket('George_Clooney').upload_file(Filename='tab1.csv', Key='tab1.csv')
s3.Bucket('George_Clooney').upload_file(Filename='tab2.csv', Key='tab2.csv')

#listing all your objects in S3

for obj in s3.Bucket('George_Clooney').objects.all():
print(obj)

# Load csv file directly into python
obj = s3.Bucket('George_Clooney').Object('tab1.csv').get()
tab1 = pd.read_csv(obj['Body'], index_col=0)

# Download file
s3.Bucket('George_Clooney').download_file(Key='tab1.csv', Filename='tab2.csv')
pd.read_csv('tab2.csv', index_col=0)

Rate

4 (1)

You rated this post out of 5. Change rating

Share

Share

Rate

4 (1)

You rated this post out of 5. Change rating