Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Connect to RedShift and S3 from Python with credentials in .env
import os
from dotenv import load_dotenv, find_dotenv
import psycopg2
import pandas as pd
import boto3
class Redshift:
def __init__(self):
''' Constructor for this class. '''
# Create some members
# find .env automagically by walking up directories until it's found
dotenv_path = find_dotenv()
# load up the entries as environment variables
load_dotenv(dotenv_path)
#credentials
self.database_endpoint = os.environ.get("DATABASE_ENDPOINT")
self.database_name = os.environ.get("DATABASE_NAME")
self.database_user = os.environ.get("DATABASE_USER")
self.database_password = os.environ.get("DATABASE_PASSWORD")
self.port = os.environ.get("PORT")
def connect(self):
#connect using psycopg2
connection = psycopg2.connect(dbname=self.database_name, host=self.database_endpoint,
port=self.port, user=self.database_user, password=self.database_password)
return connection
class S3:
def __init__(self):
''' Constructor for this class. '''
# Create some members
# find .env automagically by walking up directories until it's found
dotenv_path = find_dotenv()
# load up t`he entries as environment variables
load_dotenv(dotenv_path)
#credentials
self.AWS_KEY = os.environ.get("AWS_ACCESS_KEY")
self.AWS_SECRET = os.environ.get("AWS_SECRET_ACCESS_KEY")
self.AWS_BUCKET = os.environ.get("AWS_BUCKET")
def connect(self):
client = boto3.client('s3', aws_access_key_id=self.AWS_ACCESS_KEY,aws_secret_access_key=self.AWS_SECRET_ACCESS_KEY)
return client
def read_file(self,file_name):
client = self.connect()
file_nm=client.get_object(Bucket=self.AWS_BUCKET, Key=file_name)
df=pd.read_csv(file_nm['Body'],header=0)
return df
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment