Last active
January 29, 2022 23:10
-
-
Save bdnf/e1da01e2626f23153e44faa694591cdf to your computer and use it in GitHub Desktop.
Creating an S3 hook in Apache Airflow
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import datetime | |
import logging | |
from airflow import DAG | |
from airflow.models import Variable | |
from airflow.operators.python_operator import PythonOperator | |
from airflow.hooks.S3_hook import S3Hook | |
def list_keys(): | |
hook = S3Hook(aws_conn_id='aws_credentials') | |
bucket = Variable.get('s3_bucket') | |
prefix = Variable.get('s3_prefix') | |
logging.info(f"Listing Keys from {bucket}/{prefix}") | |
keys = hook.list_keys(bucket, prefix=prefix) | |
for key in keys: | |
logging.info(f"- s3://{bucket}/{key}") | |
dag = DAG( | |
'unique_name_of_this_dag', | |
start_date=datetime.datetime.now()) | |
list_task = PythonOperator( | |
task_id="list_keys", | |
python_callable=list_keys, | |
dag=dag | |
) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment