Skip to content

Instantly share code, notes, and snippets.

@febridev
Created July 6, 2023 06:38
Show Gist options
  • Save febridev/d253524d2293d00aa3afd236e1ebfa8e to your computer and use it in GitHub Desktop.
Save febridev/d253524d2293d00aa3afd236e1ebfa8e to your computer and use it in GitHub Desktop.
version: '3'
services:
postgres:
image: postgres:13
networks:
- default_net
env_file:
- .env
volumes:
- postgres-db-volume:/var/lib/postgresql/data
healthcheck:
test: ["CMD", "pg_isready", "-U", "airflow"]
interval: 5s
retries: 5
restart: always
scheduler:
build: .
command: scheduler
restart: on-failure
networks:
- default_net
depends_on:
- postgres
env_file:
- .env
environment:
_PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:- apache-airflow-providers-google apache-airflow-providers-apache-spark kaggle}
KAGGLE_USERNAME: febridev
KAGGLE_KEY: 591bc2adbb0869b1c8cff89f18fe5ec0
volumes:
- ./dags:/opt/airflow/dags
- ./logs:/opt/airflow/logs
- ./plugins:/opt/airflow/plugins
- ./scripts:/opt/airflow/scripts
- ~/.google/credentials/:/.google/credentials
- ./spark/app:/usr/local/spark/app # Spark scripts folder (Must be the same path in airflow and Spark Cluster)
- ./spark/resources:/usr/local/spark/resources #Resources folder (Must be the same path in airflow and Spark Cluster)
webserver:
build: .
entrypoint: ./scripts/entrypoint.sh
restart: on-failure
networks:
- default_net
depends_on:
- postgres
- scheduler
env_file:
- .env
environment:
_PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:- apache-airflow-providers-google apache-airflow-providers-apache-spark kaggle}
KAGGLE_USERNAME: febridev
KAGGLE_KEY: 591bc2adbb0869b1c8cff89f18fe5ec0
volumes:
- ./dags:/opt/airflow/dags
- ./logs:/opt/airflow/logs
- ./plugins:/opt/airflow/plugins
- ~/.google/credentials/:/.google/credentials:ro
- ./scripts:/opt/airflow/scripts
- ./spark/app:/usr/local/spark/app # Spark scripts folder (Must be the same path in airflow and Spark Cluster)
- ./spark/resources:/usr/local/spark/resources #Resources folder (Must be the same path in airflow and Spark Cluster)
user: "${AIRFLOW_UID:-50000}:0"
ports:
- "8090:8080"
healthcheck:
test: [ "CMD-SHELL", "[ -f /home/airflow/airflow-webserver.pid ]" ]
interval: 30s
timeout: 30s
retries: 3
networks:
default_net:
volumes:
postgres-db-volume:
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment