Skip to content

Instantly share code, notes, and snippets.

@asolera
Created February 16, 2021 20:33
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save asolera/7a978f3b7663f6812bcf935efc9467fd to your computer and use it in GitHub Desktop.
Save asolera/7a978f3b7663f6812bcf935efc9467fd to your computer and use it in GitHub Desktop.
Airflow 2.0.1 with Docker Compose and performance fixes
version: '3'
x-airflow-common:
&airflow-common
image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.0.1-python3.8}
environment:
&airflow-common-env
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
AIRFLOW__CORE__FERNET_KEY: '46BKJoQYlPPOexq0OhDZnIlNepKFf87WFwLbfzqDDho='
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'True'
AIRFLOW__CORE__LOAD_EXAMPLES: 'False'
AIRFLOW__WEBSERVER__RBAC: 'False'
AIRFLOW__CORE__CHECK_SLAS: 'False'
AIRFLOW__CORE__STORE_SERIALIZED_DAGS: 'False'
AIRFLOW__SCHEDULER__SCHEDULER_HEARTBEAT_SEC: 10
AIRFLOW__SCHEDULER__MIN_FILE_PROCESS_INTERVAL: 60
AIRFLOW__WEBSERVER__WORKER_REFRESH_INTERVAL: 1800
AIRFLOW__WEBSERVER__WEB_SERVER_WORKER_TIMEOUT: 300
volumes:
- ./dags:/opt/airflow/dags
- airflow-webserver-logs:/opt/airflow/logs
user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}"
depends_on:
redis:
condition: service_healthy
postgres:
condition: service_healthy
services:
postgres:
image: postgres:13
environment:
POSTGRES_USER: airflow
POSTGRES_PASSWORD: airflow
POSTGRES_DB: airflow
volumes:
- pgdata:/var/lib/postgresql/data
- airflow-database-logs:/var/lib/postgresql/data/log
command: >
postgres
-c listen_addresses=*
-c max_connections=200
healthcheck:
test: ["CMD", "pg_isready", "-U", "airflow"]
interval: 5s
retries: 5
restart: always
redis:
image: redis:latest
ports:
- 6379:6379
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 30s
retries: 50
restart: always
airflow-webserver:
<<: *airflow-common
ports:
- 8080:8080
entrypoint: /bin/bash
command: -c "pip install azure-storage-blob && airflow webserver"
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:8080/health"]
interval: 10s
timeout: 10s
retries: 5
restart: always
airflow-scheduler:
<<: *airflow-common
command: scheduler
restart: always
airflow-worker:
<<: *airflow-common
command: celery worker
restart: always
airflow-init:
<<: *airflow-common
command: version
environment:
<<: *airflow-common-env
_AIRFLOW_DB_UPGRADE: 'true'
_AIRFLOW_WWW_USER_CREATE: 'true'
_AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
_AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
flower:
<<: *airflow-common
command: celery flower
ports:
- 5555:5555
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:5555/"]
interval: 10s
timeout: 10s
retries: 5
restart: always
volumes:
postgres-db-volume:
airflow-database-logs:
airflow-webserver-logs:
pgdata:
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment