Created
August 21, 2023 02:36
-
-
Save ptran32/ee3014216007e0baa82da1bd3deb6d4a to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
apiVersion: apps/v1 | |
kind: Deployment | |
metadata: | |
annotations: | |
deployment.kubernetes.io/revision: "67" | |
meta.helm.sh/release-name: airflow-batchingestion-dev | |
meta.helm.sh/release-namespace: airflow-batchingestion | |
creationTimestamp: "2023-06-28T12:15:59Z" | |
generation: 67 | |
labels: | |
app: batchingestion | |
app.kubernetes.io/instance: airflow-batchingestion-dev | |
app.kubernetes.io/managed-by: Helm | |
app.kubernetes.io/name: airflow | |
component: scheduler | |
dept: data-platforms | |
env: just-data-sandbox | |
envtype: dev | |
feature: batchingestion | |
helm.sh/chart: airflow-1.0.0 | |
owner: data-platforms-batchingestion | |
system: airflow | |
name: scheduler | |
namespace: airflow-batchingestion | |
resourceVersion: "70289497" | |
uid: d8512a51-2695-48cc-801e-36e24fade557 | |
spec: | |
minReadySeconds: 10 | |
progressDeadlineSeconds: 600 | |
replicas: 2 | |
revisionHistoryLimit: 10 | |
selector: | |
matchLabels: | |
app.kubernetes.io/instance: airflow-batchingestion-dev | |
app.kubernetes.io/name: airflow | |
component: scheduler | |
strategy: | |
rollingUpdate: | |
maxSurge: 25% | |
maxUnavailable: 25% | |
type: RollingUpdate | |
template: | |
metadata: | |
annotations: | |
checksum/config: fd9132be2784dd150c111ac47d8a41cd7c7b14180f6a08d759b79d86f563a203 | |
kubectl.kubernetes.io/restartedAt: "2023-08-17T21:23:50Z" | |
creationTimestamp: null | |
labels: | |
app: batchingestion | |
app.kubernetes.io/instance: airflow-batchingestion-dev | |
app.kubernetes.io/managed-by: Helm | |
app.kubernetes.io/name: airflow | |
component: scheduler | |
dept: data-platforms | |
env: just-data-sandbox | |
envtype: dev | |
feature: batchingestion | |
helm.sh/chart: airflow-1.0.0 | |
owner: data-platforms-batchingestion | |
system: airflow | |
spec: | |
affinity: | |
podAntiAffinity: | |
preferredDuringSchedulingIgnoredDuringExecution: | |
- podAffinityTerm: | |
labelSelector: | |
matchExpressions: | |
- key: component | |
operator: In | |
values: | |
- scheduler | |
- key: app.kubernetes.io/instance | |
operator: In | |
values: | |
- airflow-batchingestion-dev | |
topologyKey: topology.kubernetes.io/zone | |
weight: 1 | |
containers: | |
- args: | |
- scheduler | |
env: | |
- name: AIRFLOW__CORE__FERNET_KEY | |
valueFrom: | |
secretKeyRef: | |
key: fernet-key | |
name: fernet-key | |
- name: AIRFLOW__CORE__SQL_ALCHEMY_CONN | |
valueFrom: | |
secretKeyRef: | |
key: connection | |
name: airflow-postgres | |
- name: AIRFLOW__DATABASE__SQL_ALCHEMY_CONN | |
valueFrom: | |
secretKeyRef: | |
key: connection | |
name: airflow-postgres | |
- name: POSTGRES_STATS | |
valueFrom: | |
secretKeyRef: | |
key: stats | |
name: airflow-postgres | |
- name: AIRFLOW__SMTP__SMTP_PASSWORD | |
valueFrom: | |
secretKeyRef: | |
key: smtp-password | |
name: airflow-smtp | |
envFrom: | |
- configMapRef: | |
name: airflow-config | |
image: eu.gcr.io/just-data-infrastructure/airflow:2.6.0-python3.8-1 | |
imagePullPolicy: IfNotPresent | |
livenessProbe: | |
exec: | |
command: | |
- bash | |
- -c | |
- | | |
version=`airflow version` | |
echo "Current airflow version: ${version}" | |
if [[ ! "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then | |
echo "Invalid version format. Expecting semver format: X.Y.Z" | |
exit 1 | |
fi | |
# Split the version into major, minor, and patch components | |
IFS='.' read -r major minor patch <<< "$version" | |
if (( major > 2 || (major == 2 && minor >= 5) || (major == 2 && minor == 5 && patch >= 0) )); then | |
echo "$version is greater than or equal to 2.5.0." | |
CONNECTION_CHECK_MAX_COUNT=0 AIRFLOW__LOGGING__LOGGING_LEVEL=ERROR exec /entrypoint \ | |
airflow jobs check --job-type SchedulerJob --local | |
elif (( major > 2 || (major == 2 && minor >= 1) || (major == 2 && minor == 1 && patch >= 0) )); then | |
echo "$version is greater than or equal to 2.1.0." | |
CONNECTION_CHECK_MAX_COUNT=0 AIRFLOW__LOGGING__LOGGING_LEVEL=ERROR exec /entrypoint \ | |
airflow jobs check --job-type SchedulerJob --hostname $(hostname) | |
else | |
echo "$version is less than 2.1.0." | |
CONNECTION_CHECK_MAX_COUNT=0 exec /entrypoint python -Wignore -c " | |
from airflow.jobs.scheduler_job import SchedulerJob | |
from airflow.utils.db import create_session | |
from airflow.utils.net import get_hostname | |
from airflow.utils.state import State | |
from typing import List | |
with create_session() as session: | |
hostname = get_hostname() | |
query = session \ | |
.query(SchedulerJob) \ | |
.filter_by(state=State.RUNNING, hostname=hostname) \ | |
.order_by(SchedulerJob.latest_heartbeat.desc()) | |
jobs: List[SchedulerJob] = query.all() | |
alive_jobs = [job for job in jobs if job.is_alive()] | |
count_alive_jobs = len(alive_jobs) | |
if count_alive_jobs == 1: | |
print(f'HEALTHY - {count_alive_jobs} alive SchedulerJob for: {hostname}') | |
elif count_alive_jobs == 0: | |
SystemExit(f'UNHEALTHY - 0 alive SchedulerJob for: {hostname}') | |
else: | |
SystemExit(f'UNHEALTHY - {count_alive_jobs} (more than 1) alive SchedulerJob for: {hostname}') | |
" | |
fi | |
failureThreshold: 5 | |
initialDelaySeconds: 10 | |
periodSeconds: 30 | |
successThreshold: 1 | |
timeoutSeconds: 10 | |
name: airflow-scheduler | |
resources: | |
limits: | |
cpu: "2" | |
memory: 4Gi | |
requests: | |
cpu: "1" | |
memory: 1Gi | |
terminationMessagePath: /dev/termination-log | |
terminationMessagePolicy: File | |
volumeMounts: | |
- mountPath: /opt/airflow | |
name: airflow-home | |
subPath: dags | |
- mountPath: /opt/airflow/logs | |
name: airflow-home | |
subPath: logs | |
- mountPath: /etc/airflow/k8s_pod_template.yaml | |
name: k8s-pod-template-default | |
subPath: k8s_pod_template.yaml | |
dnsPolicy: ClusterFirst | |
initContainers: | |
- args: | |
- db | |
- check-migrations | |
env: | |
- name: AIRFLOW__CORE__FERNET_KEY | |
valueFrom: | |
secretKeyRef: | |
key: fernet-key | |
name: fernet-key | |
- name: AIRFLOW__CORE__SQL_ALCHEMY_CONN | |
valueFrom: | |
secretKeyRef: | |
key: connection | |
name: airflow-postgres | |
- name: AIRFLOW__DATABASE__SQL_ALCHEMY_CONN | |
valueFrom: | |
secretKeyRef: | |
key: connection | |
name: airflow-postgres | |
- name: POSTGRES_STATS | |
valueFrom: | |
secretKeyRef: | |
key: stats | |
name: airflow-postgres | |
- name: AIRFLOW__SMTP__SMTP_PASSWORD | |
valueFrom: | |
secretKeyRef: | |
key: smtp-password | |
name: airflow-smtp | |
envFrom: | |
- configMapRef: | |
name: airflow-config | |
image: eu.gcr.io/xxx/airflow:2.6.0-python3.8-1 | |
imagePullPolicy: IfNotPresent | |
name: wait-for-airflow-migrations | |
resources: {} | |
terminationMessagePath: /dev/termination-log | |
terminationMessagePolicy: File | |
restartPolicy: Always | |
schedulerName: default-scheduler | |
securityContext: | |
runAsGroup: 0 | |
runAsUser: 50000 | |
serviceAccount: scheduler | |
serviceAccountName: scheduler | |
terminationGracePeriodSeconds: 30 | |
volumes: | |
- name: airflow-home | |
persistentVolumeClaim: | |
claimName: airflow-batchingestion-dev-nfs-pvc | |
- configMap: | |
defaultMode: 420 | |
name: k8s-pod-template | |
name: k8s-pod-template-default | |
status: | |
availableReplicas: 2 | |
conditions: | |
- lastTransitionTime: "2023-06-28T12:15:59Z" | |
lastUpdateTime: "2023-08-17T21:24:40Z" | |
message: ReplicaSet "scheduler-666d75bd49" has successfully progressed. | |
reason: NewReplicaSetAvailable | |
status: "True" | |
type: Progressing | |
- lastTransitionTime: "2023-08-20T13:07:44Z" | |
lastUpdateTime: "2023-08-20T13:07:44Z" | |
message: Deployment has minimum availability. | |
reason: MinimumReplicasAvailable | |
status: "True" | |
type: Available | |
observedGeneration: 67 | |
readyReplicas: 2 | |
replicas: 2 | |
updatedReplicas: 2 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment