diff --git a/Airflow/docker-compose-traefik.yml b/Airflow/docker-compose-traefik.yml new file mode 100644 index 00000000..ba2d5eb3 --- /dev/null +++ b/Airflow/docker-compose-traefik.yml @@ -0,0 +1,340 @@ +# AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow. +# Default: apache/airflow:2.8.4 +# AIRFLOW_UID - User ID in Airflow containers +# Default: 50000 +# AIRFLOW_PROJ_DIR - Base path to which all the files will be volumed. +# Default: . +# Those configurations are useful mostly in case of standalone testing/running Airflow in test/try-out mode +# +# _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account (if requested). +# Default: airflow +# _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account (if requested). +# Default: airflow +# _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers. +# Use this option ONLY for quick checks. Installing requirements at container +# startup is done EVERY TIME the service is started. +# A better way is to build a custom image or extend the official image +# as described in https://airflow.apache.org/docs/docker-stack/build.html. +# Default: '' +# +# Feel free to modify this file to suit your needs. +--- +#### NETWORKS +networks: + docker-traefik_front_network: + external: true + back_network: + driver: bridge + attachable: true + +x-airflow-common: + &airflow-common + # In order to add custom dependencies or upgrade provider packages you can use your extended image. + # Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml + # and uncomment the "build" line below, Then run `docker-compose build` to build the images. + image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.8.4} + # build: . + environment: + &airflow-common-env + AIRFLOW__CORE__EXECUTOR: CeleryExecutor + AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 + AIRFLOW__CORE__FERNET_KEY: '' + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' + AIRFLOW__CORE__LOAD_EXAMPLES: 'true' + AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session' + AIRFLOW__SCHEDULER__STATSD_ON: 'true' + AIRFLOW__SCHEDULER__STATSD_HOST: statsd-exporter + AIRFLOW__SCHEDULER__STATSD_PORT: 9125 + AIRFLOW__SCHEDULER__STATSD_PREFIX: airflow + # yamllint disable rule:line-length + # Use simple http server on scheduler for health checks + # See https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/check-health.html#scheduler-health-check-server + # yamllint enable rule:line-length + AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true' + # WARNING: Use _PIP_ADDITIONAL_REQUIREMENTS option ONLY for a quick checks + # for other purpose (development, test and especially production usage) build/extend Airflow image. + _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-} + volumes: + - ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags + - ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs + - ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config + - ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins + user: "${AIRFLOW_UID:-50000}:0" + depends_on: + &airflow-common-depends-on + redis: + condition: service_healthy + postgres: + condition: service_healthy + +#### SERVICES +services: + +### postgres + postgres: + container_name: airflow-postgres + hostname: airflow-postgres + image: postgres:15.6-alpine + environment: + PGDATA: ${POSTGRES_PATH} + POSTGRES_USER: ${POSTGRES_USER} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_PORT: ${POSTGRES_PORT} + TZ: Europe/Paris + volumes: + - ./data:/var/lib/postgresql/data + healthcheck: + test: ["CMD", "pg_isready", "-U", "$${POSTGRES_USER}"] + interval: 10s + retries: 5 + start_period: 5s + restart: always + networks: + - back_network + +### redis + redis: + container_name: airflow-redis + hostname: airflow-redis + image: redis:latest + expose: + - 6379 + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 30s + retries: 50 + start_period: 30s + restart: always + networks: + - back_network + +### airflow-webserver + airflow-webserver: + <<: *airflow-common + command: webserver + healthcheck: + test: ["CMD", "curl", "--fail", "http://airflow.fr.dgs.group/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - docker-traefik_front_network + - back_network + labels: + - "traefik.enable=true" + - "traefik.docker.network=docker-traefik_front_network" +# HTTP + - "traefik.http.routers.airflow-http.rule=Host(`airflow.fr.dgs.group`)" + - "traefik.http.routers.airflow-http.entrypoints=http" +# HTTPS + - "traefik.http.routers.airflow-https.rule=Host(`airflow.fr.dgs.group`)" + - "traefik.http.routers.airflow-https.entrypoints=https" + - "traefik.http.routers.airflow-https.service=airflow-service" + - "traefik.http.routers.airflow-https.tls=true" +# Middleware +# Service + - "traefik.http.services.airflow-service.loadbalancer.server.port=8080" + +### airflow-scheduler + airflow-scheduler: + <<: *airflow-common + command: scheduler + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8974/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - docker-traefik_front_network + - back_network + +### airflow-worker + airflow-worker: + <<: *airflow-common + command: celery worker + healthcheck: + # yamllint disable rule:line-length + test: + - "CMD-SHELL" + - 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + environment: + <<: *airflow-common-env + # Required to handle warm shutdown of the celery workers properly + # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation + DUMB_INIT_SETSID: "0" + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - docker-traefik_front_network + - back_network + +### airflow-triggerer + airflow-triggerer: + <<: *airflow-common + command: triggerer + healthcheck: + test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"'] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - docker-traefik_front_network + - back_network + +### airflow-init + airflow-init: + <<: *airflow-common + entrypoint: /bin/bash + # yamllint disable rule:line-length + command: + - -c + - | + if [[ -z "${AIRFLOW_UID}" ]]; then + echo + echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m" + echo "If you are on Linux, you SHOULD follow the instructions below to set " + echo "AIRFLOW_UID environment variable, otherwise files will be owned by root." + echo "For other operating systems you can get rid of the warning with manually created .env file:" + echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user" + echo + fi + one_meg=1048576 + mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg)) + cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat) + disk_available=$$(df / | tail -1 | awk '{print $$4}') + warning_resources="false" + if (( mem_available < 4000 )) ; then + echo + echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m" + echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))" + echo + warning_resources="true" + fi + if (( cpus_available < 2 )); then + echo + echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m" + echo "At least 2 CPUs recommended. You have $${cpus_available}" + echo + warning_resources="true" + fi + if (( disk_available < one_meg * 10 )); then + echo + echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m" + echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))" + echo + warning_resources="true" + fi + if [[ $${warning_resources} == "true" ]]; then + echo + echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m" + echo "Please follow the instructions to increase amount of resources available:" + echo " https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#before-you-begin" + echo + fi + mkdir -p /sources/logs /sources/dags /sources/plugins + chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} + exec /entrypoint airflow version + # yamllint enable rule:line-length + environment: + <<: *airflow-common-env + _AIRFLOW_DB_MIGRATE: 'true' + _AIRFLOW_WWW_USER_CREATE: 'true' + _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} + _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} + _PIP_ADDITIONAL_REQUIREMENTS: '' + user: "0:0" + volumes: + - ${AIRFLOW_PROJ_DIR:-.}:/sources + networks: + - docker-traefik_front_network + - back_network + +### airflow-cli + airflow-cli: + <<: *airflow-common + profiles: + - debug + environment: + <<: *airflow-common-env + CONNECTION_CHECK_MAX_COUNT: "0" + # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252 + command: + - bash + - -c + - airflow + networks: + - docker-traefik_front_network + - back_network + + statsd-exporter: + container_name: airflow-statsd-exporter + hostname: airflow-statsd-exporter + image: prom/statsd-exporter:v0.24.0 + volumes: + - ./statsd/statsd.yaml:/home/statsd-mapping-configs.yaml + entrypoint: ["/bin/sh", "-c", "--"] + command: ["statsd_exporter --statsd.listen-udp=:9125 --web.listen-address=:9102 --log.level debug --statsd.mapping-config=/home/statsd-mapping-configs.yaml"] + ports: + - 9102:9102 + - 9125:9125 + expose: + - 9125/udp + restart: always + networks: + - docker-traefik_front_network + - back_network + + # You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up + # or by explicitly targeted on the command line e.g. docker-compose up flower. + # See: https://docs.docker.com/compose/profiles/ +### flower + flower: + <<: *airflow-common + command: celery flower + profiles: + - flower + ports: + - "5555:5555" + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:5555/"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - docker-traefik_front_network + - back_network diff --git a/Airflow/docker-compose.yml b/Airflow/docker-compose.yml index 7aeae1dd..e43f6560 100644 --- a/Airflow/docker-compose.yml +++ b/Airflow/docker-compose.yml @@ -116,10 +116,10 @@ services: airflow-webserver: <<: *airflow-common command: webserver -# ports: -# - "8080:8080" + ports: + - "8080:8080" healthcheck: - test: ["CMD", "curl", "--fail", "http://airflow.fr.dgs.group/health"] + test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] interval: 30s timeout: 10s retries: 5 @@ -132,20 +132,6 @@ services: networks: - docker-traefik_front_network - back_network - labels: - - "traefik.enable=true" - - "traefik.docker.network=docker-traefik_front_network" -# HTTP - - "traefik.http.routers.airflow-http.rule=Host(`airflow.fr.dgs.group`)" - - "traefik.http.routers.airflow-http.entrypoints=http" -# HTTPS - - "traefik.http.routers.airflow-https.rule=Host(`airflow.fr.dgs.group`)" - - "traefik.http.routers.airflow-https.entrypoints=https" - - "traefik.http.routers.airflow-https.service=airflow-service" - - "traefik.http.routers.airflow-https.tls=true" -# Middleware -# Service - - "traefik.http.services.airflow-service.loadbalancer.server.port=8080" ### airflow-scheduler airflow-scheduler: