diff --git a/terraform/modules/services/airflow/docker-compose.yml b/terraform/modules/services/airflow/docker-compose.yml index ad6816a6..159e43dc 100644 --- a/terraform/modules/services/airflow/docker-compose.yml +++ b/terraform/modules/services/airflow/docker-compose.yml @@ -1,5 +1,3 @@ - - volumes: data: @@ -21,8 +19,6 @@ services: - data:/airflow scheduler: - # depends_on: - # - "airflowinit" image: apache/airflow:2.6.2 container_name: airflow-scheduler command: scheduler @@ -48,14 +44,15 @@ services: user: "${AIRFLOW_UID:-50000}:0" volumes: - data:/airflow + depends_on: + airflowinit: + condition: service_completed_successfully webserver: image: apache/airflow:2.6.2 container_name: airflow-webserver command: webserver -w 4 - # depends_on: - # - "airflowinit" ports: - 80:8080 restart: always @@ -81,32 +78,38 @@ services: user: "${AIRFLOW_UID:-50000}:0" volumes: - data:/airflow + depends_on: + airflowinit: + condition: service_completed_successfully # only need to run this once. Ref 3d1a6b8 has this uncommented. - # airflowinit: - # image: apache/airflow:2.6.2 - # container_name: airflow-init - # environment: - # AIRFLOW__CORE__FERNET_KEY: ${FERNET_KEY} - # SECRET_KEY: ${SECRET_KEY} - # AIRFLOW__WEBSERVER__SECRET_KEY: ${SECRET_KEY} - # AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: ${DB_URL} - # _AIRFLOW_DB_UPGRADE: 'True' - # _AIRFLOW_WWW_USER_CREATE: 'True' - # _AIRFLOW_WWW_USER_USERNAME: 'airflow' - # _AIRFLOW_WWW_USER_PASSWORD: ${PASSWORD} - # AIRFLOW__CORE__DAGS_FOLDER: "/airflow/dags" - # AIRFLOW__LOGGING__BASE_LOG_FOLDER: "/airflow/logs" - # AIRFLOW_UID: "50000" - # user: "${AIRFLOW_UID:-50000}:0" - # command: > - # bash -c "pip install apache-airflow[amazon] - # && mkdir -p /airflow/logs /airflow/dags /airflow/plugins - # && chmod -v 777 /airflow/{logs,dags} - # && airflow db init -v" - # volumes: - # - data:/airflow + airflowinit: + image: apache/airflow:2.6.2 + container_name: airflow-init + environment: + AIRFLOW__CORE__FERNET_KEY: ${FERNET_KEY} + SECRET_KEY: ${SECRET_KEY} + AIRFLOW__WEBSERVER__SECRET_KEY: ${SECRET_KEY} + AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: ${DB_URL} + _AIRFLOW_DB_UPGRADE: 'True' + _AIRFLOW_WWW_USER_CREATE: 'True' + _AIRFLOW_WWW_USER_USERNAME: 'airflow' + _AIRFLOW_WWW_USER_PASSWORD: ${PASSWORD} + AIRFLOW__CORE__DAGS_FOLDER: "/airflow/dags" + AIRFLOW__LOGGING__BASE_LOG_FOLDER: "/airflow/logs" + AIRFLOW_UID: "50000" + user: "${AIRFLOW_UID:-50000}:0" + command: > + bash -c "echo 'Installing dependencies' + && pip install --quiet apache-airflow[amazon] + && echo 'Making directories' + && mkdir -p /airflow/logs /airflow/dags /airflow/plugins + && chmod -v 777 /airflow/{logs,dags} + && echo 'Initializing database' + && airflow db init -v" + volumes: + - data:/airflow sync-s3: image: amazon/aws-cli @@ -122,5 +125,6 @@ services: ENVIRONMENT: $ENVIRONMENT BUCKET: $BUCKET restart: always - # depends_on: - # - "airflowinit" + depends_on: + airflowinit: + condition: service_completed_successfully