Skip to content

Commit

Permalink
Add AIRFLOW_ENV_FILE parameter to docker-compose file
Browse files Browse the repository at this point in the history
For passing any additional paramters to Airflow run inside
docker container
  • Loading branch information
michael-kotliar committed Mar 16, 2021
1 parent db14982 commit 9a9978d
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 1 deletion.
10 changes: 9 additions & 1 deletion packaging/docker_compose/local_executor/.env
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,13 @@
# and CWL_PICKLE_FOLDER folders are mounted with the same principle only for
# consistency reason. If PROCESS_REPORT_URL is not "" the correspondent
# connection will be created and CWL-Airflow will try to report workflow
# execution progress, statuses and reports
# execution progress, statuses and reports.
# You can use AIRFLOW_ENV_FILE parameter to set path to the environment file
# that will include parameters used by Airflow. If no additional parameters
# needed provide path to the empty file. Docker-compose will fail to start
# if file doesn't exist. This file may look like as follows:
# AIRFLOW__CORE__PARALLELISM=1
# AIRFLOW__CORE__DAG_CONCURRENCY=1

# CWL-Airflow
AIRFLOW_HOME=/Users/tester/compose_airflow
Expand All @@ -57,6 +63,8 @@ CWL_PICKLE_FOLDER=/Users/tester/compose_airflow/cwl_pickle_folder
AIRFLOW_WEBSERVER_PORT=8080
CWL_AIRFLOW_API_PORT=8081
PROCESS_REPORT_URL=""
AIRFLOW_ENV_FILE=airflow_settings.env


# Database
MYSQL_ROOT_PASSWORD=admin
Expand Down
Empty file.
6 changes: 6 additions & 0 deletions packaging/docker_compose/local_executor/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ x-aliases: # if it starts with x- it's ignored by docker-compose
- AIRFLOW__CORE__DAG_PROCESSOR_MANAGER_LOG_LOCATION=${AIRFLOW_HOME}/logs/dag_processor_manager/dag_processor_manager.log
- AIRFLOW__CORE__PLUGINS_FOLDER=${AIRFLOW_HOME}/plugins
- AIRFLOW__SCHEDULER__CHILD_PROCESS_LOG_DIRECTORY=${AIRFLOW_HOME}/logs/scheduler
- &airflow_env_file
env_file:
- ${AIRFLOW_ENV_FILE}
- &airflow_volumes
volumes:
- ${AIRFLOW_HOME}:${AIRFLOW_HOME}
Expand All @@ -39,6 +42,7 @@ services:
privileged: true # maybe I don't actully need it here
restart: always
<<: *airflow_env_var
<<: *airflow_env_file
command: start_scheduler.sh
depends_on:
- mysql
Expand All @@ -52,6 +56,7 @@ services:
- ${AIRFLOW_WEBSERVER_PORT}:8080
<<: *airflow_volumes
<<: *airflow_env_var
<<: *airflow_env_file
restart: always
command: start_webserver.sh
depends_on:
Expand All @@ -67,6 +72,7 @@ services:
- ${CWL_AIRFLOW_API_PORT}:8081
<<: *airflow_volumes
<<: *airflow_env_var
<<: *airflow_env_file
restart: always
command: start_apiserver.sh --replay 60 --host 0.0.0.0
depends_on:
Expand Down
6 changes: 6 additions & 0 deletions tests/run_conformance_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,12 @@ echo "Cleaning temporary directory ${TEMP}"
rm -rf ${TEMP} && mkdir ${TEMP}

echo "Setting environment variables for docker-compose"
export AIRFLOW_ENV_FILE="${TEMP}/airflow_settings.env"
echo "AIRFLOW__CORE__PARALLELISM=1" >> ${AIRFLOW_ENV_FILE}
echo "AIRFLOW__CORE__DAG_CONCURRENCY=1" >> ${AIRFLOW_ENV_FILE}
echo "AIRFLOW__SCHEDULER__DAG_DIR_LIST_INTERVAL=60" >> ${AIRFLOW_ENV_FILE}
echo "AIRFLOW__CORE__HOSTNAME_CALLABLE=socket.gethostname" >> ${AIRFLOW_ENV_FILE}

export AIRFLOW_HOME="${TEMP}/airflow"
export CWL_TMP_FOLDER="${TEMP}/airflow/cwl_tmp_folder"
export CWL_INPUTS_FOLDER="${TEMP}/airflow/cwl_inputs_folder"
Expand Down

0 comments on commit 9a9978d

Please sign in to comment.