/
docker-compose.yml
74 lines (69 loc) · 2.11 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
version: '2.1'
services:
postgres:
image: postgres:9.6
environment:
- POSTGRES_USER=airflow
- POSTGRES_PASSWORD=airflow
- POSTGRES_DB=airflow
webserver:
image: custom-docker-airflow-spark
build:
context: .
dockerfile: Dockerfile.airflow_spark
restart: always
depends_on:
- postgres
environment:
- LOAD_EX=n
- EXECUTOR=Local
volumes:
- ../dags:/usr/local/airflow/dags
- ../plugins:/usr/local/airflow/plugins
- ./logs:/usr/local/airflow/logs
- ../../requirements.txt:/requirements.txt
- ./spark:/usr/local/airflow/spark
- ../../config.cfg:/usr/local/airflow/config.cfg
- ../../data:/usr/local/airflow/dags/data
ports:
- "8080:8080"
command: webserver
healthcheck:
test: ["CMD-SHELL", "[ -f /usr/local/airflow/airflow-webserver.pid ]"]
interval: 30s
timeout: 30s
retries: 3
db:
image: postgres:10.10-alpine
restart: always
environment:
POSTGRES_DB: 'postgres'
POSTGRES_USER: 'postgres'
POSTGRES_PASSWORD: 'postgres'
ports:
- "5433:5432"
volumes:
# - /var/lib/postgresql/10/main:/var/lib/postgresql/data
- ./script/init.sql:/docker-entrypoint-initdb.d/init.sql
- ../../data:/data
spark-master:
image: bde2020/spark-master:2.4.4-hadoop2.7
#container_name: spark-master
ports:
- "8088:8080"
- "7077:7077"
environment:
- INIT_DAEMON_STEP=setup_spark
volumes:
- ./spark:/root/spark
spark-worker-1:
image: bde2020/spark-worker:2.4.4-hadoop2.7
#container_name: spark-worker-1
depends_on:
- spark-master
ports:
- "8081:8081"
environment:
- "SPARK_MASTER=spark://spark-master:7077"
volumes:
- ./spark:/root/spark