-
Notifications
You must be signed in to change notification settings - Fork 1.1k
/
docker-compose.yml
164 lines (151 loc) · 6.51 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
# docker-compose supports environment variable substitution with the ${VARIABLE-NAME} syntax.
# Environment variables can be sourced in a variety of ways. One of those ways is through
# a well known '.env' file located in the same folder as the docker-compose.yml file. See the Docker
# documentation for details: https://docs.docker.com/compose/environment-variables/#the-env-file
#
# This feature is being used to parameterize some values within this file. In this directory is also
# a .env file, which is actually a symbolic link to <examples-root>/utils/config.env. That file
# contains values which get substituted here when docker-compose parses this file.
#
# If you'd like to view the docker-compose.yml file rendered with its environment variable substituions
# you can execute the `docker-compose config` command. Take note that some demos provide additional
# environment variable values by exporting them in a script prior to running `docker-compose up`.
---
version: '3.0'
volumes:
database:
services:
microservices:
image: ${REPOSITORY}/kafka-streams-examples:${CONFLUENT_DOCKER_TAG}
container_name: microservices
ports:
- "18894:18894"
depends_on:
- connect
volumes:
- $PWD/scripts:/opt/docker/scripts
- $PWD/logs:/opt/docker/logs
- $PWD/.env:/opt/docker/config/config.env
- $PWD/stack-configs:/opt/docker/stack-configs
- $PWD/delta_configs:/opt/docker/delta_configs
environment:
LOG_DIR: /opt/docker/logs
# With Docker, we don't need to worry about cleaning up the subprocesses of the container
PIDS_FILE: /dev/null
CONFLUENT: ${CONFLUENT}
CONFIG_FILE: "/opt/docker/${CONFIG_FILE}"
command:
- bash
- -c
- |
set -a
source /opt/docker/delta_configs/env.delta
/opt/docker/scripts/run-services.sh
connect:
image: ${REPOSITORY}/cp-server-connect:${CONFLUENT_DOCKER_TAG}
container_name: connect
depends_on:
- sqlite
ports:
- "8083:8083"
environment:
CONNECT_BOOTSTRAP_SERVERS: $BOOTSTRAP_SERVERS
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_GROUP_ID: "examples-microservices-orders"
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_SECURITY_PROTOCOL: SASL_SSL
CONNECT_SASL_JAAS_CONFIG: $SASL_JAAS_CONFIG
CONNECT_SASL_MECHANISM: PLAIN
CONNECT_CONFIG_STORAGE_TOPIC: connect-demo-configs
CONNECT_OFFSET_STORAGE_TOPIC: connect-demo-offsets
CONNECT_STATUS_STORAGE_TOPIC: connect-demo-statuses
CONNECT_REPLICATION_FACTOR: 3
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 3
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 3
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 3
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: $SCHEMA_REGISTRY_URL
CONNECT_KEY_CONVERTER_BASIC_AUTH_CREDENTIALS_SOURCE: $BASIC_AUTH_CREDENTIALS_SOURCE
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO: $SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: $SCHEMA_REGISTRY_URL
CONNECT_VALUE_CONVERTER_BASIC_AUTH_CREDENTIALS_SOURCE: $BASIC_AUTH_CREDENTIALS_SOURCE
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO: $SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO
# CLASSPATH required due to CC-2422
CLASSPATH: "/usr/share/java/monitoring-interceptors/monitoring-interceptors-${CONFLUENT}.jar"
CONNECT_PLUGIN_PATH: '/usr/share/java,/usr/share/confluent-hub-components/,/connectors/'
CONNECT_PRODUCER_SECURITY_PROTOCOL: SASL_SSL
CONNECT_PRODUCER_SASL_JAAS_CONFIG: $SASL_JAAS_CONFIG
CONNECT_PRODUCER_SASL_MECHANISM: PLAIN
CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_BOOTSTRAP_SERVERS: $BOOTSTRAP_SERVERS
CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SECURITY_PROTOCOL: SASL_SSL
CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_JAAS_CONFIG: $SASL_JAAS_CONFIG
CONNECT_PRODUCER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_MECHANISM: PLAIN
CONNECT_CONSUMER_SECURITY_PROTOCOL: SASL_SSL
CONNECT_CONSUMER_SASL_JAAS_CONFIG: $SASL_JAAS_CONFIG
CONNECT_CONSUMER_SASL_MECHANISM: PLAIN
CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_BOOTSTRAP_SERVERS: $BOOTSTRAP_SERVERS
CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SECURITY_PROTOCOL: SASL_SSL
CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_JAAS_CONFIG: $SASL_JAAS_CONFIG
CONNECT_CONSUMER_CONFLUENT_MONITORING_INTERCEPTOR_SASL_MECHANISM: PLAIN
volumes:
- database:/opt/docker/db/data
- $PWD/stack-configs:/opt/docker/stack-configs
command:
- bash
- -c
- |
echo "Installing connector plugins"
confluent-hub install --no-prompt confluentinc/kafka-connect-jdbc:$KAFKA_CONNECT_JDBC_VERSION
confluent-hub install --no-prompt confluentinc/kafka-connect-elasticsearch:$KAFKA_CONNECT_ES_VERSION
echo "Launching Kafka Connect worker"
/etc/confluent/docker/run
sqlite:
image: sqlite:latest
container_name: sqlite
volumes:
- database:/db/data
build:
context: ./db
stdin_open: true
tty: true
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:5.6.16
container_name: elasticsearch
depends_on:
- connect
ports:
- "9200:9200"
environment:
xpack.security.enabled: "false"
XPACK_SECURITY_ENABLED: "false"
xpack.monitoring.enabled: "false"
kibana:
image: docker.elastic.co/kibana/kibana:5.5.2
container_name: kibana
depends_on:
- elasticsearch
ports:
- "5601:5601"
environment:
xpack.security.enabled: "false"
XPACK_SECURITY_ENABLED: "false"
xpack.monitoring.enabled: "false"
discovery.type: "single-node"
elasticsearch.url: http://elasticsearch:9200
server.host: "0.0.0.0"
SERVER_HOST: "0.0.0.0"
server.name: "kibana"
SERVER_NAME: "kibana"
XPACK_GRAPH_ENABLED: "false"
XPACK_MONITORING_ENABLED: "false"
XPACK_REPORTING_ENABLED: "false"
XPACK_SECURITY_ENABLED: "false"
command:
- bash
- -c
- |
/usr/share/kibana/bin/kibana-plugin remove x-pack
/usr/local/bin/kibana-docker