-
Notifications
You must be signed in to change notification settings - Fork 24
/
docker-compose.yml
178 lines (168 loc) · 5.98 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
---
version: "2"
networks:
kafka_dse_network:
driver: bridge
ipam:
config:
- subnet: 172.20.10.0/24
gateway: 172.20.10.1
services:
###
### PYTHON IMPORTER
### TODO set this up, not currently working with current python code
# python:
# build: ./python
# image: python-kafka-importer:latest
# container_name: python-kafka-importer
###
### KAFKA ZOOKEEPER
###
zookeeper:
image: confluentinc/cp-zookeeper:5.5.1
container_name: kafka_zk
ports:
- "22181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
networks:
kafka_dse_network:
ipv4_address: 172.20.10.11
###
### KAFKA BROKER
###
kafka:
# latest as of Aug 2020. Provides Kafka 2.5.0
image: confluentinc/cp-kafka:5.5.1
container_name: cp_kafka_007
depends_on:
- zookeeper
ports:
- 29092:29092
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 172.20.10.11:2181
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://172.20.10.12:9092,PLAINTEXT_HOST://localhost:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
networks:
kafka_dse_network:
ipv4_address: 172.20.10.12
###
### SCHEMA REGISTRY
###
schema-registry:
image: confluentinc/cp-schema-registry:5.5.1
container_name: schema-registry
depends_on:
- zookeeper
- kafka
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 172.20.10.11:2181
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
ports:
- "8081:8081"
networks:
kafka_dse_network:
ipv4_address: 172.20.10.14
###
### KAFKA-HQ
###
# (Helpful but not required admin GUI for debugging and monitoring)
kafka-hq:
image: tchiotludo/akhq
container_name: kafka-hq
environment:
AKHQ_CONFIGURATION: |
akhq:
server:
access-log:
enabled: true
name: org.akhq.log.access # Logger name
format: "[Date: {}] [Duration: {} ms] [Url: {} {} {}] [Status: {}] [Ip: {}] [Length: {}] [Port: {}]" # Logger format
connections:
docker-kafka-server:
properties:
bootstrap.servers: "172.20.10.12:9092"
schema-registry:
url: "http://172.20.10.14:8081"
connect:
- name: kafka-connect-1
url: "http://172.20.10.18:8084"
ports:
- 8085:8080
networks:
kafka_dse_network:
ipv4_address: 172.20.10.19
###
### KAFKA REST PROXY
###
# (only needed in this project if using kafka rest proxy to send messages to broker)
rest-proxy:
image: confluentinc/cp-kafka-rest:5.5.1
depends_on:
- zookeeper
- kafka # broker
- schema-registry
ports:
- 8082:8082
hostname: rest-proxy
container_name: rest-proxy
environment:
KAFKA_REST_HOST_NAME: rest-proxy
KAFKA_REST_BOOTSTRAP_SERVERS: 'PLAINTEXT://172.20.10.12:9092'
KAFKA_REST_LISTENERS: "http://0.0.0.0:8082"
KAFKA_REST_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
networks:
kafka_dse_network:
ipv4_address: 172.20.10.20
###
### KAFKA CONNECT
###
# (only needed in this project if using kafka connect to write to C*)
kafka-connect:
# using cp-kafka-connect, contrary to example we've been following in general here https://github.com/confluentinc/cp-all-in-one/blob/5.5.1-post/cp-all-in-one-community/docker-compose.yml
build:
context: ./kafka/connect
args:
ASTRA_SECURE_BUNDLE_NAME: $ASTRA_SECURE_BUNDLE_NAME
image: demo-kafka-connect-astra:latest
hostname: kafka-connect
container_name: kafka-connect
depends_on:
- zookeeper
- kafka # broker
- schema-registry
ports:
- "8083:8083"
environment:
# copied into worker properties file as shown here: https://github.com/confluentinc/cp-docker-images/blob/2d1072207790a06b88b79fb129f72bb41b67532c/debian/kafka-connect-base/include/etc/confluent/docker/configure#L54
CONNECT_BOOTSTRAP_SERVERS: 'PLAINTEXT://172.20.10.12:9092'
CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_ZOOKEEPER_CONNECT: 'zookeeper:2181'
CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
CONNECT_LOG4J_LOGGERS: org.apache.zookeeper=ERROR,org.I0Itec.zkclient=ERROR,org.reflections=ERROR
# this isn't mentioned in their examples, but it appears we need this as well to avoid error: org.apache.kafka.common.config.ConfigException: Missing required configuration "offset.storage.file.filename" which has no default value.
# their script will strip off the CONNECT part and convert underscores to periods
# choosing what to set based on their example at /etc/kafka/connect-standalone.properties (path is within their docker image)
CONNECT_OFFSET_STORAGE_FILE_FILENAME: /tmp/connect.offsets
networks:
kafka_dse_network:
ipv4_address: 172.20.10.21