-
Notifications
You must be signed in to change notification settings - Fork 1
/
docker-compose.yml
126 lines (119 loc) · 3.35 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
version: '3.7'
services:
zoo1:
image: zookeeper:3.4.9
hostname: zoo1
ports:
- "2181:2181"
networks:
- project
environment:
ZOO_MY_ID: 1
ZOO_PORT: 2181
ZOO_SERVERS: server.1=zoo1:2888:3888
volumes:
- ./volumes/zk-single-kafka-single/zoo1/data:/data
- ./volumes/zk-single-kafka-single/zoo1/datalog:/datalog
kafka1:
image: confluentinc/cp-kafka:5.2.2
hostname: kafka1
ports:
- "9092:9092"
- "19092:19092"
networks:
- project
environment:
KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL
KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
KAFKA_BROKER_ID: 1
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
volumes:
- ./volumes/zk-single-kafka-single/kafka1/data:/var/lib/kafka/data
depends_on:
- zoo1
flume1:
image: docker.pkg.github.com/trendyol-data-eng-summer-intern-2019/recom-engine-docker/flume:0.0.1
hostname: flume1
ports:
- "41414:41414"
- "1234:1234"
networks:
- project
volumes:
- ./images/flume/flume.conf:/opt/flume/conf/flume.conf
- ./volumes/flume-output:/logs
spark-master:
image: docker.pkg.github.com/trendyol-data-eng-summer-intern-2019/recom-engine-docker/spark-master:0.0.1
command: bin/spark-class org.apache.spark.deploy.master.Master -h spark-master
hostname: spark-master
environment:
MASTER: spark://spark-master:7077
SPARK_CONF_DIR: /conf
SPARK_PUBLIC_DNS: localhost
expose:
- 7001
- 7002
- 7003
- 7004
- 7005
- 7077
- 6066
- 8080
ports:
- 4040:4040
- 6066:6066
- 7077:7077
- 8383:8080
networks:
- project
volumes:
- ./images/spark/master/target/:/app/
- ./images/spark/master/cronjobs:/cronjobs
- ./volumes/spark/models/:/models
- ./volumes/flume-output:/logs/flume
spark-worker:
image: docker.pkg.github.com/trendyol-data-eng-summer-intern-2019/recom-engine-docker/spark-worker:0.0.1
command: bin/spark-class org.apache.spark.deploy.worker.Worker spark://spark-master:7077
hostname: spark-worker
environment:
SPARK_CONF_DIR: /conf
SPARK_WORKER_CORES: 4
SPARK_WORKER_MEMORY: 4g
SPARK_WORKER_PORT: 8881
SPARK_WORKER_WEBUI_PORT: 8082
SPARK_PUBLIC_DNS: localhost
links:
- spark-master
expose:
- 7012
- 7013
- 7014
- 7015
- 8881
ports:
- 8082:8082
networks:
- project
volumes:
- ./images/spark/master/target/:/app/
- ./volumes/spark/models/:/models
- ./volumes/flume-output:/logs/flume
mongo1:
image: mongo:latest
hostname: mongo1
ports:
- "27017:27017"
networks:
- project
spring-boot1:
image: docker.pkg.github.com/trendyol-data-eng-summer-intern-2019/recom-engine-web-service/spring-boot:0.0.1
hostname: spring-boot1
ports:
- "8080:8080"
networks:
- project
networks:
project: