-
Notifications
You must be signed in to change notification settings - Fork 0
/
compose.yaml
105 lines (99 loc) · 2.81 KB
/
compose.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
version: '3.8'
services:
kafka-broker:
image: docker.redpanda.com/redpandadata/redpanda:v24.1.1
command: |
redpanda start
--smp 1
--overprovisioned
--node-id 0
--kafka-addr internal://0.0.0.0:9092,external://0.0.0.0:19092
--advertise-kafka-addr internal://kafka-broker:9092,external://localhost:19092
--pandaproxy-addr internal://0.0.0.0:8082,external://0.0.0.0:18082
--advertise-pandaproxy-addr internal://kafka-broker:8082,external://localhost:18082
--schema-registry-addr internal://0.0.0.0:8081,external://0.0.0.0:18081
--rpc-addr kafka-broker:33145
--advertise-rpc-addr kafka-broker:33145
--mode dev-container
--set auto_create_topics_enabled=true
ports:
- 18081:18081
- 18082:18082
- 19092:19092
- 19644:9644
console:
image: docker.redpanda.com/redpandadata/console:v2.5.2
entrypoint: /bin/sh
command: |-
-c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console'
ports:
- 8080:8080
environment:
CONFIG_FILEPATH: /tmp/config.yml
CONSOLE_CONFIG_FILE: >
kafka:
brokers: ["kafka-broker:9092"]
schemaRegistry:
enabled: true
urls: ["http://kafka-broker:8081"]
redpanda:
adminApi:
enabled: true
urls: ["http://kafka-broker:9644"]
connect:
enabled: true
clusters:
- name: local-connect-cluster
url: http://connect:8083
postgres:
image: postgres:14
environment:
POSTGRES_DB: github_events
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
ports:
- 5432:5432
volumes:
- ./sql:/docker-entrypoint-initdb.d
# test-db:
# build: .
# command: ["python3", "test-connection.py"]
# depends_on:
# - kafka-broker
# - postgres
# environment:
# - POSTGRES_DB=github_events
# - POSTGRES_USER=postgres
# - POSTGRES_PASSWORD=postgres
# - RUN_MODE=test-db
producer:
build: .
command: ["python3", "main.py"]
depends_on:
- kafka-broker
- postgres
environment:
- POSTGRES_DB=github_events
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- RUN_MODE=producer
consumer:
build: .
command: ["python3", "consumer.py"]
depends_on:
- kafka-broker
- postgres
environment:
- POSTGRES_DB=github_events
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- RUN_MODE=consumer
spark:
image: bitnami/spark:latest
environment:
- SPARK_MODE=master
ports:
- "8089:8089"
command: bash -c "spark-submit --jars /app/jars/postgresql-42.7.3.jar --packages org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.2 /app/consumer.py"
volumes:
- .:/app