github.com/pingcap/ticdc@v0.0.0-20220526033649-485a10ef2652/docker-compose-avro.yml (about) 1 --- 2 version: '2.1' 3 4 services: 5 controller: 6 image: ticdc:latest 7 build: 8 context: . 9 dockerfile: ./Dockerfile.development 10 volumes: 11 - /data 12 - ./docker/logs:/logs 13 - ./docker/config:/config 14 command: 15 - /usr/bin/socat 16 - -v 17 - tcp-l:1234,fork 18 - exec:'/bin/cat' 19 ports: 20 - "1234:1234" 21 depends_on: 22 - "upstream-pd" 23 - "schema-registry" 24 - "kafka-connect-01" 25 - "kafka" 26 - "capturer0" 27 - "capturer1" 28 - "capturer2" 29 restart: on-failure 30 31 capturer0: 32 image: ticdc:latest 33 build: 34 context: . 35 dockerfile: ./Dockerfile.development 36 volumes: 37 - /data 38 - ./docker/logs:/logs 39 entrypoint: "/cdc server" 40 command: 41 - --addr=0.0.0.0:8300 42 - --pd=http://upstream-pd:2379 43 - --log-file=/logs/capturer0.log 44 - --log-level=debug 45 - --advertise-addr=capturer0:8300 46 - --tz=${CDC_TIME_ZONE:-SYSTEM} 47 - --sort-dir=/data/cdc_sort 48 depends_on: 49 - "upstream-tidb" 50 - "downstream-tidb" 51 - "kafka" 52 restart: on-failure 53 54 capturer1: 55 image: ticdc:latest 56 build: 57 context: . 58 dockerfile: ./Dockerfile.development 59 volumes: 60 - /data 61 - ./docker/logs:/logs 62 entrypoint: "/cdc server" 63 command: 64 - --addr=0.0.0.0:8300 65 - --pd=http://upstream-pd:2379 66 - --log-file=/logs/capturer1.log 67 - --log-level=debug 68 - --advertise-addr=capturer1:8300 69 - --tz=${CDC_TIME_ZONE:-SYSTEM} 70 - --sort-dir=/data/cdc_sort 71 depends_on: 72 - "upstream-tidb" 73 - "downstream-tidb" 74 - "kafka" 75 restart: on-failure 76 77 capturer2: 78 image: ticdc:latest 79 build: 80 context: . 81 dockerfile: ./Dockerfile.development 82 volumes: 83 - /data 84 - ./docker/logs:/logs 85 entrypoint: "/cdc server" 86 command: 87 - --addr=0.0.0.0:8300 88 - --pd=http://upstream-pd:2379 89 - --log-file=/logs/capturer2.log 90 - --log-level=debug 91 - --advertise-addr=capturer2:8300 92 - --tz=${CDC_TIME_ZONE:-SYSTEM} 93 - --sort-dir=/data/cdc_sort 94 depends_on: 95 - "upstream-tidb" 96 - "downstream-tidb" 97 - "kafka" 98 restart: on-failure 99 100 upstream-pd: 101 image: pingcap/pd:release-4.0-nightly 102 ports: 103 - "2379:2379" 104 volumes: 105 - ./docker/config/pd.toml:/pd.toml:ro 106 - /data 107 - ./docker/logs:/logs 108 command: 109 - --name=upstream-pd 110 - --client-urls=http://0.0.0.0:2379 111 - --peer-urls=http://0.0.0.0:2380 112 - --advertise-client-urls=http://upstream-pd:2379 113 - --advertise-peer-urls=http://upstream-pd:2380 114 - --initial-cluster=upstream-pd=http://upstream-pd:2380 115 - --data-dir=/data/upstream-pd 116 - --config=/pd.toml 117 - --log-file=/logs/upstream-pd.log 118 - -L=debug 119 restart: on-failure 120 121 upstream-tikv0: 122 image: pingcap/tikv:release-4.0-nightly 123 volumes: 124 - ./docker/config/tikv.toml:/tikv.toml:ro 125 - /data 126 - ./docker/logs:/logs 127 command: 128 - --addr=0.0.0.0:20160 129 - --advertise-addr=upstream-tikv0:20160 130 - --data-dir=/data/upstream-tikv0 131 - --pd=upstream-pd:2379 132 - --config=/tikv.toml 133 - --log-file=/logs/upstream-tikv0.log 134 - --log-level=debug 135 depends_on: 136 - "upstream-pd" 137 restart: on-failure 138 139 upstream-tikv1: 140 image: pingcap/tikv:release-4.0-nightly 141 volumes: 142 - ./docker/config/tikv.toml:/tikv.toml:ro 143 - /data 144 - ./docker/logs:/logs 145 command: 146 - --addr=0.0.0.0:20160 147 - --advertise-addr=upstream-tikv1:20160 148 - --data-dir=/data/upstream-tikv1 149 - --pd=upstream-pd:2379 150 - --config=/tikv.toml 151 - --log-file=/logs/upstream-tikv1.log 152 - --log-level=debug 153 depends_on: 154 - "upstream-pd" 155 restart: on-failure 156 157 upstream-tikv2: 158 image: pingcap/tikv:release-4.0-nightly 159 volumes: 160 - ./docker/config/tikv.toml:/tikv.toml:ro 161 - /data 162 - ./docker/logs:/logs 163 command: 164 - --addr=0.0.0.0:20160 165 - --advertise-addr=upstream-tikv2:20160 166 - --data-dir=/data/upstream-tikv2 167 - --pd=upstream-pd:2379 168 - --config=/tikv.toml 169 - --log-file=/logs/upstream-tikv2.log 170 - --log-level=debug 171 depends_on: 172 - "upstream-pd" 173 restart: on-failure 174 175 upstream-tidb: 176 image: pingcap/tidb:release-4.0-nightly 177 ports: 178 - "4000:4000" 179 - "10080:10080" 180 volumes: 181 - ./docker/config/tidb.toml:/tidb.toml:ro 182 - ./docker/logs:/logs 183 command: 184 - --store=tikv 185 - --path=upstream-pd:2379 186 - --config=/tidb.toml 187 - --log-file=/logs/upstream-tidb.log 188 - --advertise-address=upstream-tidb 189 - -L=debug 190 depends_on: 191 - "upstream-tikv0" 192 - "upstream-tikv1" 193 - "upstream-tikv2" 194 restart: on-failure 195 196 downstream-pd: 197 image: pingcap/pd:release-4.0-nightly 198 ports: 199 - "3379:2379" 200 volumes: 201 - ./docker/config/pd.toml:/pd.toml:ro 202 - /data 203 - ./docker/logs:/logs 204 command: 205 - --name=downstream-pd 206 - --client-urls=http://0.0.0.0:2379 207 - --peer-urls=http://0.0.0.0:2380 208 - --advertise-client-urls=http://downstream-pd:2379 209 - --advertise-peer-urls=http://downstream-pd:2380 210 - --initial-cluster=downstream-pd=http://downstream-pd:2380 211 - --data-dir=/data/downstream-pd 212 - --config=/pd.toml 213 - --log-file=/logs/downstream-pd.log 214 - -L=debug 215 restart: on-failure 216 217 downstream-tikv0: 218 image: pingcap/tikv:release-4.0-nightly 219 volumes: 220 - ./docker/config/tikv.toml:/tikv.toml:ro 221 - /data 222 - ./docker/logs:/logs 223 command: 224 - --addr=0.0.0.0:20160 225 - --advertise-addr=downstream-tikv0:20160 226 - --data-dir=/data/downstream-tikv0 227 - --pd=downstream-pd:2379 228 - --config=/tikv.toml 229 - --log-file=/logs/downstream-tikv0.log 230 - --log-level=debug 231 depends_on: 232 - "downstream-pd" 233 restart: on-failure 234 235 downstream-tikv1: 236 image: pingcap/tikv:release-4.0-nightly 237 volumes: 238 - ./docker/config/tikv.toml:/tikv.toml:ro 239 - /data 240 - ./docker/logs:/logs 241 command: 242 - --addr=0.0.0.0:20160 243 - --advertise-addr=downstream-tikv1:20160 244 - --data-dir=/data/downstream-tikv1 245 - --pd=downstream-pd:2379 246 - --config=/tikv.toml 247 - --log-file=/logs/downstream-tikv1.log 248 - --log-level=debug 249 depends_on: 250 - "downstream-pd" 251 restart: on-failure 252 253 downstream-tikv2: 254 image: pingcap/tikv:release-4.0-nightly 255 volumes: 256 - ./docker/config/tikv.toml:/tikv.toml:ro 257 - /data 258 - ./docker/logs:/logs 259 command: 260 - --addr=0.0.0.0:20160 261 - --advertise-addr=downstream-tikv2:20160 262 - --data-dir=/data/downstream-tikv2 263 - --pd=downstream-pd:2379 264 - --config=/tikv.toml 265 - --log-file=/logs/downstream-tikv2.log 266 - --log-level=debug 267 depends_on: 268 - "downstream-pd" 269 restart: on-failure 270 271 downstream-tidb: 272 image: pingcap/tidb:release-4.0-nightly 273 ports: 274 - "5000:4000" 275 - "20080:10080" 276 volumes: 277 - ./docker/config/tidb.toml:/tidb.toml:ro 278 - ./docker/logs:/logs 279 command: 280 - --store=tikv 281 - --path=downstream-pd:2379 282 - --config=/tidb.toml 283 - --log-file=/logs/downstream-tidb.log 284 - --advertise-address=downstream-tidb 285 - -L=debug 286 depends_on: 287 - "downstream-tikv0" 288 - "downstream-tikv1" 289 - "downstream-tikv2" 290 restart: on-failure 291 292 # The rest of the file is adapted from https://github.com/confluentinc/demo-scene/blob/master/connect-jdbc/docker-compose.yml 293 294 zookeeper: 295 image: confluentinc/cp-zookeeper:5.5.1 296 container_name: zookeeper 297 environment: 298 ZOOKEEPER_CLIENT_PORT: 2181 299 ZOOKEEPER_TICK_TIME: 2000 300 301 kafka: 302 image: confluentinc/cp-enterprise-kafka:5.5.1 303 container_name: kafka 304 depends_on: 305 - zookeeper 306 ports: 307 # Exposes 9092 for external connections to the broker 308 # Use kafka:29092 for connections internal on the docker network 309 # See https://rmoff.net/2018/08/02/kafka-listeners-explained/ for details 310 - 9092:9092 311 environment: 312 KAFKA_BROKER_ID: 1 313 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 314 KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT 315 KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT 316 KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://kafka:9092 317 KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true" 318 KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter 319 KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 320 KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 100 321 CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: kafka:29092 322 CONFLUENT_METRICS_REPORTER_ZOOKEEPER_CONNECT: zookeeper:2181 323 CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1 324 CONFLUENT_METRICS_ENABLE: 'true' 325 CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous' 326 327 schema-registry: 328 image: confluentinc/cp-schema-registry:5.5.1 329 container_name: schema-registry 330 ports: 331 - 8081:8081 332 depends_on: 333 - zookeeper 334 - kafka 335 environment: 336 SCHEMA_REGISTRY_HOST_NAME: schema-registry 337 SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper:2181 338 339 kafka-connect-01: 340 image: confluentinc/cp-kafka-connect:5.5.1 341 container_name: kafka-connect-01 342 depends_on: 343 - zookeeper 344 - kafka 345 - schema-registry 346 - downstream-tidb 347 ports: 348 - 8083:8083 349 environment: 350 CONNECT_LOG4J_APPENDER_STDOUT_LAYOUT_CONVERSIONPATTERN: "[%d] %p %X{connector.context}%m (%c:%L)%n" 351 CONNECT_BOOTSTRAP_SERVERS: "kafka:29092" 352 CONNECT_REST_PORT: 8083 353 CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect-01" 354 CONNECT_GROUP_ID: compose-connect-group 355 CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs 356 CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets 357 CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status 358 CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter 359 CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081' 360 CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter 361 CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081' 362 CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" 363 CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" 364 CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO" 365 CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR" 366 CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1" 367 CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1" 368 CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1" 369 CONNECT_PLUGIN_PATH: '/usr/share/java' 370 command: 371 - /bin/bash 372 - -c 373 - | 374 # JDBC Drivers 375 # ------------ 376 # MySQL 377 cd /usr/share/java/kafka-connect-jdbc/ 378 wget https://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-8.0.21.tar.gz 379 tar -xf mysql-connector-java-8.0.21.tar.gz 380 mv mysql-connector-java-8.0.21/mysql-connector-java-8.0.21.jar ./ 381 # Now launch Kafka Connect 382 sleep infinity & 383 /etc/confluent/docker/run 384 385 kafka-connect-healthcheck: 386 image: devshawn/kafka-connect-healthcheck:0.1.0 387 container_name: kafka-connect-healthcheck 388 depends_on: 389 - kafka-connect-01 390 ports: 391 - 18083:18083 392 environment: 393 HEALTHCHECK_CONNECT_URL: 'http://kafka-connect-01:8083'