github.com/pingcap/ticdc@v0.0.0-20220526033649-485a10ef2652/scripts/avro-local-test.sh (about)

     1  #!/bin/sh
     2  
     3  # Copyright 2020 PingCAP, Inc.
     4  #
     5  # Licensed under the Apache License, Version 2.0 (the "License");
     6  # you may not use this file except in compliance with the License.
     7  # You may obtain a copy of the License at
     8  #
     9  #     http://www.apache.org/licenses/LICENSE-2.0
    10  #
    11  # Unless required by applicable law or agreed to in writing, software
    12  # distributed under the License is distributed on an "AS IS" BASIS,
    13  # See the License for the specific language governing permissions and
    14  # limitations under the License.
    15  
    16  # This is a script designed for testing Avro support for Kafka sink.
    17  # The data flow is (upstream TiDB) ==> (TiCDC) =avro=> (Kafka/JDBC connector) =sql=> (downstream TiDB)
    18  # Developers should run "avro-local-test.sh up" first, wait for a while, run "avro-local-test.sh init", and only
    19  # then will the data flow be established.
    20  #
    21  # Upstream mysql://127.0.0.1:4000
    22  # Downstream mysql://127.0.0.1:5000
    23  #
    24  # This script only supports syncing (dbname = testdb, tableName = test).
    25  # Developers should adapt this file to their own needs.
    26  #
    27  
    28  ProgName=$(basename $0)
    29  cd "$(dirname "$0")"
    30  sub_help() {
    31      echo "Usage: $ProgName <subcommand> [options]\n"
    32      echo "Subcommands:"
    33      echo "    init    Create changefeed and Kafka Connector"
    34      echo "    up      Bring up the containers"
    35      echo "    down    Pause the containers"
    36      echo ""
    37  }
    38  
    39  sub_init() {
    40    sudo docker exec -it ticdc_controller_1 sh -c "
    41    /cdc cli changefeed create --pd=\"http://upstream-pd:2379\" --sink-uri=\"kafka://kafka:9092/testdb_test?protocol=avro\" --opts \"registry=http://schema-registry:8081\"
    42    curl -X POST -H \"Content-Type: application/json\" -d @/config/jdbc-sink-connector.json http://kafka-connect-01:8083/connectors
    43    "
    44  }
    45  
    46  sub_up() {
    47    sudo docker-compose -f ../docker-compose-avro.yml up --detach
    48  }
    49  
    50  sub_down() {
    51    sudo docker-compose -f ../docker-compose-avro.yml down
    52    sudo rm -r ../docker/logs ../docker/data
    53  }
    54  
    55  subcommand=$1
    56  case $subcommand in
    57      "" | "-h" | "--help")
    58          sub_help
    59          ;;
    60      *)
    61          shift
    62          sub_${subcommand} $@
    63          if [ $? = 127 ]; then
    64              echo "Error: '$subcommand' is not a known subcommand." >&2
    65              echo "       Run '$ProgName --help' for a list of known subcommands." >&2
    66              exit 1
    67          fi
    68          ;;
    69  esac