github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/kafka_column_selector/run.sh (about) 1 #!/bin/bash 2 3 set -eu 4 5 CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) 6 source $CUR/../_utils/test_prepare 7 WORK_DIR=$OUT_DIR/$TEST_NAME 8 CDC_BINARY=cdc.test 9 SINK_TYPE=$1 10 11 function run() { 12 # test kafka sink only in this case 13 if [ "$SINK_TYPE" != "kafka" ]; then 14 return 15 fi 16 17 rm -rf $WORK_DIR && mkdir -p $WORK_DIR 18 start_tidb_cluster --workdir $WORK_DIR 19 20 cd $WORK_DIR 21 22 # record tso before we create tables to skip the system table DDLs 23 start_ts=$(cdc cli tso query --pd=http://$UP_PD_HOST_1:$UP_PD_PORT_1) 24 25 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY 26 27 changefeed_id="test" 28 TOPIC_NAME="column-selector-test" 29 SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=canal-json&partition-num=1&enable-tidb-extension=true" 30 run_cdc_cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI" -c ${changefeed_id} --config="$CUR/conf/changefeed.toml" 31 32 cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1 & 33 34 run_sql_file $CUR/data/data.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT} 35 36 echo "Starting build checksum checker..." 37 cd $CUR/../../utils/checksum_checker 38 if [ ! -f ./checksum_checker ]; then 39 GO111MODULE=on go build 40 fi 41 42 check_table_exists "test1.finishmark" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 43 44 ./checksum_checker --upstream-uri "root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/" --downstream-uri "root@tcp(${DOWN_TIDB_HOST}:${DOWN_TIDB_PORT})/" --databases "test,test1" --config="$CUR/conf/changefeed.toml" 45 46 cleanup_process $CDC_BINARY 47 } 48 49 trap stop_tidb_cluster EXIT 50 run $* 51 check_logs $WORK_DIR 52 echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"