github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/kafka_simple_claim_check_avro/run.sh (about) 1 #!/bin/bash 2 3 set -e 4 5 CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) 6 source $CUR/../_utils/test_prepare 7 WORK_DIR=$OUT_DIR/$TEST_NAME 8 CDC_BINARY=cdc.test 9 SINK_TYPE=$1 10 11 # use kafka-consumer with canal-json decoder to sync data from kafka to mysql 12 function run() { 13 if [ "$SINK_TYPE" != "kafka" ]; then 14 return 15 fi 16 17 rm -rf $WORK_DIR && mkdir -p $WORK_DIR 18 19 start_tidb_cluster --workdir $WORK_DIR 20 21 cd $WORK_DIR 22 23 # upstream tidb cluster enable row level checksum 24 run_sql "set global tidb_enable_row_level_checksum=true" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 25 26 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY 27 28 TOPIC_NAME="kafka-simple-claim-check-avro-$RANDOM" 29 30 # record tso before we create tables to skip the system table DDLs 31 start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1}) 32 33 changefeed_id="kafka-simple-claim-check-avro" 34 SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=simple&encoding-format=avro" 35 run_cdc_cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI" -c ${changefeed_id} --config="$CUR/conf/changefeed.toml" 36 run_sql_file $CUR/data/ddl.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT} 37 38 sleep 5 39 40 run_cdc_cli changefeed pause -c ${changefeed_id} 41 42 SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=simple&encoding-format=avro&max-message-bytes=2048" 43 run_cdc_cli changefeed update -c ${changefeed_id} --sink-uri="$SINK_URI" --config="$CUR/conf/changefeed.toml" --no-confirm 44 run_cdc_cli changefeed resume -c ${changefeed_id} 45 46 cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1 & 47 48 run_sql_file $CUR/data/data.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT} 49 50 # sync_diff can't check non-exist table, so we check expected tables are created in downstream first 51 check_table_exists test.finish_mark ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 200 52 check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 53 54 cleanup_process $CDC_BINARY 55 } 56 57 trap stop_tidb_cluster EXIT 58 run $* 59 check_logs $WORK_DIR 60 echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"