github.com/pingcap/ticdc@v0.0.0-20220526033649-485a10ef2652/tests/kafka_sink_error_resume/run.sh (about) 1 #!/bin/bash 2 3 set -e 4 5 CUR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) 6 source $CUR/../_utils/test_prepare 7 WORK_DIR=$OUT_DIR/$TEST_NAME 8 CDC_BINARY=cdc.test 9 SINK_TYPE=$1 10 11 CDC_COUNT=3 12 DB_COUNT=4 13 MAX_RETRIES=20 14 15 function check_changefeed_state() { 16 pd_addr=$1 17 changefeed_id=$2 18 expected=$3 19 state=$(cdc cli --pd=$pd_addr changefeed query -s -c $changefeed_id|jq -r ".state") 20 if [[ "$state" != "$expected" ]];then 21 echo "unexpected state $state, expected $expected" 22 exit 1 23 fi 24 } 25 26 export -f check_changefeed_state 27 28 function run() { 29 # test kafka sink only in this case 30 if [ "$SINK_TYPE" == "mysql" ]; then 31 return 32 fi 33 34 rm -rf $WORK_DIR && mkdir -p $WORK_DIR 35 start_tidb_cluster --workdir $WORK_DIR 36 cd $WORK_DIR 37 38 pd_addr="http://$UP_PD_HOST_1:$UP_PD_PORT_1" 39 TOPIC_NAME="ticdc-kafka-sink-error-resume-test-$RANDOM" 40 SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?partition-num=4&kafka-version=${KAFKA_VERSION}" 41 run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?partition-num=4&version=${KAFKA_VERSION}" 42 43 export GO_FAILPOINTS='github.com/pingcap/ticdc/cdc/sink/producer/kafka/KafkaSinkAsyncSendError=4*return(true)' 44 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --addr "127.0.0.1:8300" --pd $pd_addr 45 changefeed_id=$(cdc cli changefeed create --pd=$pd_addr --sink-uri="$SINK_URI" 2>&1|tail -n2|head -n1|awk '{print $2}') 46 47 run_sql "CREATE DATABASE kafka_sink_error_resume;" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 48 run_sql "CREATE table kafka_sink_error_resume.t1(id int primary key auto_increment, val int);" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 49 run_sql "CREATE table kafka_sink_error_resume.t2(id int primary key auto_increment, val int);" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 50 run_sql "INSERT INTO kafka_sink_error_resume.t1 VALUES ();" 51 52 for i in $(seq 1 4); do 53 ensure $MAX_RETRIES check_changefeed_state $pd_addr $changefeed_id "stopped" 54 cdc cli changefeed resume --changefeed-id=$changefeed_id --pd=$pd_addr 55 sleep 5 56 done 57 ensure $MAX_RETRIES check_changefeed_state $pd_addr $changefeed_id "normal" 58 59 check_table_exists "kafka_sink_error_resume.t1" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 60 check_table_exists "kafka_sink_error_resume.t2" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 61 check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 62 63 run_sql "INSERT INTO kafka_sink_error_resume.t1 VALUES (),();" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 64 run_sql "INSERT INTO kafka_sink_error_resume.t2 VALUES (),();" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 65 run_sql "UPDATE kafka_sink_error_resume.t2 SET val = 100;" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 66 check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 67 68 export GO_FAILPOINTS='' 69 cleanup_process $CDC_BINARY 70 } 71 72 trap stop_tidb_cluster EXIT 73 run $* 74 check_logs $WORK_DIR 75 echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"