github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/changefeed_auto_stop/run.sh (about) 1 #!/bin/bash 2 3 set -eu 4 5 CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) 6 source $CUR/../_utils/test_prepare 7 WORK_DIR=$OUT_DIR/$TEST_NAME 8 CDC_BINARY=cdc.test 9 SINK_TYPE=$1 10 MAX_RETRIES=10 11 12 function run() { 13 DB_COUNT=4 14 15 rm -rf $WORK_DIR && mkdir -p $WORK_DIR 16 start_tidb_cluster --workdir $WORK_DIR 17 cd $WORK_DIR 18 start_ts=$(cdc cli tso query --pd=http://$UP_PD_HOST_1:$UP_PD_PORT_1) 19 20 for i in $(seq $DB_COUNT); do 21 db="changefeed_auto_stop_$i" 22 run_sql "CREATE DATABASE $db;" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 23 go-ycsb load mysql -P $CUR/conf/workload -p mysql.host=${UP_TIDB_HOST} -p mysql.port=${UP_TIDB_PORT} -p mysql.user=root -p mysql.db=$db 24 done 25 26 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --logsuffix "1" --addr "127.0.0.1:8301" --pd "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" 27 export GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/pipeline/ProcessorSyncResolvedError=1*return(true);github.com/pingcap/tiflow/cdc/processor/ProcessorUpdatePositionDelaying=sleep(1000)' 28 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --logsuffix "2" --addr "127.0.0.1:8302" --pd "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" 29 export GO_FAILPOINTS='' 30 31 TOPIC_NAME="ticdc-changefeed-auto-stop-test-$RANDOM" 32 case $SINK_TYPE in 33 kafka) SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760" ;; 34 storage) SINK_URI="file://$WORK_DIR/storage_test/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" ;; 35 pulsar) 36 run_pulsar_cluster $WORK_DIR normal 37 SINK_URI="pulsar://127.0.0.1:6650/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" 38 ;; 39 *) SINK_URI="mysql://normal:123456@127.0.0.1:3306/" ;; 40 esac 41 changefeedid=$(cdc cli changefeed create --pd="http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" --start-ts=$start_ts --sink-uri="$SINK_URI" 2>&1 | tail -n2 | head -n1 | awk '{print $2}') 42 case $SINK_TYPE in 43 kafka) run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760" ;; 44 storage) run_storage_consumer $WORK_DIR $SINK_URI "" "" ;; 45 pulsar) run_pulsar_consumer --upstream-uri $SINK_URI ;; 46 esac 47 48 ensure $MAX_RETRIES check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" ${changefeedid} "normal" "null" "" 49 50 for i in $(seq $DB_COUNT); do 51 check_table_exists "changefeed_auto_stop_$i.usertable" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 52 done 53 check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 54 55 export GO_FAILPOINTS='' 56 cleanup_process $CDC_BINARY 57 } 58 59 trap stop_tidb_cluster EXIT 60 run $* 61 check_logs $WORK_DIR 62 echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"