github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/changefeed_finish/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -eu
     4  
     5  CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
     6  source $CUR/../_utils/test_prepare
     7  WORK_DIR=$OUT_DIR/$TEST_NAME
     8  CDC_BINARY=cdc.test
     9  SINK_TYPE=$1
    10  MAX_RETRIES=10
    11  
    12  function run() {
    13  	rm -rf $WORK_DIR && mkdir -p $WORK_DIR
    14  	start_tidb_cluster --workdir $WORK_DIR
    15  	cd $WORK_DIR
    16  
    17  	pd_addr="http://$UP_PD_HOST_1:$UP_PD_PORT_1"
    18  	TOPIC_NAME="ticdc-changefeed-pause-resume-$RANDOM"
    19  	case $SINK_TYPE in
    20  	kafka) SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760" ;;
    21  	storage) SINK_URI="file://$WORK_DIR/storage_test/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" ;;
    22  	pulsar)
    23  		run_pulsar_cluster $WORK_DIR normal
    24  		SINK_URI="pulsar://127.0.0.1:6650/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true"
    25  		;;
    26  	*) SINK_URI="mysql://normal:123456@127.0.0.1:3306/?max-txn-row=1" ;;
    27  	esac
    28  
    29  	run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --addr "127.0.0.1:8300" --pd $pd_addr
    30  	now=$(cdc cli tso query --pd=http://$UP_PD_HOST_1:$UP_PD_PORT_1)
    31  	# 90s after now
    32  	target_ts=$(($now + 90 * 10 ** 3 * 2 ** 18))
    33  	changefeed_id=$(cdc cli changefeed create --sink-uri="$SINK_URI" --target-ts=$target_ts 2>&1 | tail -n2 | head -n1 | awk '{print $2}')
    34  
    35  	case $SINK_TYPE in
    36  	kafka) run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760" ;;
    37  	storage) run_storage_consumer $WORK_DIR $SINK_URI "" "" ;;
    38  	pulsar) run_pulsar_consumer --upstream-uri $SINK_URI ;;
    39  	esac
    40  
    41  	run_sql "CREATE DATABASE changefeed_finish;" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    42  	run_sql "CREATE table changefeed_finish.t (id int primary key auto_increment, t datetime DEFAULT CURRENT_TIMESTAMP)"
    43  	for i in $(seq 1 10); do
    44  		run_sql "insert into changefeed_finish.t values (),(),(),()" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    45  	done
    46  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml
    47  
    48  	sleep 90
    49  
    50  	ensure $MAX_RETRIES check_changefeed_state $pd_addr $changefeed_id "finished" "null" ""
    51  
    52  	cleanup_process $CDC_BINARY
    53  }
    54  
    55  trap stop_tidb_cluster EXIT
    56  run $*
    57  check_logs $WORK_DIR
    58  echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"