github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/capture_session_done_during_task/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -eu
     4  
     5  CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
     6  source $CUR/../_utils/test_prepare
     7  WORK_DIR=$OUT_DIR/$TEST_NAME
     8  CDC_BINARY=cdc.test
     9  SINK_TYPE=$1
    10  
    11  function run() {
    12  	rm -rf $WORK_DIR && mkdir -p $WORK_DIR
    13  	start_tidb_cluster --workdir $WORK_DIR
    14  	cd $WORK_DIR
    15  
    16  	pd_addr="http://$UP_PD_HOST_1:$UP_PD_PORT_1"
    17  	TOPIC_NAME="ticdc-capture-session-done-during-task-$RANDOM"
    18  	case $SINK_TYPE in
    19  	kafka) SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760" ;;
    20  	storage) SINK_URI="file://$WORK_DIR/storage_test/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" ;;
    21  	pulsar)
    22  		run_pulsar_cluster $WORK_DIR normal
    23  		SINK_URI="pulsar://127.0.0.1:6650/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true"
    24  		;;
    25  	*) SINK_URI="mysql://normal:123456@127.0.0.1:3306/?max-txn-row=1" ;;
    26  	esac
    27  
    28  	# create database and table in both upstream and downstream to ensure there
    29  	# will be task dispatched after changefeed starts.
    30  	run_sql "CREATE DATABASE capture_session_done_during_task;" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    31  	run_sql "CREATE table capture_session_done_during_task.t (id int primary key auto_increment, a int)" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    32  	run_sql "CREATE DATABASE capture_session_done_during_task;" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
    33  	run_sql "CREATE table capture_session_done_during_task.t (id int primary key auto_increment, a int)" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
    34  	start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1})
    35  	run_sql "INSERT INTO capture_session_done_during_task.t values (),(),(),(),(),(),()" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    36  	export GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorManagerHandleNewChangefeedDelay=sleep(2000)'
    37  	run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --addr "127.0.0.1:8300" --pd $pd_addr
    38  	changefeed_id=$(cdc cli changefeed create --pd=$pd_addr --start-ts=$start_ts --sink-uri="$SINK_URI" 2>&1 | tail -n2 | head -n1 | awk '{print $2}')
    39  	# wait task is dispatched
    40  	sleep 1
    41  
    42  	case $SINK_TYPE in
    43  	kafka) run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760" ;;
    44  	storage) run_storage_consumer $WORK_DIR $SINK_URI "" "" ;;
    45  	pulsar) run_pulsar_consumer --upstream-uri $SINK_URI ;;
    46  	esac
    47  
    48  	capture_key=$(ETCDCTL_API=3 etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix | head -n 1)
    49  	lease=$(ETCDCTL_API=3 etcdctl get $capture_key -w json | grep -o 'lease":[0-9]*' | awk -F: '{print $2}')
    50  	lease_hex=$(printf '%x\n' $lease)
    51  	# revoke lease of etcd capture key to simulate etcd session done
    52  	ETCDCTL_API=3 etcdctl lease revoke $lease_hex
    53  
    54  	# capture handle task delays 10s, minus 2s wait task dispatched
    55  	sleep 1
    56  	check_table_exists "capture_session_done_during_task.t" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
    57  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml
    58  	run_sql "INSERT INTO capture_session_done_during_task.t values (),(),(),(),(),(),()" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    59  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml
    60  
    61  	export GO_FAILPOINTS=''
    62  	cleanup_process $CDC_BINARY
    63  }
    64  
    65  trap stop_tidb_cluster EXIT
    66  run $*
    67  check_logs $WORK_DIR
    68  echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"