github.com/pingcap/ticdc@v0.0.0-20220526033649-485a10ef2652/tests/capture_session_done_during_task/run.sh (about) 1 #!/bin/bash 2 3 set -e 4 5 CUR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) 6 source $CUR/../_utils/test_prepare 7 WORK_DIR=$OUT_DIR/$TEST_NAME 8 CDC_BINARY=cdc.test 9 SINK_TYPE=$1 10 11 function run() { 12 rm -rf $WORK_DIR && mkdir -p $WORK_DIR 13 start_tidb_cluster --workdir $WORK_DIR 14 cd $WORK_DIR 15 16 pd_addr="http://$UP_PD_HOST_1:$UP_PD_PORT_1" 17 TOPIC_NAME="ticdc-capture-session-done-during-task-$RANDOM" 18 case $SINK_TYPE in 19 kafka) SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?partition-num=4&kafka-version=${KAFKA_VERSION}";; 20 *) SINK_URI="mysql://normal:123456@127.0.0.1:3306/?max-txn-row=1";; 21 esac 22 if [ "$SINK_TYPE" == "kafka" ]; then 23 run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?partition-num=4&version=${KAFKA_VERSION}" 24 fi 25 26 # create database and table in both upstream and downstream to ensure there 27 # will be task dispatched after changefeed starts. 28 run_sql "CREATE DATABASE capture_session_done_during_task;" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 29 run_sql "CREATE table capture_session_done_during_task.t (id int primary key auto_increment, a int)" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 30 run_sql "CREATE DATABASE capture_session_done_during_task;" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 31 run_sql "CREATE table capture_session_done_during_task.t (id int primary key auto_increment, a int)" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 32 start_ts=$(run_cdc_cli tso query --pd=http://$UP_PD_HOST_1:$UP_PD_PORT_1) 33 run_sql "INSERT INTO capture_session_done_during_task.t values (),(),(),(),(),(),()" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 34 export GO_FAILPOINTS='github.com/pingcap/ticdc/cdc/captureHandleTaskDelay=sleep(2000)' # old processor 35 # export GO_FAILPOINTS='github.com/pingcap/ticdc/cdc/processor/processorManagerHandleNewChangefeedDelay=sleep(2000)' # new processor 36 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --addr "127.0.0.1:8300" --pd $pd_addr 37 changefeed_id=$(cdc cli changefeed create --pd=$pd_addr --start-ts=$start_ts --sink-uri="$SINK_URI" 2>&1|tail -n2|head -n1|awk '{print $2}') 38 # wait task is dispatched 39 sleep 1 40 41 capture_key=$(ETCDCTL_API=3 etcdctl get /tidb/cdc/capture --prefix|head -n 1) 42 lease=$(ETCDCTL_API=3 etcdctl get $capture_key -w json|grep -o 'lease":[0-9]*'|awk -F: '{print $2}') 43 lease_hex=$(printf '%x\n' $lease) 44 # revoke lease of etcd capture key to simulate etcd session done 45 ETCDCTL_API=3 etcdctl lease revoke $lease_hex 46 47 # capture handle task delays 10s, minus 2s wait task dispatched 48 sleep 1 49 check_table_exists "capture_session_done_during_task.t" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 50 check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 51 run_sql "INSERT INTO capture_session_done_during_task.t values (),(),(),(),(),(),()" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 52 check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 53 54 export GO_FAILPOINTS='' 55 cleanup_process $CDC_BINARY 56 } 57 58 trap stop_tidb_cluster EXIT 59 run $* 60 check_logs $WORK_DIR 61 echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"