github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/kafka_sink_error_resume/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -eu
     4  
     5  CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
     6  source $CUR/../_utils/test_prepare
     7  WORK_DIR=$OUT_DIR/$TEST_NAME
     8  CDC_BINARY=cdc.test
     9  SINK_TYPE=$1
    10  
    11  CDC_COUNT=3
    12  DB_COUNT=4
    13  MAX_RETRIES=20
    14  
    15  function run() {
    16  	# test kafka sink only in this case
    17  	if [ "$SINK_TYPE" != "kafka" ]; then
    18  		return
    19  	fi
    20  
    21  	rm -rf $WORK_DIR && mkdir -p $WORK_DIR
    22  	start_tidb_cluster --workdir $WORK_DIR
    23  	cd $WORK_DIR
    24  
    25  	pd_addr="http://$UP_PD_HOST_1:$UP_PD_PORT_1"
    26  	TOPIC_NAME="ticdc-kafka-sink-error-resume-test-$RANDOM"
    27  	SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760"
    28  
    29  	# Return an failpoint error to fail a kafka changefeed.
    30  	# Note we return one error for the failpoint, if owner retry changefeed frequently, it may break the test.
    31  	export GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/mq/dmlproducer/KafkaSinkAsyncSendError=1*return(true)'
    32  	run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --addr "127.0.0.1:8300" --pd $pd_addr
    33  	changefeed_id=$(cdc cli changefeed create --pd=$pd_addr --sink-uri="$SINK_URI" 2>&1 | tail -n2 | head -n1 | awk '{print $2}')
    34  	run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760"
    35  
    36  	run_sql "CREATE DATABASE kafka_sink_error_resume;" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    37  	run_sql "CREATE table kafka_sink_error_resume.t1(id int primary key auto_increment, val int);" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    38  	run_sql "CREATE table kafka_sink_error_resume.t2(id int primary key auto_increment, val int);" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    39  	run_sql "INSERT INTO kafka_sink_error_resume.t1 VALUES ();"
    40  
    41  	ensure $MAX_RETRIES check_changefeed_status 127.0.0.1:8300 $changefeed_id "warning" "last_warning" "kafka sink injected error"
    42  	cdc cli changefeed resume --changefeed-id=$changefeed_id --pd=$pd_addr
    43  	ensure $MAX_RETRIES check_changefeed_status 127.0.0.1:8300 $changefeed_id "normal"
    44  
    45  	check_table_exists "kafka_sink_error_resume.t1" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
    46  	check_table_exists "kafka_sink_error_resume.t2" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
    47  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml
    48  
    49  	run_sql "INSERT INTO kafka_sink_error_resume.t1 VALUES (),();" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    50  	run_sql "INSERT INTO kafka_sink_error_resume.t2 VALUES (),();" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    51  	run_sql "UPDATE kafka_sink_error_resume.t2 SET val = 100;" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    52  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml
    53  
    54  	export GO_FAILPOINTS=''
    55  	cleanup_process $CDC_BINARY
    56  }
    57  
    58  trap stop_tidb_cluster EXIT
    59  run $*
    60  check_logs $WORK_DIR
    61  echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"