github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/multi_topics/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -e
     4  
     5  CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
     6  source $CUR/../_utils/test_prepare
     7  WORK_DIR=$OUT_DIR/$TEST_NAME
     8  CDC_BINARY=cdc.test
     9  SINK_TYPE=$1
    10  
    11  function run() {
    12  	if [ "$SINK_TYPE" != "kafka" ]; then
    13  		return
    14  	fi
    15  
    16  	rm -rf $WORK_DIR && mkdir -p $WORK_DIR
    17  
    18  	start_tidb_cluster --workdir $WORK_DIR
    19  
    20  	cd $WORK_DIR
    21  
    22  	DEFAULT_TOPIC_NAME="multi_topics"
    23  	# record tso before we create tables to skip the system table DDLs
    24  	start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1})
    25  
    26  	run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY
    27  
    28  	SINK_URI="kafka://127.0.0.1:9092/$DEFAULT_TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true&kafka-version=${KAFKA_VERSION}"
    29  	run_cdc_cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI" --config $CUR/conf/changefeed.toml
    30  
    31  	run_sql_file $CUR/data/step1.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    32  	# NOTICE: we need to wait for the kafka topic to be created.
    33  	sleep 2m
    34  
    35  	for i in $(seq 1 3); do
    36  		run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/test_table${i}?protocol=canal-json&version=${KAFKA_VERSION}&enable-tidb-extension=true" "" ${i}
    37  	done
    38  
    39  	run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/workload?protocol=canal-json&enable-tidb-extension=true" ""
    40  
    41  	# sync_diff can't check non-exist table, so we check expected tables are created in downstream first
    42  	for i in $(seq 1 3); do
    43  		check_table_exists test.table${i} ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 300
    44  	done
    45  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 300
    46  
    47  	run_sql "rename table test.table1 to test.table10, test.table2 to test.table20" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    48  	check_table_exists test.table10 ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 300
    49  	check_table_exists test.table20 ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 300
    50  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 300
    51  
    52  	run_sql_file $CUR/data/step2.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    53  	sleep 30
    54  	run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/test_table10?protocol=canal-json&version=${KAFKA_VERSION}&enable-tidb-extension=true" "" 10
    55  	run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/test_table20?protocol=canal-json&version=${KAFKA_VERSION}&enable-tidb-extension=true" "" 20
    56  	run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/test_finish?protocol=canal-json&version=${KAFKA_VERSION}&enable-tidb-extension=true" "" "finish"
    57  
    58  	check_table_exists test.finish ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 300
    59  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 300
    60  
    61  	cleanup_process $CDC_BINARY
    62  }
    63  
    64  trap stop_tidb_cluster EXIT
    65  run $*
    66  check_logs $WORK_DIR
    67  echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"