github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/csv_storage_partition_table/run.sh (about) 1 #!/bin/bash 2 3 set -eu 4 5 CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) 6 source $CUR/../_utils/test_prepare 7 WORK_DIR=$OUT_DIR/$TEST_NAME 8 CDC_BINARY=cdc.test 9 SINK_TYPE=$1 10 11 function run() { 12 if [ "$SINK_TYPE" != "storage" ]; then 13 return 14 fi 15 16 rm -rf $WORK_DIR && mkdir -p $WORK_DIR 17 start_tidb_cluster --workdir $WORK_DIR 18 cd $WORK_DIR 19 20 run_sql "set @@global.tidb_enable_exchange_partition=on" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 21 22 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --addr "127.0.0.1:8300" --logsuffix 0 23 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --addr "127.0.0.1:8301" --logsuffix 1 24 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --addr "127.0.0.1:8302" --logsuffix 2 25 26 SINK_URI="file://$WORK_DIR/storage_test?flush-interval=5s&enable-tidb-extension=true" 27 run_cdc_cli changefeed create --sink-uri="$SINK_URI" --config=$CUR/conf/changefeed.toml 28 # by pass safe mode 29 sleep 20 30 31 run_sql_file $CUR/data/prepare.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT} 32 33 run_storage_consumer $WORK_DIR $SINK_URI $CUR/conf/changefeed.toml "" 34 sleep 8 35 36 # sync_diff can't check non-exist table, so we check expected tables are created in downstream first 37 check_table_exists partition_table.t ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 38 check_table_exists partition_table.t1 ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 39 # check_table_exists partition_table.t2 ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 40 check_table_exists partition_table.finish_mark ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 41 check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 42 43 cleanup_process $CDC_BINARY 44 } 45 46 trap stop_tidb_cluster EXIT 47 run $* 48 check_logs $WORK_DIR 49 echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"