github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/canal_json_adapter_compatibility/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -e
     4  
     5  CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
     6  source $CUR/../_utils/test_prepare
     7  WORK_DIR=$OUT_DIR/$TEST_NAME
     8  CDC_BINARY=cdc.test
     9  SINK_TYPE=$1
    10  
    11  # use canal-adapter to sync data from kafka to mysql,
    12  # make sure that `canal-json` output can be consumed by the canal-adapter.
    13  function run() {
    14  	if [ "$SINK_TYPE" != "kafka" ]; then
    15  		return
    16  	fi
    17  
    18  	rm -rf $WORK_DIR && mkdir -p $WORK_DIR
    19  
    20  	start_tidb_cluster --workdir $WORK_DIR
    21  
    22  	cd $WORK_DIR
    23  
    24  	# record tso before we create tables to skip the system table DDLs
    25  	start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1})
    26  
    27  	run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY
    28  
    29  	SINK_URI="kafka://127.0.0.1:9092/test?protocol=canal-json&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760"
    30  
    31  	run_cdc_cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI"
    32  
    33  	run_sql_file $CUR/data/data.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    34  
    35  	# sync_diff can't check non-exist table, so we check expected tables are created in downstream first
    36  	check_table_exists test.binary_columns ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 600
    37  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 600
    38  
    39  	run_sql_file $CUR/data/data_gbk.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    40  	check_table_exists test.binary_columns ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 600
    41  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 600
    42  
    43  	cleanup_process $CDC_BINARY
    44  }
    45  
    46  trap stop_tidb_cluster EXIT
    47  run $*
    48  check_logs $WORK_DIR
    49  echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"