github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/avro_basic/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -e
     4  
     5  CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
     6  source $CUR/../_utils/test_prepare
     7  WORK_DIR=$OUT_DIR/$TEST_NAME
     8  CDC_BINARY=cdc.test
     9  SINK_TYPE=$1
    10  
    11  # use kafka-consumer with avro decoder to sync data from kafka to mysql
    12  function run() {
    13  	if [ "$SINK_TYPE" != "kafka" ]; then
    14  		return
    15  	fi
    16  
    17  	echo 'Starting schema registry...'
    18  	./bin/bin/schema-registry-start -daemon ./bin/etc/schema-registry/schema-registry.properties
    19  	i=0
    20  	while ! curl -o /dev/null -v -s "http://127.0.0.1:8088"; do
    21  		i=$(($i + 1))
    22  		if [ $i -gt 30 ]; then
    23  			echo 'Failed to start schema registry'
    24  			exit 1
    25  		fi
    26  		sleep 2
    27  	done
    28  
    29  	rm -rf $WORK_DIR && mkdir -p $WORK_DIR
    30  
    31  	start_tidb_cluster --workdir $WORK_DIR
    32  
    33  	cd $WORK_DIR
    34  
    35  	# adjust schema registry compatibility level to the none, to allow the schema evolution caused by the TiDB DDL execution.
    36  	curl -X PUT -H "Content-Type: application/vnd.schemaregistry.v1+json" --data '{"compatibility": "NONE"}' http://127.0.0.1:8088/config
    37  
    38  	# upstream tidb cluster enable row level checksum
    39  	run_sql "set global tidb_enable_row_level_checksum=true" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    40  
    41  	TOPIC_NAME="ticdc-avro-test"
    42  	# record tso before we create tables to skip the system table DDLs
    43  	start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1})
    44  
    45  	run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY
    46  
    47  	SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=avro&enable-tidb-extension=true&avro-enable-watermark=true&avro-decimal-handling-mode=string&avro-bigint-unsigned-handling-mode=string"
    48  
    49  	run_cdc_cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI" --config="$CUR/conf/changefeed.toml" --schema-registry=http://127.0.0.1:8088
    50  
    51  	run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=avro&enable-tidb-extension=true&enable-row-checksum=true" "" "http://127.0.0.1:8088"
    52  
    53  	run_sql_file $CUR/data/data.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    54  
    55  	# sync_diff can't check non-exist table, so we check expected tables are created in downstream first
    56  	check_table_exists test.finish_mark ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 200
    57  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml
    58  
    59  	cleanup_process $CDC_BINARY
    60  }
    61  
    62  trap stop_tidb_cluster EXIT
    63  run $*
    64  
    65  check_logs $WORK_DIR
    66  
    67  echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"