github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/kafka_big_messages_v2/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -eu
     4  
     5  CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
     6  source $CUR/../_utils/test_prepare
     7  WORK_DIR=$OUT_DIR/$TEST_NAME
     8  CDC_BINARY=cdc.test
     9  SINK_TYPE=$1
    10  
    11  function run() {
    12  	# test kafka sink only in this case
    13  	if [ "$SINK_TYPE" != "kafka" ]; then
    14  		return
    15  	fi
    16  	rm -rf $WORK_DIR && mkdir -p $WORK_DIR
    17  
    18  	start_tidb_cluster --workdir $WORK_DIR
    19  
    20  	cd $WORK_DIR
    21  
    22  	# record tso before we create tables to skip the system table DDLs
    23  	start_ts=$(cdc cli tso query --pd=http://$UP_PD_HOST_1:$UP_PD_PORT_1)
    24  
    25  	run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY
    26  
    27  	# Use a max-message-bytes parameter that is larger than the kafka topic max message bytes.
    28  	# Test if TiCDC automatically uses the max-message-bytes of the topic.
    29  	# See: https://github.com/PingCAP-QE/ci/blob/ddde195ebf4364a0028d53405d1194aa37a4d853/jenkins/pipelines/ci/ticdc/cdc_ghpr_kafka_integration_test.groovy#L178
    30  	# Use a topic that has already been created.
    31  	# See: https://github.com/PingCAP-QE/ci/blob/ddde195ebf4364a0028d53405d1194aa37a4d853/jenkins/pipelines/ci/ticdc/cdc_ghpr_kafka_integration_test.groovy#L180
    32  	SINK_URI="kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol&partition-num=1&kafka-version=${KAFKA_VERSION}&max-message-bytes=12582912"
    33  	cdc cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI" --config="$CUR/conf/changefeed.toml"
    34  	run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol&partition-num=1&version=${KAFKA_VERSION}"
    35  
    36  	echo "Starting generate kafka big messages..."
    37  	cd $CUR/../../utils/gen_kafka_big_messages
    38  	if [ ! -f ./gen_kafka_big_messages ]; then
    39  		GO111MODULE=on go build
    40  	fi
    41  	# Generate data larger than kafka broker max.message.bytes. We can send this data correctly.
    42  	./gen_kafka_big_messages --row-count=15 --sql-file-path=$CUR/test.sql
    43  
    44  	run_sql_file $CUR/test.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    45  	table="kafka_big_messages.test"
    46  	check_table_exists $table ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
    47  	check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml
    48  
    49  	cleanup_process $CDC_BINARY
    50  }
    51  
    52  trap stop_tidb_cluster EXIT
    53  run $*
    54  check_logs $WORK_DIR
    55  echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"