github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/kafka_compression/run.sh (about) 1 #!/bin/bash 2 3 set -e 4 5 CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) 6 source $CUR/../_utils/test_prepare 7 WORK_DIR=$OUT_DIR/$TEST_NAME 8 CDC_BINARY=cdc.test 9 SINK_TYPE=$1 10 11 function test_compression() { 12 # record tso before we create tables to skip the system table DDLs 13 start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1}) 14 15 TOPIC_NAME="ticdc-kafka-compression-$1-test" 16 SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true&kafka-version=${KAFKA_VERSION}&compression=$1" 17 run_cdc_cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI" -c $1 18 run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=canal-json&version=${KAFKA_VERSION}&enable-tidb-extension=true" 19 run_sql_file $CUR/data/$1_data.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT} 20 21 compression_algorithm=$(grep "Kafka producer uses $1 compression algorithm" "$WORK_DIR/cdc.log") 22 if [[ "$compression_algorithm" -ne 1 ]]; then 23 echo "can't found producer compression algorithm" 24 exit 1 25 fi 26 check_table_exists test.$1_finish_mark ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 200 27 check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 28 run_cdc_cli changefeed pause -c $1 29 run_cdc_cli changefeed remove -c $1 30 } 31 32 function run() { 33 if [ "$SINK_TYPE" != "kafka" ]; then 34 return 35 fi 36 37 rm -rf $WORK_DIR && mkdir -p $WORK_DIR 38 39 start_tidb_cluster --workdir $WORK_DIR 40 41 cd $WORK_DIR 42 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY 43 44 test_compression gzip 45 test_compression snappy 46 test_compression lz4 47 test_compression zstd 48 49 cleanup_process $CDC_BINARY 50 } 51 52 trap stop_tidb_cluster EXIT 53 run $* 54 check_logs $WORK_DIR 55 echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"