github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/tidb_mysql_test/run.sh (about) 1 #!/bin/bash 2 3 set -e 4 5 CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) 6 source $CUR/../_utils/test_prepare 7 WORK_DIR=$OUT_DIR/$TEST_NAME 8 CDC_BINARY=cdc.test 9 SINK_TYPE=$1 10 11 function prepare() { 12 rm -rf $WORK_DIR && mkdir -p $WORK_DIR 13 stop_tidb_cluster 14 15 start_tidb_cluster --workdir $WORK_DIR 16 17 cd $WORK_DIR 18 19 # record tso before we create tables to skip the system table DDLs 20 start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1}) 21 22 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY 23 24 TOPIC_NAME="ticdc-default-value-test-$RANDOM" 25 case $SINK_TYPE in 26 kafka) SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760" ;; 27 storage) SINK_URI="file://$WORK_DIR/storage_test/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" ;; 28 pulsar) 29 run_pulsar_cluster $WORK_DIR normal 30 SINK_URI="pulsar://127.0.0.1:6650/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" 31 ;; 32 *) SINK_URI="mysql://normal:123456@127.0.0.1:3306/" ;; 33 esac 34 #run_cdc_cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI" 35 SINK_PARA="{\"force_replicate\":true, \"changefeed_id\":\"tidb-mysql-test\", \"sink_uri\":\"$SINK_URI\", \"start_ts\":$start_ts}" 36 curl -X POST -H "'Content-type':'application/json'" http://127.0.0.1:8300/api/v1/changefeeds -d "$SINK_PARA" 37 case $SINK_TYPE in 38 kafka) run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760" ;; 39 storage) run_storage_consumer $WORK_DIR $SINK_URI "" "" ;; 40 pulsar) run_pulsar_consumer --upstream-uri $SINK_URI ;; 41 esac 42 } 43 44 # kafka and storage test is not supported yet. 45 # Because:(1) most cases has no pk/uk, consumer will receive more than one same DML 46 # (2) kafka consumer need support force_replicate 47 if [ "$SINK_TYPE" != "mysql" ]; then 48 echo "[$(date)] <<<<<< skip test case $TEST_NAME for kafka! >>>>>>" 49 exit 0 50 fi 51 52 # mysql test may suffer from duplicate DML for no pk/uk 53 # [TODO] need add pk or move from integration test 54 trap stop_tidb_cluster EXIT 55 prepare $* 56 cd "$(dirname "$0")" 57 # run mysql-test cases 58 ./test.sh 59 mysql -h${UP_TIDB_HOST} -P${UP_TIDB_PORT} -uroot -e "create table test.finish_mark(id int primary key)" 60 check_table_exists test.finish_mark ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 300 61 check_sync_diff $WORK_DIR $CUR/diff_config.toml 62 cleanup_process $CDC_BINARY 63 check_logs $WORK_DIR 64 echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"