github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/multi_tables_ddl/run.sh (about) 1 #!/bin/bash 2 3 set -eu 4 5 CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) 6 source $CUR/../_utils/test_prepare 7 WORK_DIR=$OUT_DIR/$TEST_NAME 8 CDC_BINARY=cdc.test 9 SINK_TYPE=$1 10 11 # start the s3 server 12 export MINIO_ACCESS_KEY=cdcs3accesskey 13 export MINIO_SECRET_KEY=cdcs3secretkey 14 export MINIO_BROWSER=off 15 export AWS_ACCESS_KEY_ID=$MINIO_ACCESS_KEY 16 export AWS_SECRET_ACCESS_KEY=$MINIO_SECRET_KEY 17 export S3_ENDPOINT=127.0.0.1:24927 18 rm -rf "$WORK_DIR" 19 mkdir -p "$WORK_DIR" 20 pkill -9 minio || true 21 bin/minio server --address $S3_ENDPOINT "$WORK_DIR/s3" & 22 MINIO_PID=$! 23 i=0 24 while ! curl -o /dev/null -v -s "http://$S3_ENDPOINT/"; do 25 i=$(($i + 1)) 26 if [ $i -gt 30 ]; then 27 echo 'Failed to start minio' 28 exit 1 29 fi 30 sleep 2 31 done 32 33 stop_minio() { 34 kill -2 $MINIO_PID 35 } 36 37 stop() { 38 stop_minio 39 stop_tidb_cluster 40 } 41 s3cmd --access_key=$MINIO_ACCESS_KEY --secret_key=$MINIO_SECRET_KEY --host=$S3_ENDPOINT --host-bucket=$S3_ENDPOINT --no-ssl mb s3://logbucket 42 43 function run() { 44 if [ "$SINK_TYPE" == "storage" ]; then 45 return 46 fi 47 # TODO(dongmen): enable pulsar in the future. 48 if [ "$SINK_TYPE" == "pulsar" ]; then 49 exit 0 50 fi 51 52 start_tidb_cluster --workdir $WORK_DIR 53 cd $WORK_DIR 54 55 # record tso before we create tables to skip the system table DDLs 56 start_ts=$(cdc cli tso query --pd=http://$UP_PD_HOST_1:$UP_PD_PORT_1) 57 58 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY 59 60 TOPIC_NAME_1="ticdc-multi-tables-ddl-test-normal-$RANDOM" 61 TOPIC_NAME_2="ticdc-multi-tables-ddl-test-error-1-$RANDOM" 62 TOPIC_NAME_3="ticdc-multi-tables-ddl-test-error-2-$RANDOM" 63 64 case $SINK_TYPE in 65 *) ;; 66 esac 67 68 cf_normal="test-normal" 69 cf_err1="test-error-1" 70 cf_err2="test-error-2" 71 72 case $SINK_TYPE in 73 "kafka") 74 SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME_1?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760" 75 cdc cli changefeed create -c=$cf_normal --start-ts=$start_ts --sink-uri="$SINK_URI" --config="$CUR/conf/normal.toml" 76 77 SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME_2?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760" 78 cdc cli changefeed create -c=$cf_err1 --start-ts=$start_ts --sink-uri="$SINK_URI" --config="$CUR/conf/error-1.toml" 79 80 SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME_3?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760" 81 cdc cli changefeed create -c=$cf_err2 --start-ts=$start_ts --sink-uri="$SINK_URI" --config="$CUR/conf/error-2.toml" 82 83 run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME_1?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760" 84 run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME_2?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760" 85 run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME_3?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760" 86 ;; 87 *) 88 SINK_URI="mysql://normal:123456@127.0.0.1:3306/" 89 cdc cli changefeed create -c=$cf_normal --start-ts=$start_ts --sink-uri="$SINK_URI" --config="$CUR/conf/normal.toml" 90 cdc cli changefeed create -c=$cf_err1 --start-ts=$start_ts --sink-uri="$SINK_URI" --config="$CUR/conf/error-1.toml" 91 cdc cli changefeed create -c=$cf_err2 --start-ts=$start_ts --sink-uri="$SINK_URI" --config="$CUR/conf/error-2.toml" 92 ;; 93 esac 94 95 run_sql_file $CUR/data/test.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT} 96 check_table_exists multi_tables_ddl_test.t55 ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 97 check_table_exists multi_tables_ddl_test.t66 ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 98 check_table_exists multi_tables_ddl_test.t7 ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 99 check_table_exists multi_tables_ddl_test.t88 ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 100 # sync_diff can't check non-exist table, so we check expected tables are created in downstream first 101 check_table_exists multi_tables_ddl_test.finish_mark ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 102 echo "check table exists success" 103 104 # changefeed test-error will not report an error, "multi_tables_ddl_test.t555 to multi_tables_ddl_test.t55" part will be skipped. 105 run_sql "rename table multi_tables_ddl_test.t7 to multi_tables_ddl_test.t77, multi_tables_ddl_test.t555 to multi_tables_ddl_test.t55;" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 106 107 check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" $cf_normal "normal" "null" "" 108 check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" $cf_err1 "normal" "null" "" 109 check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" $cf_err2 "failed" "ErrSyncRenameTableFailed" "" 110 111 check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 60 112 113 cleanup_process $CDC_BINARY 114 } 115 116 trap stop EXIT 117 run $* 118 check_logs $WORK_DIR 119 echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"