github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/ddl_attributes/run.sh (about) 1 #!/bin/bash 2 3 set -eu 4 5 CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) 6 source $CUR/../_utils/test_prepare 7 WORK_DIR=$OUT_DIR/$TEST_NAME 8 CDC_BINARY=cdc.test 9 SINK_TYPE=$1 10 11 function run() { 12 rm -rf $WORK_DIR && mkdir -p $WORK_DIR 13 14 start_tidb_cluster --workdir $WORK_DIR 15 16 cd $WORK_DIR 17 18 # record tso before we create tables to skip the system table DDLs 19 start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1}) 20 21 run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY 22 23 TOPIC_NAME="ticdc-ddl-attributes-test-$RANDOM" 24 case $SINK_TYPE in 25 kafka) SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}" ;; 26 storage) SINK_URI="file://$WORK_DIR/storage_test/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" ;; 27 pulsar) 28 run_pulsar_cluster $WORK_DIR normal 29 SINK_URI="pulsar://127.0.0.1:6650/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" 30 ;; 31 *) SINK_URI="mysql://root@127.0.0.1:3306/" ;; 32 esac 33 34 run_cdc_cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI" 35 36 case $SINK_TYPE in 37 kafka) run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}" ;; 38 storage) run_storage_consumer $WORK_DIR $SINK_URI "" "" ;; 39 pulsar) run_pulsar_consumer --upstream-uri $SINK_URI ;; 40 esac 41 run_sql_file $CUR/data/prepare.sql ${UP_TIDB_HOST} ${UP_TIDB_PORT} 42 # sync_diff can't check non-exist table, so we check expected tables are created in downstream first 43 sleep 3 44 check_table_exists ddl_attributes.attributes_t1_new ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 45 check_table_exists ddl_attributes.finish_mark ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 46 check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml 47 48 cleanup_process $CDC_BINARY 49 50 run_sql "show create table ddl_attributes.placement_t1;" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 51 run_sql "show create table ddl_attributes.placement_t2;" ${UP_TIDB_HOST} ${UP_TIDB_PORT} 52 53 run_sql "show create table ddl_attributes.placement_t1;" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} && 54 check_contains "CREATE TABLE \`placement_t1\` " 55 run_sql "show create table ddl_attributes.placement_t2;" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} && 56 check_contains "CREATE TABLE \`placement_t2\` " 57 58 TTL_MARK='![ttl]' 59 CREATE_TTL_SQL_CONTAINS1="/*T${TTL_MARK} TTL=\`t\` + INTERVAL 1 DAY */ /*T${TTL_MARK} TTL_ENABLE='OFF' */ /*T${TTL_MARK} TTL_JOB_INTERVAL='1h' */" 60 CREATE_TTL_SQL_CONTAINS2="/*T${TTL_MARK} TTL=\`t\` + INTERVAL 1 DAY */ /*T${TTL_MARK} TTL_ENABLE='OFF' */ /*T${TTL_MARK} TTL_JOB_INTERVAL='7h' */" 61 62 run_sql "show create table ddl_attributes.ttl_t1;" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} && 63 check_contains "$CREATE_TTL_SQL_CONTAINS1" 64 run_sql "show create table ddl_attributes.ttl_t2;" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} && 65 check_contains "$CREATE_TTL_SQL_CONTAINS1" 66 run_sql "show create table ddl_attributes.ttl_t3;" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} && 67 check_contains "$CREATE_TTL_SQL_CONTAINS1" 68 run_sql "show create table ddl_attributes.ttl_t4;" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} && 69 check_not_contains "TTL_ENABLE" && 70 check_not_contains "TTL=" 71 run_sql "show create table ddl_attributes.ttl_t5;" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} && 72 check_contains "$CREATE_TTL_SQL_CONTAINS2" 73 } 74 75 trap stop_tidb_cluster EXIT 76 run $* 77 check_logs $WORK_DIR 78 echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"