github.com/pingcap/ticdc@v0.0.0-20220526033649-485a10ef2652/tests/changefeed_pause_resume/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -e
     4  
     5  CUR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
     6  source $CUR/../_utils/test_prepare
     7  WORK_DIR=$OUT_DIR/$TEST_NAME
     8  CDC_BINARY=cdc.test
     9  SINK_TYPE=$1
    10  TABLE_COUNT=3
    11  
    12  function run() {
    13      rm -rf $WORK_DIR && mkdir -p $WORK_DIR
    14      start_tidb_cluster --workdir $WORK_DIR
    15      cd $WORK_DIR
    16  
    17      pd_addr="http://$UP_PD_HOST_1:$UP_PD_PORT_1"
    18      TOPIC_NAME="ticdc-changefeed-pause-resume-$RANDOM"
    19      case $SINK_TYPE in
    20          kafka) SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?partition-num=4&kafka-version=${KAFKA_VERSION}";;
    21          *) SINK_URI="mysql://normal:123456@127.0.0.1:3306/?max-txn-row=1";;
    22      esac
    23      if [ "$SINK_TYPE" == "kafka" ]; then
    24        run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?partition-num=4&version=${KAFKA_VERSION}"
    25      fi
    26      run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --addr "127.0.0.1:8300" --pd $pd_addr
    27      changefeed_id=$(cdc cli changefeed create --pd=$pd_addr --sink-uri="$SINK_URI" 2>&1|tail -n2|head -n1|awk '{print $2}')
    28  
    29      run_sql "CREATE DATABASE changefeed_pause_resume;" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    30      for i in $(seq 1 $TABLE_COUNT); do
    31          stmt="CREATE table changefeed_pause_resume.t$i (id int primary key auto_increment, t datetime DEFAULT CURRENT_TIMESTAMP)"
    32          run_sql "$stmt" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    33      done
    34  
    35      for i in $(seq 1 $TABLE_COUNT); do
    36          table="changefeed_pause_resume.t$i"
    37          check_table_exists $table ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
    38      done
    39  
    40      for i in $(seq 1 10); do
    41          cdc cli changefeed pause --changefeed-id=$changefeed_id --pd=$pd_addr
    42  
    43          for j in $(seq 1 $TABLE_COUNT); do
    44              stmt="drop table changefeed_pause_resume.t$j"
    45              run_sql "$stmt" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    46          done
    47  
    48          for j in $(seq 1 $TABLE_COUNT); do
    49              stmt="CREATE table changefeed_pause_resume.t$j (id int primary key auto_increment, t datetime DEFAULT CURRENT_TIMESTAMP)"
    50              run_sql "$stmt" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    51          done
    52  
    53          for j in $(seq 1 $TABLE_COUNT); do
    54              stmt="insert into changefeed_pause_resume.t$j values (),(),(),(),()"
    55              run_sql "$stmt" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    56          done
    57  
    58          cdc cli changefeed resume --changefeed-id=$changefeed_id --pd=$pd_addr
    59  
    60          check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml
    61      done
    62  
    63      cleanup_process $CDC_BINARY
    64  }
    65  
    66  trap stop_tidb_cluster EXIT
    67  run $*
    68  check_logs $WORK_DIR
    69  echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"