github.com/pingcap/ticdc@v0.0.0-20220526033649-485a10ef2652/tests/processor_err_chan/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -e
     4  
     5  CUR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
     6  source $CUR/../_utils/test_prepare
     7  WORK_DIR=$OUT_DIR/$TEST_NAME
     8  CDC_BINARY=cdc.test
     9  SINK_TYPE=$1
    10  
    11  function check_changefeed_mark_stopped() {
    12      endpoints=$1
    13      changefeedid=$2
    14      error_msg=$3
    15      info=$(cdc cli changefeed query --pd=$endpoints -c $changefeedid -s)
    16      echo "$info"
    17      state=$(echo $info|jq -r '.state')
    18      if [[ ! "$state" == "stopped" ]]; then
    19          echo "changefeed state $state does not equal to stopped"
    20          exit 1
    21      fi
    22      message=$(echo $info|jq -r '.error.message')
    23      if [[ ! "$message" =~ "$error_msg" ]]; then
    24          echo "error message '$message' is not as expected '$error_msg'"
    25          exit 1
    26      fi
    27  }
    28  
    29  export -f check_changefeed_mark_stopped
    30  
    31  function run() {
    32      rm -rf $WORK_DIR && mkdir -p $WORK_DIR
    33      start_tidb_cluster --workdir $WORK_DIR
    34      cd $WORK_DIR
    35  
    36      pd_addr="http://$UP_PD_HOST_1:$UP_PD_PORT_1"
    37      TOPIC_NAME="ticdc-processor-err-chan-$RANDOM"
    38      case $SINK_TYPE in
    39          kafka) SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?partition-num=4&kafka-version=${KAFKA_VERSION}";;
    40          *) SINK_URI="mysql://normal:123456@127.0.0.1:3306/?max-txn-row=1";;
    41      esac
    42      if [ "$SINK_TYPE" == "kafka" ]; then
    43        run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?partition-num=4&version=${KAFKA_VERSION}"
    44      fi
    45  
    46      run_sql "CREATE DATABASE processor_err_chan;" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    47      run_sql "CREATE DATABASE processor_err_chan;" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
    48      for i in $(seq 1 10); do
    49          run_sql "CREATE table processor_err_chan.t$i (id int primary key auto_increment)" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    50          run_sql "CREATE table processor_err_chan.t$i (id int primary key auto_increment)" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT}
    51      done
    52  
    53      export GO_FAILPOINTS='github.com/pingcap/ticdc/cdc/ProcessorAddTableError=1*return(true)' # old processor
    54      # export GO_FAILPOINTS='github.com/pingcap/ticdc/cdc/processor/pipeline/ProcessorAddTableError=1*return(true)' # new processor
    55      run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY --addr "127.0.0.1:8300" --pd $pd_addr
    56  
    57      changefeed_id=$(cdc cli changefeed create --pd=$pd_addr --sink-uri="$SINK_URI"  2>&1|tail -n2|head -n1|awk '{print $2}')
    58  
    59      retry_time=10
    60      ensure $retry_time check_changefeed_mark_stopped $pd_addr $changefeed_id "processor add table injected error"
    61  
    62      cdc cli changefeed create --pd=$pd_addr --sink-uri="$SINK_URI"
    63      for i in $(seq 1 10); do
    64          run_sql "INSERT INTO processor_err_chan.t$i values (),(),(),(),(),(),()" ${UP_TIDB_HOST} ${UP_TIDB_PORT}
    65      done
    66      check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml
    67  
    68      cleanup_process $CDC_BINARY
    69  }
    70  
    71  trap stop_tidb_cluster EXIT
    72  run $*
    73  echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>"