github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/dm/tests/relay_interrupt/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -eu
     4  
     5  cur=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
     6  source $cur/../_utils/test_prepare
     7  
     8  WORK_DIR=$TEST_DIR/$TEST_NAME
     9  
    10  function prepare_data1() {
    11  	run_sql 'DROP DATABASE if exists relay_interrupt;' $MYSQL_PORT1 $MYSQL_PASSWORD1
    12  	run_sql 'CREATE DATABASE relay_interrupt;' $MYSQL_PORT1 $MYSQL_PASSWORD1
    13  	run_sql "CREATE TABLE relay_interrupt.t$1(i TINYINT, j INT UNIQUE KEY);" $MYSQL_PORT1 $MYSQL_PASSWORD1
    14  	for j in $(seq 100); do
    15  		run_sql "INSERT INTO relay_interrupt.t$1 VALUES ($j,${j}000$j),($j,${j}001$j);" $MYSQL_PORT1 $MYSQL_PASSWORD1
    16  	done
    17  }
    18  
    19  function prepare_data2() {
    20  	run_sql "DELETE FROM relay_interrupt.t$1 limit 1;" $MYSQL_PORT1 $MYSQL_PASSWORD1
    21  }
    22  
    23  function run() {
    24  	failpoints=(
    25  		# 1152 is ErrAbortingConnection
    26  		"github.com/pingcap/tiflow/dm/pkg/conn/GetGlobalVariableFailed=return(\"server_uuid,1152\")"
    27  		"github.com/pingcap/tiflow/dm/pkg/conn/GetSessionVariableFailed=return(\"sql_mode,1152\")"
    28  	)
    29  
    30  	for ((i = 0; i < ${#failpoints[@]}; i++)); do
    31  		WORK_DIR=$TEST_DIR/$TEST_NAME/$i
    32  
    33  		echo "failpoint=${failpoints[i]}"
    34  		export GO_FAILPOINTS=${failpoints[i]}
    35  
    36  		# clear downstream env
    37  		run_sql 'DROP DATABASE if exists dm_meta;' $TIDB_PORT $TIDB_PASSWORD
    38  		run_sql 'DROP DATABASE if exists relay_interrupt;' $TIDB_PORT $TIDB_PASSWORD
    39  		prepare_data1 $i
    40  
    41  		run_dm_master $WORK_DIR/master $MASTER_PORT $cur/conf/dm-master.toml
    42  		check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT
    43  		run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
    44  		check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
    45  		# operate mysql config to worker
    46  		cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml
    47  		sed -i "/relay-binlog-name/i\relay-dir: $WORK_DIR/worker1/relay_log" $WORK_DIR/source1.yaml
    48  		dmctl_operate_source create $WORK_DIR/source1.yaml $SOURCE_ID1
    49  
    50  		echo "query status, relay log failed"
    51  		run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
    52  			"query-status -s $SOURCE_ID1" \
    53  			"no sub task started" 1 \
    54  			"ERROR" 1
    55  
    56  		echo "start task and query status, task and relay have error message"
    57  		task_conf="$cur/conf/dm-task.yaml"
    58  		run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
    59  			"start-task $task_conf" \
    60  			"\"result\": false" 1 \
    61  			"\"source\": \"$SOURCE_ID1\"" 1
    62  
    63  		echo "waiting for asynchronous relay and subtask to be started"
    64  		sleep 2
    65  		run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
    66  			"query-status -s $SOURCE_ID1" \
    67  			"database driver error" 1 \
    68  			"ERROR 1152" 1
    69  		run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
    70  			"query-status -s $SOURCE_ID1" \
    71  			"there aren't any data under relay log directory" 1 \
    72  			"ERROR" 1
    73  		run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
    74  			"query-status" \
    75  			"\"taskName\": \"test\"" 1 \
    76  			"\"taskStatus\": \"Error - Some error occurred in subtask. Please run \`query-status test\` to get more details.\"" 1
    77  
    78  		echo "reset go failpoints, and need restart dm-worker"
    79  		echo "then resume task, task will recover success"
    80  		kill_dm_worker
    81  		export GO_FAILPOINTS=''
    82  
    83  		run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
    84  		check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
    85  
    86  		sleep 8
    87  		echo "start task after restarted dm-worker"
    88  		task_conf="$cur/conf/dm-task.yaml"
    89  		run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
    90  			"start-task $task_conf" \
    91  			"\"result\": false" 1 \
    92  			"subtasks with name test for sources \[mysql-replica-01\] already exist" 1
    93  		# wait relay unit up
    94  		run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
    95  			"query-status test" \
    96  			"\"binlogType\": \"local\"" 1
    97  
    98  		check_sync_diff $WORK_DIR $cur/conf/diff_config.toml
    99  
   100  		echo "read binlog from relay log failed, and will use remote binlog"
   101  		kill_dm_worker
   102  		export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/relay/GetEventFromLocalFailed=return()"
   103  		run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
   104  		check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
   105  		prepare_data2 $i
   106  		sleep 8
   107  		run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
   108  			"query-status test" \
   109  			"\"binlogType\": \"remote\"" 1
   110  
   111  		check_sync_diff $WORK_DIR $cur/conf/diff_config.toml
   112  
   113  		export GO_FAILPOINTS=''
   114  		cleanup_process
   115  	done
   116  }
   117  
   118  cleanup_data relay_interrupt
   119  cleanup_process
   120  
   121  run $*
   122  
   123  cleanup_process
   124  
   125  echo "[$(date)] <<<<<< test case $TEST_NAME success! >>>>>>"