github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/dm/tests/case_sensitive/run.sh (about)

     1  #!/bin/bash
     2  
     3  set -eu
     4  
     5  cur=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
     6  source $cur/../_utils/test_prepare
     7  WORK_DIR=$TEST_DIR/$TEST_NAME
     8  API_VERSION="v1alpha1"
     9  
    10  function prepare_sensitive_task() {
    11  	cp $cur/data/db1.prepare.sql $WORK_DIR/db1.prepare.sql
    12  	cp $cur/data/db2.prepare.sql $WORK_DIR/db2.prepare.sql
    13  	cp $cur/conf/dm-task.yaml $WORK_DIR/dm-task.yaml
    14  }
    15  
    16  function prepare_insensitive_task() {
    17  	cp $cur/data/db1.prepare.sql $WORK_DIR/db1.prepare.sql
    18  	cp $cur/data/db2.prepare.sql $WORK_DIR/db2.prepare.sql
    19  	cp $cur/conf/dm-task.yaml $WORK_DIR/dm-task.yaml
    20  
    21  	sed -i "/sensitive/d" $WORK_DIR/dm-task.yaml
    22  	sed -i "/create table upper_table/d" $WORK_DIR/db2.prepare.sql
    23  }
    24  
    25  function run_with_prepared() {
    26  	run_sql_both_source "SET @@GLOBAL.SQL_MODE='ANSI_QUOTES,NO_AUTO_VALUE_ON_ZERO'"
    27  	inject_points=(
    28  		"github.com/pingcap/tiflow/dm/worker/TaskCheckInterval=return(\"500ms\")"
    29  		"github.com/pingcap/tiflow/dm/relay/NewUpstreamServer=return(true)"
    30  	)
    31  	export GO_FAILPOINTS="$(join_string \; ${inject_points[@]})"
    32  
    33  	run_sql_file $WORK_DIR/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1
    34  	check_contains 'Query OK, 2 rows affected'
    35  	run_sql_file $WORK_DIR/db2.prepare.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2
    36  	check_contains 'Query OK, 3 rows affected'
    37  	# manually create the route table
    38  	run_sql 'CREATE DATABASE IF NOT EXISTS `UPPER_DB_ROUTE`' $TIDB_PORT $TIDB_PASSWORD
    39  
    40  	# start DM worker and master
    41  	run_dm_master $WORK_DIR/master $MASTER_PORT $cur/conf/dm-master.toml
    42  	check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT
    43  	check_metric $MASTER_PORT 'start_leader_counter' 3 0 2
    44  	run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
    45  	check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
    46  
    47  	# operate mysql config to worker
    48  	cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml
    49  	cp $cur/conf/source2.yaml $WORK_DIR/source2.yaml
    50  	# make sure source1 is bound to worker1
    51  	dmctl_operate_source create $WORK_DIR/source1.yaml $SOURCE_ID1
    52  
    53  	run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml
    54  	check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT
    55  	dmctl_operate_source create $WORK_DIR/source2.yaml $SOURCE_ID2
    56  
    57  	# start DM task only
    58  	dmctl_start_task "$WORK_DIR/dm-task.yaml" "--remove-meta"
    59  	# check task has started
    60  	check_metric $WORKER1_PORT "dm_worker_task_state{source_id=\"mysql-replica-01\",task=\"test\",worker=\"worker1\"}" 10 1 3
    61  	check_metric $WORKER2_PORT "dm_worker_task_state{source_id=\"mysql-replica-02\",task=\"test\",worker=\"worker2\"}" 10 1 3
    62  
    63  	# use sync_diff_inspector to check full dump loader
    64  	check_sync_diff $WORK_DIR $cur/conf/diff_config.toml
    65  
    66  	# restart dm-worker1
    67  	pkill -hup -f dm-worker1.toml 2>/dev/null || true
    68  	wait_pattern_exit dm-worker1.toml
    69  	run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
    70  	check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
    71  	# make sure worker1 have bound a source, and the source should same with bound before
    72  	run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
    73  		"query-status test" \
    74  		"worker1" 1
    75  
    76  	# restart dm-worker2
    77  	pkill -hup -f dm-worker2.toml 2>/dev/null || true
    78  	wait_pattern_exit dm-worker2.toml
    79  	run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml
    80  	check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT
    81  
    82  	sleep 10
    83  	echo "after restart dm-worker, task should resume automatically"
    84  
    85  	# wait for task running
    86  	check_http_alive 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/status/test '"stage": "Running"' 10
    87  
    88  	check_metric $WORKER1_PORT "dm_worker_task_state{source_id=\"mysql-replica-01\",task=\"test\",worker=\"worker1\"}" 10 1 3
    89  	check_metric $WORKER2_PORT "dm_worker_task_state{source_id=\"mysql-replica-02\",task=\"test\",worker=\"worker2\"}" 10 1 3
    90  
    91  	run_sql_file $cur/data/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1
    92  	run_sql_file $cur/data/db2.increment.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2
    93  
    94  	# use sync_diff_inspector to check data now!
    95  	check_sync_diff $WORK_DIR $cur/conf/diff_config.toml
    96  
    97  	# test block-allow-list by the way
    98  	run_sql "show databases;" $TIDB_PORT $TIDB_PASSWORD
    99  	check_contains "Upper_DB1"
   100  	check_contains "lower_db"
   101  	# test route-rule
   102  	check_contains "UPPER_DB_ROUTE"
   103  
   104  	run_sql "show tables from UPPER_DB_ROUTE" $TIDB_PORT $TIDB_PASSWORD
   105  	check_contains "do_table_route"
   106  	run_sql_tidb_with_retry "select count(*) from UPPER_DB_ROUTE.do_table_route" "count(*): 5"
   107  
   108  	# test binlog event filter
   109  	run_sql "truncate table Upper_DB.Do_Table" $MYSQL_PORT1 $MYSQL_PASSWORD1
   110  	# insert another row
   111  	run_sql "INSERT INTO Upper_DB.Do_Table (id, name) values (103, 'new');" $MYSQL_PORT1 $MYSQL_PASSWORD1
   112  	sleep 2
   113  	# ensure the truncate is ignored and the new row is inserted
   114  	run_sql_tidb_with_retry "select count(*) from UPPER_DB_ROUTE.do_table_route" "count(*): 6"
   115  
   116  	dmctl_stop_task test
   117  	dmctl_operate_source stop $WORK_DIR/source1.yaml $SOURCE_ID1
   118  	dmctl_operate_source stop $WORK_DIR/source2.yaml $SOURCE_ID2
   119  
   120  	export GO_FAILPOINTS=''
   121  }
   122  
   123  function check_ignore_when_sensitive() {
   124  	run_sql "show databases;" $TIDB_PORT $TIDB_PASSWORD
   125  	check_not_contains "Upper_Db_IGNORE"
   126  	run_sql "show tables from UPPER_DB_ROUTE" $TIDB_PORT $TIDB_PASSWORD
   127  	check_not_contains "Do_table_ignore"
   128  }
   129  
   130  trap cleanup_process EXIT
   131  trap "cleanup_data Upper_DB Upper_DB1 lower_db UPPER_DB_ROUTE Upper_Db_IGNORE sync_diff_inspector" EXIT
   132  
   133  # also cleanup dm processes in case of last run failed
   134  cleanup_process $*
   135  cleanup_data Upper_DB Upper_DB1 lower_db UPPER_DB_ROUTE Upper_Db_IGNORE
   136  
   137  prepare_sensitive_task
   138  run_with_prepared
   139  check_ignore_when_sensitive
   140  
   141  cleanup_process $*
   142  cleanup_data Upper_DB Upper_DB1 lower_db UPPER_DB_ROUTE Upper_Db_IGNORE
   143  
   144  prepare_insensitive_task
   145  run_with_prepared
   146  
   147  cleanup_process $*
   148  
   149  run_sql_both_source "SET @@GLOBAL.SQL_MODE='ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION'"
   150  echo "[$(date)] <<<<<< test case $TEST_NAME success! >>>>>>"