github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/dm/tests/s3_dumpling_lightning/run.sh (about) 1 #!/bin/bash 2 3 set -eu 4 5 cur=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) 6 source $cur/../_utils/test_prepare 7 WORK_DIR=$TEST_DIR/$TEST_NAME 8 TASK_NAME="s3_dumpling_lightning" 9 SPECIAL_TASK_NAME="ab?c/b%cËd" 10 SOURCE_ID1="mysql-replica-01" 11 db="s3_dumpling_lightning" 12 db1="s3_dumpling_lightning1" 13 tb="t" 14 tb1="t1" 15 S3_DIR="s3://dmbucket/dump?region=us-east-1\&endpoint=http://127.0.0.1:8688\&access_key=s3accesskey\&secret_access_key=s3secretkey\&force_path_style=true" 16 LOCAL_TEST_DIR="./dumpdata" 17 18 # s3 config 19 s3_ACCESS_KEY="s3accesskey" 20 s3_SECRET_KEY="s3secretkey" 21 S3_ENDPOINT="127.0.0.1:8688" 22 s3_DBPATH="${WORK_DIR}/s3.minio" 23 s3_bucket="dmbucket" 24 dumpPath="dmbucket/dump" 25 26 # start s3 server 27 function start_s3() { 28 export MINIO_ACCESS_KEY=$s3_ACCESS_KEY 29 export MINIO_SECRET_KEY=$s3_SECRET_KEY 30 export MINIO_BROWSER=on 31 export S3_ENDPOINT=$S3_ENDPOINT 32 bin/minio server --address $S3_ENDPOINT "$s3_DBPATH" & 33 s3_MINIO_PID=$! 34 35 i=0 36 while ! curl -o /dev/null -v -s "http://$S3_ENDPOINT/"; do 37 i=$(($i + 1)) 38 if [ $i -gt 7 ]; then 39 echo 'Failed to start minio' 40 exit 1 41 fi 42 sleep 2 43 done 44 # create bucket dbpath 45 mkdir -p "${s3_DBPATH}/${s3_bucket}" 46 } 47 48 # clean s3 server 49 cleanup_s3() { 50 pkill -9 minio 2>/dev/null || true 51 wait_process_exit minio 52 rm -rf $s3_DBPATH 53 } 54 55 # check dump file in s3 56 # $1 db name 57 # $2 table name 58 # $3 task name 59 # $4 source id 60 function check_dump_s3_exist() { 61 62 schema_create="${1}-schema-create.sql" 63 table_schema="${1}.${2}-schema.sql" 64 65 file_should_exist "${s3_DBPATH}/${dumpPath}/${3}.${4}/${schema_create}" 66 file_should_exist "${s3_DBPATH}/${dumpPath}/${3}.${4}/${table_schema}" 67 } 68 69 function file_should_exist() { 70 if [ ! -f "$1" ]; then 71 echo "[$(date)] File $1 not found." && exit 1 72 fi 73 } 74 75 function dir_should_not_exist() { 76 if [ -d "$1" ]; then 77 echo "[$(date)] Dir $1 should not found." && exit 1 78 fi 79 } 80 81 # $1 == true will checkDumpFile, false will not 82 # $2 == task_name used for check dump file exist or not 83 function run_test() { 84 85 cleanup_data 86 cleanup_s3 87 # start s3 server 88 start_s3 89 90 kill_dm_master 91 kill_dm_worker 92 93 # start dm master and worker 94 run_dm_master $WORK_DIR/master $MASTER_PORT $cur/conf/dm-master.toml 95 check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT 96 run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml 97 check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT 98 run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml 99 check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT 100 101 # operate mysql config to worker 102 cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml 103 cp $cur/conf/source2.yaml $WORK_DIR/source2.yaml 104 dmctl_operate_source create $WORK_DIR/source1.yaml $SOURCE_ID1 105 dmctl_operate_source create $WORK_DIR/source2.yaml $SOURCE_ID2 106 107 echo "prepare source data" 108 run_sql_file $cur/data/clean_data.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 109 run_sql_file $cur/data/clean_data.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 110 run_sql_file $cur/data/clean_data.sql $TIDB_HOST $TIDB_PORT $TIDB_PASSWORD 111 run_sql_file $cur/data/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 112 run_sql_file $cur/data/db2.prepare.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 113 114 echo "start task" 115 cp $cur/conf/dm-task.yaml $WORK_DIR/dm-task.yaml 116 sed -i "s#name: test#name: $2#g" $WORK_DIR/dm-task.yaml 117 sed -i "s#dir: placeholder#dir: $S3_DIR#g" $WORK_DIR/dm-task.yaml 118 if $1; then 119 sed -i "s#clean-dump-file: true#clean-dump-file: false#g" $WORK_DIR/dm-task.yaml 120 fi 121 dmctl_start_task $WORK_DIR/dm-task.yaml "--remove-meta" 122 123 run_sql_file $cur/data/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 124 run_sql_file $cur/data/db2.increment.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 125 126 echo "check task result" 127 # wait 128 run_sql_tidb_with_retry "select count(1) from information_schema.tables where TABLE_SCHEMA='${db}' and TABLE_NAME = '${tb}';" "count(1): 1" 129 130 # check table data 131 run_sql_tidb_with_retry "select count(1) from ${db}.${tb};" "count(1): 25" 132 133 # check dump file 134 if $1; then 135 check_dump_s3_exist $db1 $tb1 $2 $SOURCE_ID1 136 else 137 dir_should_not_exist "${s3_DBPATH}/${dumpPath}/${2}.${SOURCE_ID1}" 138 fi 139 140 cleanup_s3 141 } 142 143 function run_error_check() { 144 145 cleanup_data 146 cleanup_s3 147 # start s3 server 148 start_s3 149 150 kill_dm_master 151 kill_dm_worker 152 153 export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/loader/TestRemoveMetaFile=return()" 154 155 # start dm master and worker 156 run_dm_master $WORK_DIR/master $MASTER_PORT $cur/conf/dm-master.toml 157 check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT 158 run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml 159 check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT 160 run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml 161 check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT 162 163 # operate mysql config to worker 164 cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml 165 cp $cur/conf/source2.yaml $WORK_DIR/source2.yaml 166 dmctl_operate_source create $WORK_DIR/source1.yaml $SOURCE_ID1 167 dmctl_operate_source create $WORK_DIR/source2.yaml $SOURCE_ID2 168 169 echo "prepare source data" 170 run_sql_file $cur/data/clean_data.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 171 run_sql_file $cur/data/clean_data.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 172 run_sql_file $cur/data/clean_data.sql $TIDB_HOST $TIDB_PORT $TIDB_PASSWORD 173 run_sql_file $cur/data/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 174 run_sql_file $cur/data/db2.prepare.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 175 176 echo "start task" 177 cp $cur/conf/dm-task.yaml $WORK_DIR/dm-task.yaml 178 sed -i "s#name: test#name: $TASK_NAME#g" $WORK_DIR/dm-task.yaml 179 sed -i "s#dir: placeholder#dir: $S3_DIR#g" $WORK_DIR/dm-task.yaml 180 run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \ 181 "start-task $WORK_DIR/dm-task.yaml" 182 183 run_sql_file $cur/data/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 184 run_sql_file $cur/data/db2.increment.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 185 186 echo "error check" 187 check_log_contain_with_retry 'panic: success check file not exist!!' $WORK_DIR/worker1/log/stdout.log 188 check_log_contain_with_retry 'panic: success check file not exist!!' $WORK_DIR/worker2/log/stdout.log 189 190 export GO_FAILPOINTS="" 191 192 cleanup_s3 193 } 194 195 function test_local_special_name() { 196 cleanup_data 197 198 kill_dm_master 199 kill_dm_worker 200 201 # start dm master and worker 202 run_dm_master $WORK_DIR/master $MASTER_PORT $cur/conf/dm-master.toml 203 check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT 204 run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml 205 check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT 206 run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml 207 check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT 208 209 # operate mysql config to worker 210 cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml 211 cp $cur/conf/source2.yaml $WORK_DIR/source2.yaml 212 dmctl_operate_source create $WORK_DIR/source1.yaml $SOURCE_ID1 213 dmctl_operate_source create $WORK_DIR/source2.yaml $SOURCE_ID2 214 215 echo "prepare source data" 216 run_sql_file $cur/data/clean_data.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 217 run_sql_file $cur/data/clean_data.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 218 run_sql_file $cur/data/clean_data.sql $TIDB_HOST $TIDB_PORT $TIDB_PASSWORD 219 run_sql_file $cur/data/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 220 run_sql_file $cur/data/db2.prepare.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 221 222 echo "start task" 223 cp $cur/conf/dm-task.yaml $WORK_DIR/dm-task.yaml 224 sed -i "s#name: test#name: $SPECIAL_TASK_NAME#g" $WORK_DIR/dm-task.yaml 225 sed -i "s#dir: placeholder#dir: $LOCAL_TEST_DIR#g" $WORK_DIR/dm-task.yaml 226 dmctl_start_task $WORK_DIR/dm-task.yaml "--remove-meta" 227 228 run_sql_file $cur/data/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1 229 run_sql_file $cur/data/db2.increment.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2 230 231 echo "check task result" 232 # wait 233 run_sql_tidb_with_retry "select count(1) from information_schema.tables where TABLE_SCHEMA='${db}' and TABLE_NAME = '${tb}';" "count(1): 1" 234 235 # check table data 236 run_sql_tidb_with_retry "select count(1) from ${db}.${tb};" "count(1): 25" 237 } 238 239 function run() { 240 killall tidb-server 2>/dev/null || true 241 killall tikv-server 2>/dev/null || true 242 killall pd-server 2>/dev/null || true 243 244 mkdir -p "$WORK_DIR.downstream" 245 run_downstream_cluster "$WORK_DIR.downstream" 246 247 run_test true $TASK_NAME 248 echo "run s3 test with check dump files success" 249 run_test false $TASK_NAME 250 echo "run s3 test without check dump files success" 251 run_test true $SPECIAL_TASK_NAME 252 echo "run s3 test with special task-name and check dump files success" 253 run_test false $SPECIAL_TASK_NAME 254 echo "run s3 test with special task-name and without check dump files success" 255 run_error_check 256 echo "run s3 test error check success" 257 # # TODO local special name will be resolved after fix https://github.com/pingcap/tidb/issues/32549 258 # test_local_special_name 259 # echo "run local special task-name success" 260 261 # restart to standalone tidb 262 killall -9 tidb-server 2>/dev/null || true 263 killall -9 tikv-server 2>/dev/null || true 264 killall -9 pd-server 2>/dev/null || true 265 rm -rf /tmp/tidb || true 266 run_tidb_server 4000 $TIDB_PASSWORD 267 } 268 269 cleanup_data $TEST_NAME 270 # also cleanup dm processes in case of last run failed 271 cleanup_process $* 272 run $* 273 cleanup_process $* 274 275 echo "[$(date)] <<<<<< test case $TEST_NAME success! >>>>>>"