github.com/pingcap/br@v5.3.0-alpha.0.20220125034240-ec59c7b6ce30+incompatible/tests/br_300_small_tables/run.sh (about) 1 #!/bin/sh 2 # 3 # Copyright 2020 PingCAP, Inc. 4 # 5 # Licensed under the Apache License, Version 2.0 (the "License"); 6 # you may not use this file except in compliance with the License. 7 # You may obtain a copy of the License at 8 # 9 # http://www.apache.org/licenses/LICENSE-2.0 10 # 11 # Unless required by applicable law or agreed to in writing, software 12 # distributed under the License is distributed on an "AS IS" BASIS, 13 # See the License for the specific language governing permissions and 14 # limitations under the License. 15 16 set -eu 17 DB="$TEST_NAME" 18 TABLES_COUNT=300 19 20 PROGRESS_FILE="$TEST_DIR/progress_file" 21 BACKUPMETAV1_LOG="$TEST_DIR/backup.log" 22 BACKUPMETAV2_LOG="$TEST_DIR/backupv2.log" 23 RESTORE_LOG="$TEST_DIR/restore.log" 24 rm -rf $PROGRESS_FILE 25 26 # functions to do float point arithmetric 27 calc() { awk "BEGIN{print $*}"; } 28 29 run_sql "create schema $DB;" 30 31 # generate 300 tables with 1 row content. 32 i=1 33 while [ $i -le $TABLES_COUNT ]; do 34 run_sql "create table $DB.sbtest$i(id int primary key, k int not null, c char(120) not null, pad char(60) not null);" 35 run_sql "insert into $DB.sbtest$i values ($i, $i, '$i', '$i');" 36 i=$(($i+1)) 37 done 38 39 # backup db 40 echo "backup meta v2 start..." 41 unset BR_LOG_TO_TERM 42 rm -f $BACKUPMETAV2_LOG 43 export GO_FAILPOINTS="github.com/pingcap/br/pkg/task/progress-call-back=return(\"$PROGRESS_FILE\")" 44 run_br backup db --db "$DB" --log-file $BACKUPMETAV2_LOG -s "local://$TEST_DIR/${DB}v2" --pd $PD_ADDR --use-backupmeta-v2 45 backupv2_size=`grep "backup data size" "${BACKUPMETAV2_LOG}" | grep -oP '\[\K[^\]]+' | grep "backup data size" | awk -F '=' '{print $2}' | grep -oP '\d*\.\d+'` 46 echo "backup meta v2 backup size is ${backupv2_size}" 47 export GO_FAILPOINTS="" 48 49 if [[ "$(wc -l <$PROGRESS_FILE)" == "1" ]] && [[ $(grep -c "range" $PROGRESS_FILE) == "1" ]]; 50 then 51 echo "use the correct progress unit" 52 else 53 echo "use the wrong progress unit, expect range" 54 cat $PROGRESS_FILE 55 exit 1 56 fi 57 58 rm -rf $PROGRESS_FILE 59 60 echo "backup meta v1 start..." 61 rm -f $BACKUPMETAV1_LOG 62 run_br backup db --db "$DB" --log-file $BACKUPMETAV1_LOG -s "local://$TEST_DIR/$DB" --pd $PD_ADDR 63 backupv1_size=`grep "backup data size" "${BACKUPMETAV1_LOG}" | grep -oP '\[\K[^\]]+' | grep "backup data size" | awk -F '=' '{print $2}' | grep -oP '\d*\.\d+'` 64 echo "backup meta v1 backup size is ${backupv1_size}" 65 66 67 if [ $(calc "${backupv1_size}-${backupv2_size}==0") -eq 1 ]; then 68 echo "backup meta v1 data size match backup meta v2 data size" 69 else 70 echo "statistics unmatch" 71 exit 1 72 fi 73 74 # truncate every table 75 # (FIXME: drop instead of truncate. if we drop then create-table will still be executed and wastes time executing DDLs) 76 i=1 77 while [ $i -le $TABLES_COUNT ]; do 78 run_sql "truncate $DB.sbtest$i;" 79 i=$(($i+1)) 80 done 81 82 rm -rf $RESTORE_LOG 83 echo "restore 1/300 of the table start..." 84 run_br restore table --db $DB --table "sbtest100" --log-file $RESTORE_LOG -s "local://$TEST_DIR/$DB" --pd $PD_ADDR --no-schema 85 restore_size=`grep "restore data size" "${RESTORE_LOG}" | grep -oP '\[\K[^\]]+' | grep "restore data size" | awk -F '=' '{print $2}' | grep -oP '\d*\.\d+'` 86 echo "restore data size is ${restore_size}" 87 88 diff=$(calc "$backupv2_size-$restore_size*$TABLES_COUNT") 89 echo "the difference is ${diff}" 90 91 threshold="1" 92 93 if [ $(calc "$diff<$threshold") -eq 1 ]; then 94 echo "statistics match" 95 else 96 echo "statistics unmatch" 97 exit 1 98 fi 99 100 # restore db 101 # (FIXME: shouldn't need --no-schema to be fast, currently the alter-auto-id DDL slows things down) 102 echo "restore start..." 103 run_br restore db --db $DB -s "local://$TEST_DIR/$DB" --pd $PD_ADDR --no-schema 104 105 run_sql "DROP DATABASE $DB;"