github.com/pingcap/br@v5.3.0-alpha.0.20220125034240-ec59c7b6ce30+incompatible/tests/lightning_checkpoint_parquet/run.sh (about) 1 #!/bin/sh 2 # 3 # Copyright 2019 PingCAP, Inc. 4 # 5 # Licensed under the Apache License, Version 2.0 (the "License"); 6 # you may not use this file except in compliance with the License. 7 # You may obtain a copy of the License at 8 # 9 # http://www.apache.org/licenses/LICENSE-2.0 10 # 11 # Unless required by applicable law or agreed to in writing, software 12 # distributed under the License is distributed on an "AS IS" BASIS, 13 # See the License for the specific language governing permissions and 14 # limitations under the License. 15 16 set -euE 17 18 # Populate the mydumper source 19 DBPATH="$TEST_DIR/cppq.mydump" 20 ROW_COUNT=100 21 22 do_run_lightning() { 23 run_lightning -d "$DBPATH" --enable-checkpoint=1 24 } 25 26 mkdir -p $DBPATH 27 echo 'CREATE DATABASE cppq_tsr;' > "$DBPATH/cppq_tsr-schema-create.sql" 28 # column "iVal" use for testing column name with upper case is properly handled 29 echo 'CREATE TABLE tbl(iVal INT PRIMARY KEY, s VARCHAR(16));' > "$DBPATH/cppq_tsr.tbl-schema.sql" 30 bin/parquet_gen --dir $DBPATH --schema cppq_tsr --table tbl --chunk 1 --rows $ROW_COUNT 31 32 # Set the failpoint to kill the lightning instance as soon as one batch data is written 33 PKG="github.com/pingcap/br/pkg/lightning/restore" 34 export GO_FAILPOINTS="$PKG/SlowDownWriteRows=sleep(1000);$PKG/FailAfterWriteRows=panic;$PKG/SetMinDeliverBytes=return(1)" 35 36 # Start importing the tables. 37 run_sql 'DROP DATABASE IF EXISTS cppq_tsr' 38 run_sql 'DROP DATABASE IF EXISTS checkpoint_test_parquet' 39 40 set +e 41 run_lightning -d "$DBPATH" --backend tidb --enable-checkpoint=1 2> /dev/null 42 set -e 43 run_sql 'SELECT count(*), sum(iVal) FROM `cppq_tsr`.tbl' 44 check_contains "count(*): 1" 45 # sum(0) 46 check_contains "sum(iVal): 0" 47 48 # check chunk offset and update checkpoint current row id to a higher value so that 49 # if parse read from start, the generated rows will be different 50 run_sql "UPDATE checkpoint_test_parquet.chunk_v5 SET prev_rowid_max = prev_rowid_max + 1000, rowid_max = rowid_max + 1000;" 51 52 # restart lightning from checkpoint, the second line should be written successfully 53 export GO_FAILPOINTS= 54 set +e 55 run_lightning -d "$DBPATH" --backend tidb --enable-checkpoint=1 2> /dev/null 56 set -e 57 58 run_sql 'SELECT count(*), sum(iVal) FROM `cppq_tsr`.tbl' 59 check_contains "count(*): 100" 60 # sum(0..99) 61 check_contains "sum(iVal): 4950"