github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/dm/dumpling/dumpling_test.go (about) 1 // Copyright 2020 PingCAP, Inc. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // http://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // See the License for the specific language governing permissions and 12 // limitations under the License. 13 14 package dumpling 15 16 import ( 17 "context" 18 "testing" 19 "time" 20 21 "github.com/DATA-DOG/go-sqlmock" 22 "github.com/docker/go-units" 23 . "github.com/pingcap/check" 24 "github.com/pingcap/failpoint" 25 "github.com/pingcap/tidb/dumpling/export" 26 "github.com/pingcap/tidb/pkg/util/filter" 27 tidbpromutil "github.com/pingcap/tidb/pkg/util/promutil" 28 tfilter "github.com/pingcap/tidb/pkg/util/table-filter" 29 "github.com/pingcap/tiflow/dm/config" 30 "github.com/pingcap/tiflow/dm/pb" 31 "github.com/pingcap/tiflow/dm/pkg/conn" 32 "github.com/pingcap/tiflow/dm/pkg/log" 33 "github.com/pingcap/tiflow/engine/pkg/promutil" 34 "github.com/prometheus/client_golang/prometheus" 35 "github.com/stretchr/testify/require" 36 ) 37 38 var _ = Suite(&testDumplingSuite{}) 39 40 const ( 41 testDumplingSchemaName = "INFORMATION_SCHEMA" 42 testDumplingTableName = "TABLES" 43 ) 44 45 func TestSuite(t *testing.T) { 46 TestingT(t) 47 } 48 49 type testDumplingSuite struct { 50 cfg *config.SubTaskConfig 51 } 52 53 func (t *testDumplingSuite) SetUpSuite(c *C) { 54 dir := c.MkDir() 55 t.cfg = &config.SubTaskConfig{ 56 Name: "dumpling_ut", 57 Timezone: "UTC", 58 From: config.GetDBConfigForTest(), 59 LoaderConfig: config.LoaderConfig{ 60 Dir: dir, 61 }, 62 BAList: &filter.Rules{ 63 DoDBs: []string{testDumplingSchemaName}, 64 DoTables: []*filter.Table{{ 65 Schema: testDumplingSchemaName, 66 Name: testDumplingTableName, 67 }}, 68 }, 69 } 70 c.Assert(log.InitLogger(&log.Config{}), IsNil) 71 } 72 73 func (t *testDumplingSuite) TestDumpling(c *C) { 74 dumpling := NewDumpling(t.cfg) 75 ctx, cancel := context.WithCancel(context.Background()) 76 defer cancel() 77 78 err := dumpling.Init(ctx) 79 c.Assert(err, IsNil) 80 resultCh := make(chan pb.ProcessResult, 1) 81 82 c.Assert(failpoint.Enable("github.com/pingcap/tiflow/dm/dumpling/dumpUnitProcessNoError", `return(true)`), IsNil) 83 //nolint:errcheck 84 defer failpoint.Disable("github.com/pingcap/tiflow/dm/dumpling/dumpUnitProcessNoError") 85 86 dumpling.Process(ctx, resultCh) 87 c.Assert(len(resultCh), Equals, 1) 88 result := <-resultCh 89 c.Assert(result.IsCanceled, IsFalse) 90 c.Assert(len(result.Errors), Equals, 0) 91 //nolint:errcheck 92 failpoint.Disable("github.com/pingcap/tiflow/dm/dumpling/dumpUnitProcessNoError") 93 94 c.Assert(failpoint.Enable("github.com/pingcap/tiflow/dm/dumpling/dumpUnitProcessWithError", `return("unknown error")`), IsNil) 95 // nolint:errcheck 96 defer failpoint.Disable("github.com/pingcap/tiflow/dm/dumpling/dumpUnitProcessWithError") 97 98 // return error 99 dumpling.Process(ctx, resultCh) 100 c.Assert(len(resultCh), Equals, 1) 101 result = <-resultCh 102 c.Assert(result.IsCanceled, IsFalse) 103 c.Assert(len(result.Errors), Equals, 1) 104 c.Assert(result.Errors[0].Message, Equals, "unknown error") 105 // nolint:errcheck 106 failpoint.Disable("github.com/pingcap/tiflow/dm/dumpling/dumpUnitProcessWithError") 107 108 // cancel 109 c.Assert(failpoint.Enable("github.com/pingcap/tiflow/dm/dumpling/dumpUnitProcessCancel", `return("unknown error")`), IsNil) 110 // nolint:errcheck 111 defer failpoint.Disable("github.com/pingcap/tiflow/dm/dumpling/dumpUnitProcessCancel") 112 c.Assert(failpoint.Enable("github.com/pingcap/tiflow/dm/dumpling/dumpUnitProcessForever", `return(true)`), IsNil) 113 //nolint:errcheck 114 defer failpoint.Disable("github.com/pingcap/tiflow/dm/dumpling/dumpUnitProcessForever") 115 116 ctx2, cancel2 := context.WithTimeout(context.Background(), 3*time.Second) 117 defer cancel2() 118 dumpling.Process(ctx2, resultCh) 119 c.Assert(len(resultCh), Equals, 1) 120 result = <-resultCh 121 c.Assert(result.IsCanceled, IsTrue) 122 c.Assert(len(result.Errors), Equals, 1) 123 c.Assert(result.Errors[0].String(), Matches, ".*context deadline exceeded.*") 124 } 125 126 func (t *testDumplingSuite) TestDefaultConfig(c *C) { 127 dumpling := NewDumpling(t.cfg) 128 ctx := context.Background() 129 c.Assert(dumpling.Init(ctx), IsNil) 130 c.Assert(dumpling.dumpConfig.StatementSize, Not(Equals), export.UnspecifiedSize) 131 c.Assert(dumpling.dumpConfig.Rows, Not(Equals), export.UnspecifiedSize) 132 } 133 134 func TestCallStatus(t *testing.T) { 135 cfg := genDumpCfg(t) 136 m := NewDumpling(cfg) 137 m.metricProxies = defaultMetricProxies 138 ctx := context.Background() 139 140 dumpConf := export.DefaultConfig() 141 dumpConf.PromFactory = promutil.NewWrappingFactory( 142 promutil.NewPromFactory(), 143 "", 144 prometheus.Labels{ 145 "task": m.cfg.Name, "source_id": m.cfg.SourceID, 146 }, 147 ) 148 dumpConf.PromRegistry = tidbpromutil.NewDefaultRegistry() 149 150 s := m.Status(nil).(*pb.DumpStatus) 151 require.Equal(t, s.CompletedTables, float64(0)) 152 require.Equal(t, s.FinishedBytes, float64(0)) 153 require.Equal(t, s.FinishedRows, float64(0)) 154 require.Equal(t, s.EstimateTotalRows, float64(0)) 155 require.Equal(t, s.Progress, "") 156 require.Equal(t, s.Bps, int64(0)) 157 158 // NewDumper is the only way we can set conf to Dumper, but it will return error. so we just ignore the error 159 dumpling, _ := export.NewDumper(ctx, dumpConf) 160 m.core = dumpling 161 162 m.Close() 163 s = m.Status(nil).(*pb.DumpStatus) 164 require.Equal(t, s.CompletedTables, float64(0)) 165 require.Equal(t, s.FinishedBytes, float64(0)) 166 require.Equal(t, s.FinishedRows, float64(0)) 167 require.Equal(t, s.EstimateTotalRows, float64(0)) 168 require.Equal(t, s.Progress, "") 169 require.Equal(t, s.Bps, int64(0)) 170 } 171 172 func (t *testDumplingSuite) TestParseArgsWontOverwrite(c *C) { 173 cfg := &config.SubTaskConfig{ 174 Timezone: "UTC", 175 } 176 cfg.ChunkFilesize = "1" 177 rules := &filter.Rules{ 178 DoDBs: []string{"unit_test"}, 179 } 180 cfg.BAList = rules 181 // make sure we enter `parseExtraArgs` 182 cfg.ExtraArgs = "-s=4000 --consistency lock" 183 184 d := NewDumpling(cfg) 185 exportCfg, err := d.constructArgs(context.Background()) 186 c.Assert(err, IsNil) 187 188 c.Assert(exportCfg.StatementSize, Equals, uint64(4000)) 189 c.Assert(exportCfg.FileSize, Equals, uint64(1*units.MiB)) 190 191 f, err2 := tfilter.ParseMySQLReplicationRules(rules) 192 c.Assert(err2, IsNil) 193 c.Assert(exportCfg.TableFilter, DeepEquals, tfilter.CaseInsensitive(f)) 194 195 c.Assert(exportCfg.Consistency, Equals, "lock") 196 } 197 198 func (t *testDumplingSuite) TestConstructArgs(c *C) { 199 ctx := context.Background() 200 201 mock := conn.InitMockDB(c) 202 mock.ExpectQuery("SELECT cast\\(TIMEDIFF\\(NOW\\(6\\), UTC_TIMESTAMP\\(6\\)\\) as time\\);"). 203 WillReturnRows(sqlmock.NewRows([]string{""}).AddRow("01:00:00")) 204 205 cfg := &config.SubTaskConfig{} 206 cfg.ExtraArgs = `--statement-size=100 --where "t>10" --threads 8 -F 50B` 207 d := NewDumpling(cfg) 208 exportCfg, err := d.constructArgs(ctx) 209 c.Assert(err, IsNil) 210 c.Assert(exportCfg.StatementSize, Equals, uint64(100)) 211 c.Assert(exportCfg.Where, Equals, "t>10") 212 c.Assert(exportCfg.Threads, Equals, 8) 213 c.Assert(exportCfg.FileSize, Equals, uint64(50)) 214 c.Assert(exportCfg.SessionParams, NotNil) 215 c.Assert(exportCfg.SessionParams["time_zone"], Equals, "+01:00") 216 } 217 218 func genDumpCfg(t *testing.T) *config.SubTaskConfig { 219 t.Helper() 220 221 dir := t.TempDir() 222 return &config.SubTaskConfig{ 223 Name: "dumpling_ut", 224 Timezone: "UTC", 225 From: config.GetDBConfigForTest(), 226 LoaderConfig: config.LoaderConfig{ 227 Dir: dir, 228 }, 229 BAList: &filter.Rules{ 230 DoDBs: []string{testDumplingSchemaName}, 231 DoTables: []*filter.Table{{ 232 Schema: testDumplingSchemaName, 233 Name: testDumplingTableName, 234 }}, 235 }, 236 } 237 }