github.com/whtcorpsinc/MilevaDB-Prod@v0.0.0-20211104133533-f57f4be3b597/causetstore/milevadb-server/statistics/statistics_test.go (about) 1 // Copyright 2020 WHTCORPS INC, Inc. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // http://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // See the License for the specific language governing permissions and 12 // limitations under the License. 13 14 package statistics 15 16 import ( 17 "context" 18 "math" 19 "testing" 20 "time" 21 22 . "github.com/whtcorpsinc/check" 23 "github.com/whtcorpsinc/errors" 24 "github.com/whtcorpsinc/BerolinaSQL/ast" 25 "github.com/whtcorpsinc/BerolinaSQL/perceptron" 26 "github.com/whtcorpsinc/BerolinaSQL/allegrosql" 27 "github.com/whtcorpsinc/milevadb/stochastikctx" 28 "github.com/whtcorpsinc/milevadb/stochastikctx/stmtctx" 29 "github.com/whtcorpsinc/milevadb/types" 30 "github.com/whtcorpsinc/milevadb/types/json" 31 "github.com/whtcorpsinc/milevadb/soliton/chunk" 32 "github.com/whtcorpsinc/milevadb/soliton/codec" 33 "github.com/whtcorpsinc/milevadb/soliton/collate" 34 "github.com/whtcorpsinc/milevadb/soliton/mock" 35 "github.com/whtcorpsinc/milevadb/soliton/ranger" 36 "github.com/whtcorpsinc/milevadb/soliton/sqlexec" 37 ) 38 39 func TestT(t *testing.T) { 40 TestingT(t) 41 } 42 43 var _ = Suite(&testStatisticsSuite{}) 44 45 type testStatisticsSuite struct { 46 count int 47 samples []*SampleItem 48 rc sqlexec.RecordSet 49 pk sqlexec.RecordSet 50 } 51 52 type recordSet struct { 53 firstIsID bool 54 data []types.Causet 55 count int 56 cursor int 57 fields []*ast.ResultField 58 } 59 60 func (r *recordSet) Fields() []*ast.ResultField { 61 return r.fields 62 } 63 64 func (r *recordSet) setFields(tps ...uint8) { 65 r.fields = make([]*ast.ResultField, len(tps)) 66 for i := 0; i < len(tps); i++ { 67 rf := new(ast.ResultField) 68 rf.DeferredCauset = new(perceptron.DeferredCausetInfo) 69 rf.DeferredCauset.FieldType = *types.NewFieldType(tps[i]) 70 r.fields[i] = rf 71 } 72 } 73 74 func (r *recordSet) getNext() []types.Causet { 75 if r.cursor == r.count { 76 return nil 77 } 78 r.cursor++ 79 event := make([]types.Causet, 0, len(r.fields)) 80 if r.firstIsID { 81 event = append(event, types.NewIntCauset(int64(r.cursor))) 82 } 83 event = append(event, r.data[r.cursor-1]) 84 return event 85 } 86 87 func (r *recordSet) Next(ctx context.Context, req *chunk.Chunk) error { 88 req.Reset() 89 event := r.getNext() 90 if event != nil { 91 for i := 0; i < len(event); i++ { 92 req.AppendCauset(i, &event[i]) 93 } 94 } 95 return nil 96 } 97 98 func (r *recordSet) NewChunk() *chunk.Chunk { 99 fields := make([]*types.FieldType, 0, len(r.fields)) 100 for _, field := range r.fields { 101 fields = append(fields, &field.DeferredCauset.FieldType) 102 } 103 return chunk.NewChunkWithCapacity(fields, 32) 104 } 105 106 func (r *recordSet) Close() error { 107 r.cursor = 0 108 return nil 109 } 110 111 func (s *testStatisticsSuite) SetUpSuite(c *C) { 112 s.count = 100000 113 samples := make([]*SampleItem, 10000) 114 for i := 0; i < len(samples); i++ { 115 samples[i] = &SampleItem{} 116 } 117 start := 1000 118 samples[0].Value.SetInt64(0) 119 for i := 1; i < start; i++ { 120 samples[i].Value.SetInt64(2) 121 } 122 for i := start; i < len(samples); i++ { 123 samples[i].Value.SetInt64(int64(i)) 124 } 125 for i := start; i < len(samples); i += 3 { 126 samples[i].Value.SetInt64(samples[i].Value.GetInt64() + 1) 127 } 128 for i := start; i < len(samples); i += 5 { 129 samples[i].Value.SetInt64(samples[i].Value.GetInt64() + 2) 130 } 131 sc := new(stmtctx.StatementContext) 132 samples, err := SortSampleItems(sc, samples) 133 c.Check(err, IsNil) 134 s.samples = samples 135 136 rc := &recordSet{ 137 data: make([]types.Causet, s.count), 138 count: s.count, 139 cursor: 0, 140 } 141 rc.setFields(allegrosql.TypeLonglong) 142 rc.data[0].SetInt64(0) 143 for i := 1; i < start; i++ { 144 rc.data[i].SetInt64(2) 145 } 146 for i := start; i < rc.count; i++ { 147 rc.data[i].SetInt64(int64(i)) 148 } 149 for i := start; i < rc.count; i += 3 { 150 rc.data[i].SetInt64(rc.data[i].GetInt64() + 1) 151 } 152 for i := start; i < rc.count; i += 5 { 153 rc.data[i].SetInt64(rc.data[i].GetInt64() + 2) 154 } 155 err = types.SortCausets(sc, rc.data) 156 c.Check(err, IsNil) 157 s.rc = rc 158 159 pk := &recordSet{ 160 data: make([]types.Causet, s.count), 161 count: s.count, 162 cursor: 0, 163 } 164 pk.setFields(allegrosql.TypeLonglong) 165 for i := 0; i < rc.count; i++ { 166 pk.data[i].SetInt64(int64(i)) 167 } 168 s.pk = pk 169 } 170 171 func encodeKey(key types.Causet) types.Causet { 172 sc := &stmtctx.StatementContext{TimeZone: time.Local} 173 buf, _ := codec.EncodeKey(sc, nil, key) 174 return types.NewBytesCauset(buf) 175 } 176 177 func buildPK(sctx stochastikctx.Context, numBuckets, id int64, records sqlexec.RecordSet) (int64, *Histogram, error) { 178 b := NewSortedBuilder(sctx.GetStochastikVars().StmtCtx, numBuckets, id, types.NewFieldType(allegrosql.TypeLonglong)) 179 ctx := context.Background() 180 for { 181 req := records.NewChunk() 182 err := records.Next(ctx, req) 183 if err != nil { 184 return 0, nil, errors.Trace(err) 185 } 186 if req.NumRows() == 0 { 187 break 188 } 189 it := chunk.NewIterator4Chunk(req) 190 for event := it.Begin(); event != it.End(); event = it.Next() { 191 datums := RowToCausets(event, records.Fields()) 192 err = b.Iterate(datums[0]) 193 if err != nil { 194 return 0, nil, errors.Trace(err) 195 } 196 } 197 } 198 return b.Count, b.hist, nil 199 } 200 201 func buildIndex(sctx stochastikctx.Context, numBuckets, id int64, records sqlexec.RecordSet) (int64, *Histogram, *CMSketch, error) { 202 b := NewSortedBuilder(sctx.GetStochastikVars().StmtCtx, numBuckets, id, types.NewFieldType(allegrosql.TypeBlob)) 203 cms := NewCMSketch(8, 2048) 204 ctx := context.Background() 205 req := records.NewChunk() 206 it := chunk.NewIterator4Chunk(req) 207 for { 208 err := records.Next(ctx, req) 209 if err != nil { 210 return 0, nil, nil, errors.Trace(err) 211 } 212 if req.NumRows() == 0 { 213 break 214 } 215 for event := it.Begin(); event != it.End(); event = it.Next() { 216 datums := RowToCausets(event, records.Fields()) 217 buf, err := codec.EncodeKey(sctx.GetStochastikVars().StmtCtx, nil, datums...) 218 if err != nil { 219 return 0, nil, nil, errors.Trace(err) 220 } 221 data := types.NewBytesCauset(buf) 222 err = b.Iterate(data) 223 if err != nil { 224 return 0, nil, nil, errors.Trace(err) 225 } 226 cms.InsertBytes(buf) 227 } 228 } 229 return b.Count, b.Hist(), cms, nil 230 } 231 232 func checkRepeats(c *C, hg *Histogram) { 233 for _, bkt := range hg.Buckets { 234 c.Assert(bkt.Repeat, Greater, int64(0)) 235 } 236 } 237 238 func (s *testStatisticsSuite) TestBuild(c *C) { 239 bucketCount := int64(256) 240 ctx := mock.NewContext() 241 sc := ctx.GetStochastikVars().StmtCtx 242 sketch, _, err := buildFMSketch(sc, s.rc.(*recordSet).data, 1000) 243 c.Assert(err, IsNil) 244 245 collector := &SampleDefCauslector{ 246 Count: int64(s.count), 247 NullCount: 0, 248 Samples: s.samples, 249 FMSketch: sketch, 250 } 251 col, err := BuildDeferredCauset(ctx, bucketCount, 2, collector, types.NewFieldType(allegrosql.TypeLonglong)) 252 c.Check(err, IsNil) 253 checkRepeats(c, col) 254 col.PreCalculateScalar() 255 c.Check(col.Len(), Equals, 226) 256 count := col.equalRowCount(types.NewIntCauset(1000)) 257 c.Check(int(count), Equals, 0) 258 count = col.lessRowCount(types.NewIntCauset(1000)) 259 c.Check(int(count), Equals, 10000) 260 count = col.lessRowCount(types.NewIntCauset(2000)) 261 c.Check(int(count), Equals, 19999) 262 count = col.greaterRowCount(types.NewIntCauset(2000)) 263 c.Check(int(count), Equals, 80000) 264 count = col.lessRowCount(types.NewIntCauset(200000000)) 265 c.Check(int(count), Equals, 100000) 266 count = col.greaterRowCount(types.NewIntCauset(200000000)) 267 c.Check(count, Equals, 0.0) 268 count = col.equalRowCount(types.NewIntCauset(200000000)) 269 c.Check(count, Equals, 0.0) 270 count = col.BetweenRowCount(types.NewIntCauset(3000), types.NewIntCauset(3500)) 271 c.Check(int(count), Equals, 4994) 272 count = col.lessRowCount(types.NewIntCauset(1)) 273 c.Check(int(count), Equals, 9) 274 275 builder := SampleBuilder{ 276 Sc: mock.NewContext().GetStochastikVars().StmtCtx, 277 RecordSet: s.pk, 278 DefCausLen: 1, 279 MaxSampleSize: 1000, 280 MaxFMSketchSize: 1000, 281 DefCauslators: make([]collate.DefCauslator, 1), 282 DefCaussFieldType: []*types.FieldType{types.NewFieldType(allegrosql.TypeLonglong)}, 283 } 284 c.Assert(s.pk.Close(), IsNil) 285 collectors, _, err := builder.DefCauslectDeferredCausetStats() 286 c.Assert(err, IsNil) 287 c.Assert(len(collectors), Equals, 1) 288 col, err = BuildDeferredCauset(mock.NewContext(), 256, 2, collectors[0], types.NewFieldType(allegrosql.TypeLonglong)) 289 c.Assert(err, IsNil) 290 checkRepeats(c, col) 291 c.Assert(col.Len(), Equals, 250) 292 293 tblCount, col, _, err := buildIndex(ctx, bucketCount, 1, s.rc) 294 c.Check(err, IsNil) 295 checkRepeats(c, col) 296 col.PreCalculateScalar() 297 c.Check(int(tblCount), Equals, 100000) 298 count = col.equalRowCount(encodeKey(types.NewIntCauset(10000))) 299 c.Check(int(count), Equals, 1) 300 count = col.lessRowCount(encodeKey(types.NewIntCauset(20000))) 301 c.Check(int(count), Equals, 19999) 302 count = col.BetweenRowCount(encodeKey(types.NewIntCauset(30000)), encodeKey(types.NewIntCauset(35000))) 303 c.Check(int(count), Equals, 4999) 304 count = col.BetweenRowCount(encodeKey(types.MinNotNullCauset()), encodeKey(types.NewIntCauset(0))) 305 c.Check(int(count), Equals, 0) 306 count = col.lessRowCount(encodeKey(types.NewIntCauset(0))) 307 c.Check(int(count), Equals, 0) 308 309 s.pk.(*recordSet).cursor = 0 310 tblCount, col, err = buildPK(ctx, bucketCount, 4, s.pk) 311 c.Check(err, IsNil) 312 checkRepeats(c, col) 313 col.PreCalculateScalar() 314 c.Check(int(tblCount), Equals, 100000) 315 count = col.equalRowCount(types.NewIntCauset(10000)) 316 c.Check(int(count), Equals, 1) 317 count = col.lessRowCount(types.NewIntCauset(20000)) 318 c.Check(int(count), Equals, 20000) 319 count = col.BetweenRowCount(types.NewIntCauset(30000), types.NewIntCauset(35000)) 320 c.Check(int(count), Equals, 5000) 321 count = col.greaterRowCount(types.NewIntCauset(1001)) 322 c.Check(int(count), Equals, 98998) 323 count = col.lessRowCount(types.NewIntCauset(99999)) 324 c.Check(int(count), Equals, 99999) 325 326 causet := types.Causet{} 327 causet.SetMysqlJSON(json.BinaryJSON{TypeCode: json.TypeCodeLiteral}) 328 item := &SampleItem{Value: causet} 329 collector = &SampleDefCauslector{ 330 Count: 1, 331 NullCount: 0, 332 Samples: []*SampleItem{item}, 333 FMSketch: sketch, 334 } 335 col, err = BuildDeferredCauset(ctx, bucketCount, 2, collector, types.NewFieldType(allegrosql.TypeJSON)) 336 c.Assert(err, IsNil) 337 c.Assert(col.Len(), Equals, 1) 338 c.Assert(col.GetLower(0), DeepEquals, col.GetUpper(0)) 339 } 340 341 func (s *testStatisticsSuite) TestHistogramProtoConversion(c *C) { 342 ctx := mock.NewContext() 343 c.Assert(s.rc.Close(), IsNil) 344 tblCount, col, _, err := buildIndex(ctx, 256, 1, s.rc) 345 c.Check(err, IsNil) 346 c.Check(int(tblCount), Equals, 100000) 347 348 p := HistogramToProto(col) 349 h := HistogramFromProto(p) 350 c.Assert(HistogramEqual(col, h, true), IsTrue) 351 } 352 353 func mockHistogram(lower, num int64) *Histogram { 354 h := NewHistogram(0, num, 0, 0, types.NewFieldType(allegrosql.TypeLonglong), int(num), 0) 355 for i := int64(0); i < num; i++ { 356 lower, upper := types.NewIntCauset(lower+i), types.NewIntCauset(lower+i) 357 h.AppendBucket(&lower, &upper, i+1, 1) 358 } 359 return h 360 } 361 362 func (s *testStatisticsSuite) TestMergeHistogram(c *C) { 363 tests := []struct { 364 leftLower int64 365 leftNum int64 366 rightLower int64 367 rightNum int64 368 bucketNum int 369 ndv int64 370 }{ 371 { 372 leftLower: 0, 373 leftNum: 0, 374 rightLower: 0, 375 rightNum: 1, 376 bucketNum: 1, 377 ndv: 1, 378 }, 379 { 380 leftLower: 0, 381 leftNum: 200, 382 rightLower: 200, 383 rightNum: 200, 384 bucketNum: 200, 385 ndv: 400, 386 }, 387 { 388 leftLower: 0, 389 leftNum: 200, 390 rightLower: 199, 391 rightNum: 200, 392 bucketNum: 200, 393 ndv: 399, 394 }, 395 } 396 sc := mock.NewContext().GetStochastikVars().StmtCtx 397 bucketCount := 256 398 for _, t := range tests { 399 lh := mockHistogram(t.leftLower, t.leftNum) 400 rh := mockHistogram(t.rightLower, t.rightNum) 401 h, err := MergeHistograms(sc, lh, rh, bucketCount) 402 c.Assert(err, IsNil) 403 c.Assert(h.NDV, Equals, t.ndv) 404 c.Assert(h.Len(), Equals, t.bucketNum) 405 c.Assert(int64(h.TotalRowCount()), Equals, t.leftNum+t.rightNum) 406 expectLower := types.NewIntCauset(t.leftLower) 407 cmp, err := h.GetLower(0).CompareCauset(sc, &expectLower) 408 c.Assert(err, IsNil) 409 c.Assert(cmp, Equals, 0) 410 expectUpper := types.NewIntCauset(t.rightLower + t.rightNum - 1) 411 cmp, err = h.GetUpper(h.Len()-1).CompareCauset(sc, &expectUpper) 412 c.Assert(err, IsNil) 413 c.Assert(cmp, Equals, 0) 414 } 415 } 416 417 func (s *testStatisticsSuite) TestPseudoTable(c *C) { 418 ti := &perceptron.TableInfo{} 419 colInfo := &perceptron.DeferredCausetInfo{ 420 ID: 1, 421 FieldType: *types.NewFieldType(allegrosql.TypeLonglong), 422 } 423 ti.DeferredCausets = append(ti.DeferredCausets, colInfo) 424 tbl := PseudoTable(ti) 425 c.Assert(tbl.Count, Greater, int64(0)) 426 sc := new(stmtctx.StatementContext) 427 count := tbl.DeferredCausetLessRowCount(sc, types.NewIntCauset(100), colInfo.ID) 428 c.Assert(int(count), Equals, 3333) 429 count, err := tbl.DeferredCausetEqualRowCount(sc, types.NewIntCauset(1000), colInfo.ID) 430 c.Assert(err, IsNil) 431 c.Assert(int(count), Equals, 10) 432 count = tbl.DeferredCausetBetweenRowCount(sc, types.NewIntCauset(1000), types.NewIntCauset(5000), colInfo.ID) 433 c.Assert(int(count), Equals, 250) 434 } 435 436 func buildCMSketch(values []types.Causet) *CMSketch { 437 cms := NewCMSketch(8, 2048) 438 for _, val := range values { 439 cms.insert(&val) 440 } 441 return cms 442 } 443 444 func (s *testStatisticsSuite) TestDeferredCausetRange(c *C) { 445 bucketCount := int64(256) 446 ctx := mock.NewContext() 447 sc := ctx.GetStochastikVars().StmtCtx 448 sketch, _, err := buildFMSketch(sc, s.rc.(*recordSet).data, 1000) 449 c.Assert(err, IsNil) 450 451 collector := &SampleDefCauslector{ 452 Count: int64(s.count), 453 NullCount: 0, 454 Samples: s.samples, 455 FMSketch: sketch, 456 } 457 hg, err := BuildDeferredCauset(ctx, bucketCount, 2, collector, types.NewFieldType(allegrosql.TypeLonglong)) 458 hg.PreCalculateScalar() 459 c.Check(err, IsNil) 460 col := &DeferredCauset{Histogram: *hg, CMSketch: buildCMSketch(s.rc.(*recordSet).data), Info: &perceptron.DeferredCausetInfo{}} 461 tbl := &Block{ 462 HistDefCausl: HistDefCausl{ 463 Count: int64(col.TotalRowCount()), 464 DeferredCausets: make(map[int64]*DeferredCauset), 465 }, 466 } 467 ran := []*ranger.Range{{ 468 LowVal: []types.Causet{{}}, 469 HighVal: []types.Causet{types.MaxValueCauset()}, 470 }} 471 count, err := tbl.GetRowCountByDeferredCausetRanges(sc, 0, ran) 472 c.Assert(err, IsNil) 473 c.Assert(int(count), Equals, 100000) 474 ran[0].LowVal[0] = types.MinNotNullCauset() 475 count, err = tbl.GetRowCountByDeferredCausetRanges(sc, 0, ran) 476 c.Assert(err, IsNil) 477 c.Assert(int(count), Equals, 99900) 478 ran[0].LowVal[0] = types.NewIntCauset(1000) 479 ran[0].LowExclude = true 480 ran[0].HighVal[0] = types.NewIntCauset(2000) 481 ran[0].HighExclude = true 482 count, err = tbl.GetRowCountByDeferredCausetRanges(sc, 0, ran) 483 c.Assert(err, IsNil) 484 c.Assert(int(count), Equals, 2500) 485 ran[0].LowExclude = false 486 ran[0].HighExclude = false 487 count, err = tbl.GetRowCountByDeferredCausetRanges(sc, 0, ran) 488 c.Assert(err, IsNil) 489 c.Assert(int(count), Equals, 2500) 490 ran[0].LowVal[0] = ran[0].HighVal[0] 491 count, err = tbl.GetRowCountByDeferredCausetRanges(sc, 0, ran) 492 c.Assert(err, IsNil) 493 c.Assert(int(count), Equals, 100) 494 495 tbl.DeferredCausets[0] = col 496 ran[0].LowVal[0] = types.Causet{} 497 ran[0].HighVal[0] = types.MaxValueCauset() 498 count, err = tbl.GetRowCountByDeferredCausetRanges(sc, 0, ran) 499 c.Assert(err, IsNil) 500 c.Assert(int(count), Equals, 100000) 501 ran[0].LowVal[0] = types.NewIntCauset(1000) 502 ran[0].LowExclude = true 503 ran[0].HighVal[0] = types.NewIntCauset(2000) 504 ran[0].HighExclude = true 505 count, err = tbl.GetRowCountByDeferredCausetRanges(sc, 0, ran) 506 c.Assert(err, IsNil) 507 c.Assert(int(count), Equals, 9998) 508 ran[0].LowExclude = false 509 ran[0].HighExclude = false 510 count, err = tbl.GetRowCountByDeferredCausetRanges(sc, 0, ran) 511 c.Assert(err, IsNil) 512 c.Assert(int(count), Equals, 10000) 513 ran[0].LowVal[0] = ran[0].HighVal[0] 514 count, err = tbl.GetRowCountByDeferredCausetRanges(sc, 0, ran) 515 c.Assert(err, IsNil) 516 c.Assert(int(count), Equals, 1) 517 } 518 519 func (s *testStatisticsSuite) TestIntDeferredCausetRanges(c *C) { 520 bucketCount := int64(256) 521 ctx := mock.NewContext() 522 sc := ctx.GetStochastikVars().StmtCtx 523 524 s.pk.(*recordSet).cursor = 0 525 rowCount, hg, err := buildPK(ctx, bucketCount, 0, s.pk) 526 hg.PreCalculateScalar() 527 c.Check(err, IsNil) 528 c.Check(rowCount, Equals, int64(100000)) 529 col := &DeferredCauset{Histogram: *hg, Info: &perceptron.DeferredCausetInfo{}} 530 tbl := &Block{ 531 HistDefCausl: HistDefCausl{ 532 Count: int64(col.TotalRowCount()), 533 DeferredCausets: make(map[int64]*DeferredCauset), 534 }, 535 } 536 ran := []*ranger.Range{{ 537 LowVal: []types.Causet{types.NewIntCauset(math.MinInt64)}, 538 HighVal: []types.Causet{types.NewIntCauset(math.MaxInt64)}, 539 }} 540 count, err := tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 541 c.Assert(err, IsNil) 542 c.Assert(int(count), Equals, 100000) 543 ran[0].LowVal[0].SetInt64(1000) 544 ran[0].HighVal[0].SetInt64(2000) 545 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 546 c.Assert(err, IsNil) 547 c.Assert(int(count), Equals, 1000) 548 ran[0].LowVal[0].SetInt64(1001) 549 ran[0].HighVal[0].SetInt64(1999) 550 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 551 c.Assert(err, IsNil) 552 c.Assert(int(count), Equals, 998) 553 ran[0].LowVal[0].SetInt64(1000) 554 ran[0].HighVal[0].SetInt64(1000) 555 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 556 c.Assert(err, IsNil) 557 c.Assert(int(count), Equals, 1) 558 559 ran = []*ranger.Range{{ 560 LowVal: []types.Causet{types.NewUintCauset(0)}, 561 HighVal: []types.Causet{types.NewUintCauset(math.MaxUint64)}, 562 }} 563 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 564 c.Assert(err, IsNil) 565 c.Assert(int(count), Equals, 100000) 566 ran[0].LowVal[0].SetUint64(1000) 567 ran[0].HighVal[0].SetUint64(2000) 568 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 569 c.Assert(err, IsNil) 570 c.Assert(int(count), Equals, 1000) 571 ran[0].LowVal[0].SetUint64(1001) 572 ran[0].HighVal[0].SetUint64(1999) 573 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 574 c.Assert(err, IsNil) 575 c.Assert(int(count), Equals, 998) 576 ran[0].LowVal[0].SetUint64(1000) 577 ran[0].HighVal[0].SetUint64(1000) 578 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 579 c.Assert(err, IsNil) 580 c.Assert(int(count), Equals, 1) 581 582 tbl.DeferredCausets[0] = col 583 ran[0].LowVal[0].SetInt64(math.MinInt64) 584 ran[0].HighVal[0].SetInt64(math.MaxInt64) 585 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 586 c.Assert(err, IsNil) 587 c.Assert(int(count), Equals, 100000) 588 ran[0].LowVal[0].SetInt64(1000) 589 ran[0].HighVal[0].SetInt64(2000) 590 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 591 c.Assert(err, IsNil) 592 c.Assert(int(count), Equals, 1001) 593 ran[0].LowVal[0].SetInt64(1001) 594 ran[0].HighVal[0].SetInt64(1999) 595 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 596 c.Assert(err, IsNil) 597 c.Assert(int(count), Equals, 999) 598 ran[0].LowVal[0].SetInt64(1000) 599 ran[0].HighVal[0].SetInt64(1000) 600 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 601 c.Assert(err, IsNil) 602 c.Assert(int(count), Equals, 1) 603 604 tbl.Count *= 10 605 count, err = tbl.GetRowCountByIntDeferredCausetRanges(sc, 0, ran) 606 c.Assert(err, IsNil) 607 c.Assert(int(count), Equals, 10) 608 } 609 610 func (s *testStatisticsSuite) TestIndexRanges(c *C) { 611 bucketCount := int64(256) 612 ctx := mock.NewContext() 613 sc := ctx.GetStochastikVars().StmtCtx 614 615 s.rc.(*recordSet).cursor = 0 616 rowCount, hg, cms, err := buildIndex(ctx, bucketCount, 0, s.rc) 617 hg.PreCalculateScalar() 618 c.Check(err, IsNil) 619 c.Check(rowCount, Equals, int64(100000)) 620 idxInfo := &perceptron.IndexInfo{DeferredCausets: []*perceptron.IndexDeferredCauset{{Offset: 0}}} 621 idx := &Index{Histogram: *hg, CMSketch: cms, Info: idxInfo} 622 tbl := &Block{ 623 HistDefCausl: HistDefCausl{ 624 Count: int64(idx.TotalRowCount()), 625 Indices: make(map[int64]*Index), 626 }, 627 } 628 ran := []*ranger.Range{{ 629 LowVal: []types.Causet{types.MinNotNullCauset()}, 630 HighVal: []types.Causet{types.MaxValueCauset()}, 631 }} 632 count, err := tbl.GetRowCountByIndexRanges(sc, 0, ran) 633 c.Assert(err, IsNil) 634 c.Assert(int(count), Equals, 99900) 635 ran[0].LowVal[0] = types.NewIntCauset(1000) 636 ran[0].HighVal[0] = types.NewIntCauset(2000) 637 count, err = tbl.GetRowCountByIndexRanges(sc, 0, ran) 638 c.Assert(err, IsNil) 639 c.Assert(int(count), Equals, 2500) 640 ran[0].LowVal[0] = types.NewIntCauset(1001) 641 ran[0].HighVal[0] = types.NewIntCauset(1999) 642 count, err = tbl.GetRowCountByIndexRanges(sc, 0, ran) 643 c.Assert(err, IsNil) 644 c.Assert(int(count), Equals, 2500) 645 ran[0].LowVal[0] = types.NewIntCauset(1000) 646 ran[0].HighVal[0] = types.NewIntCauset(1000) 647 count, err = tbl.GetRowCountByIndexRanges(sc, 0, ran) 648 c.Assert(err, IsNil) 649 c.Assert(int(count), Equals, 100) 650 651 tbl.Indices[0] = &Index{Info: &perceptron.IndexInfo{DeferredCausets: []*perceptron.IndexDeferredCauset{{Offset: 0}}, Unique: true}} 652 ran[0].LowVal[0] = types.NewIntCauset(1000) 653 ran[0].HighVal[0] = types.NewIntCauset(1000) 654 count, err = tbl.GetRowCountByIndexRanges(sc, 0, ran) 655 c.Assert(err, IsNil) 656 c.Assert(int(count), Equals, 1) 657 658 tbl.Indices[0] = idx 659 ran[0].LowVal[0] = types.MinNotNullCauset() 660 ran[0].HighVal[0] = types.MaxValueCauset() 661 count, err = tbl.GetRowCountByIndexRanges(sc, 0, ran) 662 c.Assert(err, IsNil) 663 c.Assert(int(count), Equals, 100000) 664 ran[0].LowVal[0] = types.NewIntCauset(1000) 665 ran[0].HighVal[0] = types.NewIntCauset(2000) 666 count, err = tbl.GetRowCountByIndexRanges(sc, 0, ran) 667 c.Assert(err, IsNil) 668 c.Assert(int(count), Equals, 1000) 669 ran[0].LowVal[0] = types.NewIntCauset(1001) 670 ran[0].HighVal[0] = types.NewIntCauset(1990) 671 count, err = tbl.GetRowCountByIndexRanges(sc, 0, ran) 672 c.Assert(err, IsNil) 673 c.Assert(int(count), Equals, 989) 674 ran[0].LowVal[0] = types.NewIntCauset(1000) 675 ran[0].HighVal[0] = types.NewIntCauset(1000) 676 count, err = tbl.GetRowCountByIndexRanges(sc, 0, ran) 677 c.Assert(err, IsNil) 678 c.Assert(int(count), Equals, 0) 679 }