github.com/m3db/m3@v1.5.1-0.20231129193456-75a402aa583b/src/query/storage/prom_converter_test.go (about) 1 // Copyright (c) 2019 Uber Technologies, Inc. 2 // 3 // Permission is hereby granted, free of charge, to any person obtaining a copy 4 // of this software and associated documentation files (the "Software"), to deal 5 // in the Software without restriction, including without limitation the rights 6 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 // copies of the Software, and to permit persons to whom the Software is 8 // furnished to do so, subject to the following conditions: 9 // 10 // The above copyright notice and this permission notice shall be included in 11 // all copies or substantial portions of the Software. 12 // 13 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 // THE SOFTWARE. 20 21 package storage 22 23 import ( 24 "context" 25 "testing" 26 "time" 27 28 "github.com/m3db/m3/src/dbnode/encoding" 29 "github.com/m3db/m3/src/dbnode/generated/proto/annotation" 30 dts "github.com/m3db/m3/src/dbnode/ts" 31 "github.com/m3db/m3/src/query/block" 32 "github.com/m3db/m3/src/query/generated/proto/prompb" 33 "github.com/m3db/m3/src/query/models" 34 "github.com/m3db/m3/src/query/storage/m3/consolidators" 35 "github.com/m3db/m3/src/query/test/seriesiter" 36 "github.com/m3db/m3/src/query/ts" 37 "github.com/m3db/m3/src/x/ident" 38 xsync "github.com/m3db/m3/src/x/sync" 39 xtest "github.com/m3db/m3/src/x/test" 40 xtime "github.com/m3db/m3/src/x/time" 41 42 "github.com/golang/mock/gomock" 43 "github.com/stretchr/testify/assert" 44 "github.com/stretchr/testify/require" 45 ) 46 47 func buildFetchOpts() *FetchOptions { 48 opts := NewFetchOptions() 49 opts.MaxMetricMetadataStats = 1 50 return opts 51 } 52 53 func fr( 54 t *testing.T, 55 its encoding.SeriesIterators, 56 tags ...*models.Tags, 57 ) consolidators.SeriesFetchResult { 58 result, err := consolidators. 59 NewSeriesFetchResult(its, tags, block.NewResultMetadata()) 60 assert.NoError(t, err) 61 return result 62 } 63 64 func makeTag(n, v string, count int) []*models.Tags { 65 tags := make([]*models.Tags, 0, count) 66 for i := 0; i < count; i++ { 67 t := models.EmptyTags().AddTag(models.Tag{Name: []byte(n), Value: []byte(v)}) 68 tags = append(tags, &t) 69 } 70 71 return tags 72 } 73 74 func verifyExpandPromSeries( 75 t *testing.T, 76 ctrl *gomock.Controller, 77 num int, 78 ex bool, 79 pools xsync.PooledWorkerPool, 80 ) { 81 iters := seriesiter.NewMockSeriesIters(ctrl, ident.Tag{}, num, 2) 82 fetchResult := fr(t, iters, makeTag("foo", "bar", num)...) 83 fetchResult.Metadata = block.ResultMetadata{ 84 Exhaustive: ex, 85 LocalOnly: true, 86 Warnings: []block.Warning{{Name: "foo", Message: "bar"}}, 87 } 88 89 results, err := SeriesIteratorsToPromResult( 90 context.Background(), fetchResult, pools, nil, NewPromConvertOptions(), buildFetchOpts()) 91 assert.NoError(t, err) 92 93 require.NotNil(t, results) 94 ts := results.PromResult.GetTimeseries() 95 require.NotNil(t, ts) 96 require.Equal(t, ex, results.Metadata.Exhaustive) 97 require.Equal(t, 1, len(results.Metadata.Warnings)) 98 merged := results.Metadata.MetadataByNameMerged() 99 require.Equal(t, len(ts), merged.WithSamples) 100 require.Equal(t, "foo_bar", results.Metadata.Warnings[0].Header()) 101 require.Equal(t, len(ts), num) 102 expectedTags := []prompb.Label{ 103 { 104 Name: []byte("foo"), 105 Value: []byte("bar"), 106 }, 107 } 108 109 for i := 0; i < num; i++ { 110 series := ts[i] 111 require.NotNil(t, series) 112 require.Equal(t, expectedTags, series.GetLabels()) 113 } 114 } 115 116 func testExpandPromSeries(t *testing.T, ex bool, pools xsync.PooledWorkerPool) { 117 ctrl := gomock.NewController(t) 118 119 for i := 0; i < 10; i++ { 120 verifyExpandPromSeries(t, ctrl, i, ex, pools) 121 } 122 } 123 124 func TestContextCanceled(t *testing.T) { 125 ctrl := gomock.NewController(t) 126 defer ctrl.Finish() 127 128 ctx, cancel := context.WithCancel(context.Background()) 129 cancel() 130 131 pool, err := xsync.NewPooledWorkerPool(100, xsync.NewPooledWorkerPoolOptions()) 132 require.NoError(t, err) 133 pool.Init() 134 135 iters := seriesiter.NewMockSeriesIters(ctrl, ident.Tag{}, 1, 2) 136 fetchResult := fr(t, iters, makeTag("foo", "bar", 1)...) 137 _, err = SeriesIteratorsToPromResult( 138 ctx, fetchResult, pool, nil, NewPromConvertOptions(), buildFetchOpts()) 139 require.Error(t, err) 140 require.Contains(t, err.Error(), "context canceled") 141 } 142 143 func TestExpandPromSeriesNilPools(t *testing.T) { 144 testExpandPromSeries(t, false, nil) 145 testExpandPromSeries(t, true, nil) 146 } 147 148 func TestExpandPromSeriesValidPools(t *testing.T) { 149 pool, err := xsync.NewPooledWorkerPool(100, xsync.NewPooledWorkerPoolOptions()) 150 require.NoError(t, err) 151 pool.Init() 152 testExpandPromSeries(t, false, pool) 153 testExpandPromSeries(t, true, pool) 154 } 155 156 func TestExpandPromSeriesSmallValidPools(t *testing.T) { 157 pool, err := xsync.NewPooledWorkerPool(2, xsync.NewPooledWorkerPoolOptions()) 158 require.NoError(t, err) 159 pool.Init() 160 testExpandPromSeries(t, false, pool) 161 testExpandPromSeries(t, true, pool) 162 } 163 164 func TestIteratorsToPromResult(t *testing.T) { 165 ctrl := gomock.NewController(t) 166 defer ctrl.Finish() 167 168 now := xtime.Now() 169 promNow := TimeToPromTimestamp(now) 170 171 vals := ts.NewMockValues(ctrl) 172 vals.EXPECT().Len().Return(0).Times(2) 173 vals.EXPECT().Datapoints().Return(ts.Datapoints{}) 174 175 tags := models.NewTags(1, models.NewTagOptions()). 176 AddTag(models.Tag{Name: []byte("a"), Value: []byte("b")}) 177 178 valsNonEmpty := ts.NewMockValues(ctrl) 179 valsNonEmpty.EXPECT().Len().Return(1).Times(3) 180 dp := ts.Datapoints{{Timestamp: now, Value: 1}} 181 valsNonEmpty.EXPECT().Datapoints().Return(dp).Times(2) 182 tagsNonEmpty := models.NewTags(1, models.NewTagOptions()). 183 AddTag(models.Tag{Name: []byte("c"), Value: []byte("d")}) 184 185 r := &FetchResult{ 186 SeriesList: ts.SeriesList{ 187 ts.NewSeries([]byte("a"), vals, tags), 188 ts.NewSeries([]byte("c"), valsNonEmpty, tagsNonEmpty), 189 }, 190 } 191 192 // NB: not keeping empty series. 193 result := FetchResultToPromResult(r, false) 194 expected := &prompb.QueryResult{ 195 Timeseries: []*prompb.TimeSeries{ 196 { 197 Labels: []prompb.Label{{Name: []byte("c"), Value: []byte("d")}}, 198 Samples: []prompb.Sample{{Timestamp: promNow, Value: 1}}, 199 }, 200 }, 201 } 202 203 assert.Equal(t, expected, result) 204 205 // NB: keeping empty series. 206 result = FetchResultToPromResult(r, true) 207 expected = &prompb.QueryResult{ 208 Timeseries: []*prompb.TimeSeries{ 209 { 210 Labels: []prompb.Label{{Name: []byte("a"), Value: []byte("b")}}, 211 Samples: []prompb.Sample{}, 212 }, 213 { 214 Labels: []prompb.Label{{Name: []byte("c"), Value: []byte("d")}}, 215 Samples: []prompb.Sample{{Timestamp: promNow, Value: 1}}, 216 }, 217 }, 218 } 219 220 assert.Equal(t, expected, result) 221 } 222 223 func TestDecodeIteratorsWithEmptySeries(t *testing.T) { 224 ctrl := xtest.NewController(t) 225 defer ctrl.Finish() 226 227 name := "name" 228 now := xtime.Now() 229 buildIter := func(val string, hasVal bool) *encoding.MockSeriesIterator { 230 iter := encoding.NewMockSeriesIterator(ctrl) 231 232 if hasVal { 233 iter.EXPECT().Next().Return(true) 234 dp := dts.Datapoint{TimestampNanos: now, Value: 1} 235 iter.EXPECT().Current().Return(dp, xtime.Second, nil) 236 } 237 238 iter.EXPECT().Err().Return(nil) 239 iter.EXPECT().Next().Return(false) 240 241 tag := ident.Tag{ 242 Name: ident.TagName(ident.StringID(name)), 243 Value: ident.TagValue(ident.StringID(val)), 244 } 245 246 tags := ident.NewMockTagIterator(ctrl) 247 populateIter := func() { 248 gomock.InOrder( 249 tags.EXPECT().Remaining().Return(1), 250 tags.EXPECT().Next().Return(true), 251 tags.EXPECT().Current().Return(tag), 252 tags.EXPECT().Next().Return(false), 253 tags.EXPECT().Err().Return(nil), 254 tags.EXPECT().Rewind(), 255 ) 256 } 257 258 populateIter() 259 iter.EXPECT().Tags().Return(tags) 260 iter.EXPECT().Close().MaxTimes(1) 261 262 return iter 263 } 264 265 verifyResult := func(t *testing.T, res PromResult) { 266 ts := res.PromResult.GetTimeseries() 267 meta := res.Metadata 268 exSeriesTags := []string{"bar", "qux", "quail"} 269 require.Equal(t, len(exSeriesTags), len(ts)) 270 for i, series := range ts { 271 lbs := series.GetLabels() 272 require.Equal(t, 1, len(lbs)) 273 assert.Equal(t, name, string(lbs[0].GetName())) 274 assert.Equal(t, exSeriesTags[i], string(lbs[0].GetValue())) 275 276 samples := series.GetSamples() 277 require.Equal(t, 1, len(samples)) 278 s := samples[0] 279 assert.Equal(t, float64(1), s.GetValue()) 280 assert.Equal(t, int64(now)/int64(time.Millisecond), s.GetTimestamp()) 281 } 282 merged := meta.MetadataByNameMerged() 283 // in buildIters, we create 5 series. only 3 of them have samples. 284 require.Equal(t, 5, meta.FetchedSeriesCount) 285 require.Equal(t, merged, block.ResultMetricMetadata{WithSamples: 3, NoSamples: 2}) 286 } 287 288 buildIters := func() consolidators.SeriesFetchResult { 289 iters := []encoding.SeriesIterator{ 290 buildIter("foo", false), 291 buildIter("bar", true), 292 buildIter("baz", false), 293 buildIter("qux", true), 294 buildIter("quail", true), 295 } 296 297 it := encoding.NewMockSeriesIterators(ctrl) 298 it.EXPECT().Iters().Return(iters).AnyTimes() 299 it.EXPECT().Len().Return(len(iters)).AnyTimes() 300 return fr(t, it) 301 } 302 303 opts := models.NewTagOptions() 304 res, err := SeriesIteratorsToPromResult( 305 context.Background(), buildIters(), nil, opts, NewPromConvertOptions(), buildFetchOpts()) 306 require.NoError(t, err) 307 verifyResult(t, res) 308 309 pool, err := xsync.NewPooledWorkerPool(10, xsync.NewPooledWorkerPoolOptions()) 310 require.NoError(t, err) 311 pool.Init() 312 313 res, err = SeriesIteratorsToPromResult( 314 context.Background(), buildIters(), pool, opts, NewPromConvertOptions(), buildFetchOpts()) 315 require.NoError(t, err) 316 verifyResult(t, res) 317 } 318 319 func TestSeriesIteratorsToPromResultNormalizeLowResCounters(t *testing.T) { 320 var ( 321 t0 = xtime.Now().Truncate(time.Hour) 322 opts = NewPromConvertOptions() 323 ) 324 325 tests := []struct { 326 name string 327 isCounter bool 328 maxResolution time.Duration 329 given []dts.Datapoint 330 want []prompb.Sample 331 }{ 332 { 333 name: "low resolution gauge", 334 isCounter: false, 335 maxResolution: time.Hour, 336 given: []dts.Datapoint{ 337 {TimestampNanos: t0, Value: 1}, 338 {TimestampNanos: t0.Add(time.Hour), Value: 2}, 339 }, 340 want: []prompb.Sample{ 341 {Value: 1, Timestamp: ms(t0)}, 342 {Value: 2, Timestamp: ms(t0.Add(time.Hour))}, 343 }, 344 }, 345 { 346 name: "high resolution gauge", 347 isCounter: false, 348 maxResolution: time.Minute, 349 given: []dts.Datapoint{ 350 {TimestampNanos: t0, Value: 1}, 351 {TimestampNanos: t0.Add(time.Minute), Value: 2}, 352 }, 353 want: []prompb.Sample{ 354 {Value: 1, Timestamp: ms(t0)}, 355 {Value: 2, Timestamp: ms(t0.Add(time.Minute))}, 356 }, 357 }, 358 { 359 name: "low resolution counter, no datapoints", 360 isCounter: true, 361 maxResolution: time.Hour, 362 }, 363 { 364 name: "low resolution counter, one datapoint", 365 isCounter: true, 366 maxResolution: time.Hour, 367 given: []dts.Datapoint{{TimestampNanos: t0, Value: 1}}, 368 want: []prompb.Sample{{Value: 1, Timestamp: ms(t0)}}, 369 }, 370 { // nolint: dupl 371 name: "high resolution counter with no resets", 372 isCounter: true, 373 maxResolution: time.Minute, 374 given: []dts.Datapoint{ 375 {TimestampNanos: t0, Value: 1}, 376 {TimestampNanos: t0.Add(time.Minute), Value: 2}, 377 {TimestampNanos: t0.Add(2 * time.Minute), Value: 2}, 378 {TimestampNanos: t0.Add(3 * time.Minute), Value: 3}, 379 }, 380 want: []prompb.Sample{ 381 {Value: 1, Timestamp: ms(t0)}, 382 {Value: 2, Timestamp: ms(t0.Add(time.Minute))}, 383 {Value: 2, Timestamp: ms(t0.Add(2 * time.Minute))}, 384 {Value: 3, Timestamp: ms(t0.Add(3 * time.Minute))}, 385 }, 386 }, 387 { // nolint: dupl 388 name: "high resolution counter with resets", 389 isCounter: true, 390 maxResolution: time.Minute, 391 given: []dts.Datapoint{ 392 {TimestampNanos: t0, Value: 10}, 393 {TimestampNanos: t0.Add(time.Minute), Value: 3}, 394 {TimestampNanos: t0.Add(2 * time.Minute), Value: 5}, 395 {TimestampNanos: t0.Add(3 * time.Minute), Value: 8}, 396 }, 397 want: []prompb.Sample{ 398 {Value: 10, Timestamp: ms(t0)}, 399 {Value: 3, Timestamp: ms(t0.Add(time.Minute))}, 400 {Value: 5, Timestamp: ms(t0.Add(2 * time.Minute))}, 401 {Value: 8, Timestamp: ms(t0.Add(3 * time.Minute))}, 402 }, 403 }, 404 { // nolint: dupl 405 name: "low resolution counter with no resets", 406 isCounter: true, 407 maxResolution: time.Hour, 408 given: []dts.Datapoint{ 409 {TimestampNanos: t0, Value: 1}, 410 {TimestampNanos: t0.Add(time.Minute), Value: 2}, 411 {TimestampNanos: t0.Add(time.Hour), Value: 2}, 412 {TimestampNanos: t0.Add(time.Hour + time.Minute), Value: 3}, 413 }, 414 want: []prompb.Sample{ 415 {Value: 2, Timestamp: ms(t0.Add(time.Minute))}, 416 {Value: 3, Timestamp: ms(t0.Add(time.Hour + time.Minute))}, 417 }, 418 }, 419 { // nolint: dupl 420 name: "low resolution counter with resets", 421 isCounter: true, 422 maxResolution: time.Hour, 423 given: []dts.Datapoint{ 424 {TimestampNanos: t0, Value: 10}, 425 {TimestampNanos: t0.Add(time.Minute), Value: 3}, 426 {TimestampNanos: t0.Add(time.Hour), Value: 5}, 427 {TimestampNanos: t0.Add(time.Hour + time.Minute), Value: 8}, 428 }, 429 want: []prompb.Sample{ 430 {Value: 13, Timestamp: ms(t0.Add(time.Minute))}, 431 {Value: 18, Timestamp: ms(t0.Add(time.Hour + time.Minute))}, 432 }, 433 }, 434 } 435 436 for _, tt := range tests { 437 t.Run(tt.name, func(t *testing.T) { 438 testSeriesIteratorsToPromResult( 439 t, tt.isCounter, tt.maxResolution, tt.given, tt.want, opts) 440 }) 441 } 442 } 443 444 func TestSeriesIteratorsToPromResultValueDecreaseTolerance(t *testing.T) { 445 now := xtime.Now().Truncate(time.Hour) 446 447 tests := []struct { 448 name string 449 given []float64 450 tolerance float64 451 until xtime.UnixNano 452 want []float64 453 }{ 454 { 455 name: "no tolerance", 456 given: []float64{187.80131100000006, 187.801311, 187.80131100000006, 187.801311, 200, 199.99}, 457 tolerance: 0, 458 until: 0, 459 want: []float64{187.80131100000006, 187.801311, 187.80131100000006, 187.801311, 200, 199.99}, 460 }, 461 { 462 name: "low tolerance", 463 given: []float64{187.80131100000006, 187.801311, 187.80131100000006, 187.801311, 200, 199.99}, 464 tolerance: 0.00000001, 465 until: now.Add(time.Hour), 466 want: []float64{187.80131100000006, 187.80131100000006, 187.80131100000006, 187.80131100000006, 200, 199.99}, 467 }, 468 { 469 name: "high tolerance", 470 given: []float64{187.80131100000006, 187.801311, 187.80131100000006, 187.801311, 200, 199.99}, 471 tolerance: 0.0001, 472 until: now.Add(time.Hour), 473 want: []float64{187.80131100000006, 187.80131100000006, 187.80131100000006, 187.80131100000006, 200, 200}, 474 }, 475 { 476 name: "tolerance expired", 477 given: []float64{200, 199.99, 200, 199.99, 200, 199.99}, 478 tolerance: 0.0001, 479 until: now, 480 want: []float64{200, 199.99, 200, 199.99, 200, 199.99}, 481 }, 482 { 483 name: "tolerance expires in the middle", 484 given: []float64{200, 199.99, 200, 199.99, 200, 199.99}, 485 tolerance: 0.0001, 486 until: now.Add(3 * time.Minute), 487 want: []float64{200, 200, 200, 199.99, 200, 199.99}, 488 }, 489 } 490 491 for _, tt := range tests { 492 t.Run(tt.name, func(t *testing.T) { 493 testSeriesIteratorsToPromResultValueDecreaseTolerance( 494 t, now, tt.given, tt.want, tt.tolerance, tt.until) 495 }) 496 } 497 } 498 499 func testSeriesIteratorsToPromResultValueDecreaseTolerance( 500 t *testing.T, 501 now xtime.UnixNano, 502 input []float64, 503 expectedOutput []float64, 504 decreaseTolerance float64, 505 toleranceUntil xtime.UnixNano, 506 ) { 507 var ( 508 given = make([]dts.Datapoint, 0, len(input)) 509 want = make([]prompb.Sample, 0, len(expectedOutput)) 510 ) 511 for i, v := range input { 512 given = append(given, dts.Datapoint{ 513 TimestampNanos: now.Add(time.Duration(i) * time.Minute), 514 Value: v, 515 }) 516 } 517 for i, v := range expectedOutput { 518 want = append(want, prompb.Sample{ 519 Timestamp: ms(now.Add(time.Duration(i) * time.Minute)), 520 Value: v, 521 }) 522 } 523 524 opts := NewPromConvertOptions(). 525 SetValueDecreaseTolerance(decreaseTolerance). 526 SetValueDecreaseToleranceUntil(toleranceUntil) 527 528 testSeriesIteratorsToPromResult(t, false, 0, given, want, opts) 529 } 530 531 func testSeriesIteratorsToPromResult( 532 t *testing.T, 533 isCounter bool, 534 maxResolution time.Duration, 535 given []dts.Datapoint, 536 want []prompb.Sample, 537 opts PromConvertOptions, 538 ) { 539 ctrl := xtest.NewController(t) 540 defer ctrl.Finish() 541 542 var ( 543 gaugePayload = &annotation.Payload{ 544 OpenMetricsFamilyType: annotation.OpenMetricsFamilyType_GAUGE, 545 } 546 counterPayload = &annotation.Payload{ 547 OpenMetricsFamilyType: annotation.OpenMetricsFamilyType_COUNTER, 548 OpenMetricsHandleValueResets: true, 549 } 550 ) 551 552 firstAnnotation := annotationBytes(t, gaugePayload) 553 if isCounter { 554 firstAnnotation = annotationBytes(t, counterPayload) 555 } 556 557 iter := encoding.NewMockSeriesIterator(ctrl) 558 559 iter.EXPECT().FirstAnnotation().Return(firstAnnotation).MaxTimes(1) 560 for _, dp := range given { 561 iter.EXPECT().Next().Return(true) 562 iter.EXPECT().Current().Return(dp, xtime.Second, nil) 563 } 564 565 iter.EXPECT().Err().Return(nil) 566 iter.EXPECT().Next().Return(false) 567 568 iter.EXPECT().Tags().Return(ident.EmptyTagIterator) 569 570 verifyResult := func(t *testing.T, expected []prompb.Sample, res PromResult) { 571 timeSeries := res.PromResult.GetTimeseries() 572 if len(expected) == 0 { 573 require.Equal(t, 0, len(timeSeries)) 574 } else { 575 require.Equal(t, 1, len(timeSeries)) 576 samples := timeSeries[0].Samples 577 require.Equal(t, expected, samples) 578 } 579 } 580 581 iters := []encoding.SeriesIterator{iter} 582 583 it := encoding.NewMockSeriesIterators(ctrl) 584 it.EXPECT().Iters().Return(iters).AnyTimes() 585 it.EXPECT().Len().Return(len(iters)).AnyTimes() 586 587 fetchResultMetadata := block.NewResultMetadata() 588 fetchResultMetadata.Resolutions = []time.Duration{maxResolution, maxResolution / 2} 589 fetchResult, err := consolidators.NewSeriesFetchResult(it, nil, fetchResultMetadata) 590 assert.NoError(t, err) 591 592 res, err := SeriesIteratorsToPromResult( 593 context.Background(), fetchResult, nil, models.NewTagOptions(), opts, buildFetchOpts()) 594 require.NoError(t, err) 595 verifyResult(t, want, res) 596 } 597 598 func ms(t xtime.UnixNano) int64 { 599 return t.ToNormalizedTime(time.Millisecond) 600 } 601 602 func annotationBytes(t *testing.T, payload *annotation.Payload) dts.Annotation { 603 if payload != nil { 604 annotationBytes, err := payload.Marshal() 605 require.NoError(t, err) 606 return annotationBytes 607 } 608 return nil 609 }