github.com/m3db/m3@v1.5.1-0.20231129193456-75a402aa583b/src/query/graphite/ts/series_test.go (about)

     1  // Copyright (c) 2019 Uber Technologies, Inc.
     2  //
     3  // Permission is hereby granted, free of charge, to any person obtaining a copy
     4  // of this software and associated documentation files (the "Software"), to deal
     5  // in the Software without restriction, including without limitation the rights
     6  // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     7  // copies of the Software, and to permit persons to whom the Software is
     8  // furnished to do so, subject to the following conditions:
     9  //
    10  // The above copyright notice and this permission notice shall be included in
    11  // all copies or substantial portions of the Software.
    12  //
    13  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    14  // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    15  // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    16  // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    17  // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    18  // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    19  // THE SOFTWARE.
    20  
    21  package ts
    22  
    23  import (
    24  	"math"
    25  	"math/rand"
    26  	"testing"
    27  	"time"
    28  
    29  	"github.com/m3db/m3/src/query/graphite/context"
    30  	xtest "github.com/m3db/m3/src/query/graphite/testing"
    31  
    32  	"github.com/stretchr/testify/assert"
    33  	"github.com/stretchr/testify/require"
    34  )
    35  
    36  // A Datapoint is a datapoint (timestamp, value, optional series) used in testing
    37  type testDatapoint struct {
    38  	SeriesName string
    39  	Timestamp  time.Time
    40  	Value      float64
    41  }
    42  
    43  // Datapoints is a set of datapoints
    44  type testDatapoints []testDatapoint
    45  
    46  // Shuffle randomizes the set of datapoints
    47  func (pts testDatapoints) Shuffle() {
    48  	for i := len(pts) - 1; i > 0; i-- {
    49  		if j := rand.Intn(i + 1); i != j {
    50  			pts[i], pts[j] = pts[j], pts[i]
    51  		}
    52  	}
    53  }
    54  
    55  func TestLcm(t *testing.T) {
    56  	assert.Equal(t, int64(210), Lcm(10, 21))
    57  	assert.Equal(t, int64(210), Lcm(10, -21))
    58  	assert.Equal(t, int64(210), Lcm(-10, 21))
    59  	assert.Equal(t, int64(210), Lcm(-10, -21))
    60  	assert.Equal(t, int64(306), Lcm(18, 17))
    61  	assert.Equal(t, int64(306), Lcm(17, 18))
    62  	assert.Equal(t, int64(0), Lcm(0, 5))
    63  }
    64  
    65  func TestGcd(t *testing.T) {
    66  	assert.Equal(t, int64(5), Gcd(5, 10))
    67  	assert.Equal(t, int64(5), Gcd(10, 5))
    68  	assert.Equal(t, int64(5), Gcd(-10, 5))
    69  	assert.Equal(t, int64(5), Gcd(10, -5))
    70  	assert.Equal(t, int64(5), Gcd(-10, -5))
    71  	assert.Equal(t, int64(10), Gcd(10, 10))
    72  	assert.Equal(t, int64(8), Gcd(8, 0))
    73  	assert.Equal(t, int64(8), Gcd(0, 8))
    74  }
    75  
    76  func TestAllNaN(t *testing.T) {
    77  	ctx := context.New()
    78  	defer ctx.Close()
    79  
    80  	values := NewValues(ctx, 1000, 30)
    81  	assert.True(t, values.AllNaN())
    82  	values.SetValueAt(10, math.NaN())
    83  	assert.True(t, values.AllNaN())
    84  	values.SetValueAt(20, 100)
    85  	assert.False(t, values.AllNaN())
    86  
    87  	assert.True(t, NewConstantValues(ctx, math.NaN(), 1000, 10).AllNaN())
    88  	assert.False(t, NewConstantValues(ctx, 200, 1000, 10).AllNaN())
    89  }
    90  
    91  func TestConstantValues(t *testing.T) {
    92  	ctx := context.New()
    93  	defer ctx.Close()
    94  
    95  	series := NewSeries(ctx, "foo", time.Now(), NewConstantValues(ctx, 100, 50, 1000))
    96  	assert.Equal(t, 50, series.Len())
    97  	n := series.ValueAt(10)
    98  	assert.Equal(t, float64(100), n)
    99  
   100  	agg := series.CalcStatistics()
   101  	assert.Equal(t, uint(50), agg.Count)
   102  	assert.Equal(t, float64(100), agg.Min)
   103  	assert.Equal(t, float64(100), agg.Max)
   104  	assert.Equal(t, float64(100), agg.Mean)
   105  	assert.Equal(t, float64(0), agg.StdDev)
   106  }
   107  
   108  func TestConstantNaNValues(t *testing.T) {
   109  	ctx := context.New()
   110  	defer ctx.Close()
   111  
   112  	series := NewSeries(ctx, "foo", time.Now(), NewConstantValues(ctx, math.NaN(), 50, 1000))
   113  	assert.Equal(t, 50, series.Len())
   114  	n := series.ValueAt(10)
   115  	assert.True(t, math.IsNaN(n))
   116  	assert.False(t, series.IsConsolidationFuncSet())
   117  	series.SetConsolidationFunc(ConsolidationSum.Func())
   118  	assert.True(t, series.IsConsolidationFuncSet())
   119  	xtest.Equalish(t, ConsolidationSum.Func(), series.consolidationFunc)
   120  	agg := series.CalcStatistics()
   121  	assert.Equal(t, uint(0), agg.Count)
   122  	assert.True(t, math.IsNaN(agg.Min))
   123  	assert.True(t, math.IsNaN(agg.Max))
   124  	assert.True(t, math.IsNaN(agg.Mean))
   125  	assert.Equal(t, float64(0), agg.StdDev)
   126  }
   127  
   128  func TestInvalidConsolidation(t *testing.T) {
   129  	var (
   130  		ctx     = context.New()
   131  		dummyCF = func(existing, toAdd float64, count int) float64 {
   132  			return existing
   133  		}
   134  		millisPerStep = 100
   135  		start         = time.Now()
   136  		end           = start.Add(-1 * time.Hour)
   137  	)
   138  	NewConsolidation(ctx, start, end, millisPerStep, dummyCF)
   139  }
   140  
   141  func TestConsolidation(t *testing.T) {
   142  	ctx := context.New()
   143  	defer ctx.Close()
   144  
   145  	startTime := time.Now()
   146  	endTime := startTime.Add(5 * time.Minute)
   147  
   148  	datapoints := testDatapoints{
   149  		{Timestamp: startTime.Add(66 * time.Second), Value: 4.5},
   150  		{Timestamp: startTime.Add(67 * time.Second), Value: 5.0},
   151  		{Timestamp: startTime.Add(5 * time.Second), Value: 65.3},
   152  		{Timestamp: startTime.Add(7 * time.Second), Value: 20.5},
   153  		{Timestamp: startTime.Add(23 * time.Second), Value: 17.5},
   154  		{Timestamp: startTime.Add(74 * time.Second), Value: 20.5},
   155  	}
   156  
   157  	consolidation := NewConsolidation(
   158  		ctx, startTime, endTime, 10*1000, func(a, b float64, count int) float64 {
   159  			return a + b
   160  		})
   161  
   162  	for i := range datapoints {
   163  		consolidation.AddDatapoint(datapoints[i].Timestamp, datapoints[i].Value)
   164  	}
   165  
   166  	series := consolidation.BuildSeries("foo", Finalize)
   167  	assert.Equal(t, 30, series.Len())
   168  	statistics := series.CalcStatistics()
   169  	assert.Equal(t, float64(9.5), statistics.Min)     // Sum of 66 and 67 second
   170  	assert.Equal(t, float64(85.8), statistics.Max)    // Sum of 5 and 7 seconds
   171  	assert.Equal(t, uint(4), statistics.Count)        // 66 and 67 are combined, 5 and 7 are combined
   172  	assert.Equal(t, float64(33.325), statistics.Mean) // Average of sums
   173  }
   174  
   175  type consolidationTest struct {
   176  	name            string
   177  	f               ConsolidationFunc
   178  	stepAggregation ConsolidationFunc
   179  	expectedValues  []float64
   180  }
   181  
   182  var (
   183  	consolidationStartTime = time.Now().Truncate(time.Minute)
   184  	consolidationEndTime   = consolidationStartTime.Add(1 * time.Minute)
   185  )
   186  
   187  func newConsolidationTestSeries(ctx context.Context) []*Series {
   188  	return []*Series{
   189  		// series1 starts and ends at the same time as the consolidation
   190  		NewSeries(ctx, "a", consolidationStartTime,
   191  			NewConstantValues(ctx, 10, 6, 10000)),
   192  
   193  		// series2 starts before the consolidation but ends before the end
   194  		NewSeries(ctx, "b", consolidationStartTime.Add(-30*time.Second),
   195  			NewConstantValues(ctx, 15, 6, 10000)),
   196  
   197  		// series3 starts after the consolidation and ends after the end
   198  		NewSeries(ctx, "c", consolidationStartTime.Add(30*time.Second),
   199  			NewConstantValues(ctx, 17, 6, 10000)),
   200  
   201  		// series4 has a smaller step size than the consolidation
   202  		NewSeries(ctx, "d", consolidationStartTime,
   203  			NewConstantValues(ctx, 3, 60, 1000)),
   204  	}
   205  }
   206  
   207  func TestConsolidateSeries(t *testing.T) {
   208  	ctx := context.New()
   209  	defer ctx.Close()
   210  
   211  	consolidatedSeries := newConsolidationTestSeries(ctx)
   212  	tests := []consolidationTest{
   213  		{"sumMins", Sum, Min, []float64{28, 28, 28, 30, 30, 30}},
   214  		{"minSums", Min, Sum, []float64{10, 10, 10, 10, 10, 10}},
   215  		{"minMins", Min, Min, []float64{3, 3, 3, 3, 3, 3}},
   216  	}
   217  
   218  	for _, test := range tests {
   219  		consolidation := NewConsolidation(ctx, consolidationStartTime, consolidationEndTime, 10000, test.f)
   220  		for _, series := range consolidatedSeries {
   221  			consolidation.AddSeries(series, test.stepAggregation)
   222  		}
   223  
   224  		results := consolidation.BuildSeries("foo", Finalize)
   225  		require.Equal(t, consolidationStartTime, results.StartTime(), "invalid start time for %s", test.name)
   226  		require.Equal(t, consolidationEndTime, results.EndTime(), "invalid end time for %s", test.name)
   227  		require.Equal(t, 6, results.Len(), "invalid consolidation size for %s", test.name)
   228  
   229  		for i := 0; i < results.Len(); i++ {
   230  			value := results.ValueAt(i)
   231  			assert.Equal(t, test.expectedValues[i], value, "invalid value for %d of %s", i, test.name)
   232  		}
   233  	}
   234  }
   235  
   236  func TestConsolidationAcrossTimeIntervals(t *testing.T) {
   237  	ctx := context.New()
   238  	defer ctx.Close()
   239  
   240  	expectedResults := []float64{
   241  		10 * 6, // entire range falls in the one minute period
   242  		15 * 3, // last half falls into the one minute period
   243  		17 * 3, // first half fallgs into the one minute period
   244  		3 * 60, // entire range falls, at smaller interval
   245  	}
   246  
   247  	consolidatedSeries := newConsolidationTestSeries(ctx)
   248  	for i := range consolidatedSeries {
   249  		series, expected := consolidatedSeries[i], expectedResults[i]
   250  		consolidation := NewConsolidation(ctx, consolidationStartTime, consolidationEndTime, 60000, Sum)
   251  		consolidation.AddSeries(series, Sum)
   252  		result := consolidation.BuildSeries("foo", Finalize)
   253  		assert.Equal(t, consolidationStartTime, result.StartTime(), "incorrect start to %s", series.Name())
   254  		assert.Equal(t, consolidationEndTime, result.EndTime(), "incorrect end to %s", series.Name())
   255  		require.Equal(t, 1, result.Len(), "incorrect # of steps for %s", series.Name())
   256  
   257  		value := result.ValueAt(0)
   258  		assert.Equal(t, expected, value, "incorrect value for %s", series.Name())
   259  
   260  	}
   261  }
   262  
   263  func withValues(vals MutableValues, values []float64) Values {
   264  	for i, n := range values {
   265  		vals.SetValueAt(i, n)
   266  	}
   267  	return vals
   268  }
   269  
   270  func TestSlicing(t *testing.T) {
   271  	ctx := context.New()
   272  	defer ctx.Close()
   273  
   274  	tests := []struct {
   275  		input      Values
   276  		begin, end int
   277  		output     Values
   278  	}{
   279  		{
   280  			withValues(NewValues(ctx, 100, 5), []float64{10.0, 20.0, 30.0, 40.0, 50.0}),
   281  			1, 3,
   282  			withValues(NewValues(ctx, 100, 2), []float64{20.0, 30.0}),
   283  		},
   284  		{
   285  			NewConstantValues(ctx, 42.0, 10, 100),
   286  			1, 5,
   287  			NewConstantValues(ctx, 42.0, 4, 100),
   288  		},
   289  	}
   290  
   291  	start := time.Now()
   292  
   293  	for _, test := range tests {
   294  		input := NewSeries(ctx, "<nil>", start, test.input)
   295  		output, err := input.Slice(test.begin, test.end)
   296  		require.NoError(t, err)
   297  
   298  		expect := NewSeries(ctx, "<nil>", input.StartTimeForStep(test.begin), test.output)
   299  		require.Equal(t, output.Len(), expect.Len())
   300  
   301  		for step := 0; step < output.Len(); step++ {
   302  			v1 := output.ValueAt(step)
   303  			v2 := expect.ValueAt(step)
   304  			assert.Equal(t, v1, v2)
   305  		}
   306  	}
   307  }
   308  
   309  func TestAddSeries(t *testing.T) {
   310  	ctx := context.New()
   311  	defer ctx.Close()
   312  
   313  	seriesStart := time.Now().Add(10000 * time.Millisecond)
   314  	ctxStart := seriesStart.Add(400 * time.Millisecond)
   315  	ctxEnd := ctxStart.Add(7200 * time.Millisecond)
   316  	stepSize := 3600
   317  	values := NewValues(ctx, stepSize, 3)
   318  	for i := 0; i < values.Len(); i++ {
   319  		values.SetValueAt(i, float64(i+1))
   320  	}
   321  	series := NewSeries(ctx, "foo", seriesStart, values)
   322  	consolidation := NewConsolidation(ctx, ctxStart, ctxEnd, stepSize, Avg)
   323  	consolidation.AddSeries(series, Avg)
   324  	consolidated := consolidation.BuildSeries("consolidated", Finalize)
   325  	require.Equal(t, 2, consolidated.Len())
   326  	require.Equal(t, 2.0, consolidated.ValueAt(0))
   327  	require.Equal(t, 3.0, consolidated.ValueAt(1))
   328  }
   329  
   330  func TestIntersectAndResize(t *testing.T) {
   331  	ctx := context.New()
   332  	defer ctx.Close()
   333  
   334  	seriesStart := time.Now()
   335  	stepSize := 1000
   336  	values := NewValues(ctx, stepSize, 3)
   337  	for i := 0; i < values.Len(); i++ {
   338  		values.SetValueAt(i, float64(i+1))
   339  	}
   340  	series := NewSeries(ctx, "foo", seriesStart, values)
   341  	tests := []struct {
   342  		startOffset time.Duration
   343  		endOffset   time.Duration
   344  		newStep     int
   345  	}{
   346  		{
   347  			startOffset: -1 * time.Hour,
   348  			endOffset:   -1 * time.Hour,
   349  			newStep:     stepSize,
   350  		},
   351  		{
   352  			startOffset: 0,
   353  			endOffset:   0,
   354  			newStep:     stepSize,
   355  		},
   356  		{
   357  			startOffset: 0,
   358  			endOffset:   0,
   359  			newStep:     stepSize / 2,
   360  		},
   361  	}
   362  
   363  	for _, test := range tests {
   364  		start := seriesStart.Add(test.startOffset)
   365  		end := seriesStart.Add(test.endOffset)
   366  		result, err := series.IntersectAndResize(start, end, test.newStep, series.ConsolidationFunc())
   367  		require.NoError(t, err)
   368  		require.NotNil(t, result)
   369  		require.Equal(t, start, result.StartTime())
   370  		require.Equal(t, end, result.EndTime())
   371  		require.Equal(t, series.Specification, result.Specification)
   372  		require.Equal(t, series.Name(), result.Name())
   373  		require.Equal(t, test.newStep, result.MillisPerStep())
   374  	}
   375  }
   376  
   377  var (
   378  	benchmarkRange     = 24 * time.Hour
   379  	benchmarkEndTime   = time.Now()
   380  	benchmarkStartTime = benchmarkEndTime.Add(-benchmarkRange)
   381  	benchmarkNumSteps  = NumSteps(benchmarkStartTime, benchmarkEndTime, benchmarkStepInMillis)
   382  )
   383  
   384  const (
   385  	benchmarkStepInMillis = 10000
   386  )
   387  
   388  func buildBenchmarkDatapoints() testDatapoints {
   389  	datapoints := make(testDatapoints, benchmarkNumSteps)
   390  	for i := range datapoints {
   391  		datapoints[i].Timestamp = benchmarkStartTime.Add(time.Millisecond *
   392  			time.Duration(i*benchmarkStepInMillis))
   393  		datapoints[i].Value = 5
   394  	}
   395  
   396  	datapoints.Shuffle()
   397  	return datapoints
   398  }
   399  
   400  func BenchmarkUint64Adds(b *testing.B) {
   401  	nan := math.Float64bits(math.NaN())
   402  	datapoints := buildBenchmarkDatapoints()
   403  	for i := 0; i < b.N; i++ {
   404  		values := make([]uint64, len(datapoints))
   405  		for j := 0; j < len(datapoints); j++ {
   406  			values[j] = nan
   407  		}
   408  
   409  		for j := 0; j < len(datapoints); j++ {
   410  			startTimeMillis := benchmarkStartTime.UnixNano() / 1000000
   411  			millis := datapoints[j].Timestamp.UnixNano() / 1000000
   412  
   413  			step := int(millis-startTimeMillis) / benchmarkStepInMillis
   414  			values[step] = 100 + 2
   415  		}
   416  	}
   417  }
   418  
   419  func BenchmarkFloat64Adds(b *testing.B) {
   420  	nan := math.NaN()
   421  	datapoints := buildBenchmarkDatapoints()
   422  	for i := 0; i < b.N; i++ {
   423  		values := make([]float64, len(datapoints))
   424  		for j := 0; j < len(datapoints); j++ {
   425  			values[j] = nan
   426  		}
   427  
   428  		for j := 0; j < len(datapoints); j++ {
   429  			startTimeMillis := benchmarkStartTime.UnixNano() / 1000000
   430  			millis := datapoints[j].Timestamp.UnixNano() / 1000000
   431  
   432  			step := int(millis-startTimeMillis) / benchmarkStepInMillis
   433  			if math.IsNaN(values[step]) {
   434  				values[step] = 200
   435  			} else {
   436  				values[step] = Sum(values[step], 200, 1)
   437  			}
   438  		}
   439  	}
   440  }
   441  
   442  func BenchmarkConsolidation(b *testing.B) {
   443  	ctx := context.New()
   444  	defer ctx.Close()
   445  
   446  	datapoints := buildBenchmarkDatapoints()
   447  	for i := 0; i < b.N; i++ {
   448  		consolidation := NewConsolidation(ctx, benchmarkStartTime, benchmarkEndTime, benchmarkStepInMillis, Sum)
   449  		for j := 0; j < len(datapoints); j++ {
   450  			consolidation.AddDatapoint(datapoints[j].Timestamp, datapoints[j].Value)
   451  		}
   452  		consolidation.BuildSeries("foo", Finalize)
   453  	}
   454  }
   455  
   456  func BenchmarkConsolidationAddSeries(b *testing.B) {
   457  	ctx := context.New()
   458  	defer ctx.Close()
   459  
   460  	c := NewConsolidation(ctx, benchmarkStartTime, benchmarkEndTime, benchmarkStepInMillis, Avg)
   461  	stepsInMillis := benchmarkStepInMillis / 10
   462  	numSteps := int(benchmarkRange/time.Millisecond) / stepsInMillis
   463  	series := NewSeries(ctx, "a", benchmarkStartTime,
   464  		NewConstantValues(ctx, 3.1428, numSteps, stepsInMillis))
   465  
   466  	require.Equal(b, benchmarkStartTime, series.StartTime(), "start time not equal")
   467  	require.Equal(b, benchmarkEndTime, series.EndTime(), "end time not equal")
   468  
   469  	for i := 0; i < b.N; i++ {
   470  		c.AddSeries(series, Sum)
   471  	}
   472  }
   473  
   474  func BenchmarkNewSeries(b *testing.B) {
   475  	ctx := context.New()
   476  	defer ctx.Close()
   477  	for i := 0; i < b.N; i++ {
   478  		NewSeries(ctx, "a", benchmarkStartTime,
   479  			NewConstantValues(ctx, 3.1428, 1, 1000))
   480  	}
   481  }