github.com/m3db/m3@v1.5.0/src/query/graphite/native/builtin_functions.go (about)

     1  // Copyright (c) 2019 Uber Technologies, Inc.
     2  //
     3  // Permission is hereby granted, free of charge, to any person obtaining a copy
     4  // of this software and associated documentation files (the "Software"), to deal
     5  // in the Software without restriction, including without limitation the rights
     6  // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     7  // copies of the Software, and to permit persons to whom the Software is
     8  // furnished to do so, subject to the following conditions:
     9  //
    10  // The above copyright notice and this permission notice shall be included in
    11  // all copies or substantial portions of the Software.
    12  //
    13  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    14  // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    15  // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    16  // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    17  // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    18  // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    19  // THE SOFTWARE.
    20  
    21  package native
    22  
    23  import (
    24  	"bytes"
    25  	"errors"
    26  	"fmt"
    27  	"math"
    28  	"math/rand"
    29  	"regexp"
    30  	"runtime"
    31  	"sort"
    32  	"strings"
    33  	"sync"
    34  	"time"
    35  
    36  	"github.com/m3db/m3/src/query/graphite/common"
    37  	"github.com/m3db/m3/src/query/graphite/graphite"
    38  	"github.com/m3db/m3/src/query/graphite/ts"
    39  	"github.com/m3db/m3/src/query/util"
    40  	xerrors "github.com/m3db/m3/src/x/errors"
    41  )
    42  
    43  const (
    44  	millisPerSecond     = 1000
    45  	secondsPerDay       = 24 * 3600
    46  	daysPerWeek         = 7
    47  	secondsPerWeek      = secondsPerDay * daysPerWeek
    48  	cactiStyleFormat    = "%.2f"
    49  	wrappingFmt         = "%s(%s)"
    50  	alpha               = 0.1
    51  	gamma               = 0.1
    52  	beta                = 0.0035
    53  	defaultXFilesFactor = 0.0
    54  )
    55  
    56  func joinPathExpr(series ts.SeriesList) string {
    57  	seen := make(map[string]struct{})
    58  
    59  	joined := make([]string, 0, series.Len())
    60  	for _, s := range series.Values {
    61  		if len(s.Specification) == 0 {
    62  			continue
    63  		}
    64  
    65  		if _, exists := seen[s.Specification]; exists {
    66  			continue
    67  		}
    68  
    69  		seen[s.Specification] = struct{}{}
    70  		joined = append(joined, s.Specification)
    71  	}
    72  
    73  	return strings.Join(joined, ",")
    74  }
    75  
    76  // sortBy allows for sorting by an aggregation function.
    77  func sortBy(ctx *common.Context, series singlePathSpec, fn string, reverse bool) (ts.SeriesList, error) {
    78  	var (
    79  		result ts.SeriesList
    80  		err    error
    81  	)
    82  	if strings.HasPrefix(fn, "min") {
    83  		result, err = lowest(ctx, series, len(series.Values), fn)
    84  	} else {
    85  		result, err = highest(ctx, series, len(series.Values), fn)
    86  	}
    87  	if err != nil {
    88  		return ts.SeriesList{}, err
    89  	}
    90  	if reverse {
    91  		reverseSeries(result.Values)
    92  	}
    93  	return result, nil
    94  }
    95  
    96  func reverseSeries(s []*ts.Series) {
    97  	for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 {
    98  		s[i], s[j] = s[j], s[i]
    99  	}
   100  }
   101  
   102  // sortByName sorts timeseries results by their names
   103  func sortByName(_ *common.Context, series singlePathSpec, natural, reverse bool) (ts.SeriesList, error) {
   104  	sorted := make([]*ts.Series, len(series.Values))
   105  	for i := range series.Values {
   106  		sorted[i] = series.Values[i]
   107  	}
   108  
   109  	var sortedBy sort.Interface
   110  	if natural {
   111  		sortedBy = ts.SeriesByNameAndNaturalNumbers(sorted)
   112  	} else {
   113  		sortedBy = ts.SeriesByName(sorted)
   114  	}
   115  
   116  	if reverse {
   117  		sortedBy = sort.Reverse(sortedBy)
   118  	}
   119  
   120  	// Use sort.Stable for deterministic output.
   121  	sort.Stable(sortedBy)
   122  
   123  	r := ts.SeriesList(series)
   124  	r.Values = sorted
   125  	r.SortApplied = true
   126  	return r, nil
   127  }
   128  
   129  // sortByTotal sorts timeseries results by the sum of values.
   130  func sortByTotal(ctx *common.Context, series singlePathSpec) (ts.SeriesList, error) {
   131  	return highestSum(ctx, series, len(series.Values))
   132  }
   133  
   134  // sortByMaxima sorts timeseries by the maximum value across the time period specified.
   135  func sortByMaxima(ctx *common.Context, series singlePathSpec) (ts.SeriesList, error) {
   136  	return highestMax(ctx, series, len(series.Values))
   137  }
   138  
   139  // useSeriesAbove compares the maximum of each series against the given `value`. If the series
   140  // maximum is greater than `value`, the regular expression search and replace is
   141  // applied against the series name to plot a related metric.
   142  //
   143  // e.g. given useSeriesAbove(ganglia.metric1.reqs,10,'reqs','time'),
   144  // the response time metric will be plotted only when the maximum value of the
   145  // corresponding request/s metric is > 10
   146  // Example: useSeriesAbove(ganglia.metric1.reqs,10,"reqs","time")
   147  //nolint:govet,gocritic
   148  func useSeriesAbove(
   149  	ctx *common.Context,
   150  	seriesList singlePathSpec,
   151  	maxAllowedValue float64,
   152  	search, replace string,
   153  ) (ts.SeriesList, error) {
   154  	var (
   155  		mu             sync.Mutex
   156  		multiErr       xerrors.MultiError
   157  		newNames       []string
   158  		output         = make([]*ts.Series, 0, len(seriesList.Values))
   159  		maxConcurrency = runtime.GOMAXPROCS(0) / 2
   160  	)
   161  
   162  	for _, series := range seriesList.Values {
   163  		if series.SafeMax() > maxAllowedValue {
   164  			seriesName := strings.Replace(series.Name(), search, replace, -1)
   165  			newNames = append(newNames, seriesName)
   166  		}
   167  	}
   168  
   169  	for _, newNameChunk := range chunkArrayHelper(newNames, maxConcurrency) {
   170  		var wg sync.WaitGroup
   171  		for _, newTarget := range newNameChunk {
   172  			newTarget := newTarget
   173  			wg.Add(1)
   174  			go func() {
   175  				defer wg.Done()
   176  				resultSeriesList, err := evaluateTarget(ctx, newTarget)
   177  
   178  				mu.Lock()
   179  				defer mu.Unlock()
   180  
   181  				if err != nil {
   182  					multiErr = multiErr.Add(err)
   183  					return
   184  				}
   185  
   186  				for _, resultSeries := range resultSeriesList.Values {
   187  					resultSeries.Specification = newTarget
   188  					output = append(output, resultSeries)
   189  				}
   190  			}()
   191  		}
   192  		wg.Wait()
   193  
   194  		if err := multiErr.LastError(); err != nil {
   195  			return ts.NewSeriesList(), err
   196  		}
   197  	}
   198  
   199  	// Retain metadata but mark as unsorted since this was done in parallel.
   200  	r := ts.SeriesList(seriesList)
   201  	r.Values = output
   202  	r.SortApplied = false
   203  	return r, nil
   204  }
   205  
   206  // sortByMinima sorts timeseries by the minimum value across the time period specified.
   207  func sortByMinima(ctx *common.Context, series singlePathSpec) (ts.SeriesList, error) {
   208  	return lowest(ctx, series, len(series.Values), "min")
   209  }
   210  
   211  type valueComparator func(v, threshold float64) bool
   212  
   213  func compareByFunction(
   214  	_ *common.Context,
   215  	series singlePathSpec,
   216  	sr ts.SeriesReducer,
   217  	vc valueComparator,
   218  	threshold float64,
   219  ) (ts.SeriesList, error) {
   220  	res := make([]*ts.Series, 0, len(series.Values))
   221  	for _, s := range series.Values {
   222  		stats := sr(s)
   223  		if vc(stats, threshold) {
   224  			res = append(res, s)
   225  		}
   226  	}
   227  
   228  	r := ts.SeriesList(series)
   229  	r.Values = res
   230  	return r, nil
   231  }
   232  
   233  func aboveByFunction(
   234  	ctx *common.Context,
   235  	series singlePathSpec,
   236  	sr ts.SeriesReducer,
   237  	threshold float64,
   238  ) (ts.SeriesList, error) {
   239  	return compareByFunction(ctx, series, sr, func(stats, threshold float64) bool {
   240  		return stats > threshold
   241  	}, threshold)
   242  }
   243  
   244  func belowByFunction(
   245  	ctx *common.Context,
   246  	series singlePathSpec,
   247  	sr ts.SeriesReducer,
   248  	threshold float64,
   249  ) (ts.SeriesList, error) {
   250  	return compareByFunction(ctx, series, sr, func(stats, threshold float64) bool {
   251  		return stats < threshold
   252  	}, threshold)
   253  }
   254  
   255  // maximumAbove takes one metric or a wildcard seriesList followed by an floating point number n,
   256  // returns only the metrics with a maximum value above n.
   257  func maximumAbove(ctx *common.Context, series singlePathSpec, n float64) (ts.SeriesList, error) {
   258  	sr := ts.SeriesReducerMax.Reducer()
   259  	return aboveByFunction(ctx, series, sr, n)
   260  }
   261  
   262  // minimumAbove takes one metric or a wildcard seriesList followed by an floating point number n,
   263  // returns only the metrics with a minimum value above n.
   264  func minimumAbove(ctx *common.Context, series singlePathSpec, n float64) (ts.SeriesList, error) {
   265  	sr := ts.SeriesReducerMin.Reducer()
   266  	return aboveByFunction(ctx, series, sr, n)
   267  }
   268  
   269  // averageAbove takes one metric or a wildcard seriesList followed by an floating point number n,
   270  // returns only the metrics with an average value above n.
   271  func averageAbove(ctx *common.Context, series singlePathSpec, n float64) (ts.SeriesList, error) {
   272  	sr := ts.SeriesReducerAvg.Reducer()
   273  	return aboveByFunction(ctx, series, sr, n)
   274  }
   275  
   276  // averageBelow takes one metric or a wildcard seriesList followed by an floating point number n,
   277  // returns only the metrics with an average value below n.
   278  func averageBelow(ctx *common.Context, series singlePathSpec, n float64) (ts.SeriesList, error) {
   279  	sr := ts.SeriesReducerAvg.Reducer()
   280  	return belowByFunction(ctx, series, sr, n)
   281  }
   282  
   283  // currentAbove takes one metric or a wildcard seriesList followed by an floating point number n,
   284  // returns only the metrics with the last value above n.
   285  func currentAbove(ctx *common.Context, series singlePathSpec, n float64) (ts.SeriesList, error) {
   286  	sr := ts.SeriesReducerLast.Reducer()
   287  	return aboveByFunction(ctx, series, sr, n)
   288  }
   289  
   290  // currentBelow takes one metric or a wildcard seriesList followed by an floating point number n,
   291  // returns only the metrics with the last value below n.
   292  func currentBelow(ctx *common.Context, series singlePathSpec, n float64) (ts.SeriesList, error) {
   293  	sr := ts.SeriesReducerLast.Reducer()
   294  	return belowByFunction(ctx, series, sr, n)
   295  }
   296  
   297  // constantLine takes value and creates a constant line at value.
   298  func constantLine(ctx *common.Context, value float64) (ts.SeriesList, error) {
   299  	newSeries, err := common.ConstantLine(ctx, value)
   300  	if err != nil {
   301  		return ts.NewSeriesList(), err
   302  	}
   303  	return ts.NewSeriesListWithSeries(newSeries), nil
   304  }
   305  
   306  // identity returns datapoints where the value equals the timestamp of the datapoint.
   307  func identity(ctx *common.Context, name string) (ts.SeriesList, error) {
   308  	return common.Identity(ctx, name)
   309  }
   310  
   311  // limit takes one metric or a wildcard seriesList followed by an integer N, and draws
   312  // the first N metrics.
   313  func limit(_ *common.Context, series singlePathSpec, n int) (ts.SeriesList, error) {
   314  	if n < 0 {
   315  		return ts.NewSeriesList(), xerrors.NewInvalidParamsError(fmt.Errorf("invalid limit parameter n: %d", n))
   316  	}
   317  	upperBound := int(math.Min(float64(len(series.Values)), float64(n)))
   318  
   319  	if !series.SortApplied {
   320  		// If sort not applied then sort to get deterministic results.
   321  		// Use sort.Stable for deterministic output.
   322  		sort.Stable(ts.SeriesByName(series.Values))
   323  	}
   324  
   325  	r := ts.SeriesList(series)
   326  	r.Values = series.Values[0:upperBound]
   327  	// Note: If wasn't sorted we applied a sort by name for determinism.
   328  	r.SortApplied = true
   329  	return r, nil
   330  }
   331  
   332  // grep takes a metric or a wildcard seriesList, followed by a regular
   333  // expression in double quotes. Excludes metrics that don’t match the regular expression.
   334  func grep(_ *common.Context, seriesList singlePathSpec, regex string) (ts.SeriesList, error) {
   335  	re, err := regexp.Compile(regex)
   336  	if err != nil {
   337  		return ts.NewSeriesList(), err
   338  	}
   339  
   340  	filtered := seriesList.Values[:0]
   341  	for _, series := range seriesList.Values {
   342  		if re.MatchString(series.Name()) {
   343  			filtered = append(filtered, series)
   344  		}
   345  	}
   346  
   347  	// Retain sort applied function.
   348  	r := ts.SeriesList(seriesList)
   349  	r.Values = filtered
   350  	return r, nil
   351  }
   352  
   353  // timeShift draws the selected metrics shifted in time. If no sign is given, a minus sign ( - ) is
   354  // implied which will shift the metric back in time. If a plus sign ( + ) is given, the metric will
   355  // be shifted forward in time
   356  func timeShift(
   357  	_ *common.Context,
   358  	_ singlePathSpec,
   359  	timeShiftS string,
   360  	_ bool,
   361  	_ bool,
   362  ) (*unaryContextShifter, error) {
   363  	// TODO: implement resetEnd
   364  	if !(strings.HasPrefix(timeShiftS, "+") || strings.HasPrefix(timeShiftS, "-")) {
   365  		timeShiftS = "-" + timeShiftS
   366  	}
   367  
   368  	shift, err := common.ParseInterval(timeShiftS)
   369  	if err != nil {
   370  		return nil, xerrors.NewInvalidParamsError(
   371  			fmt.Errorf("invalid timeShift parameter %s: %w", timeShiftS, err))
   372  	}
   373  
   374  	contextShiftingFn := func(c *common.Context) *common.Context {
   375  		opts := common.NewChildContextOptions()
   376  		opts.AdjustTimeRange(shift, shift, 0, 0)
   377  		childCtx := c.NewChildContext(opts)
   378  		return childCtx
   379  	}
   380  
   381  	transformerFn := func(input ts.SeriesList) (ts.SeriesList, error) {
   382  		output := make([]*ts.Series, input.Len())
   383  		for i, in := range input.Values {
   384  			// NB(jayp): opposite direction
   385  			output[i] = in.Shift(-1 * shift).RenamedTo(fmt.Sprintf("timeShift(%s,%q)", in.Name(), timeShiftS))
   386  		}
   387  		input.Values = output
   388  		return input, nil
   389  	}
   390  
   391  	return &unaryContextShifter{
   392  		ContextShiftFunc: contextShiftingFn,
   393  		UnaryTransformer: transformerFn,
   394  	}, nil
   395  }
   396  
   397  // delay shifts all samples later by an integer number of steps. This can be used
   398  // for custom derivative calculations, among other things. Note: this will pad
   399  // the early end of the data with NaN for every step shifted. delay complements
   400  // other time-displacement functions such as timeShift and timeSlice, in that
   401  // delay is indifferent about the step intervals being shifted.
   402  func delay(
   403  	ctx *common.Context,
   404  	singlePath singlePathSpec,
   405  	steps int,
   406  ) (ts.SeriesList, error) {
   407  	input := ts.SeriesList(singlePath)
   408  	output := make([]*ts.Series, 0, input.Len())
   409  
   410  	for _, series := range input.Values {
   411  		delayedVals := delayValuesHelper(ctx, series, steps)
   412  		delayedSeries := ts.NewSeries(ctx, series.Name(), series.StartTime(), delayedVals)
   413  		renamedSeries := delayedSeries.RenamedTo(fmt.Sprintf("delay(%s,%d)", delayedSeries.Name(), steps))
   414  		output = append(output, renamedSeries)
   415  	}
   416  	input.Values = output
   417  	return input, nil
   418  }
   419  
   420  // delayValuesHelper takes a series and returns a copy of the values after
   421  // delaying the values by `steps` number of steps
   422  func delayValuesHelper(ctx *common.Context, series *ts.Series, steps int) ts.Values {
   423  	output := ts.NewValues(ctx, series.MillisPerStep(), series.Len())
   424  	for i := steps; i < series.Len(); i++ {
   425  		output.SetValueAt(i, series.ValueAt(i-steps))
   426  	}
   427  	return output
   428  }
   429  
   430  // timeSlice takes one metric or a wildcard metric, followed by a quoted string with the time to start the line and
   431  // another quoted string with the time to end the line. The start and end times are inclusive.
   432  // Useful for filtering out a part of a series of data from a wider range of data.
   433  func timeSlice(ctx *common.Context, inputPath singlePathSpec, start string, end string) (ts.SeriesList, error) {
   434  	var (
   435  		now                     = time.Now()
   436  		tzOffsetForAbsoluteTime time.Duration
   437  	)
   438  	startTime, err := graphite.ParseTime(start, now, tzOffsetForAbsoluteTime)
   439  	if err != nil {
   440  		return ts.NewSeriesList(), err
   441  	}
   442  	endTime, err := graphite.ParseTime(end, now, tzOffsetForAbsoluteTime)
   443  	if err != nil {
   444  		return ts.NewSeriesList(), err
   445  	}
   446  
   447  	input := ts.SeriesList(inputPath)
   448  	output := make([]*ts.Series, 0, input.Len())
   449  
   450  	for _, series := range input.Values {
   451  		stepDuration := time.Duration(series.MillisPerStep()) * time.Millisecond
   452  		truncatedValues := ts.NewValues(ctx, series.MillisPerStep(), series.Len())
   453  
   454  		currentTime := series.StartTime()
   455  		for i := 0; i < series.Len(); i++ {
   456  			equalOrAfterStart := currentTime.Equal(startTime) || currentTime.After(startTime)
   457  			beforeOrEqualEnd := currentTime.Before(endTime) || currentTime.Equal(endTime)
   458  			if equalOrAfterStart && beforeOrEqualEnd {
   459  				truncatedValues.SetValueAt(i, series.ValueAtTime(currentTime))
   460  			}
   461  			currentTime = currentTime.Add(stepDuration)
   462  		}
   463  
   464  		slicedSeries := ts.NewSeries(ctx, series.Name(), series.StartTime(), truncatedValues)
   465  		renamedSlicedSeries := slicedSeries.RenamedTo(fmt.Sprintf("timeSlice(%s, %q, %q)", slicedSeries.Name(), start, end))
   466  		output = append(output, renamedSlicedSeries)
   467  	}
   468  	input.Values = output
   469  	return input, nil
   470  }
   471  
   472  // absolute returns the absolute value of each element in the series.
   473  func absolute(ctx *common.Context, input singlePathSpec) (ts.SeriesList, error) {
   474  	return transform(ctx, input,
   475  		func(fname string) string { return fmt.Sprintf(wrappingFmt, "absolute", fname) },
   476  		math.Abs)
   477  }
   478  
   479  // scale multiplies each element of a collection of time series by a given value
   480  func scale(ctx *common.Context, input singlePathSpec, scale float64) (ts.SeriesList, error) {
   481  	return transform(
   482  		ctx,
   483  		input,
   484  		func(fname string) string {
   485  			newName := fmt.Sprintf("%s,"+common.FloatingPointFormat, fname, scale)
   486  			return fmt.Sprintf(wrappingFmt, "scale", newName)
   487  		},
   488  		common.Scale(scale),
   489  	)
   490  }
   491  
   492  // scaleToSeconds makes a wildcard seriesList and returns "value per seconds"
   493  func scaleToSeconds(
   494  	ctx *common.Context,
   495  	seriesList singlePathSpec,
   496  	seconds int,
   497  ) (ts.SeriesList, error) {
   498  	output := make([]*ts.Series, len(seriesList.Values))
   499  	for i, series := range seriesList.Values {
   500  		var (
   501  			outvals = ts.NewValues(ctx, series.MillisPerStep(), series.Len())
   502  			name    = fmt.Sprintf("scaleToSeconds(%s,%d)", series.Name(), seconds)
   503  			factor  = float64(seconds*1000) / float64(series.MillisPerStep()) // convert seconds to millis
   504  		)
   505  		for step := 0; step < series.Len(); step++ {
   506  			value := series.ValueAt(step)
   507  			outvals.SetValueAt(step, value*factor)
   508  		}
   509  		output[i] = ts.NewSeries(ctx, name, series.StartTime(), outvals)
   510  	}
   511  
   512  	r := ts.SeriesList(seriesList)
   513  	r.Values = output
   514  	return r, nil
   515  }
   516  
   517  // offset adds a value to each element of a collection of time series
   518  func offset(ctx *common.Context, input singlePathSpec, factor float64) (ts.SeriesList, error) {
   519  	return transform(
   520  		ctx,
   521  		input,
   522  		func(fname string) string {
   523  			newName := fmt.Sprintf("%s,"+common.FloatingPointFormat, fname, factor)
   524  			return fmt.Sprintf(wrappingFmt, "offset", newName)
   525  		},
   526  		common.Offset(factor),
   527  	)
   528  }
   529  
   530  // transform converts values in a timeseries according to the valueTransformer.
   531  func transform(ctx *common.Context, input singlePathSpec,
   532  	fname func(inputName string) string, fn common.TransformFunc) (ts.SeriesList, error) {
   533  	t := common.NewStatelessTransformer(fn)
   534  	return common.Transform(ctx, ts.SeriesList(input), t, func(in *ts.Series) string {
   535  		return fname(in.Name())
   536  	})
   537  }
   538  
   539  // perSecond computes a derivative adjusted for the series time interval,
   540  // useful for taking a running total metric and showing how many occurrences
   541  // per second were handled
   542  func perSecond(ctx *common.Context, input singlePathSpec, _ float64) (ts.SeriesList, error) {
   543  	// TODO:  we are ignoring maxValue; we may need to implement it
   544  	return common.PerSecond(ctx, ts.SeriesList(input), func(series *ts.Series) string {
   545  		return fmt.Sprintf("perSecond(%s)", series.Name())
   546  	})
   547  }
   548  
   549  // transformNull transforms all nulls (NaNs) in a time series to a defaultValue.
   550  func transformNull(ctx *common.Context, input singlePathSpec, defaultValue float64) (ts.SeriesList, error) {
   551  	return transform(
   552  		ctx,
   553  		input,
   554  		func(fname string) string {
   555  			newName := fmt.Sprintf("%s,"+common.FloatingPointFormat, fname, defaultValue)
   556  			return fmt.Sprintf(wrappingFmt, "transformNull", newName)
   557  		},
   558  		common.TransformNull(defaultValue),
   559  	)
   560  }
   561  
   562  // isNonNull replaces datapoints that are non-null with 1, and null values with 0.
   563  // This is useful for understanding which metrics have data at a given point in time
   564  // (ie, to count which servers are alive).
   565  func isNonNull(ctx *common.Context, input singlePathSpec) (ts.SeriesList, error) {
   566  	return transform(ctx,
   567  		input,
   568  		func(fname string) string { return fmt.Sprintf(wrappingFmt, "isNonNull", fname) },
   569  		common.IsNonNull())
   570  }
   571  
   572  // keepLastValue takes one metric or a wildcard seriesList, and optionally a limit to the number of
   573  // NaN values to skip over. If not specified, limit has a default value of -1, meaning all NaNs will
   574  // be replaced by the closest preceding value that's not an NaN.
   575  func keepLastValue(ctx *common.Context, input singlePathSpec, limit int) (ts.SeriesList, error) {
   576  	output := make([]*ts.Series, 0, len(input.Values))
   577  	for _, series := range input.Values {
   578  		consecutiveNaNs := 0
   579  		numSteps := series.Len()
   580  		vals := ts.NewValues(ctx, series.MillisPerStep(), numSteps)
   581  		for i := 0; i < numSteps; i++ {
   582  			value := series.ValueAt(i)
   583  			vals.SetValueAt(i, value)
   584  			if i == 0 {
   585  				continue
   586  			}
   587  			if math.IsNaN(value) {
   588  				consecutiveNaNs++
   589  			} else {
   590  				if limit == -1 || (consecutiveNaNs > 0 && consecutiveNaNs <= limit) {
   591  					v := series.ValueAt(i - consecutiveNaNs - 1)
   592  					if !math.IsNaN(v) {
   593  						for index := i - consecutiveNaNs; index < i; index++ {
   594  							vals.SetValueAt(index, v)
   595  						}
   596  					}
   597  				}
   598  				consecutiveNaNs = 0
   599  			}
   600  		}
   601  		if limit == -1 || (consecutiveNaNs > 0 && consecutiveNaNs <= limit) {
   602  			for index := numSteps - consecutiveNaNs; index < numSteps; index++ {
   603  				vals.SetValueAt(index, series.ValueAt(numSteps-consecutiveNaNs-1))
   604  			}
   605  		}
   606  		name := fmt.Sprintf("keepLastValue(%s)", series.Name())
   607  		newSeries := ts.NewSeries(ctx, name, series.StartTime(), vals)
   608  		output = append(output, newSeries)
   609  	}
   610  
   611  	r := ts.SeriesList(input)
   612  	r.Values = output
   613  	return r, nil
   614  }
   615  
   616  func roundFunction(ctx *common.Context, input singlePathSpec, precision int) (ts.SeriesList, error) {
   617  	output := make([]*ts.Series, 0, len(input.Values))
   618  	for _, series := range input.Values {
   619  		numSteps := series.Len()
   620  		vals := ts.NewValues(ctx, series.MillisPerStep(), numSteps)
   621  		for i := 0; i < numSteps; i++ {
   622  			value := series.ValueAt(i)
   623  			if !math.IsNaN(value) {
   624  				value = roundTo(value, int32(precision))
   625  			}
   626  			vals.SetValueAt(i, value)
   627  		}
   628  		name := ""
   629  		if precision == 0 {
   630  			name = fmt.Sprintf("roundFunction(%s)", series.Name())
   631  		} else {
   632  			name = fmt.Sprintf("roundFunction(%s,%d)", series.Name(), precision)
   633  		}
   634  		newSeries := ts.NewSeries(ctx, name, series.StartTime(), vals)
   635  		output = append(output, newSeries)
   636  	}
   637  
   638  	r := ts.SeriesList(input)
   639  	r.Values = output
   640  	return r, nil
   641  }
   642  
   643  type comparator func(float64, float64) bool
   644  
   645  // equalFunc checks whether x is equal to y
   646  func equalFunc(x float64, y float64) bool {
   647  	return x == y
   648  }
   649  
   650  // notEqualFunc checks whether x is not equal to y
   651  func notEqualFunc(x float64, y float64) bool {
   652  	return x != y
   653  }
   654  
   655  // greaterFunc checks whether x is greater than y
   656  func greaterFunc(x float64, y float64) bool {
   657  	return x > y
   658  }
   659  
   660  // greaterOrEqualFunc checks whether x is greater or equal to y
   661  func greaterOrEqualFunc(x float64, y float64) bool {
   662  	return x >= y
   663  }
   664  
   665  // lessFunc checks whether x is less than y
   666  func lessFunc(x float64, y float64) bool {
   667  	return x < y
   668  }
   669  
   670  // lessOrEqualFunc checks whether x is less than or equal to y
   671  func lessOrEqualFunc(x float64, y float64) bool {
   672  	return x <= y
   673  }
   674  
   675  var comparatorFns = map[string]comparator{
   676  	"=":  equalFunc,
   677  	"!=": notEqualFunc,
   678  	">":  greaterFunc,
   679  	">=": greaterOrEqualFunc,
   680  	"<":  lessFunc,
   681  	"<=": lessOrEqualFunc,
   682  }
   683  
   684  func filterSeries(
   685  	_ *common.Context,
   686  	input singlePathSpec,
   687  	aggregationFn string,
   688  	comparator string,
   689  	threshold float64) (ts.SeriesList, error) {
   690  	comparatorFn, ok := comparatorFns[comparator]
   691  	if !ok {
   692  		return ts.SeriesList{}, xerrors.NewInvalidParamsError(fmt.Errorf("invalid comparator: %s", comparator))
   693  	}
   694  
   695  	filteredSeries := make([]*ts.Series, 0, len(input.Values))
   696  	for _, series := range input.Values {
   697  		safeAggFn, ok := common.SafeAggregationFns[aggregationFn]
   698  		if !ok {
   699  			return ts.SeriesList{}, xerrors.NewInvalidParamsError(fmt.Errorf("invalid function: %s", aggregationFn))
   700  		}
   701  
   702  		aggregatedValue, _, ok := safeAggFn(series.SafeValues())
   703  		if !ok {
   704  			// No elements or all nans, similar skipping behavior to filterSeries
   705  			// for graphite web where comparator only ran against series when the value
   706  			// is not python None type which is returned when safe... function
   707  			// cannot be applied to the set of values due to no safe values being
   708  			// present due to empty series or all nans.
   709  			continue
   710  		}
   711  
   712  		if comparatorFn(aggregatedValue, threshold) {
   713  			filteredSeries = append(filteredSeries, series)
   714  		}
   715  	}
   716  
   717  	r := ts.SeriesList(input)
   718  	r.Values = filteredSeries
   719  	return r, nil
   720  }
   721  
   722  func sustainedCompare(ctx *common.Context, input singlePathSpec, threshold float64, intervalString string,
   723  	comparisonFunction comparator, zeroValue float64, funcName string) (ts.SeriesList, error) {
   724  	output := make([]*ts.Series, 0, len(input.Values))
   725  	interval, err := common.ParseInterval(intervalString)
   726  	if err != nil {
   727  		return ts.NewSeriesList(), err
   728  	}
   729  
   730  	intervalMillis := int(interval / time.Millisecond)
   731  	for _, series := range input.Values {
   732  		numSteps := series.Len()
   733  		vals := ts.NewValues(ctx, series.MillisPerStep(), numSteps)
   734  
   735  		minSteps := intervalMillis / series.MillisPerStep()
   736  		currSteps := 0
   737  
   738  		for i := 0; i < numSteps; i++ {
   739  			value := series.ValueAt(i)
   740  			if comparisonFunction(value, threshold) {
   741  				currSteps++
   742  			} else {
   743  				currSteps = 0
   744  			}
   745  			if currSteps >= minSteps {
   746  				vals.SetValueAt(i, value)
   747  			} else {
   748  				vals.SetValueAt(i, zeroValue)
   749  			}
   750  		}
   751  
   752  		name := fmt.Sprintf("%s(%s, %f, '%s')",
   753  			funcName, series.Name(), threshold, intervalString)
   754  		newSeries := ts.NewSeries(ctx, name, series.StartTime(), vals)
   755  		output = append(output, newSeries)
   756  	}
   757  
   758  	r := ts.SeriesList(input)
   759  	r.Values = output
   760  	return r, nil
   761  }
   762  
   763  func sustainedAbove(ctx *common.Context, input singlePathSpec, threshold float64, intervalString string) (ts.SeriesList, error) {
   764  	return sustainedCompare(ctx, input, threshold, intervalString, greaterOrEqualFunc, threshold-math.Abs(threshold), "sustainedAbove")
   765  }
   766  
   767  func sustainedBelow(ctx *common.Context, input singlePathSpec, threshold float64, intervalString string) (ts.SeriesList, error) {
   768  	return sustainedCompare(ctx, input, threshold, intervalString, lessOrEqualFunc, threshold+math.Abs(threshold), "sustainedBelow")
   769  }
   770  
   771  // removeBelowValue removes data below the given threshold from the series or list of series provided.
   772  // Values below this threshold are assigned a value of None.
   773  func removeBelowValue(ctx *common.Context, input singlePathSpec, n float64) (ts.SeriesList, error) {
   774  	return transform(ctx, input,
   775  		func(inputName string) string {
   776  			return fmt.Sprintf("removeBelowValue(%s, "+common.FloatingPointFormat+")", inputName, n)
   777  		},
   778  		common.Filter(func(v float64) bool { return v >= n }))
   779  }
   780  
   781  // removeAboveValue removes data above the given threshold from the series or list of series provided.
   782  // Values above this threshold are assigned a value of None.
   783  func removeAboveValue(ctx *common.Context, input singlePathSpec, n float64) (ts.SeriesList, error) {
   784  	return transform(ctx, input,
   785  		func(inputName string) string {
   786  			return fmt.Sprintf("removeAboveValue(%s, "+common.FloatingPointFormat+")", inputName, n)
   787  		},
   788  		common.Filter(func(v float64) bool { return v <= n }))
   789  }
   790  
   791  // removeEmptySeries returns only the time-series with non-empty data
   792  func removeEmptySeries(ctx *common.Context, input singlePathSpec, xFilesFactor float64) (ts.SeriesList, error) {
   793  	return common.RemoveEmpty(ctx, ts.SeriesList(input), xFilesFactor)
   794  }
   795  
   796  func takeByFunction(input singlePathSpec, n int, sr ts.SeriesReducer, sort ts.Direction) (ts.SeriesList, error) {
   797  	result, err := ts.SortSeries(ts.SeriesList(input), sr, sort)
   798  	if err != nil {
   799  		return ts.NewSeriesList(), err
   800  	}
   801  	return common.Head(result, n)
   802  }
   803  
   804  func getReducer(f string) (ts.SeriesReducer, error) {
   805  	sa := ts.SeriesReducerApproach(f)
   806  	r, ok := sa.SafeReducer()
   807  	if !ok {
   808  		return r, xerrors.NewInvalidParamsError(fmt.Errorf("invalid function %s", f))
   809  	}
   810  	return r, nil
   811  }
   812  
   813  // highest takes one metric or a wildcard seriesList followed by an integer N and an aggregation function.
   814  // Out of all metrics passed, draws only the N metrics with the highest
   815  // aggregated value over the time period specified.
   816  func highest(_ *common.Context, input singlePathSpec, n int, f string) (ts.SeriesList, error) {
   817  	reducer, err := getReducer(f)
   818  	if err != nil {
   819  		return ts.NewSeriesList(), err
   820  	}
   821  	return takeByFunction(input, n, reducer, ts.Descending)
   822  }
   823  
   824  // highestSum takes one metric or a wildcard seriesList followed by an integer
   825  // n.  Out of all metrics passed, draws only the N metrics with the highest
   826  // total value in the time period specified.
   827  func highestSum(ctx *common.Context, input singlePathSpec, n int) (ts.SeriesList, error) {
   828  	return highest(ctx, input, n, "sum")
   829  }
   830  
   831  // highestMax takes one metric or a wildcard seriesList followed by an integer
   832  // n.  Out of all metrics passed, draws only the N metrics with the highest
   833  // maximum value in the time period specified.
   834  func highestMax(ctx *common.Context, input singlePathSpec, n int) (ts.SeriesList, error) {
   835  	return highest(ctx, input, n, "max")
   836  }
   837  
   838  // highestCurrent takes one metric or a wildcard seriesList followed by an
   839  // integer n.  Out of all metrics passed, draws only the N metrics with the
   840  // highest value at the end of the time period specified.
   841  func highestCurrent(ctx *common.Context, input singlePathSpec, n int) (ts.SeriesList, error) {
   842  	return highest(ctx, input, n, "current")
   843  }
   844  
   845  // highestAverage takes one metric or a wildcard seriesList followed by an
   846  // integer n.  Out of all metrics passed, draws only the top N metrics with the
   847  // highest average value for the time period specified.
   848  func highestAverage(ctx *common.Context, input singlePathSpec, n int) (ts.SeriesList, error) {
   849  	return highest(ctx, input, n, "average")
   850  }
   851  
   852  // fallbackSeries takes one metric or a wildcard seriesList, and a second fallback metric.
   853  // If the wildcard does not match any series, draws the fallback metric.
   854  func fallbackSeries(_ *common.Context, input singlePathSpec, fallback singlePathSpec) (ts.SeriesList, error) {
   855  	if len(input.Values) > 0 {
   856  		return ts.SeriesList(input), nil
   857  	}
   858  
   859  	return ts.SeriesList(fallback), nil
   860  }
   861  
   862  // mostDeviant takes one metric or a wildcard seriesList followed by an integer
   863  // N. Draws the N most deviant metrics.  To find the deviants, the standard
   864  // deviation (sigma) of each series is taken and ranked.  The top N standard
   865  // deviations are returned.
   866  func mostDeviant(ctx *common.Context, input singlePathSpec, n int) (ts.SeriesList, error) {
   867  	return highest(ctx, input, n, "stddev")
   868  }
   869  
   870  // lowest takes one metric or a wildcard seriesList followed by an integer N and an aggregation function.
   871  // Out of all metrics passed, draws only the N metrics with the lowest
   872  // aggregated value over the time period specified.
   873  func lowest(_ *common.Context, input singlePathSpec, n int, f string) (ts.SeriesList, error) {
   874  	reducer, err := getReducer(f)
   875  	if err != nil {
   876  		return ts.NewSeriesList(), err
   877  	}
   878  	return takeByFunction(input, n, reducer, ts.Ascending)
   879  }
   880  
   881  // lowestAverage takes one metric or a wildcard seriesList followed by an
   882  // integer n.  Out of all metrics passed, draws only the top N metrics with the
   883  // lowest average value for the time period specified.
   884  func lowestAverage(ctx *common.Context, input singlePathSpec, n int) (ts.SeriesList, error) {
   885  	return lowest(ctx, input, n, "average")
   886  }
   887  
   888  // lowestCurrent takes one metric or a wildcard seriesList followed by an
   889  // integer n.  Out of all metrics passed, draws only the N metrics with the
   890  // lowest value at the end of the time period specified.
   891  func lowestCurrent(ctx *common.Context, input singlePathSpec, n int) (ts.SeriesList, error) {
   892  	return lowest(ctx, input, n, "current")
   893  }
   894  
   895  // effectiveXFF return true if windowPoints has a % of non-nulls above the xFilesFactor, false if not
   896  func effectiveXFF(windowPoints, nans int, xFilesFactor float64) bool {
   897  	return float64(windowPoints-nans)/float64(windowPoints) >= xFilesFactor
   898  }
   899  
   900  // nolint: lll
   901  type windowSizeParsed struct {
   902  	deltaValue  time.Duration
   903  	stringValue string
   904  	// exponentialMovingAverageConstant is the exponential moving average
   905  	// constant used by exponentialMovingAvarage which differs by how
   906  	// the window size was specified (whether string or number for multiplying
   907  	// max resolution/step of all the series).
   908  	// See:
   909  	// https://github.com/graphite-project/graphite-web/blob/f1acda6590627dabccf877ed309f28b7a7263374/webapp/graphite/render/functions.py#L1370-L1376
   910  	exponentialMovingAverageConstant float64
   911  }
   912  
   913  func parseWindowSize(windowSizeValue genericInterface, input singlePathSpec) (windowSizeParsed, error) {
   914  	windowSize := windowSizeParsed{}
   915  
   916  	switch windowSizeValue := windowSizeValue.(type) {
   917  	case string:
   918  		interval, err := common.ParseInterval(windowSizeValue)
   919  		if err != nil {
   920  			return windowSize, err
   921  		}
   922  		if interval <= 0 {
   923  			err := xerrors.NewInvalidParamsError(fmt.Errorf(
   924  				"windowSize must be positive but instead is %v",
   925  				interval))
   926  			return windowSize, err
   927  		}
   928  		windowSize.stringValue = fmt.Sprintf("%q", windowSizeValue)
   929  		windowSize.deltaValue = interval
   930  		windowSize.exponentialMovingAverageConstant = 2.0 / (1.0 + float64(int(interval/time.Second)))
   931  	case float64:
   932  		if len(input.Values) == 0 {
   933  			err := xerrors.NewInvalidParamsError(fmt.Errorf(
   934  				"windowSize is an int but no timeseries in time window to multiply resolution by %T",
   935  				windowSizeValue))
   936  			return windowSize, err
   937  		}
   938  
   939  		windowSizeInt := int(windowSizeValue)
   940  		if windowSizeInt <= 0 {
   941  			err := xerrors.NewInvalidParamsError(fmt.Errorf(
   942  				"windowSize must be positive but instead is %d",
   943  				windowSizeInt))
   944  			return windowSize, err
   945  		}
   946  		windowSize.stringValue = fmt.Sprintf("%d", windowSizeInt)
   947  		maxStepSize := input.Values[0].MillisPerStep()
   948  		for i := 1; i < len(input.Values); i++ {
   949  			maxStepSize = int(math.Max(float64(maxStepSize), float64(input.Values[i].MillisPerStep())))
   950  		}
   951  		windowSize.deltaValue = time.Duration(maxStepSize*windowSizeInt) * time.Millisecond
   952  		windowSize.exponentialMovingAverageConstant = 2.0 / (1.0 + float64(windowSizeInt))
   953  	default:
   954  		err := xerrors.NewInvalidParamsError(fmt.Errorf(
   955  			"windowSize must be either a string or an int but instead is a %T",
   956  			windowSizeValue))
   957  		return windowSize, err
   958  	}
   959  	return windowSize, nil
   960  }
   961  
   962  // exponentialMovingAverage takes a series of values and a window size and produces
   963  // an exponential moving average utilizing the following formula:
   964  // 		ema(current) = constant * (Current Value) + (1 - constant) * ema(previous)
   965  // The `constant` is calculated as:
   966  // 		constant = 2 / (windowSize + 1)
   967  // the first period EMA uses a simple moving average for its value.
   968  func exponentialMovingAverage(
   969  	ctx *common.Context,
   970  	input singlePathSpec,
   971  	windowSize genericInterface,
   972  ) (*unaryContextShifter, error) {
   973  	return newMovingBinaryTransform(ctx, input, windowSize,
   974  		"exponentialMovingAverage", defaultXFilesFactor,
   975  		newExponentialMovingAverageImpl())
   976  }
   977  
   978  var _ movingImpl = (*exponentialMovingAverageImpl)(nil)
   979  
   980  type exponentialMovingAverageImpl struct {
   981  	curr        movingImplResetOptions
   982  	ema         float64
   983  	emaConstant float64
   984  }
   985  
   986  func newExponentialMovingAverageImpl() *exponentialMovingAverageImpl {
   987  	return &exponentialMovingAverageImpl{}
   988  }
   989  
   990  func (impl *exponentialMovingAverageImpl) Reset(opts movingImplResetOptions) error {
   991  	impl.curr = opts
   992  	// EMA constant is based on window size seconds or num steps, see
   993  	// parseWindowSize and returned type windowSizeParsed for more details.
   994  	impl.emaConstant = opts.WindowSize.exponentialMovingAverageConstant
   995  
   996  	firstWindow, err := impl.curr.Series.Slice(0, impl.curr.WindowPoints)
   997  	if err != nil {
   998  		return err
   999  	}
  1000  
  1001  	// The first value is just a regular moving average.
  1002  	impl.ema = firstWindow.SafeAvg()
  1003  	if !math.IsNaN(impl.ema) {
  1004  		return nil
  1005  	}
  1006  
  1007  	// Use zero if NaN.
  1008  	impl.ema = 0
  1009  	return nil
  1010  }
  1011  
  1012  func (impl *exponentialMovingAverageImpl) Evaluate(
  1013  	window []float64,
  1014  	values ts.MutableValues,
  1015  	windowPoints int,
  1016  	i int,
  1017  	_ float64,
  1018  ) {
  1019  	if i == 0 {
  1020  		// First value is the first moving average value.
  1021  		// Note: roundTo rounds to 6 decimal places to match graphite rounding.
  1022  		values.SetValueAt(i, roundTo(impl.ema, 6))
  1023  		return
  1024  	}
  1025  
  1026  	curr := math.NaN()
  1027  	if idx := i + impl.curr.WindowPoints; idx < impl.curr.Series.Len() {
  1028  		curr = impl.curr.Series.ValueAt(idx)
  1029  	}
  1030  
  1031  	if !math.IsNaN(curr) {
  1032  		// Formula: ema(current) = constant * (Current Value) + (1 - constant) * ema(previous).
  1033  		impl.ema = impl.emaConstant*curr + (1-impl.emaConstant)*impl.ema
  1034  		// Note: roundTo rounds to 6 decimal places to match graphite rounding.
  1035  		values.SetValueAt(i, roundTo(impl.ema, 6))
  1036  	} else {
  1037  		values.SetValueAt(i, math.NaN())
  1038  	}
  1039  }
  1040  
  1041  func roundTo(n float64, decimals int32) float64 {
  1042  	if decimals < 0 {
  1043  		return math.Round(n/math.Pow(10, math.Abs(float64(decimals)))) * math.Pow(10, math.Abs(float64(decimals)))
  1044  	}
  1045  	return math.Round(n*math.Pow(10, float64(decimals))) / math.Pow(10, float64(decimals))
  1046  }
  1047  
  1048  // totalFunc takes an index and returns a total value for that index
  1049  type totalFunc func(int, *ts.Series) float64
  1050  
  1051  func totalBySum(seriesList []*ts.Series, index int) float64 {
  1052  	s, hasValue := 0.0, false
  1053  	for _, series := range seriesList {
  1054  		v := series.ValueAt(index)
  1055  		if !math.IsNaN(v) {
  1056  			hasValue = true
  1057  			s += v
  1058  		}
  1059  	}
  1060  	if hasValue {
  1061  		return s
  1062  	}
  1063  	return math.NaN()
  1064  }
  1065  
  1066  // asPercent calculates a percentage of the total of a wildcard series.
  1067  func asPercent(ctx *common.Context, input singlePathSpec, total genericInterface, nodes ...int) (ts.SeriesList, error) {
  1068  	if len(input.Values) == 0 {
  1069  		return ts.SeriesList(input), nil
  1070  	}
  1071  
  1072  	if len(nodes) > 0 {
  1073  		metaSeries, err := getMetaSeriesGrouping(input, nodes)
  1074  		if err != nil {
  1075  			return ts.NewSeriesList(), err
  1076  		}
  1077  		totalSeries := make(map[string]*ts.Series)
  1078  
  1079  		switch totalArg := total.(type) {
  1080  		case nil:
  1081  			for k, series := range metaSeries {
  1082  				if len(series) == 1 {
  1083  					totalSeries[k] = series[0]
  1084  				} else {
  1085  					group, err := sumSeries(ctx, multiplePathSpecs(ts.NewSeriesListWithSeries(series...)))
  1086  					if err != nil {
  1087  						return ts.NewSeriesList(), err
  1088  					}
  1089  					totalSeries[k] = group.Values[0]
  1090  				}
  1091  			}
  1092  		case ts.SeriesList, singlePathSpec:
  1093  			var total ts.SeriesList
  1094  			switch v := totalArg.(type) {
  1095  			case ts.SeriesList:
  1096  				total = v
  1097  			case singlePathSpec:
  1098  				total = ts.SeriesList(v)
  1099  			}
  1100  			totalGroups, err := getMetaSeriesGrouping(singlePathSpec(total), nodes)
  1101  			if err != nil {
  1102  				return ts.NewSeriesList(), err
  1103  			}
  1104  			for k, series := range totalGroups {
  1105  				if len(series) == 1 {
  1106  					totalSeries[k] = series[0]
  1107  				} else {
  1108  					group, err := sumSeries(ctx, multiplePathSpecs(ts.NewSeriesListWithSeries(series...)))
  1109  					if err != nil {
  1110  						return ts.NewSeriesList(), err
  1111  					}
  1112  					totalSeries[k] = group.Values[0]
  1113  				}
  1114  			}
  1115  		default:
  1116  			return ts.NewSeriesList(), xerrors.NewInvalidParamsError(fmt.Errorf("total must be nil or series list"))
  1117  		}
  1118  
  1119  		keys := make([]string, 0, len(metaSeries)+len(totalSeries))
  1120  		// Create meta series keys copy.
  1121  		for k := range metaSeries {
  1122  			keys = append(keys, k)
  1123  		}
  1124  		// Add any missing keys from totals.
  1125  		for k := range totalSeries {
  1126  			if _, ok := metaSeries[k]; ok {
  1127  				continue
  1128  			}
  1129  			keys = append(keys, k)
  1130  		}
  1131  		// Sort keys for determinism and test result.
  1132  		sort.Strings(keys)
  1133  
  1134  		results := make([]*ts.Series, 0, len(metaSeries))
  1135  		for _, k := range keys {
  1136  			seriesList, ok := metaSeries[k]
  1137  			if !ok {
  1138  				total := totalSeries[k]
  1139  				newName := fmt.Sprintf("asPercent(MISSING,%s)", total.Name())
  1140  				nanValues := ts.NewConstantValues(ctx, math.NaN(), total.Len(), total.MillisPerStep())
  1141  				newSeries := ts.NewSeries(ctx, newName, total.StartTime(), nanValues)
  1142  				results = append(results, newSeries)
  1143  				continue
  1144  			}
  1145  
  1146  			for _, series := range seriesList {
  1147  				total, ok := totalSeries[k]
  1148  				if !ok {
  1149  					newName := fmt.Sprintf("asPercent(%s,MISSING)", series.Name())
  1150  					nanValues := ts.NewConstantValues(ctx, math.NaN(), series.Len(), series.MillisPerStep())
  1151  					newSeries := ts.NewSeries(ctx, newName, series.StartTime(), nanValues)
  1152  					results = append(results, newSeries)
  1153  					continue
  1154  				}
  1155  
  1156  				normalized, start, _, millisPerStep, err := common.Normalize(ctx,
  1157  					ts.NewSeriesListWithSeries(series, total))
  1158  				if err != nil {
  1159  					return ts.NewSeriesList(), err
  1160  				}
  1161  
  1162  				steps := normalized.Values[0].Len()
  1163  				values := ts.NewValues(ctx, millisPerStep, steps)
  1164  				for i := 0; i < steps; i++ {
  1165  					v, t := normalized.Values[0].ValueAt(i), normalized.Values[1].ValueAt(i)
  1166  					if !math.IsNaN(v) && !math.IsNaN(t) && t != 0 {
  1167  						values.SetValueAt(i, (v/t)*100.0)
  1168  					}
  1169  				}
  1170  
  1171  				newName := fmt.Sprintf("asPercent(%s,%s)", series.Name(), total.Name())
  1172  				divided := ts.NewSeries(ctx, newName, start, values)
  1173  				results = append(results, divided)
  1174  			}
  1175  		}
  1176  
  1177  		result := ts.NewSeriesList()
  1178  		result.Values = results
  1179  		return result, nil
  1180  	}
  1181  
  1182  	var totalSeriesList ts.SeriesList
  1183  	switch totalArg := total.(type) {
  1184  	case float64:
  1185  		// No normalization required, simply divide each by constant.
  1186  		results := make([]*ts.Series, 0, len(input.Values))
  1187  		totalText := fmt.Sprintf(common.FloatingPointFormat, totalArg)
  1188  		for _, series := range input.Values {
  1189  			steps := series.Len()
  1190  			values := ts.NewValues(ctx, series.MillisPerStep(), steps)
  1191  			for i := 0; i < steps; i++ {
  1192  				v, t := series.ValueAt(i), totalArg
  1193  				if !math.IsNaN(v) && !math.IsNaN(t) && t != 0 {
  1194  					values.SetValueAt(i, (v/t)*100.0)
  1195  				}
  1196  			}
  1197  			newName := fmt.Sprintf("asPercent(%s,%s)", series.Name(), totalText)
  1198  			divided := ts.NewSeries(ctx, newName, series.StartTime(), values)
  1199  			results = append(results, divided)
  1200  		}
  1201  		result := ts.NewSeriesList()
  1202  		result.Values = results
  1203  		return result, nil
  1204  	case nil:
  1205  		// Totals of the total sum.
  1206  		sum, err := sumSeries(ctx, multiplePathSpecs(input))
  1207  		if err != nil {
  1208  			return ts.NewSeriesList(), err
  1209  		}
  1210  		totalSeriesList = sum
  1211  	case ts.SeriesList, singlePathSpec:
  1212  		// Total of a single series or same number of matching series.
  1213  		var total ts.SeriesList
  1214  		switch v := totalArg.(type) {
  1215  		case ts.SeriesList:
  1216  			total = v
  1217  		case singlePathSpec:
  1218  			total = ts.SeriesList(v)
  1219  		}
  1220  		if total.Len() != 1 && total.Len() != len(input.Values) {
  1221  			return ts.NewSeriesList(), xerrors.NewInvalidParamsError(fmt.Errorf(
  1222  				"require total to be missing, float, single series or same number of series: series=%d, total=%d",
  1223  				len(input.Values), total.Len()))
  1224  		}
  1225  		// Sort both by name so they get divided by same name if same length
  1226  		// or closest match.
  1227  		sort.Slice(input.Values, func(i, j int) bool {
  1228  			return strings.Compare(input.Values[i].Name(), input.Values[j].Name()) < 0
  1229  		})
  1230  		sort.Slice(total.Values, func(i, j int) bool {
  1231  			return strings.Compare(total.Values[i].Name(), total.Values[j].Name()) < 0
  1232  		})
  1233  		totalSeriesList = total
  1234  	default:
  1235  		err := xerrors.NewInvalidParamsError(errors.New(
  1236  			"total must be either an nil, float, a series or same number of series"))
  1237  		return ts.NewSeriesList(), err
  1238  	}
  1239  
  1240  	results := make([]*ts.Series, 0, len(input.Values))
  1241  	for idx, series := range input.Values {
  1242  		totalSeries := totalSeriesList.Values[0]
  1243  		if totalSeriesList.Len() == len(input.Values) {
  1244  			// Divide each by their matching total if matching
  1245  			// number of total.
  1246  			totalSeries = totalSeriesList.Values[idx]
  1247  		}
  1248  		normalized, start, _, millisPerStep, err := common.Normalize(ctx,
  1249  			ts.NewSeriesListWithSeries(series, totalSeries))
  1250  		if err != nil {
  1251  			return ts.NewSeriesList(), nil
  1252  		}
  1253  
  1254  		steps := normalized.Values[0].Len()
  1255  		values := ts.NewValues(ctx, millisPerStep, steps)
  1256  		for i := 0; i < steps; i++ {
  1257  			v, t := normalized.Values[0].ValueAt(i), normalized.Values[1].ValueAt(i)
  1258  			if !math.IsNaN(v) && !math.IsNaN(t) && t != 0 {
  1259  				values.SetValueAt(i, (v/t)*100.0)
  1260  			}
  1261  		}
  1262  		newName := fmt.Sprintf("asPercent(%s,%s)", series.Name(), totalSeries.Name())
  1263  		divided := ts.NewSeries(ctx, newName, start, values)
  1264  		results = append(results, divided)
  1265  	}
  1266  	result := ts.NewSeriesList()
  1267  	result.Values = results
  1268  	return result, nil
  1269  }
  1270  
  1271  // exclude takes a metric or a wildcard seriesList, followed by a regular
  1272  // expression in double quotes.  Excludes metrics that match the regular
  1273  // expression.
  1274  func exclude(_ *common.Context, input singlePathSpec, pattern string) (ts.SeriesList, error) {
  1275  	rePattern, err := regexp.Compile(pattern)
  1276  	// NB(rooz): we decided to just fail if regex compilation fails to
  1277  	// differentiate it from an all-excluding regex
  1278  	if err != nil {
  1279  		return ts.NewSeriesList(), err
  1280  	}
  1281  
  1282  	output := make([]*ts.Series, 0, len(input.Values))
  1283  	for _, in := range input.Values {
  1284  		if m := rePattern.FindStringSubmatch(strings.TrimSpace(in.Name())); len(m) == 0 {
  1285  			output = append(output, in)
  1286  		}
  1287  	}
  1288  
  1289  	r := ts.SeriesList(input)
  1290  	r.Values = output
  1291  	return r, nil
  1292  }
  1293  
  1294  // pow takes one metric or a wildcard seriesList followed by a constant,
  1295  // and raises the datapoint by the power of the constant provided at each point
  1296  // nolint: gocritic
  1297  func pow(ctx *common.Context, input singlePathSpec, factor float64) (ts.SeriesList, error) {
  1298  	r := powHelper(ctx, input, factor, false)
  1299  	return r, nil
  1300  }
  1301  
  1302  // invert takes one metric or a wildcard seriesList, and inverts each datapoint (i.e. 1/x).
  1303  func invert(ctx *common.Context, input singlePathSpec) (ts.SeriesList, error) {
  1304  	r := powHelper(ctx, input, -1, true)
  1305  	return r, nil
  1306  }
  1307  
  1308  func powHelper(ctx *common.Context, input singlePathSpec, factor float64, isInvert bool) ts.SeriesList {
  1309  	results := make([]*ts.Series, 0, len(input.Values))
  1310  
  1311  	renamePrefix := "pow"
  1312  	if isInvert {
  1313  		renamePrefix = "invert"
  1314  	}
  1315  	for _, series := range input.Values {
  1316  		numSteps := series.Len()
  1317  		millisPerStep := series.MillisPerStep()
  1318  		vals := ts.NewValues(ctx, millisPerStep, numSteps)
  1319  		for i := 0; i < numSteps; i++ {
  1320  			vals.SetValueAt(i, math.Pow(series.ValueAt(i), factor))
  1321  		}
  1322  		newName := fmt.Sprintf("%s(%s, %f)", renamePrefix, series.Name(), factor)
  1323  		if isInvert {
  1324  			newName = fmt.Sprintf("%s(%s)", renamePrefix, series.Name())
  1325  		}
  1326  		results = append(results, ts.NewSeries(ctx, newName, series.StartTime(), vals))
  1327  	}
  1328  
  1329  	r := ts.SeriesList(input)
  1330  	r.Values = results
  1331  	return r
  1332  }
  1333  
  1334  // logarithm takes one metric or a wildcard seriesList, and draws the y-axis in
  1335  // logarithmic format.
  1336  func logarithm(ctx *common.Context, input singlePathSpec, base float64) (ts.SeriesList, error) {
  1337  	if base <= 0 {
  1338  		err := xerrors.NewInvalidParamsError(fmt.Errorf("invalid log base: %v", base))
  1339  		return ts.NewSeriesList(), err
  1340  	}
  1341  
  1342  	results := make([]*ts.Series, 0, len(input.Values))
  1343  	for _, series := range input.Values {
  1344  		vals := ts.NewValues(ctx, series.MillisPerStep(), series.Len())
  1345  		newName := fmt.Sprintf("log(%s, %f)", series.Name(), base)
  1346  		if series.AllNaN() {
  1347  			results = append(results, ts.NewSeries(ctx, newName, series.StartTime(), vals))
  1348  			continue
  1349  		}
  1350  
  1351  		for i := 0; i < series.Len(); i++ {
  1352  			n := series.ValueAt(i)
  1353  			if !math.IsNaN(n) && n > 0 {
  1354  				vals.SetValueAt(i, math.Log10(n)/math.Log10(base))
  1355  			}
  1356  		}
  1357  
  1358  		results = append(results, ts.NewSeries(ctx, newName, series.StartTime(), vals))
  1359  	}
  1360  
  1361  	r := ts.SeriesList(input)
  1362  	r.Values = results
  1363  	return r, nil
  1364  }
  1365  
  1366  // interpolate takes one metric or a wildcard seriesList, and optionally a limit to the number of ‘None’ values
  1367  // to skip over. Continues the line with the last received value when gaps (‘None’ values)
  1368  // appear in your data, rather than breaking your line.
  1369  //
  1370  // interpolate will not interpolate at the beginning or end of a series, only in the middle
  1371  func interpolate(ctx *common.Context, input singlePathSpec, limit int) (ts.SeriesList, error) {
  1372  	output := make([]*ts.Series, 0, len(input.Values))
  1373  	for _, series := range input.Values {
  1374  		var (
  1375  			consecutiveNaNs = 0
  1376  			numSteps        = series.Len()
  1377  			vals            = ts.NewValues(ctx, series.MillisPerStep(), numSteps)
  1378  			firstNonNan     = false
  1379  		)
  1380  
  1381  		for i := 0; i < numSteps; i++ {
  1382  			value := series.ValueAt(i)
  1383  			vals.SetValueAt(i, value)
  1384  
  1385  			if math.IsNaN(value) {
  1386  				if !firstNonNan {
  1387  					continue
  1388  				}
  1389  
  1390  				consecutiveNaNs++
  1391  				if limit >= 0 && consecutiveNaNs > limit {
  1392  					consecutiveNaNs = 0
  1393  				}
  1394  
  1395  				continue
  1396  			} else {
  1397  				firstNonNan = true
  1398  			}
  1399  
  1400  			if consecutiveNaNs == 0 {
  1401  				// have a value but no need to interpolate
  1402  				continue
  1403  			}
  1404  
  1405  			interpolated := series.ValueAt(i - consecutiveNaNs - 1)
  1406  			interpolateStep := (value - interpolated) / float64(consecutiveNaNs+1)
  1407  			for index := i - consecutiveNaNs; index < i; index++ {
  1408  				interpolated = interpolated + interpolateStep
  1409  				vals.SetValueAt(index, interpolated)
  1410  			}
  1411  
  1412  			consecutiveNaNs = 0
  1413  		}
  1414  
  1415  		name := fmt.Sprintf("interpolate(%s)", series.Name())
  1416  		newSeries := ts.NewSeries(ctx, name, series.StartTime(), vals)
  1417  		output = append(output, newSeries)
  1418  	}
  1419  	r := ts.SeriesList(input)
  1420  	r.Values = output
  1421  	return r, nil
  1422  }
  1423  
  1424  // group takes an arbitrary number of pathspecs and adds them to a single timeseries array.
  1425  // This function is used to pass multiple pathspecs to a function which only takes one
  1426  func group(_ *common.Context, input multiplePathSpecs) (ts.SeriesList, error) {
  1427  	return ts.SeriesList(input), nil
  1428  }
  1429  
  1430  func derivativeTemplate(ctx *common.Context, input singlePathSpec, nameTemplate string,
  1431  	fn func(float64, float64) float64) (ts.SeriesList, error) {
  1432  	output := make([]*ts.Series, len(input.Values))
  1433  	for i, in := range input.Values {
  1434  		derivativeValues := ts.NewValues(ctx, in.MillisPerStep(), in.Len())
  1435  		previousValue := math.NaN()
  1436  
  1437  		for step := 0; step < in.Len(); step++ {
  1438  			value := in.ValueAt(step)
  1439  			if math.IsNaN(value) || math.IsNaN(previousValue) {
  1440  				derivativeValues.SetValueAt(step, math.NaN())
  1441  			} else {
  1442  				derivativeValues.SetValueAt(step, fn(value, previousValue))
  1443  			}
  1444  
  1445  			previousValue = value
  1446  		}
  1447  
  1448  		name := fmt.Sprintf("%s(%s)", nameTemplate, in.Name())
  1449  		output[i] = ts.NewSeries(ctx, name, in.StartTime(), derivativeValues)
  1450  	}
  1451  
  1452  	r := ts.SeriesList(input)
  1453  	r.Values = output
  1454  	return r, nil
  1455  }
  1456  
  1457  // integral shows the sum over time, sort of like a continuous addition function.
  1458  //  Useful for finding totals or trends in metrics that are collected per minute.
  1459  func integral(ctx *common.Context, input singlePathSpec) (ts.SeriesList, error) {
  1460  	results := make([]*ts.Series, 0, len(input.Values))
  1461  	for _, series := range input.Values {
  1462  		if series.AllNaN() {
  1463  			results = append(results, series)
  1464  			continue
  1465  		}
  1466  
  1467  		outvals := ts.NewValues(ctx, series.MillisPerStep(), series.Len())
  1468  		var current float64
  1469  		for i := 0; i < series.Len(); i++ {
  1470  			n := series.ValueAt(i)
  1471  			if !math.IsNaN(n) {
  1472  				current += n
  1473  				outvals.SetValueAt(i, current)
  1474  			}
  1475  		}
  1476  
  1477  		newName := fmt.Sprintf("integral(%s)", series.Name())
  1478  		results = append(results, ts.NewSeries(ctx, newName, series.StartTime(), outvals))
  1479  	}
  1480  
  1481  	r := ts.SeriesList(input)
  1482  	r.Values = results
  1483  	return r, nil
  1484  }
  1485  
  1486  // integralByInterval will do the same as integral funcion, except it resets the total to 0
  1487  // at the given time in the parameter “from”. Useful for finding totals per hour/day/week.
  1488  func integralByInterval(ctx *common.Context, input singlePathSpec, intervalString string) (ts.SeriesList, error) {
  1489  	intervalUnit, err := common.ParseInterval(intervalString)
  1490  	if err != nil {
  1491  		return ts.NewSeriesList(), err
  1492  	}
  1493  	results := make([]*ts.Series, 0, len(input.Values))
  1494  
  1495  	for _, series := range input.Values {
  1496  		var (
  1497  			stepsPerInterval = intervalUnit.Milliseconds() / int64(series.MillisPerStep())
  1498  			outVals          = ts.NewValues(ctx, series.MillisPerStep(), series.Len())
  1499  			stepCounter      int64
  1500  			currentSum       float64
  1501  		)
  1502  
  1503  		for i := 0; i < series.Len(); i++ {
  1504  			if stepCounter == stepsPerInterval {
  1505  				// startNewInterval
  1506  				stepCounter = 0
  1507  				currentSum = 0.0
  1508  			}
  1509  			n := series.ValueAt(i)
  1510  			if !math.IsNaN(n) {
  1511  				currentSum += n
  1512  			}
  1513  			outVals.SetValueAt(i, currentSum)
  1514  			stepCounter += 1
  1515  		}
  1516  
  1517  		newName := fmt.Sprintf("integralByInterval(%s, %s)", series.Name(), intervalString)
  1518  		results = append(results, ts.NewSeries(ctx, newName, series.StartTime(), outVals))
  1519  	}
  1520  
  1521  	r := ts.SeriesList(input)
  1522  	r.Values = results
  1523  	return r, nil
  1524  }
  1525  
  1526  // This is the opposite of the integral function.  This is useful for taking a
  1527  // running total metric and calculating the delta between subsequent data
  1528  // points.
  1529  func derivative(ctx *common.Context, input singlePathSpec) (ts.SeriesList, error) {
  1530  	return derivativeTemplate(ctx, input, "derivative", func(value, previousValue float64) float64 {
  1531  		return value - previousValue
  1532  	})
  1533  }
  1534  
  1535  // Same as the derivative function above, but ignores datapoints that trend down.
  1536  func nonNegativeDerivative(ctx *common.Context, input singlePathSpec, maxValue float64) (ts.SeriesList, error) {
  1537  	return derivativeTemplate(ctx, input, "nonNegativeDerivative", func(value, previousValue float64) float64 {
  1538  		difference := value - previousValue
  1539  		if difference >= 0 {
  1540  			return difference
  1541  		} else if !math.IsNaN(maxValue) && maxValue >= value {
  1542  			return (maxValue - previousValue) + value + 1.0
  1543  		} else {
  1544  			return math.NaN()
  1545  		}
  1546  	})
  1547  }
  1548  
  1549  // nPercentile returns percentile-percent of each series in the seriesList.
  1550  func nPercentile(ctx *common.Context, seriesList singlePathSpec, percentile float64) (ts.SeriesList, error) {
  1551  	return common.NPercentile(ctx, ts.SeriesList(seriesList), percentile, func(name string, percentile float64) string {
  1552  		return fmt.Sprintf("nPercentile(%s, "+common.FloatingPointFormat+")", name, percentile)
  1553  	})
  1554  }
  1555  
  1556  func percentileOfSeries(ctx *common.Context, seriesList singlePathSpec, percentile float64, interpolateValue genericInterface) (ts.SeriesList, error) {
  1557  	if percentile <= 0 || percentile > 100 {
  1558  		err := xerrors.NewInvalidParamsError(fmt.Errorf(
  1559  			"the requested percentile value must be betwween 0 and 100"))
  1560  		return ts.NewSeriesList(), err
  1561  	}
  1562  
  1563  	var interpolate bool
  1564  	switch interpolateValue := interpolateValue.(type) {
  1565  	case bool:
  1566  		interpolate = interpolateValue
  1567  	case string:
  1568  		if interpolateValue == "true" {
  1569  			interpolate = true
  1570  		} else if interpolateValue != "false" {
  1571  			err := xerrors.NewInvalidParamsError(fmt.Errorf(
  1572  				"interpolateValue must be either true or false but instead is %s",
  1573  				interpolateValue))
  1574  			return ts.NewSeriesList(), err
  1575  		}
  1576  	default:
  1577  		err := xerrors.NewInvalidParamsError(fmt.Errorf(
  1578  			"interpolateValue must be either a boolean or a string but instead is %T",
  1579  			interpolateValue))
  1580  		return ts.NewSeriesList(), err
  1581  	}
  1582  
  1583  	if len(seriesList.Values) == 0 {
  1584  		err := xerrors.NewInvalidParamsError(fmt.Errorf("series list cannot be empty"))
  1585  		return ts.NewSeriesList(), err
  1586  	}
  1587  
  1588  	normalize, _, _, _, err := common.Normalize(ctx, ts.SeriesList(seriesList))
  1589  	if err != nil {
  1590  		return ts.NewSeriesList(), err
  1591  	}
  1592  
  1593  	step := seriesList.Values[0].MillisPerStep()
  1594  	for _, series := range seriesList.Values[1:] {
  1595  		if step != series.MillisPerStep() {
  1596  			err := xerrors.NewInvalidParamsError(fmt.Errorf(
  1597  				"different step sizes in input series not supported"))
  1598  			return ts.NewSeriesList(), err
  1599  		}
  1600  	}
  1601  
  1602  	// TODO: This is wrong when MillisPerStep is different across
  1603  	// the timeseries.
  1604  	min := seriesList.Values[0].Len()
  1605  	for _, series := range seriesList.Values[1:] {
  1606  		numSteps := series.Len()
  1607  		if numSteps < min {
  1608  			min = numSteps
  1609  		}
  1610  	}
  1611  
  1612  	percentiles := make([]float64, min)
  1613  	for i := 0; i < min; i++ {
  1614  		row := make([]float64, len(seriesList.Values))
  1615  		for j, series := range seriesList.Values {
  1616  			row[j] = series.ValueAt(i)
  1617  		}
  1618  
  1619  		percentiles[i] = common.GetPercentile(row, percentile, interpolate)
  1620  	}
  1621  
  1622  	percentilesSeries := ts.NewValues(ctx, normalize.Values[0].MillisPerStep(), min)
  1623  	for k := 0; k < min; k++ {
  1624  		percentilesSeries.SetValueAt(k, percentiles[k])
  1625  	}
  1626  
  1627  	name := fmt.Sprintf("percentileOfSeries(%s,"+common.FloatingPointFormat+")",
  1628  		seriesList.Values[0].Specification, percentile)
  1629  	return ts.SeriesList{
  1630  		Values: []*ts.Series{
  1631  			ts.NewSeries(ctx, name, normalize.Values[0].StartTime(), percentilesSeries),
  1632  		},
  1633  		Metadata: seriesList.Metadata,
  1634  	}, nil
  1635  }
  1636  
  1637  // divMod takes dividend n and divisor m, returns quotient and remainder.
  1638  // prerequisite: m is nonzero.
  1639  func divMod(n, m int) (int, int) {
  1640  	quotient := n / m
  1641  	remainder := n - quotient*m
  1642  	return quotient, remainder
  1643  }
  1644  
  1645  // addToBucket add value to buckets[idx] and handles NaNs properly.
  1646  func addToBucket(buckets ts.MutableValues, idx int, value float64) {
  1647  	v := buckets.ValueAt(idx)
  1648  	if math.IsNaN(v) {
  1649  		buckets.SetValueAt(idx, value)
  1650  	} else {
  1651  		buckets.SetValueAt(idx, v+value)
  1652  	}
  1653  }
  1654  
  1655  // durationToSeconds converts a duration to number of seconds.
  1656  func durationToSeconds(d time.Duration) int {
  1657  	return int(d / time.Second)
  1658  }
  1659  
  1660  // hitcount estimates hit counts from a list of time series. This function assumes the values in each time
  1661  // series represent hits per second. It calculates hits per some larger interval such as per day or per hour.
  1662  func hitcount(
  1663  	ctx *common.Context,
  1664  	_ singlePathSpec,
  1665  	intervalString string,
  1666  	alignToInterval bool,
  1667  ) (*unaryContextShifter, error) {
  1668  	interval, err := common.ParseInterval(intervalString)
  1669  	if err != nil {
  1670  		return nil, err
  1671  	}
  1672  
  1673  	intervalInSeconds := durationToSeconds(interval)
  1674  	if intervalInSeconds <= 0 {
  1675  		return nil, common.ErrInvalidIntervalFormat
  1676  	}
  1677  
  1678  	shiftDuration := time.Second * 0 // Default to no shift.
  1679  	if alignToInterval {
  1680  		// Follow graphite implementation: only handle minutes, hours and days.
  1681  		origStartTime := ctx.StartTime
  1682  		switch {
  1683  		case interval.Hours() >= 24:
  1684  			// Interval in days, truncate to days.
  1685  			newStartTime := time.Date(
  1686  				origStartTime.Year(),
  1687  				origStartTime.Month(),
  1688  				origStartTime.Day(), 0, 0, 0, 0, origStartTime.Location())
  1689  			shiftDuration = newStartTime.Sub(origStartTime)
  1690  		case interval.Hours() >= 1:
  1691  			// Interval is in hrs.
  1692  			newStartTime := time.Date(
  1693  				origStartTime.Year(),
  1694  				origStartTime.Month(),
  1695  				origStartTime.Day(),
  1696  				origStartTime.Hour(), 0, 0, 0, origStartTime.Location())
  1697  			shiftDuration = newStartTime.Sub(origStartTime)
  1698  		case interval.Minutes() >= 1:
  1699  			// Interval is in minutes.
  1700  			newStartTime := time.Date(
  1701  				origStartTime.Year(),
  1702  				origStartTime.Month(),
  1703  				origStartTime.Day(),
  1704  				origStartTime.Hour(),
  1705  				origStartTime.Minute(), 0, 0, origStartTime.Location())
  1706  			shiftDuration = newStartTime.Sub(origStartTime)
  1707  		}
  1708  	}
  1709  
  1710  	contextShiftingFn := func(c *common.Context) *common.Context {
  1711  		opts := common.NewChildContextOptions()
  1712  		opts.AdjustTimeRange(shiftDuration, 0, 0, 0)
  1713  		childCtx := c.NewChildContext(opts)
  1714  		return childCtx
  1715  	}
  1716  
  1717  	transformerFn := func(bootstrappedSeries ts.SeriesList) (ts.SeriesList, error) {
  1718  		r := hitCountImpl(ctx, bootstrappedSeries, intervalString, interval, intervalInSeconds)
  1719  		return r, nil
  1720  	}
  1721  
  1722  	return &unaryContextShifter{
  1723  		ContextShiftFunc: contextShiftingFn,
  1724  		UnaryTransformer: transformerFn,
  1725  	}, nil
  1726  }
  1727  
  1728  func hitCountImpl(
  1729  	ctx *common.Context,
  1730  	seriesList ts.SeriesList,
  1731  	intervalString string,
  1732  	interval time.Duration,
  1733  	intervalInSeconds int,
  1734  ) ts.SeriesList {
  1735  	resultSeries := make([]*ts.Series, 0, len(seriesList.Values))
  1736  	for _, series := range seriesList.Values {
  1737  		step := time.Duration(series.MillisPerStep()) * time.Millisecond
  1738  		bucketCount := int(math.Ceil(float64(series.EndTime().Sub(series.StartTime())) / float64(interval)))
  1739  		buckets := ts.NewValues(ctx, int(interval/time.Millisecond), bucketCount)
  1740  		newStart := series.EndTime().Add(-time.Duration(bucketCount) * interval)
  1741  
  1742  		for i := 0; i < series.Len(); i++ {
  1743  			value := series.ValueAt(i)
  1744  			if math.IsNaN(value) {
  1745  				continue
  1746  			}
  1747  			startTime := series.StartTime().Add(time.Duration(i) * step)
  1748  			startBucket, startMod := divMod(durationToSeconds(startTime.Sub(newStart)), intervalInSeconds)
  1749  			endTime := startTime.Add(step)
  1750  			endBucket, endMod := divMod(durationToSeconds(endTime.Sub(newStart)), intervalInSeconds)
  1751  
  1752  			if endBucket >= bucketCount {
  1753  				endBucket = bucketCount - 1
  1754  				endMod = intervalInSeconds
  1755  			}
  1756  
  1757  			if startBucket == endBucket {
  1758  				addToBucket(buckets, startBucket, value*float64(endMod-startMod))
  1759  			} else {
  1760  				addToBucket(buckets, startBucket, value*float64(intervalInSeconds-startMod))
  1761  				hitsPerBucket := value * float64(intervalInSeconds)
  1762  				for j := startBucket + 1; j < endBucket; j++ {
  1763  					addToBucket(buckets, j, hitsPerBucket)
  1764  				}
  1765  				if endMod > 0 {
  1766  					addToBucket(buckets, endBucket, value*float64(endMod))
  1767  				}
  1768  			}
  1769  		}
  1770  		newName := fmt.Sprintf("hitcount(%s, %q)", series.Name(), intervalString)
  1771  		newSeries := ts.NewSeries(ctx, newName, newStart, buckets)
  1772  		resultSeries = append(resultSeries, newSeries)
  1773  	}
  1774  
  1775  	r := ts.SeriesList(seriesList)
  1776  	r.Values = resultSeries
  1777  	return r
  1778  }
  1779  
  1780  func safeIndex(len, index int) int {
  1781  	return int(math.Min(float64(index), float64(len)))
  1782  }
  1783  
  1784  // substr takes one metric or a wildcard seriesList followed by 1 or 2 integers. Prints n - length elements
  1785  // of the array (if only one integer n is passed) or n - m elements of the array (if two integers n and m
  1786  // are passed).
  1787  func substr(_ *common.Context, seriesList singlePathSpec, start, stop int) (ts.SeriesList, error) {
  1788  	origStart, origStop := start, stop
  1789  	results := make([]*ts.Series, len(seriesList.Values))
  1790  	re := regexp.MustCompile(",.*$")
  1791  	for i, series := range seriesList.Values {
  1792  		name := series.Name()
  1793  		left := strings.LastIndex(name, "(") + 1
  1794  		right := strings.Index(name, ")")
  1795  		length := len(name)
  1796  		if right < 0 {
  1797  			right = length
  1798  		}
  1799  		right = safeIndex(length, right)
  1800  		nameParts := strings.Split(name[left:right], ".")
  1801  		numParts := len(nameParts)
  1802  		currStart, currStop := start, stop
  1803  		// Graphite supports negative indexing, so we need to also.
  1804  		if numParts > 0 {
  1805  			for currStart < 0 {
  1806  				currStart += numParts
  1807  			}
  1808  
  1809  			for currStop < 0 {
  1810  				currStop += numParts
  1811  			}
  1812  		}
  1813  
  1814  		// If stop == 0, it's as if stop was unspecified
  1815  		if currStart < 0 || currStart >= numParts || (currStop != 0 && currStop < currStart) {
  1816  			err := xerrors.NewInvalidParamsError(fmt.Errorf(
  1817  				"invalid substr params: start=%d, stop=%d", origStart, origStop))
  1818  			return ts.NewSeriesList(), err
  1819  		}
  1820  		var newName string
  1821  		if currStop == 0 {
  1822  			newName = strings.Join(nameParts[currStart:], ".")
  1823  		} else {
  1824  			stop = safeIndex(numParts, currStop)
  1825  			newName = strings.Join(nameParts[currStart:currStop], ".")
  1826  		}
  1827  		newName = re.ReplaceAllString(newName, "")
  1828  		results[i] = series.RenamedTo(newName)
  1829  	}
  1830  
  1831  	r := ts.SeriesList(seriesList)
  1832  	r.Values = results
  1833  	return r, nil
  1834  }
  1835  
  1836  // combineBootstrapWithOriginal combines the bootstrapped the series with the original series.
  1837  func combineBootstrapWithOriginal(
  1838  	ctx *common.Context,
  1839  	bootstrapStartTime, bootstrapEndTime time.Time,
  1840  	_, originalEndTime time.Time,
  1841  	bootstrapped ts.SeriesList,
  1842  	seriesList singlePathSpec,
  1843  ) (ts.SeriesList, error) {
  1844  	nameToSeries := make(map[string]*ts.Series)
  1845  	for _, series := range bootstrapped.Values {
  1846  		nameToSeries[series.Name()] = series
  1847  	}
  1848  
  1849  	bootstrapList := make([]*ts.Series, 0, len(seriesList.Values))
  1850  	for _, series := range seriesList.Values {
  1851  		bs, found := nameToSeries[series.Name()]
  1852  		if !found {
  1853  			numSteps := ts.NumSteps(bootstrapStartTime, bootstrapEndTime, series.MillisPerStep())
  1854  			vals := ts.NewValues(ctx, series.MillisPerStep(), numSteps)
  1855  			bs = ts.NewSeries(ctx, series.Name(), bootstrapStartTime, vals)
  1856  		} else {
  1857  			// Delete from the lookup so we can fill in
  1858  			// the bootstrapped time series with NaNs if
  1859  			// the original series list is missing the series.
  1860  			delete(nameToSeries, series.Name())
  1861  		}
  1862  		bootstrapList = append(bootstrapList, bs)
  1863  	}
  1864  
  1865  	var err error
  1866  	newSeriesList := make([]*ts.Series, 0, len(seriesList.Values)+len(nameToSeries))
  1867  	for i, bootstrap := range bootstrapList {
  1868  		original := seriesList.Values[i]
  1869  		if bootstrap.MillisPerStep() < original.MillisPerStep() {
  1870  			bootstrap, err = bootstrap.IntersectAndResize(bootstrap.StartTime(), bootstrap.EndTime(),
  1871  				original.MillisPerStep(), original.ConsolidationFunc())
  1872  			if err != nil {
  1873  				return ts.NewSeriesList(), err
  1874  			}
  1875  		}
  1876  		bootstrapEndStep := bootstrapEndTime.Truncate(original.Resolution())
  1877  		if bootstrapEndStep.Before(bootstrapEndTime) {
  1878  			bootstrapEndStep = bootstrapEndStep.Add(original.Resolution())
  1879  		}
  1880  		// NB(braskin): using bootstrap.Len() is incorrect as it will include all
  1881  		// of the steps in the original timeseries, not just the steps up to the new end time
  1882  		bootstrapLength := bootstrap.StepAtTime(bootstrapEndStep)
  1883  		ratio := bootstrap.MillisPerStep() / original.MillisPerStep()
  1884  		numBootstrapValues := bootstrapLength * ratio
  1885  		numCombinedValues := numBootstrapValues + original.Len()
  1886  		values := ts.NewValues(ctx, original.MillisPerStep(), numCombinedValues)
  1887  		for j := 0; j < min(bootstrap.Len(), bootstrapLength); j++ {
  1888  			for k := j * ratio; k < (j+1)*ratio; k++ {
  1889  				values.SetValueAt(k, bootstrap.ValueAt(j))
  1890  			}
  1891  		}
  1892  		for j := numBootstrapValues; j < numCombinedValues; j++ {
  1893  			values.SetValueAt(j, original.ValueAt(j-numBootstrapValues))
  1894  		}
  1895  		newSeries := ts.NewSeries(ctx, original.Name(), bootstrapStartTime, values)
  1896  		newSeries.Specification = original.Specification
  1897  		newSeriesList = append(newSeriesList, newSeries)
  1898  	}
  1899  	// Now add any series in bootstrap list but not original series,
  1900  	// need to iterate the bootstrapped.Values to retain order
  1901  	// but can check if they are in the nameToSeries map still and if so
  1902  	// then there was no matching original series.
  1903  	for _, series := range bootstrapped.Values {
  1904  		bs, found := nameToSeries[series.Name()]
  1905  		if !found {
  1906  			// Processed already.
  1907  			continue
  1908  		}
  1909  		// Extend the bootstrap series to include steps covered by original
  1910  		// time range since the original series is missing from fetch.
  1911  		needSteps := bs.StepAtTime(originalEndTime)
  1912  		if currSteps := bs.Len(); needSteps > currSteps {
  1913  			// Need to resize.
  1914  			vals := ts.NewValues(ctx, bs.MillisPerStep(), needSteps)
  1915  			for i := 0; i < currSteps; i++ {
  1916  				vals.SetValueAt(i, bs.ValueAt(i))
  1917  			}
  1918  			bs = bs.DerivedSeries(bs.StartTime(), vals)
  1919  		}
  1920  		newSeriesList = append(newSeriesList, bs)
  1921  	}
  1922  
  1923  	r := ts.SeriesList(seriesList)
  1924  	r.Values = newSeriesList
  1925  	return r, nil
  1926  }
  1927  
  1928  func min(a, b int) int {
  1929  	if a < b {
  1930  		return a
  1931  	}
  1932  	return b
  1933  }
  1934  
  1935  // trimBootstrap trims the bootstrap period off the front of this series so it matches the original.
  1936  func trimBootstrap(ctx *common.Context, bootstrap, original *ts.Series) *ts.Series {
  1937  	originalLen := original.Len()
  1938  	bootstrapLen := bootstrap.Len()
  1939  	lengthLimit := (originalLen * original.MillisPerStep()) / bootstrap.MillisPerStep()
  1940  	trimStart := bootstrap.EndTime().Add(-time.Duration(lengthLimit*bootstrap.MillisPerStep()) * time.Millisecond)
  1941  	vals := ts.NewValues(ctx, bootstrap.MillisPerStep(), lengthLimit)
  1942  	for i := 0; i < lengthLimit; i++ {
  1943  		vals.SetValueAt(i, bootstrap.ValueAt(i+bootstrapLen-lengthLimit))
  1944  	}
  1945  	return ts.NewSeries(ctx, bootstrap.Name(), trimStart, vals)
  1946  }
  1947  
  1948  // holtWintersForecast performs a Holt-Winters forecast using the series as input data.
  1949  // Data from one week previous to the series is used to bootstrap the initial forecast.
  1950  func holtWintersForecast(ctx *common.Context, seriesList singlePathSpec) (ts.SeriesList, error) {
  1951  	return holtWintersForecastInternal(ctx, seriesList, secondsPerWeek*time.Second)
  1952  }
  1953  
  1954  // nolint: unparam
  1955  func holtWintersForecastInternal(ctx *common.Context, seriesList singlePathSpec, d time.Duration) (ts.SeriesList, error) {
  1956  	results := make([]*ts.Series, len(seriesList.Values))
  1957  	bootstrapList, err := common.FetchWithBootstrap(ctx, ts.SeriesList(seriesList), d)
  1958  	if err != nil {
  1959  		return ts.NewSeriesList(), err
  1960  	}
  1961  	for i, bootstrap := range bootstrapList.Values {
  1962  		series := seriesList.Values[i]
  1963  		analysis := holtWintersAnalysis(ctx, bootstrap)
  1964  		results[i] = trimBootstrap(ctx, analysis.predictions, series)
  1965  	}
  1966  	r := ts.SeriesList(seriesList)
  1967  	r.Values = results
  1968  	return r, nil
  1969  }
  1970  
  1971  // holtWintersConfidenceBands performs a Holt-Winters forecast using the series as input data and
  1972  // plots upper and lower bands with the predicted forecast deviations.
  1973  func holtWintersConfidenceBands(ctx *common.Context, seriesList singlePathSpec, delta float64) (ts.SeriesList, error) {
  1974  	return holtWintersConfidenceBandsInternal(ctx, seriesList, delta, secondsPerWeek*time.Second)
  1975  }
  1976  
  1977  // nolint: unparam
  1978  func holtWintersConfidenceBandsInternal(ctx *common.Context, seriesList singlePathSpec, delta float64, d time.Duration) (ts.SeriesList, error) {
  1979  	results := make([]*ts.Series, 2*len(seriesList.Values))
  1980  	bootstrapList, err := common.FetchWithBootstrap(ctx, ts.SeriesList(seriesList), d)
  1981  	if err != nil {
  1982  		return ts.NewSeriesList(), err
  1983  	}
  1984  
  1985  	for index, bootstrap := range bootstrapList.Values {
  1986  		series := seriesList.Values[index]
  1987  		analysis := holtWintersAnalysis(ctx, bootstrap)
  1988  		forecast := trimBootstrap(ctx, analysis.predictions, series)
  1989  		deviation := trimBootstrap(ctx, analysis.deviations, series)
  1990  		seriesLength := forecast.Len()
  1991  		upperBand := ts.NewValues(ctx, forecast.MillisPerStep(), seriesLength)
  1992  		lowerBand := ts.NewValues(ctx, forecast.MillisPerStep(), seriesLength)
  1993  		for i := 0; i < seriesLength; i++ {
  1994  			forecastItem := forecast.ValueAt(i)
  1995  			deviationItem := deviation.ValueAt(i)
  1996  			if !math.IsNaN(forecastItem) && !math.IsNaN(deviationItem) {
  1997  				scaledDeviation := delta * deviationItem
  1998  				upperBand.SetValueAt(i, forecastItem+scaledDeviation)
  1999  				lowerBand.SetValueAt(i, forecastItem-scaledDeviation)
  2000  			}
  2001  		}
  2002  		upperName := fmt.Sprintf("holtWintersConfidenceUpper(%s)", series.Name())
  2003  		lowerName := fmt.Sprintf("holtWintersConfidenceLower(%s)", series.Name())
  2004  		upperSeries := ts.NewSeries(ctx, upperName, forecast.StartTime(), upperBand)
  2005  		lowerSeries := ts.NewSeries(ctx, lowerName, forecast.StartTime(), lowerBand)
  2006  		newIndex := index * 2
  2007  		results[newIndex] = lowerSeries
  2008  		results[newIndex+1] = upperSeries
  2009  	}
  2010  
  2011  	r := ts.SeriesList(seriesList)
  2012  	r.Values = results
  2013  	return r, nil
  2014  }
  2015  
  2016  // holtWintersAberration performs a Holt-Winters forecast using the series as input data and
  2017  // plots the positive or negative deviation of the series data from the forecast.
  2018  func holtWintersAberration(ctx *common.Context, seriesList singlePathSpec, delta float64) (ts.SeriesList, error) {
  2019  	// log if we are actually using this function
  2020  	var b bytes.Buffer
  2021  	b.WriteString("holtWintersAberration is used")
  2022  	if len(seriesList.Values) > 0 {
  2023  		b.WriteString(fmt.Sprintf(", series[0] name=%s", seriesList.Values[0].Name()))
  2024  	}
  2025  	return holtWintersAberrationInternal(ctx, seriesList, delta, secondsPerWeek*time.Second)
  2026  }
  2027  
  2028  // nolint: unparam
  2029  func holtWintersAberrationInternal(
  2030  	ctx *common.Context,
  2031  	seriesList singlePathSpec,
  2032  	delta float64,
  2033  	d time.Duration,
  2034  ) (ts.SeriesList, error) {
  2035  	results := make([]*ts.Series, len(seriesList.Values))
  2036  	for index, series := range seriesList.Values {
  2037  		confidenceBands, err := holtWintersConfidenceBandsInternal(ctx, singlePathSpec{
  2038  			Values: []*ts.Series{series},
  2039  		}, delta, d)
  2040  		if err != nil {
  2041  			return ts.NewSeriesList(), err
  2042  		}
  2043  
  2044  		lowerBand := confidenceBands.Values[0]
  2045  		upperBand := confidenceBands.Values[1]
  2046  		numPoints := series.Len()
  2047  		aberration := ts.NewValues(ctx, series.MillisPerStep(), numPoints)
  2048  		for i := 0; i < numPoints; i++ {
  2049  			actual := series.ValueAt(i)
  2050  			upperVal := upperBand.ValueAt(i)
  2051  			lowerVal := lowerBand.ValueAt(i)
  2052  			var newValue float64
  2053  			if math.IsNaN(actual) {
  2054  				newValue = 0
  2055  			} else if !math.IsNaN(upperVal) && actual > upperVal {
  2056  				newValue = actual - upperVal
  2057  			} else if !math.IsNaN(lowerVal) && actual < lowerVal {
  2058  				newValue = actual - lowerVal
  2059  			} else {
  2060  				newValue = 0
  2061  			}
  2062  			aberration.SetValueAt(i, newValue)
  2063  		}
  2064  		newName := fmt.Sprintf("holtWintersAberration(%s)", series.Name())
  2065  		newSeries := ts.NewSeries(ctx, newName, series.StartTime(), aberration)
  2066  		results[index] = newSeries
  2067  	}
  2068  
  2069  	r := ts.SeriesList(seriesList)
  2070  	r.Values = results
  2071  	return r, nil
  2072  }
  2073  
  2074  func holtWintersIntercept(actual, lastSeason, lastIntercept, lastSlope float64) float64 {
  2075  	return alpha*(actual-lastSeason) + (1-alpha)*(lastIntercept+lastSlope)
  2076  }
  2077  
  2078  func holtWintersSlope(intercept, lastIntercept, lastSlope float64) float64 {
  2079  	return beta*(intercept-lastIntercept) + (1-beta)*lastSlope
  2080  }
  2081  
  2082  func holtWintersSeasonal(actual, intercept, lastSeason float64) float64 {
  2083  	return gamma*(actual-intercept) + (1-gamma)*lastSeason
  2084  }
  2085  
  2086  func holtWintersDeviation(actual, prediction, lastSeasonalDev float64) float64 {
  2087  	if math.IsNaN(prediction) {
  2088  		prediction = 0
  2089  	}
  2090  	return gamma*math.Abs(actual-prediction) + (1-gamma)*lastSeasonalDev
  2091  }
  2092  
  2093  type holtWintersAnalysisResult struct {
  2094  	predictions *ts.Series
  2095  	deviations  *ts.Series
  2096  	intercepts  []float64
  2097  	slopes      []float64
  2098  	seasonals   []float64
  2099  }
  2100  
  2101  // holtWintersAnalysis takes a series, and returns the analysis result.
  2102  func holtWintersAnalysis(ctx *common.Context, series *ts.Series) *holtWintersAnalysisResult {
  2103  	seasonLength := secondsPerDay * millisPerSecond / series.MillisPerStep()
  2104  	numPoints := series.Len()
  2105  	intercepts := make([]float64, numPoints)
  2106  	slopes := make([]float64, numPoints)
  2107  	seasonals := make([]float64, numPoints)
  2108  	predictions := ts.NewValues(ctx, series.MillisPerStep(), numPoints)
  2109  	deviations := ts.NewValues(ctx, series.MillisPerStep(), numPoints)
  2110  
  2111  	getLastSeasonal := func(i int) float64 {
  2112  		j := i - seasonLength
  2113  		if j >= 0 {
  2114  			return seasonals[j]
  2115  		}
  2116  		return 0.0
  2117  	}
  2118  
  2119  	getLastDeviation := func(i int) float64 {
  2120  		j := i - seasonLength
  2121  		if j >= 0 {
  2122  			return deviations.ValueAt(j)
  2123  		}
  2124  		return 0.0
  2125  	}
  2126  
  2127  	nextPred := math.NaN()
  2128  	for i := 0; i < numPoints; i++ {
  2129  		actual := series.ValueAt(i)
  2130  		if math.IsNaN(actual) {
  2131  			intercepts[i] = math.NaN()
  2132  			predictions.SetValueAt(i, nextPred)
  2133  			deviations.SetValueAt(i, 0.0)
  2134  			nextPred = math.NaN()
  2135  			continue
  2136  		}
  2137  
  2138  		var lastIntercept, lastSlope, prediction float64
  2139  		if i == 0 {
  2140  			lastIntercept = actual
  2141  			lastSlope = 0
  2142  			prediction = actual
  2143  		} else {
  2144  			lastIntercept = intercepts[i-1]
  2145  			lastSlope = slopes[i-1]
  2146  			if math.IsNaN(lastIntercept) {
  2147  				lastIntercept = actual
  2148  			}
  2149  			prediction = nextPred
  2150  		}
  2151  
  2152  		lastSeasonal := getLastSeasonal(i)
  2153  		nextLastSeasonal := getLastSeasonal(i + 1)
  2154  		lastSeasonalDev := getLastDeviation(i)
  2155  
  2156  		intercept := holtWintersIntercept(actual, lastSeasonal, lastIntercept, lastSlope)
  2157  		slope := holtWintersSlope(intercept, lastIntercept, lastSlope)
  2158  		seasonal := holtWintersSeasonal(actual, intercept, lastSeasonal)
  2159  		nextPred = intercept + slope + nextLastSeasonal
  2160  		deviation := holtWintersDeviation(actual, prediction, lastSeasonalDev)
  2161  
  2162  		intercepts[i] = intercept
  2163  		slopes[i] = slope
  2164  		seasonals[i] = seasonal
  2165  		predictions.SetValueAt(i, prediction)
  2166  		deviations.SetValueAt(i, deviation)
  2167  	}
  2168  
  2169  	// make the new forecast series
  2170  	forecastName := fmt.Sprintf("holtWintersForecast(%s)", series.Name())
  2171  	forecastSeries := ts.NewSeries(ctx, forecastName, series.StartTime(), predictions)
  2172  
  2173  	// make the new deviation series
  2174  	deviationName := fmt.Sprintf("holtWintersDeviation(%s)", series.Name())
  2175  	deviationSeries := ts.NewSeries(ctx, deviationName, series.StartTime(), deviations)
  2176  
  2177  	return &holtWintersAnalysisResult{
  2178  		predictions: forecastSeries,
  2179  		deviations:  deviationSeries,
  2180  		intercepts:  intercepts,
  2181  		slopes:      slopes,
  2182  		seasonals:   seasonals,
  2183  	}
  2184  }
  2185  
  2186  // squareRoot takes one metric or a wildcard seriesList, and computes the square root of each datapoint.
  2187  func squareRoot(ctx *common.Context, seriesList singlePathSpec) (ts.SeriesList, error) {
  2188  	return transform(
  2189  		ctx,
  2190  		seriesList,
  2191  		func(fname string) string { return fmt.Sprintf(wrappingFmt, "squareRoot", fname) },
  2192  		math.Sqrt,
  2193  	)
  2194  }
  2195  
  2196  // stdev takes one metric or a wildcard seriesList followed by an integer N. Draw the standard deviation
  2197  // of all metrics passed for the past N datapoints. If the ratio of null points in the window is greater than
  2198  // windowTolerance, skip the calculation.
  2199  func stdev(ctx *common.Context, seriesList singlePathSpec, points int, windowTolerance float64) (ts.SeriesList, error) {
  2200  	return common.Stdev(ctx, ts.SeriesList(seriesList), points, windowTolerance, func(series *ts.Series, points int) string {
  2201  		return fmt.Sprintf("stddev(%s,%d)", series.Name(), points)
  2202  	})
  2203  }
  2204  
  2205  // rangeOfSeries distills down a set of inputs into the range of the series.
  2206  func rangeOfSeries(ctx *common.Context, seriesList singlePathSpec) (ts.SeriesList, error) {
  2207  	series, err := common.Range(ctx, ts.SeriesList(seriesList), func(series ts.SeriesList) string {
  2208  		return wrapPathExpr("rangeOfSeries", series)
  2209  	})
  2210  	if err != nil {
  2211  		return ts.NewSeriesList(), err
  2212  	}
  2213  	return ts.SeriesList{
  2214  		Values:   []*ts.Series{series},
  2215  		Metadata: seriesList.Metadata,
  2216  	}, nil
  2217  }
  2218  
  2219  // removeAbovePercentile removes data above the specified percentile from the series
  2220  // or list of series provided. Values above this percentile are assigned a value
  2221  // of None.
  2222  func removeAbovePercentile(ctx *common.Context, seriesList singlePathSpec, percentile float64) (ts.SeriesList, error) {
  2223  	return common.RemoveByPercentile(ctx,
  2224  		ts.SeriesList(seriesList),
  2225  		percentile,
  2226  		func(name string, percentile float64) string {
  2227  			return fmt.Sprintf("removeAbovePercentile(%s, "+common.FloatingPointFormat+")", name, percentile)
  2228  		},
  2229  		common.GreaterThan)
  2230  }
  2231  
  2232  // removeBelowPercentile removes data below the specified percentile from the series
  2233  // or list of series provided. Values below this percentile are assigned a value of None.
  2234  func removeBelowPercentile(ctx *common.Context, seriesList singlePathSpec, percentile float64) (ts.SeriesList, error) {
  2235  	return common.RemoveByPercentile(
  2236  		ctx,
  2237  		ts.SeriesList(seriesList),
  2238  		percentile,
  2239  		func(name string, percentile float64) string {
  2240  			return fmt.Sprintf("removeBelowPercentile(%s, "+common.FloatingPointFormat+")", name, percentile)
  2241  		},
  2242  		common.LessThan)
  2243  }
  2244  
  2245  // randomWalkFunction returns a random walk starting at 0.
  2246  // Note: step has a unit of seconds.
  2247  func randomWalkFunction(ctx *common.Context, name string, step int) (ts.SeriesList, error) {
  2248  	if step <= 0 {
  2249  		return ts.NewSeriesList(), xerrors.NewInvalidParamsError(fmt.Errorf("non-positive step size %d", step))
  2250  	}
  2251  	if !ctx.StartTime.Before(ctx.EndTime) {
  2252  		return ts.NewSeriesList(), xerrors.NewInvalidParamsError(
  2253  			fmt.Errorf("startTime %v is no earlier than endTime %v", ctx.StartTime, ctx.EndTime))
  2254  	}
  2255  	r := rand.New(rand.NewSource(time.Now().UnixNano()))
  2256  	millisPerStep := step * millisPerSecond
  2257  	numSteps := ts.NumSteps(ctx.StartTime, ctx.EndTime, millisPerStep)
  2258  	vals := ts.NewValues(ctx, millisPerStep, numSteps)
  2259  	for i := 0; i < numSteps; i++ {
  2260  		vals.SetValueAt(i, r.Float64()-0.5)
  2261  	}
  2262  	newSeries := ts.NewSeries(ctx, name, ctx.StartTime, vals)
  2263  	return ts.NewSeriesListWithSeries(newSeries), nil
  2264  }
  2265  
  2266  // aggregateLine draws a horizontal line based the function applied to the series.
  2267  func aggregateLine(ctx *common.Context, seriesList singlePathSpec, f string) (ts.SeriesList, error) {
  2268  	if len(seriesList.Values) == 0 {
  2269  		return ts.NewSeriesList(), common.ErrEmptySeriesList
  2270  	}
  2271  
  2272  	sa := ts.SeriesReducerApproach(f)
  2273  	r, ok := sa.SafeReducer()
  2274  	if !ok {
  2275  		return ts.NewSeriesList(), xerrors.NewInvalidParamsError(fmt.Errorf("invalid function %s", f))
  2276  	}
  2277  
  2278  	value := r(seriesList.Values[0])
  2279  	name := fmt.Sprintf("aggregateLine(%s,"+common.FloatingPointFormat+")",
  2280  		seriesList.Values[0].Specification, value)
  2281  	series, err := constantLine(ctx, value)
  2282  	if err != nil {
  2283  		return ts.NewSeriesList(), err
  2284  	}
  2285  
  2286  	renamed := series.Values[0].RenamedTo(name)
  2287  	return ts.SeriesList{
  2288  		Values:   []*ts.Series{renamed},
  2289  		Metadata: seriesList.Metadata,
  2290  	}, nil
  2291  }
  2292  
  2293  // changed takes one metric or a wildcard seriesList.
  2294  // Output 1 when the value changed, 0 when null or the same.
  2295  func changed(ctx *common.Context, seriesList singlePathSpec) (ts.SeriesList, error) {
  2296  	return common.Changed(ctx, ts.SeriesList(seriesList), func(series *ts.Series) string {
  2297  		return fmt.Sprintf("changed(%s)", series.Name())
  2298  	})
  2299  }
  2300  
  2301  // windowPointsLength calculates the number of window points in a interval
  2302  func windowPointsLength(series *ts.Series, interval time.Duration) int {
  2303  	return int(interval / (time.Duration(series.MillisPerStep()) * time.Millisecond))
  2304  }
  2305  
  2306  type movingImpl interface {
  2307  	Reset(
  2308  		opts movingImplResetOptions,
  2309  	) error
  2310  
  2311  	Evaluate(
  2312  		window []float64,
  2313  		values ts.MutableValues,
  2314  		windowPoints int,
  2315  		i int,
  2316  		xFilesFactor float64,
  2317  	)
  2318  }
  2319  
  2320  type movingImplResetOptions struct {
  2321  	Series       *ts.Series
  2322  	WindowPoints int
  2323  	WindowSize   windowSizeParsed
  2324  }
  2325  
  2326  // Ensure movingImplementationFn implements movingImpl.
  2327  var _ movingImpl = movingImplementationFn(nil)
  2328  
  2329  type movingImplementationFn func(
  2330  	window []float64,
  2331  	values ts.MutableValues,
  2332  	windowPoints int,
  2333  	i int,
  2334  	xFilesFactor float64,
  2335  )
  2336  
  2337  func (f movingImplementationFn) Reset(opts movingImplResetOptions) error {
  2338  	return nil
  2339  }
  2340  
  2341  func (f movingImplementationFn) Evaluate(
  2342  	window []float64,
  2343  	values ts.MutableValues,
  2344  	windowPoints int,
  2345  	i int,
  2346  	xFilesFactor float64,
  2347  ) {
  2348  	f(window, values, windowPoints, i, xFilesFactor)
  2349  }
  2350  
  2351  // movingMedianHelper given a slice of floats, calculates the median and assigns it into vals as index i
  2352  func movingMedianHelper(window []float64, vals ts.MutableValues, windowPoints int, i int, xFilesFactor float64) {
  2353  	nans := common.SafeSort(window)
  2354  
  2355  	if nans < windowPoints && effectiveXFF(windowPoints, nans, xFilesFactor) {
  2356  		index := (windowPoints - nans) / 2
  2357  		median := window[nans+index]
  2358  		vals.SetValueAt(i, median)
  2359  	}
  2360  }
  2361  
  2362  // movingSumHelper given a slice of floats, calculates the sum and assigns it into vals as index i
  2363  func movingSumHelper(window []float64, vals ts.MutableValues, windowPoints int, i int, xFilesFactor float64) {
  2364  	sum, nans, ok := common.SafeSum(window)
  2365  	if !ok {
  2366  		return
  2367  	}
  2368  
  2369  	if nans < windowPoints && effectiveXFF(windowPoints, nans, xFilesFactor) {
  2370  		vals.SetValueAt(i, sum)
  2371  	}
  2372  }
  2373  
  2374  // movingAverageHelper given a slice of floats, calculates the average and assigns it into vals as index i
  2375  func movingAverageHelper(window []float64, vals ts.MutableValues, windowPoints int, i int, xFilesFactor float64) {
  2376  	avg, nans, ok := common.SafeAverage(window)
  2377  	if !ok {
  2378  		return
  2379  	}
  2380  
  2381  	if nans < windowPoints && effectiveXFF(windowPoints, nans, xFilesFactor) {
  2382  		vals.SetValueAt(i, avg)
  2383  	}
  2384  }
  2385  
  2386  // movingMaxHelper given a slice of floats, finds the max and assigns it into vals as index i
  2387  func movingMaxHelper(window []float64, vals ts.MutableValues, windowPoints int, i int, xFilesFactor float64) {
  2388  	max, nans, ok := common.SafeMax(window)
  2389  	if !ok {
  2390  		return
  2391  	}
  2392  
  2393  	if nans < windowPoints && effectiveXFF(windowPoints, nans, xFilesFactor) {
  2394  		vals.SetValueAt(i, max)
  2395  	}
  2396  }
  2397  
  2398  // movingMinHelper given a slice of floats, finds the min and assigns it into vals as index i
  2399  func movingMinHelper(window []float64, vals ts.MutableValues, windowPoints int, i int, xFilesFactor float64) {
  2400  	min, nans, ok := common.SafeMin(window)
  2401  	if !ok {
  2402  		return
  2403  	}
  2404  
  2405  	if nans < windowPoints && effectiveXFF(windowPoints, nans, xFilesFactor) {
  2406  		vals.SetValueAt(i, min)
  2407  	}
  2408  }
  2409  
  2410  // newMovingBinaryTransform requires that functions are registered with
  2411  // WithoutUnaryContextShifterSkipFetchOptimization() during function
  2412  // registration.
  2413  func newMovingBinaryTransform(
  2414  	ctx *common.Context,
  2415  	input singlePathSpec,
  2416  	windowSizeValue genericInterface,
  2417  	movingFunctionName string,
  2418  	xFilesFactor float64,
  2419  	impl movingImpl,
  2420  ) (*unaryContextShifter, error) {
  2421  	windowSize, err := parseWindowSize(windowSizeValue, input)
  2422  	if err != nil {
  2423  		return nil, err
  2424  	}
  2425  
  2426  	interval := windowSize.deltaValue
  2427  	if interval <= 0 {
  2428  		return nil, common.ErrInvalidIntervalFormat
  2429  	}
  2430  
  2431  	contextShiftingFn := func(c *common.Context) *common.Context {
  2432  		opts := common.NewChildContextOptions()
  2433  		opts.AdjustTimeRange(0, 0, interval, 0)
  2434  		childCtx := c.NewChildContext(opts)
  2435  		return childCtx
  2436  	}
  2437  
  2438  	// Save the original context in case we need to reference it
  2439  	// during a context adjust step for ContextShiftAdjustFunc.
  2440  	originalCtx := ctx
  2441  	contextShiftingAdjustFn := func(
  2442  		shiftedContext *common.Context,
  2443  		bootstrappedSeries ts.SeriesList,
  2444  	) (*common.Context, bool, error) {
  2445  		newWindowSize, err := parseWindowSize(windowSizeValue,
  2446  			singlePathSpec(bootstrappedSeries))
  2447  		if err != nil {
  2448  			return nil, false, err
  2449  		}
  2450  		if newWindowSize.deltaValue == windowSize.deltaValue {
  2451  			// No adjustment necessary.
  2452  			return nil, false, nil
  2453  		}
  2454  
  2455  		// Adjustment necessary, update variables and validate.
  2456  		windowSize = newWindowSize
  2457  		interval = windowSize.deltaValue
  2458  		if interval <= 0 {
  2459  			return nil, false, common.ErrInvalidIntervalFormat
  2460  		}
  2461  
  2462  		// Create new child context.
  2463  		opts := common.NewChildContextOptions()
  2464  		opts.AdjustTimeRange(0, 0, interval, 0)
  2465  		// Be sure to use the original ctx to shift from (since
  2466  		// recalculated the window size and need to shift from the
  2467  		// original start time).
  2468  		childCtx := originalCtx.NewChildContext(opts)
  2469  		return childCtx, true, nil
  2470  	}
  2471  
  2472  	return &unaryContextShifter{
  2473  		ContextShiftFunc:       contextShiftingFn,
  2474  		ContextShiftAdjustFunc: contextShiftingAdjustFn,
  2475  		UnaryTransformer: func(bootstrapped ts.SeriesList) (ts.SeriesList, error) {
  2476  			results := make([]*ts.Series, 0, bootstrapped.Len())
  2477  			maxWindowPoints := 0
  2478  			for _, series := range bootstrapped.Values {
  2479  				windowPoints := windowPointsLength(series, interval)
  2480  				if windowPoints <= 0 {
  2481  					err := xerrors.NewInvalidParamsError(fmt.Errorf(
  2482  						"non positive window points, windowSize=%s, stepSize=%d",
  2483  						windowSize.stringValue, series.MillisPerStep()))
  2484  					return ts.NewSeriesList(), err
  2485  				}
  2486  				if windowPoints > maxWindowPoints {
  2487  					maxWindowPoints = windowPoints
  2488  				}
  2489  			}
  2490  
  2491  			windowPoints := make([]float64, maxWindowPoints)
  2492  			for _, series := range bootstrapped.Values {
  2493  				currWindowPoints := windowPointsLength(series, interval)
  2494  				window := windowPoints[:currWindowPoints]
  2495  				util.Memset(window, math.NaN())
  2496  
  2497  				step := time.Duration(series.MillisPerStep()) * time.Millisecond
  2498  				newStartTime := series.StartTime().Add(step * time.Duration(currWindowPoints))
  2499  				numSteps := series.Len() - currWindowPoints
  2500  				if numSteps < 1 {
  2501  					// No values in range.
  2502  					continue
  2503  				}
  2504  
  2505  				vals := ts.NewValues(ctx, series.MillisPerStep(), numSteps)
  2506  
  2507  				if err := impl.Reset(movingImplResetOptions{
  2508  					Series:       series,
  2509  					WindowPoints: currWindowPoints,
  2510  					WindowSize:   windowSize,
  2511  				}); err != nil {
  2512  					return ts.SeriesList{}, err
  2513  				}
  2514  
  2515  				for i := 0; i < numSteps; i++ {
  2516  					for j := i; j < i+currWindowPoints; j++ {
  2517  						if j >= series.Len() {
  2518  							continue
  2519  						}
  2520  
  2521  						idx := j - i
  2522  						if idx < 0 || idx > len(window)-1 {
  2523  							continue
  2524  						}
  2525  
  2526  						window[idx] = series.ValueAt(j)
  2527  					}
  2528  					impl.Evaluate(window, vals, currWindowPoints, i, xFilesFactor)
  2529  				}
  2530  
  2531  				name := fmt.Sprintf("%s(%s,%s)", movingFunctionName, series.Name(), windowSize.stringValue)
  2532  
  2533  				newSeries := ts.NewSeries(ctx, name, newStartTime, vals)
  2534  				results = append(results, newSeries)
  2535  			}
  2536  
  2537  			bootstrapped.Values = results
  2538  			return bootstrapped, nil
  2539  		},
  2540  	}, nil
  2541  }
  2542  
  2543  // movingMedian calculates the moving median of a metric (or metrics) over a time interval.
  2544  func movingMedian(
  2545  	ctx *common.Context,
  2546  	input singlePathSpec,
  2547  	windowSize genericInterface,
  2548  	xFilesFactor float64,
  2549  ) (*unaryContextShifter, error) {
  2550  	return newMovingBinaryTransform(ctx, input, windowSize, "movingMedian", xFilesFactor,
  2551  		movingImplementationFn(movingMedianHelper))
  2552  }
  2553  
  2554  // movingSum calculates the moving sum of a metric (or metrics) over a time interval.
  2555  func movingSum(
  2556  	ctx *common.Context,
  2557  	input singlePathSpec,
  2558  	windowSize genericInterface,
  2559  	xFilesFactor float64,
  2560  ) (*unaryContextShifter, error) {
  2561  	return newMovingBinaryTransform(ctx, input, windowSize, "movingSum", xFilesFactor,
  2562  		movingImplementationFn(movingSumHelper))
  2563  }
  2564  
  2565  // movingAverage calculates the moving average of a metric (or metrics) over a time interval.
  2566  func movingAverage(
  2567  	ctx *common.Context,
  2568  	input singlePathSpec,
  2569  	windowSize genericInterface,
  2570  	xFilesFactor float64,
  2571  ) (*unaryContextShifter, error) {
  2572  	return newMovingBinaryTransform(ctx, input, windowSize, "movingAverage", xFilesFactor,
  2573  		movingImplementationFn(movingAverageHelper))
  2574  }
  2575  
  2576  // movingMax calculates the moving maximum of a metric (or metrics) over a time interval.
  2577  func movingMax(
  2578  	ctx *common.Context,
  2579  	input singlePathSpec,
  2580  	windowSize genericInterface,
  2581  	xFilesFactor float64,
  2582  ) (*unaryContextShifter, error) {
  2583  	return newMovingBinaryTransform(ctx, input, windowSize, "movingMax", xFilesFactor,
  2584  		movingImplementationFn(movingMaxHelper))
  2585  }
  2586  
  2587  // movingMin calculates the moving minimum of a metric (or metrics) over a time interval.
  2588  func movingMin(
  2589  	ctx *common.Context,
  2590  	input singlePathSpec,
  2591  	windowSize genericInterface,
  2592  	xFilesFactor float64,
  2593  ) (*unaryContextShifter, error) {
  2594  	return newMovingBinaryTransform(ctx, input, windowSize, "movingMin", xFilesFactor,
  2595  		movingImplementationFn(movingMinHelper))
  2596  }
  2597  
  2598  func movingWindow(
  2599  	ctx *common.Context,
  2600  	input singlePathSpec,
  2601  	windowSize genericInterface,
  2602  	fname string,
  2603  	xFilesFactor float64,
  2604  ) (*unaryContextShifter, error) {
  2605  	switch fname {
  2606  	case avgFnName, averageFnName:
  2607  		return movingAverage(ctx, input, windowSize, xFilesFactor)
  2608  	case maxFnName:
  2609  		return movingMax(ctx, input, windowSize, xFilesFactor)
  2610  	case medianFnName:
  2611  		return movingMedian(ctx, input, windowSize, xFilesFactor)
  2612  	case minFnName:
  2613  		return movingMin(ctx, input, windowSize, xFilesFactor)
  2614  	case sumFnName:
  2615  		return movingSum(ctx, input, windowSize, xFilesFactor)
  2616  	default:
  2617  		err := xerrors.NewInvalidParamsError(fmt.Errorf("movingWindow doesn't support %v function", fname))
  2618  		return nil, err
  2619  	}
  2620  }
  2621  
  2622  // legendValue takes one metric or a wildcard seriesList and a string in quotes.
  2623  // Appends a value to the metric name in the legend.  Currently one or several of:
  2624  // "last", "avg", "total", "min", "max".
  2625  func legendValue(_ *common.Context, seriesList singlePathSpec, valueType string) (ts.SeriesList, error) {
  2626  	sa := ts.SeriesReducerApproach(valueType)
  2627  	reducer, ok := sa.SafeReducer()
  2628  	if !ok {
  2629  		return ts.NewSeriesList(), xerrors.NewInvalidParamsError(fmt.Errorf("invalid function %s", valueType))
  2630  	}
  2631  
  2632  	results := make([]*ts.Series, 0, len(seriesList.Values))
  2633  	for _, series := range seriesList.Values {
  2634  		value := reducer(series)
  2635  		newName := fmt.Sprintf("%s (%s: "+common.FloatingPointFormat+")", series.Name(), valueType, value)
  2636  		renamed := series.RenamedTo(newName)
  2637  		results = append(results, renamed)
  2638  	}
  2639  
  2640  	r := ts.SeriesList(seriesList)
  2641  	r.Values = results
  2642  	return r, nil
  2643  }
  2644  
  2645  func getStatLen(v float64) int {
  2646  	if math.IsNaN(v) {
  2647  		return 4
  2648  	}
  2649  	return len(fmt.Sprintf("%d", int(v))) + 3
  2650  }
  2651  
  2652  func toCactiStyle(v float64) string {
  2653  	if math.IsNaN(v) {
  2654  		return "nan"
  2655  	}
  2656  	return fmt.Sprintf(cactiStyleFormat, v)
  2657  }
  2658  
  2659  func findAllLens(seriesList ts.SeriesList) (int, int, int, int) {
  2660  	var nameLen, lastLen, maxLen, minLen float64
  2661  	for _, series := range seriesList.Values {
  2662  		name, min, max, last := series.Name(), series.SafeMin(), series.SafeMax(), series.SafeLastValue()
  2663  		nameLen = math.Max(nameLen, float64(len(name)))
  2664  		lastLen = math.Max(lastLen, float64(getStatLen(last)))
  2665  		maxLen = math.Max(maxLen, float64(getStatLen(max)))
  2666  		minLen = math.Max(minLen, float64(getStatLen(min)))
  2667  	}
  2668  	return int(nameLen), int(lastLen) + 3, int(maxLen) + 3, int(minLen) + 3
  2669  }
  2670  
  2671  // cactiStyle takes a series list and modifies the aliases to provide column aligned
  2672  // output with Current, Max, and Min values in the style of cacti.
  2673  func cactiStyle(_ *common.Context, seriesList singlePathSpec) (ts.SeriesList, error) {
  2674  	if len(seriesList.Values) == 0 {
  2675  		return ts.NewSeriesList(), common.ErrEmptySeriesList
  2676  	}
  2677  
  2678  	nameLen, lastLen, maxLen, minLen := findAllLens(ts.SeriesList(seriesList))
  2679  	results := make([]*ts.Series, 0, len(seriesList.Values))
  2680  	for _, series := range seriesList.Values {
  2681  		name := series.Name()
  2682  		last := toCactiStyle(series.SafeLastValue())
  2683  		max := toCactiStyle(series.SafeMax())
  2684  		min := toCactiStyle(series.SafeMin())
  2685  
  2686  		newName := fmt.Sprintf(
  2687  			"%*s Current:%*s Max:%*s Min:%*s ",
  2688  			-nameLen, name,
  2689  			-lastLen, last,
  2690  			-maxLen, max,
  2691  			-minLen, min,
  2692  		)
  2693  		renamed := series.RenamedTo(newName)
  2694  		results = append(results, renamed)
  2695  	}
  2696  
  2697  	r := ts.SeriesList(seriesList)
  2698  	r.Values = results
  2699  	return r, nil
  2700  }
  2701  
  2702  // consolidateBy takes one metric or a wildcard seriesList and a consolidation
  2703  // function name. Valid function names are "sum", "average", "min", and "max".
  2704  // When a graph is drawn where width of the graph size in pixels is smaller than
  2705  // the number of data points to be graphed, m3 consolidates the values to
  2706  // to prevent line overlap. The consolidateBy() function changes the consolidation
  2707  // function from the default of "average" to one of "sum", "max", or "min".
  2708  func consolidateBy(
  2709  	ctx *common.Context,
  2710  	seriesList singlePathSpec,
  2711  	consolidationApproach string,
  2712  ) (ts.SeriesList, error) {
  2713  	ca := ts.ConsolidationApproach(consolidationApproach)
  2714  	cf, ok := ca.SafeFunc()
  2715  	if !ok {
  2716  		err := xerrors.NewInvalidParamsError(fmt.Errorf("invalid consolidation approach %s", consolidationApproach))
  2717  		return ts.NewSeriesList(), err
  2718  	}
  2719  
  2720  	results := make([]*ts.Series, 0, len(seriesList.Values))
  2721  	for _, series := range seriesList.Values {
  2722  		newName := fmt.Sprintf("consolidateBy(%s,%q)", series.Name(), consolidationApproach)
  2723  
  2724  		newSeries := series.RenamedTo(newName)
  2725  		newSeries.SetConsolidationFunc(cf)
  2726  
  2727  		// Check if needs to resized datapoints to fit max data points so that
  2728  		// default LTTB downsampling is not applied if downsampling needs to
  2729  		// occur when series is rendered.
  2730  		resizedSeries, resized := newSeries.ResizeToMaxDataPoints(ctx.MaxDataPoints, cf)
  2731  		if resized {
  2732  			newSeries = resizedSeries
  2733  		}
  2734  
  2735  		results = append(results, newSeries)
  2736  	}
  2737  
  2738  	r := ts.SeriesList(seriesList)
  2739  	r.Values = results
  2740  	return r, nil
  2741  }
  2742  
  2743  // cumulative is an alias for consolidateBy(series, 'sum')
  2744  func cumulative(ctx *common.Context, seriesList singlePathSpec) (ts.SeriesList, error) {
  2745  	return consolidateBy(ctx, seriesList, "sum")
  2746  }
  2747  
  2748  // offsetToZero offsets a metric or wildcard seriesList by subtracting the minimum
  2749  // value in the series from each data point.
  2750  func offsetToZero(ctx *common.Context, seriesList singlePathSpec) (ts.SeriesList, error) {
  2751  	results := make([]*ts.Series, len(seriesList.Values))
  2752  	for idx, series := range seriesList.Values {
  2753  		minimum := series.SafeMin()
  2754  		numSteps := series.Len()
  2755  		vals := ts.NewValues(ctx, series.MillisPerStep(), numSteps)
  2756  		if !math.IsNaN(minimum) {
  2757  			for i := 0; i < numSteps; i++ {
  2758  				v := series.ValueAt(i)
  2759  				if !math.IsNaN(v) {
  2760  					vals.SetValueAt(i, v-minimum)
  2761  				}
  2762  			}
  2763  		}
  2764  		name := fmt.Sprintf("offsetToZero(%s)", series.Name())
  2765  		series := ts.NewSeries(ctx, name, series.StartTime(), vals)
  2766  		results[idx] = series
  2767  	}
  2768  
  2769  	r := ts.SeriesList(seriesList)
  2770  	r.Values = results
  2771  	return r, nil
  2772  }
  2773  
  2774  // timeFunction returns the timestamp for each X value.
  2775  // Note: step is measured in seconds.
  2776  func timeFunction(ctx *common.Context, name string, step int) (ts.SeriesList, error) {
  2777  	if step <= 0 {
  2778  		return ts.NewSeriesList(), xerrors.NewInvalidParamsError(
  2779  			fmt.Errorf("step must be a positive int but instead is %d", step))
  2780  	}
  2781  
  2782  	stepSizeInMilli := step * millisPerSecond
  2783  	numSteps := ts.NumSteps(ctx.StartTime, ctx.EndTime, stepSizeInMilli)
  2784  	vals := ts.NewValues(ctx, stepSizeInMilli, numSteps)
  2785  	start := ctx.StartTime.Truncate(time.Second)
  2786  	for current, index := start.Unix(), 0; index < numSteps; index++ {
  2787  		vals.SetValueAt(index, float64(current))
  2788  		current += int64(step)
  2789  	}
  2790  
  2791  	series := ts.NewSeries(ctx, name, start, vals)
  2792  	return ts.NewSeriesListWithSeries(series), nil
  2793  }
  2794  
  2795  // dashed draws the selected metrics with a dotted line with segments of length f.
  2796  func dashed(_ *common.Context, seriesList singlePathSpec, dashLength float64) (ts.SeriesList, error) {
  2797  	if dashLength <= 0 {
  2798  		return ts.NewSeriesList(), xerrors.NewInvalidParamsError(
  2799  			fmt.Errorf("expected a positive dashLength but got %f", dashLength))
  2800  	}
  2801  
  2802  	results := make([]*ts.Series, len(seriesList.Values))
  2803  	for idx, s := range seriesList.Values {
  2804  		name := fmt.Sprintf("dashed(%s, "+common.FloatingPointFormat+")", s.Name(), dashLength)
  2805  		renamed := s.RenamedTo(name)
  2806  		results[idx] = renamed
  2807  	}
  2808  
  2809  	r := ts.SeriesList(seriesList)
  2810  	r.Values = results
  2811  	return r, nil
  2812  }
  2813  
  2814  // threshold draws a horizontal line at value f across the graph.
  2815  func threshold(ctx *common.Context, value float64, label string, color string) (ts.SeriesList, error) {
  2816  	seriesList, err := constantLine(ctx, value)
  2817  	if err != nil {
  2818  		err := xerrors.NewInvalidParamsError(fmt.Errorf(
  2819  			"error applying threshold function, error=%v", err))
  2820  		return ts.NewSeriesList(), err
  2821  	}
  2822  
  2823  	series := seriesList.Values[0]
  2824  	if label != "" {
  2825  		series = series.RenamedTo(label)
  2826  	}
  2827  
  2828  	return ts.NewSeriesListWithSeries(series), nil
  2829  }
  2830  
  2831  func init() {
  2832  	// functions - in alpha ordering
  2833  	MustRegisterFunction(absolute)
  2834  	MustRegisterFunction(aggregate)
  2835  	MustRegisterFunction(aggregateLine).WithDefaultParams(map[uint8]interface{}{
  2836  		2: "avg", // f
  2837  	})
  2838  	MustRegisterFunction(aggregateWithWildcards).WithDefaultParams(map[uint8]interface{}{
  2839  		3: -1, // positions
  2840  	})
  2841  	MustRegisterFunction(alias)
  2842  	MustRegisterFunction(aliasByMetric)
  2843  	MustRegisterFunction(aliasByNode)
  2844  	MustRegisterFunction(aliasSub)
  2845  	MustRegisterFunction(applyByNode).WithDefaultParams(map[uint8]interface{}{
  2846  		4: "", // newName
  2847  	})
  2848  	MustRegisterFunction(asPercent).WithDefaultParams(map[uint8]interface{}{
  2849  		2: nil, // total
  2850  	})
  2851  	MustRegisterFunction(averageAbove)
  2852  	MustRegisterFunction(averageBelow)
  2853  	MustRegisterFunction(averageSeries)
  2854  	MustRegisterFunction(averageSeriesWithWildcards).WithDefaultParams(map[uint8]interface{}{
  2855  		2: -1, // positions
  2856  	})
  2857  	MustRegisterFunction(cactiStyle)
  2858  	MustRegisterFunction(changed)
  2859  	MustRegisterFunction(consolidateBy)
  2860  	MustRegisterFunction(constantLine)
  2861  	MustRegisterFunction(countSeries)
  2862  	MustRegisterFunction(cumulative)
  2863  	MustRegisterFunction(currentAbove)
  2864  	MustRegisterFunction(currentBelow)
  2865  	MustRegisterFunction(dashed).WithDefaultParams(map[uint8]interface{}{
  2866  		2: 5.0, // dashLength
  2867  	})
  2868  	MustRegisterFunction(delay)
  2869  	MustRegisterFunction(derivative)
  2870  	MustRegisterFunction(diffSeries)
  2871  	MustRegisterFunction(divideSeries)
  2872  	MustRegisterFunction(divideSeriesLists)
  2873  	MustRegisterFunction(exclude)
  2874  	MustRegisterFunction(exponentialMovingAverage).
  2875  		WithoutUnaryContextShifterSkipFetchOptimization()
  2876  	MustRegisterFunction(fallbackSeries)
  2877  	MustRegisterFunction(filterSeries)
  2878  	MustRegisterFunction(grep)
  2879  	MustRegisterFunction(group)
  2880  	MustRegisterFunction(groupByNode).WithDefaultParams(map[uint8]interface{}{
  2881  		3: "average", // fname
  2882  	})
  2883  	MustRegisterFunction(groupByNodes)
  2884  	MustRegisterFunction(highest).WithDefaultParams(map[uint8]interface{}{
  2885  		2: 1,         // n,
  2886  		3: "average", // f
  2887  	})
  2888  	MustRegisterFunction(highestAverage)
  2889  	MustRegisterFunction(highestCurrent)
  2890  	MustRegisterFunction(highestMax)
  2891  	MustRegisterFunction(hitcount).WithDefaultParams(map[uint8]interface{}{
  2892  		3: false, // alignToInterval
  2893  	})
  2894  	MustRegisterFunction(holtWintersAberration)
  2895  	MustRegisterFunction(holtWintersConfidenceBands)
  2896  	MustRegisterFunction(holtWintersForecast)
  2897  	MustRegisterFunction(identity)
  2898  	MustRegisterFunction(integral)
  2899  	MustRegisterFunction(integralByInterval)
  2900  	MustRegisterFunction(interpolate).WithDefaultParams(map[uint8]interface{}{
  2901  		2: -1, // limit
  2902  	})
  2903  	MustRegisterFunction(invert)
  2904  	MustRegisterFunction(isNonNull)
  2905  	MustRegisterFunction(keepLastValue).WithDefaultParams(map[uint8]interface{}{
  2906  		2: -1, // limit
  2907  	})
  2908  	MustRegisterFunction(legendValue)
  2909  	MustRegisterFunction(limit)
  2910  	MustRegisterFunction(logarithm).WithDefaultParams(map[uint8]interface{}{
  2911  		2: 10.0, // base
  2912  	})
  2913  	MustRegisterFunction(lowest).WithDefaultParams(map[uint8]interface{}{
  2914  		2: 1,         // n,
  2915  		3: "average", // f
  2916  	})
  2917  	MustRegisterFunction(lowestAverage)
  2918  	MustRegisterFunction(lowestCurrent)
  2919  	MustRegisterFunction(maxSeries)
  2920  	MustRegisterFunction(maximumAbove)
  2921  	MustRegisterFunction(minSeries)
  2922  	MustRegisterFunction(minimumAbove)
  2923  	MustRegisterFunction(mostDeviant)
  2924  	MustRegisterFunction(movingAverage).
  2925  		WithDefaultParams(map[uint8]interface{}{
  2926  			3: defaultXFilesFactor, // XFilesFactor
  2927  		}).
  2928  		WithoutUnaryContextShifterSkipFetchOptimization()
  2929  	MustRegisterFunction(movingMedian).
  2930  		WithDefaultParams(map[uint8]interface{}{
  2931  			3: defaultXFilesFactor, // XFilesFactor
  2932  		}).
  2933  		WithoutUnaryContextShifterSkipFetchOptimization()
  2934  	MustRegisterFunction(movingSum).
  2935  		WithDefaultParams(map[uint8]interface{}{
  2936  			3: defaultXFilesFactor, // XFilesFactor
  2937  		}).
  2938  		WithoutUnaryContextShifterSkipFetchOptimization()
  2939  	MustRegisterFunction(movingMax).
  2940  		WithDefaultParams(map[uint8]interface{}{
  2941  			3: defaultXFilesFactor, // XFilesFactor
  2942  		}).
  2943  		WithoutUnaryContextShifterSkipFetchOptimization()
  2944  	MustRegisterFunction(movingMin).
  2945  		WithDefaultParams(map[uint8]interface{}{
  2946  			3: defaultXFilesFactor, // XFilesFactor
  2947  		}).
  2948  		WithoutUnaryContextShifterSkipFetchOptimization()
  2949  	MustRegisterFunction(movingWindow).
  2950  		WithDefaultParams(map[uint8]interface{}{
  2951  			3: "avg",
  2952  			4: defaultXFilesFactor, // XFilesFactor
  2953  		}).
  2954  		WithoutUnaryContextShifterSkipFetchOptimization()
  2955  	MustRegisterFunction(multiplySeries)
  2956  	MustRegisterFunction(multiplySeriesWithWildcards).WithDefaultParams(map[uint8]interface{}{
  2957  		2: -1, // positions
  2958  	})
  2959  	MustRegisterFunction(nonNegativeDerivative).WithDefaultParams(map[uint8]interface{}{
  2960  		2: math.NaN(), // maxValue
  2961  	})
  2962  	MustRegisterFunction(nPercentile)
  2963  	MustRegisterFunction(offset)
  2964  	MustRegisterFunction(offsetToZero)
  2965  	MustRegisterFunction(percentileOfSeries).WithDefaultParams(map[uint8]interface{}{
  2966  		3: false, // interpolate
  2967  	})
  2968  	MustRegisterFunction(perSecond).WithDefaultParams(map[uint8]interface{}{
  2969  		2: math.NaN(), // maxValue
  2970  	})
  2971  	MustRegisterFunction(pow)
  2972  	MustRegisterFunction(powSeries)
  2973  	MustRegisterFunction(rangeOfSeries)
  2974  	MustRegisterFunction(randomWalkFunction).WithDefaultParams(map[uint8]interface{}{
  2975  		2: 60, // step
  2976  	})
  2977  	MustRegisterFunction(removeAbovePercentile)
  2978  	MustRegisterFunction(removeAboveValue)
  2979  	MustRegisterFunction(removeBelowPercentile)
  2980  	MustRegisterFunction(removeBelowValue)
  2981  	MustRegisterFunction(removeEmptySeries).WithDefaultParams(map[uint8]interface{}{
  2982  		2: 0.0, // xFilesFactor
  2983  	})
  2984  	MustRegisterFunction(roundFunction).WithDefaultParams(map[uint8]interface{}{
  2985  		2: 0, // precision
  2986  	})
  2987  	MustRegisterFunction(scale)
  2988  	MustRegisterFunction(scaleToSeconds)
  2989  	MustRegisterFunction(sortBy).WithDefaultParams(map[uint8]interface{}{
  2990  		2: "average", // fn
  2991  		3: false,     // reverse
  2992  	})
  2993  	MustRegisterFunction(sortByMaxima)
  2994  	MustRegisterFunction(sortByMinima)
  2995  	MustRegisterFunction(sortByName).WithDefaultParams(map[uint8]interface{}{
  2996  		2: false, // natural
  2997  		3: false, // reverse
  2998  	})
  2999  	MustRegisterFunction(sortByTotal)
  3000  	MustRegisterFunction(squareRoot)
  3001  	MustRegisterFunction(stdev).WithDefaultParams(map[uint8]interface{}{
  3002  		3: 0.1, // windowTolerance
  3003  	})
  3004  	MustRegisterFunction(stddevSeries)
  3005  	MustRegisterFunction(substr).WithDefaultParams(map[uint8]interface{}{
  3006  		2: 0, // start
  3007  		3: 0, // stop
  3008  	})
  3009  	MustRegisterFunction(summarize).WithDefaultParams(map[uint8]interface{}{
  3010  		3: "",    // fname
  3011  		4: false, // alignToFrom
  3012  	})
  3013  	MustRegisterFunction(smartSummarize).WithDefaultParams(map[uint8]interface{}{
  3014  		3: "", // fname
  3015  	})
  3016  	MustRegisterFunction(sumSeries)
  3017  	MustRegisterFunction(sumSeriesWithWildcards).WithDefaultParams(map[uint8]interface{}{
  3018  		2: -1, // positions
  3019  	})
  3020  	MustRegisterFunction(sustainedAbove)
  3021  	MustRegisterFunction(sustainedBelow)
  3022  	MustRegisterFunction(threshold).WithDefaultParams(map[uint8]interface{}{
  3023  		2: "", // label
  3024  		3: "", // color
  3025  	})
  3026  	MustRegisterFunction(timeFunction).WithDefaultParams(map[uint8]interface{}{
  3027  		2: 60, // step
  3028  	})
  3029  	MustRegisterFunction(timeShift).WithDefaultParams(map[uint8]interface{}{
  3030  		3: true,  // resetEnd
  3031  		4: false, // alignDst
  3032  	})
  3033  	MustRegisterFunction(timeSlice).WithDefaultParams(map[uint8]interface{}{
  3034  		3: "now", // endTime
  3035  	})
  3036  	MustRegisterFunction(transformNull).WithDefaultParams(map[uint8]interface{}{
  3037  		2: 0.0, // defaultValue
  3038  	})
  3039  	MustRegisterFunction(useSeriesAbove)
  3040  	MustRegisterFunction(weightedAverage)
  3041  
  3042  	// alias functions - in alpha ordering
  3043  	MustRegisterAliasedFunction("abs", absolute)
  3044  	MustRegisterAliasedFunction("aliasByTags", aliasByNode)
  3045  	MustRegisterAliasedFunction("avg", averageSeries)
  3046  	MustRegisterAliasedFunction("log", logarithm)
  3047  	MustRegisterAliasedFunction("max", maxSeries)
  3048  	MustRegisterAliasedFunction("min", minSeries)
  3049  	MustRegisterAliasedFunction("randomWalk", randomWalkFunction)
  3050  	MustRegisterAliasedFunction("round", roundFunction)
  3051  	MustRegisterAliasedFunction("sum", sumSeries)
  3052  	MustRegisterAliasedFunction("time", timeFunction)
  3053  }