github.com/m3db/m3@v1.5.1-0.20231129193456-75a402aa583b/src/query/graphite/native/aggregation_functions.go (about)

     1  // Copyright (c) 2019 Uber Technologies, Inc.
     2  //
     3  // Permission is hereby granted, free of charge, to any person obtaining a copy
     4  // of this software and associated documentation files (the "Software"), to deal
     5  // in the Software without restriction, including without limitation the rights
     6  // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     7  // copies of the Software, and to permit persons to whom the Software is
     8  // furnished to do so, subject to the following conditions:
     9  //
    10  // The above copyright notice and this permission notice shall be included in
    11  // all copies or substantial portions of the Software.
    12  //
    13  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    14  // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    15  // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    16  // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    17  // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    18  // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    19  // THE SOFTWARE.
    20  
    21  package native
    22  
    23  import (
    24  	"fmt"
    25  	"math"
    26  	"runtime"
    27  	"sort"
    28  	"strings"
    29  	"sync"
    30  
    31  	"github.com/m3db/m3/src/query/block"
    32  	"github.com/m3db/m3/src/query/graphite/common"
    33  	"github.com/m3db/m3/src/query/graphite/ts"
    34  	xerrors "github.com/m3db/m3/src/x/errors"
    35  )
    36  
    37  func wrapPathExpr(wrapper string, series ts.SeriesList) string {
    38  	return fmt.Sprintf("%s(%s)", wrapper, joinPathExpr(series))
    39  }
    40  
    41  // sumSeries adds metrics together and returns the sum at each datapoint.
    42  // If the time series have different intervals, the coarsest interval will be used.
    43  func sumSeries(ctx *common.Context, series multiplePathSpecs) (ts.SeriesList, error) {
    44  	return combineSeries(ctx, series, wrapPathExpr(sumSeriesFnName, ts.SeriesList(series)), ts.Sum)
    45  }
    46  
    47  // diffSeries subtracts all but the first series from the first series.
    48  // If the time series have different intervals, the coarsest interval will be used.
    49  func diffSeries(ctx *common.Context, series multiplePathSpecs) (ts.SeriesList, error) {
    50  	transformedSeries := series
    51  	numSeries := len(series.Values)
    52  	if numSeries > 1 {
    53  		transformedSeries.Values = make([]*ts.Series, numSeries)
    54  		transformedSeries.Values[0] = series.Values[0]
    55  		for i := 1; i < len(series.Values); i++ {
    56  			res, err := transform(
    57  				ctx,
    58  				singlePathSpec{Values: []*ts.Series{series.Values[i]}},
    59  				func(n string) string { return n },
    60  				common.MaintainNaNTransformer(func(v float64) float64 { return -v }),
    61  			)
    62  			if err != nil {
    63  				return ts.NewSeriesList(), err
    64  			}
    65  			transformedSeries.Values[i] = res.Values[0]
    66  		}
    67  	}
    68  
    69  	return combineSeries(ctx, transformedSeries, wrapPathExpr(diffSeriesFnName, ts.SeriesList(series)), ts.Sum)
    70  }
    71  
    72  // multiplySeries multiplies metrics together and returns the product at each datapoint.
    73  // If the time series have different intervals, the coarsest interval will be used.
    74  func multiplySeries(ctx *common.Context, series multiplePathSpecs) (ts.SeriesList, error) {
    75  	return combineSeries(ctx, series, wrapPathExpr(multiplySeriesFnName, ts.SeriesList(series)), ts.Mul)
    76  }
    77  
    78  // averageSeries takes a list of series and returns a new series containing the
    79  // average of all values at each datapoint.
    80  func averageSeries(ctx *common.Context, series multiplePathSpecs) (ts.SeriesList, error) {
    81  	return combineSeries(ctx, series, wrapPathExpr(averageSeriesFnName, ts.SeriesList(series)), ts.Avg)
    82  }
    83  
    84  // minSeries takes a list of series and returns a new series containing the
    85  // minimum value across the series at each datapoint
    86  func minSeries(ctx *common.Context, series multiplePathSpecs) (ts.SeriesList, error) {
    87  	return combineSeries(ctx, series, wrapPathExpr(minSeriesFnName, ts.SeriesList(series)), ts.Min)
    88  }
    89  
    90  // powSeries takes a list of series and returns a new series containing the
    91  // pow value across the series at each datapoint
    92  // nolint: gocritic
    93  func powSeries(ctx *common.Context, series multiplePathSpecs) (ts.SeriesList, error) {
    94  	return combineSeries(ctx, series, wrapPathExpr(powSeriesFnName, ts.SeriesList(series)), ts.Pow)
    95  }
    96  
    97  // maxSeries takes a list of series and returns a new series containing the
    98  // maximum value across the series at each datapoint
    99  func maxSeries(ctx *common.Context, series multiplePathSpecs) (ts.SeriesList, error) {
   100  	return combineSeries(ctx, series, wrapPathExpr(maxSeriesFnName, ts.SeriesList(series)), ts.Max)
   101  }
   102  
   103  // medianSeries takes a list of series and returns a new series containing the
   104  // median value across the series at each datapoint
   105  func medianSeries(ctx *common.Context, series multiplePathSpecs) (ts.SeriesList, error) {
   106  	if len(series.Values) == 0 {
   107  		return ts.NewSeriesList(), nil
   108  	}
   109  	normalized, start, end, millisPerStep, err := common.Normalize(ctx, ts.SeriesList(series))
   110  	if err != nil {
   111  		return ts.NewSeriesList(), err
   112  	}
   113  	numSteps := ts.NumSteps(start, end, millisPerStep)
   114  	values := ts.NewValues(ctx, millisPerStep, numSteps)
   115  
   116  	valuesAtTime := make([]float64, len(normalized.Values))
   117  	for i := 0; i < numSteps; i++ {
   118  		for j, series := range normalized.Values {
   119  			valuesAtTime[j] = series.ValueAt(i)
   120  		}
   121  		values.SetValueAt(i, ts.Median(valuesAtTime, len(valuesAtTime)))
   122  	}
   123  
   124  	name := wrapPathExpr(medianSeriesFnName, ts.SeriesList(series))
   125  	output := ts.NewSeries(ctx, name, start, values)
   126  	return ts.SeriesList{
   127  		Values:   []*ts.Series{output},
   128  		Metadata: series.Metadata,
   129  	}, nil
   130  }
   131  
   132  // lastSeries takes a list of series and returns a new series containing the
   133  // last value at each datapoint
   134  func lastSeries(ctx *common.Context, series multiplePathSpecs) (ts.SeriesList, error) {
   135  	return combineSeries(ctx, series, joinPathExpr(ts.SeriesList(series)), ts.Last)
   136  }
   137  
   138  // standardDeviationHelper returns the standard deviation of a slice of a []float64
   139  func standardDeviationHelper(values []float64) float64 {
   140  	var count, sum float64
   141  
   142  	for _, value := range values {
   143  		if !math.IsNaN(value) {
   144  			sum += value
   145  			count++
   146  		}
   147  	}
   148  	if count == 0 {
   149  		return math.NaN()
   150  	}
   151  	avg := sum / count
   152  
   153  	m2 := float64(0)
   154  	for _, value := range values {
   155  		if !math.IsNaN(value) {
   156  			diff := value - avg
   157  			m2 += diff * diff
   158  		}
   159  	}
   160  
   161  	variance := m2 / count
   162  
   163  	return math.Sqrt(variance)
   164  }
   165  
   166  // stddevSeries takes a list of series and returns a new series containing the
   167  // standard deviation at each datapoint
   168  // At step n, stddevSeries will make a list of every series' nth value,
   169  // and calculate the standard deviation of that list.
   170  // The output is a seriesList containing 1 series
   171  func stddevSeries(ctx *common.Context, seriesList multiplePathSpecs) (ts.SeriesList, error) {
   172  	if len(seriesList.Values) == 0 {
   173  		return ts.NewSeriesList(), nil
   174  	}
   175  
   176  	firstSeries := seriesList.Values[0]
   177  	numSteps := firstSeries.Len()
   178  	values := ts.NewValues(ctx, firstSeries.MillisPerStep(), numSteps)
   179  	valuesAtTime := make([]float64, 0, numSteps)
   180  	for i := 0; i < numSteps; i++ {
   181  		valuesAtTime = valuesAtTime[:0]
   182  		for _, series := range seriesList.Values {
   183  			if l := series.Len(); l != numSteps {
   184  				return ts.NewSeriesList(), fmt.Errorf("mismatched series length, expected %d, got %d", numSteps, l)
   185  			}
   186  			valuesAtTime = append(valuesAtTime, series.ValueAt(i))
   187  		}
   188  		values.SetValueAt(i, standardDeviationHelper(valuesAtTime))
   189  	}
   190  
   191  	name := wrapPathExpr(stddevSeriesFnName, ts.SeriesList(seriesList))
   192  	output := ts.NewSeries(ctx, name, firstSeries.StartTime(), values)
   193  	return ts.SeriesList{
   194  		Values:   []*ts.Series{output},
   195  		Metadata: seriesList.Metadata,
   196  	}, nil
   197  }
   198  
   199  func divideSeriesHelper(ctx *common.Context, dividendSeries, divisorSeries *ts.Series, metadata block.ResultMetadata) (*ts.Series, error) {
   200  	normalized, minBegin, _, lcmMillisPerStep, err := common.Normalize(ctx, ts.SeriesList{
   201  		Values:   []*ts.Series{dividendSeries, divisorSeries},
   202  		Metadata: metadata,
   203  	})
   204  	if err != nil {
   205  		return nil, err
   206  	}
   207  
   208  	// NB(bl): Normalized must give back exactly two series of the same length.
   209  	dividend, divisor := normalized.Values[0], normalized.Values[1]
   210  	numSteps := dividend.Len()
   211  	vals := ts.NewValues(ctx, lcmMillisPerStep, numSteps)
   212  	for i := 0; i < numSteps; i++ {
   213  		dividendVal := dividend.ValueAt(i)
   214  		divisorVal := divisor.ValueAt(i)
   215  		if !math.IsNaN(dividendVal) && !math.IsNaN(divisorVal) && divisorVal != 0 {
   216  			value := dividendVal / divisorVal
   217  			vals.SetValueAt(i, value)
   218  		}
   219  	}
   220  
   221  	// The individual series will be named divideSeries(X, X), even if it is generated by divideSeriesLists
   222  	// Based on Graphite source code (link below)
   223  	// https://github.com/graphite-project/graphite-web/blob/17a34e7966f7a46eded30c2362765c74eea899cb/webapp/graphite/render/functions.py#L901
   224  	name := fmt.Sprintf("divideSeries(%s,%s)", dividend.Name(), divisor.Name())
   225  	quotientSeries := ts.NewSeries(ctx, name, minBegin, vals)
   226  	return quotientSeries, nil
   227  }
   228  
   229  // divideSeries divides one series list by another single series
   230  func divideSeries(ctx *common.Context, dividendSeriesList, divisorSeriesList singlePathSpec) (ts.SeriesList, error) {
   231  	if len(dividendSeriesList.Values) == 0 || len(divisorSeriesList.Values) == 0 {
   232  		return ts.NewSeriesList(), nil
   233  	}
   234  	if len(divisorSeriesList.Values) != 1 {
   235  		err := xerrors.NewInvalidParamsError(fmt.Errorf(
   236  			"divideSeries second argument must reference exactly one series but instead has %d",
   237  			len(divisorSeriesList.Values)))
   238  		return ts.NewSeriesList(), err
   239  	}
   240  
   241  	divisorSeries := divisorSeriesList.Values[0]
   242  	results := make([]*ts.Series, len(dividendSeriesList.Values))
   243  	for idx, dividendSeries := range dividendSeriesList.Values {
   244  		metadata := divisorSeriesList.Metadata.CombineMetadata(dividendSeriesList.Metadata)
   245  		quotientSeries, err := divideSeriesHelper(ctx, dividendSeries, divisorSeries, metadata)
   246  		if err != nil {
   247  			return ts.NewSeriesList(), err
   248  		}
   249  		results[idx] = quotientSeries
   250  	}
   251  
   252  	r := ts.SeriesList(dividendSeriesList)
   253  	r.Values = results
   254  	return r, nil
   255  }
   256  
   257  // divideSeriesLists divides one series list by another series list
   258  func divideSeriesLists(ctx *common.Context, dividendSeriesList, divisorSeriesList singlePathSpec) (ts.SeriesList, error) {
   259  	if len(dividendSeriesList.Values) != len(divisorSeriesList.Values) {
   260  		err := xerrors.NewInvalidParamsError(fmt.Errorf(
   261  			"divideSeriesLists both SeriesLists must have exactly the same length"))
   262  		return ts.NewSeriesList(), err
   263  	}
   264  
   265  	// If either list is not sorted yet then apply a default sort for deterministic results.
   266  	if !dividendSeriesList.SortApplied {
   267  		// Use sort.Stable for deterministic output.
   268  		sort.Stable(ts.SeriesByName(dividendSeriesList.Values))
   269  		dividendSeriesList.SortApplied = true
   270  	}
   271  	if !divisorSeriesList.SortApplied {
   272  		// Use sort.Stable for deterministic output.
   273  		sort.Stable(ts.SeriesByName(divisorSeriesList.Values))
   274  		divisorSeriesList.SortApplied = true
   275  	}
   276  
   277  	results := make([]*ts.Series, len(dividendSeriesList.Values))
   278  	for idx, dividendSeries := range dividendSeriesList.Values {
   279  		divisorSeries := divisorSeriesList.Values[idx]
   280  		metadata := divisorSeriesList.Metadata.CombineMetadata(dividendSeriesList.Metadata)
   281  		quotientSeries, err := divideSeriesHelper(ctx, dividendSeries, divisorSeries, metadata)
   282  		if err != nil {
   283  			return ts.NewSeriesList(), err
   284  		}
   285  		results[idx] = quotientSeries
   286  	}
   287  
   288  	r := ts.SeriesList(dividendSeriesList)
   289  	// Set sorted as we sorted any input that wasn't already sorted.
   290  	r.SortApplied = true
   291  	r.Values = results
   292  	return r, nil
   293  }
   294  
   295  // aggregate takes a list of series and returns a new series containing the
   296  // value aggregated across the series at each datapoint using the specified function.
   297  // This function can be used with aggregation functions average (or avg), avg_zero,
   298  // median, sum (or total), min, max, diff, stddev, count,
   299  // range (or rangeOf), multiply & last (or current).
   300  func aggregate(ctx *common.Context, series singlePathSpec, fname string) (ts.SeriesList, error) {
   301  	switch fname {
   302  	case emptyFnName, sumFnName, sumSeriesFnName, totalFnName:
   303  		return sumSeries(ctx, multiplePathSpecs(series))
   304  	case minFnName, minSeriesFnName:
   305  		return minSeries(ctx, multiplePathSpecs(series))
   306  	case maxFnName, maxSeriesFnName:
   307  		return maxSeries(ctx, multiplePathSpecs(series))
   308  	case medianFnName, medianSeriesFnName:
   309  		return medianSeries(ctx, multiplePathSpecs(series))
   310  	case avgFnName, averageFnName, averageSeriesFnName:
   311  		return averageSeries(ctx, multiplePathSpecs(series))
   312  	case multiplyFnName, multiplySeriesFnName:
   313  		return multiplySeries(ctx, multiplePathSpecs(series))
   314  	case diffFnName, diffSeriesFnName:
   315  		return diffSeries(ctx, multiplePathSpecs(series))
   316  	case countFnName, countSeriesFnName:
   317  		return countSeries(ctx, multiplePathSpecs(series))
   318  	case rangeFnName, rangeOfFnName, rangeOfSeriesFnName:
   319  		return rangeOfSeries(ctx, series)
   320  	case lastFnName, currentFnName:
   321  		return lastSeries(ctx, multiplePathSpecs(series))
   322  	case stddevFnName, stdevFnName, stddevSeriesFnName:
   323  		return stddevSeries(ctx, multiplePathSpecs(series))
   324  	default:
   325  		// Median: the movingMedian() method already implemented is returning an series non compatible result. skip support for now.
   326  		// avg_zero is not implemented, skip support for now unless later identified actual use cases.
   327  		return ts.NewSeriesList(), xerrors.NewInvalidParamsError(fmt.Errorf("invalid func %s", fname))
   328  	}
   329  }
   330  
   331  // averageSeriesWithWildcards splits the given set of series into sub-groupings
   332  // based on wildcard matches in the hierarchy, then averages the values in each
   333  // grouping
   334  func averageSeriesWithWildcards(
   335  	ctx *common.Context,
   336  	series singlePathSpec,
   337  	positions ...int,
   338  ) (ts.SeriesList, error) {
   339  	return combineSeriesWithWildcards(ctx, series, positions, averageSpecificationFunc, ts.Avg)
   340  }
   341  
   342  // sumSeriesWithWildcards splits the given set of series into sub-groupings
   343  // based on wildcard matches in the hierarchy, then sums the values in each
   344  // grouping
   345  func sumSeriesWithWildcards(
   346  	ctx *common.Context,
   347  	series singlePathSpec,
   348  	positions ...int,
   349  ) (ts.SeriesList, error) {
   350  	return combineSeriesWithWildcards(ctx, series, positions, sumSpecificationFunc, ts.Sum)
   351  }
   352  
   353  // multiplySeriesWithWildcards splits the given set of series into sub-groupings
   354  // based on wildcard matches in the hierarchy, then multiply the values in each
   355  // grouping
   356  func multiplySeriesWithWildcards(
   357  	ctx *common.Context,
   358  	series singlePathSpec,
   359  	positions ...int,
   360  ) (ts.SeriesList, error) {
   361  	return combineSeriesWithWildcards(ctx, series, positions, multiplyWithWildcardsSpecificationFunc, ts.Mul)
   362  }
   363  
   364  // aggregateWithWildcards splits the given set of series into sub-groupings
   365  // based on wildcard matches in the hierarchy, then aggregate the values in
   366  // each grouping based on the given function.
   367  // Similar to combineSeriesWithWildcards function but more general, as it
   368  // supports any aggregate functions while combineSeriesWithWildcards only
   369  // support aggregation with existing ts.ConsolidationFunc.
   370  func aggregateWithWildcards(
   371  	ctx *common.Context,
   372  	series singlePathSpec,
   373  	fname string,
   374  	positions ...int,
   375  ) (ts.SeriesList, error) {
   376  	if len(series.Values) == 0 {
   377  		return ts.SeriesList(series), nil
   378  	}
   379  
   380  	toAggregate := splitSeriesIntoSubgroups(series, positions)
   381  
   382  	newSeries := make([]*ts.Series, 0, len(toAggregate))
   383  	for name, toAggregateSeries := range toAggregate {
   384  		seriesList := ts.SeriesList{
   385  			Values:   toAggregateSeries,
   386  			Metadata: series.Metadata,
   387  		}
   388  		aggregated, err := aggregate(ctx, singlePathSpec(seriesList), fname)
   389  		if err != nil {
   390  			return ts.NewSeriesList(), err
   391  		}
   392  		renamedSeries := aggregated.Values[0].RenamedTo(name)
   393  		newSeries = append(newSeries, renamedSeries)
   394  	}
   395  
   396  	r := ts.SeriesList(series)
   397  
   398  	r.Values = newSeries
   399  
   400  	// Ranging over hash map to create results destroys
   401  	// any sort order on the incoming series list
   402  	r.SortApplied = false
   403  
   404  	return r, nil
   405  }
   406  
   407  // combineSeriesWithWildcards splits the given set of series into sub-groupings
   408  // based on wildcard matches in the hierarchy, then combines the values in each
   409  // sub-grouping according to the provided consolidation function
   410  func combineSeriesWithWildcards(
   411  	ctx *common.Context,
   412  	series singlePathSpec,
   413  	positions []int,
   414  	sf specificationFunc,
   415  	f ts.ConsolidationFunc,
   416  ) (ts.SeriesList, error) {
   417  	if len(series.Values) == 0 {
   418  		return ts.SeriesList(series), nil
   419  	}
   420  
   421  	toCombine := splitSeriesIntoSubgroups(series, positions)
   422  
   423  	newSeries := make([]*ts.Series, 0, len(toCombine))
   424  	for name, toCombineSeries := range toCombine {
   425  		seriesList := ts.SeriesList{
   426  			Values:   toCombineSeries,
   427  			Metadata: series.Metadata,
   428  		}
   429  		combined, err := combineSeries(ctx, multiplePathSpecs(seriesList), name, f)
   430  		if err != nil {
   431  			return ts.NewSeriesList(), err
   432  		}
   433  		combined.Values[0].Specification = sf(seriesList)
   434  		newSeries = append(newSeries, combined.Values...)
   435  	}
   436  
   437  	r := ts.SeriesList(series)
   438  
   439  	r.Values = newSeries
   440  
   441  	// Ranging over hash map to create results destroys
   442  	// any sort order on the incoming series list
   443  	r.SortApplied = false
   444  
   445  	return r, nil
   446  }
   447  
   448  func splitSeriesIntoSubgroups(series singlePathSpec, positions []int) map[string][]*ts.Series {
   449  	var (
   450  		toCombine = make(map[string][]*ts.Series)
   451  		wildcards = make(map[int]struct{})
   452  	)
   453  
   454  	for _, position := range positions {
   455  		wildcards[position] = struct{}{}
   456  	}
   457  
   458  	for _, series := range series.Values {
   459  		var (
   460  			parts    = strings.Split(series.Name(), ".")
   461  			newParts = make([]string, 0, len(parts))
   462  		)
   463  		for i, part := range parts {
   464  			if _, wildcard := wildcards[i]; !wildcard {
   465  				newParts = append(newParts, part)
   466  			}
   467  		}
   468  
   469  		newName := strings.Join(newParts, ".")
   470  		toCombine[newName] = append(toCombine[newName], series)
   471  	}
   472  
   473  	return toCombine
   474  }
   475  
   476  // splits a slice into chunks
   477  func chunkArrayHelper(slice []string, numChunks int) [][]string {
   478  	divided := make([][]string, 0, numChunks)
   479  
   480  	chunkSize := (len(slice) + numChunks - 1) / numChunks
   481  
   482  	for i := 0; i < len(slice); i += chunkSize {
   483  		end := i + chunkSize
   484  
   485  		if end > len(slice) {
   486  			end = len(slice)
   487  		}
   488  
   489  		divided = append(divided, slice[i:end])
   490  	}
   491  
   492  	return divided
   493  }
   494  
   495  func evaluateTarget(ctx *common.Context, target string) (ts.SeriesList, error) {
   496  	eng, ok := ctx.Engine.(*Engine)
   497  	if !ok {
   498  		return ts.NewSeriesList(), fmt.Errorf("engine not native engine")
   499  	}
   500  	expression, err := eng.Compile(target)
   501  	if err != nil {
   502  		return ts.NewSeriesList(), err
   503  	}
   504  	return expression.Execute(ctx)
   505  }
   506  
   507  /*
   508  applyByNode takes a seriesList and applies some complicated function (described by a string), replacing templates with unique
   509  prefixes of keys from the seriesList (the key is all nodes up to the index given as `nodeNum`).
   510  
   511  If the `newName` parameter is provided, the name of the resulting series will be given by that parameter, with any
   512  "%" characters replaced by the unique prefix.
   513  
   514  Example:
   515  
   516  `applyByNode(servers.*.disk.bytes_free,1,"divideSeries(%.disk.bytes_free,sumSeries(%.disk.bytes_*))")`
   517  
   518  Would find all series which match `servers.*.disk.bytes_free`, then trim them down to unique series up to the node
   519  given by nodeNum, then fill them into the template function provided (replacing % by the prefixes).
   520  
   521  Additional Examples:
   522  
   523  Given keys of
   524  
   525  - `stats.counts.haproxy.web.2XX`
   526  - `stats.counts.haproxy.web.3XX`
   527  - `stats.counts.haproxy.web.5XX`
   528  - `stats.counts.haproxy.microservice.2XX`
   529  - `stats.counts.haproxy.microservice.3XX`
   530  - `stats.counts.haproxy.microservice.5XX`
   531  
   532  The following will return the rate of 5XX's per service:
   533  
   534  `applyByNode(stats.counts.haproxy.*.*XX, 3, "asPercent(%.5XX, sumSeries(%.*XX))", "%.pct_5XX")`
   535  
   536  The output series would have keys `stats.counts.haproxy.web.pct_5XX` and `stats.counts.haproxy.microservice.pct_5XX`.
   537  */
   538  func applyByNode(ctx *common.Context, seriesList singlePathSpec, nodeNum int, templateFunction string, newName string) (ts.SeriesList, error) {
   539  	// using this as a set
   540  	prefixMap := map[string]struct{}{}
   541  	for _, series := range seriesList.Values {
   542  		var (
   543  			name = series.Name()
   544  
   545  			partsSeen int
   546  			prefix    string
   547  		)
   548  
   549  		for i, c := range name {
   550  			if c == '.' {
   551  				partsSeen++
   552  				if partsSeen == nodeNum+1 {
   553  					prefix = name[:i]
   554  					break
   555  				}
   556  			}
   557  		}
   558  
   559  		if len(prefix) == 0 {
   560  			continue
   561  		}
   562  
   563  		prefixMap[prefix] = struct{}{}
   564  	}
   565  
   566  	// transform to slice
   567  	var prefixes []string
   568  	for p := range prefixMap {
   569  		prefixes = append(prefixes, p)
   570  	}
   571  	sort.Strings(prefixes)
   572  
   573  	var (
   574  		mu       sync.Mutex
   575  		wg       sync.WaitGroup
   576  		multiErr xerrors.MultiError
   577  
   578  		output         = make([]*ts.Series, 0, len(prefixes))
   579  		maxConcurrency = runtime.GOMAXPROCS(0) / 2
   580  	)
   581  	for _, prefixChunk := range chunkArrayHelper(prefixes, maxConcurrency) {
   582  		if multiErr.LastError() != nil {
   583  			return ts.NewSeriesList(), multiErr.LastError()
   584  		}
   585  
   586  		for _, prefix := range prefixChunk {
   587  			prefix := prefix // Capture for lambda.
   588  			newTarget := strings.ReplaceAll(templateFunction, "%", prefix)
   589  			wg.Add(1)
   590  			go func() {
   591  				defer wg.Done()
   592  				resultSeriesList, err := evaluateTarget(ctx, newTarget)
   593  				if err != nil {
   594  					mu.Lock()
   595  					multiErr = multiErr.Add(err)
   596  					mu.Unlock()
   597  					return
   598  				}
   599  
   600  				mu.Lock()
   601  				for _, resultSeries := range resultSeriesList.Values {
   602  					if newName != "" {
   603  						resultSeries = resultSeries.RenamedTo(strings.ReplaceAll(newName, "%", prefix))
   604  					}
   605  					resultSeries.Specification = prefix
   606  					output = append(output, resultSeries)
   607  				}
   608  				mu.Unlock()
   609  			}()
   610  		}
   611  		wg.Wait()
   612  	}
   613  
   614  	// Retain metadata but we definitely did not retain sort order.
   615  	r := ts.SeriesList(seriesList)
   616  	r.Values = output
   617  	r.SortApplied = false
   618  	return r, nil
   619  }
   620  
   621  // groupByNode takes a serieslist and maps a callback to subgroups within as defined by a common node
   622  //
   623  //    &target=groupByNode(foo.by-function.*.*.cpu.load5,2,"sumSeries")
   624  //
   625  //  Would return multiple series which are each the result of applying the "sumSeries" function
   626  //  to groups joined on the second node (0 indexed) resulting in a list of targets like
   627  //
   628  //    sumSeries(foo.by-function.server1.*.cpu.load5),sumSeries(foo.by-function.server2.*.cpu.load5),...
   629  func groupByNode(ctx *common.Context, series singlePathSpec, node int, fname string) (ts.SeriesList, error) {
   630  	return groupByNodes(ctx, series, fname, []int{node}...)
   631  }
   632  
   633  func getAggregationKey(series *ts.Series, nodes []int) (string, error) {
   634  	seriesName := series.Name()
   635  	metricsPath, err := getFirstPathExpression(seriesName)
   636  	if err != nil {
   637  		return "", err
   638  	}
   639  	seriesName = metricsPath
   640  
   641  	parts := strings.Split(seriesName, ".")
   642  
   643  	keys := make([]string, 0, len(nodes))
   644  	for _, n := range nodes {
   645  		if n < 0 {
   646  			n = len(parts) + n
   647  		}
   648  
   649  		if n < len(parts) {
   650  			keys = append(keys, parts[n])
   651  		} else {
   652  			keys = append(keys, "")
   653  		}
   654  	}
   655  	return strings.Join(keys, "."), nil
   656  }
   657  
   658  func getMetaSeriesGrouping(seriesList singlePathSpec, nodes []int) (map[string][]*ts.Series, error) {
   659  	metaSeries := make(map[string][]*ts.Series)
   660  
   661  	if len(nodes) > 0 {
   662  		for _, s := range seriesList.Values {
   663  			key, err := getAggregationKey(s, nodes)
   664  			if err != nil {
   665  				return metaSeries, err
   666  			}
   667  			metaSeries[key] = append(metaSeries[key], s)
   668  		}
   669  	}
   670  
   671  	return metaSeries, nil
   672  }
   673  
   674  // Takes a serieslist and maps a callback to subgroups within as defined by multiple nodes
   675  //
   676  //      &target=groupByNodes(ganglia.server*.*.cpu.load*,"sum",1,4)
   677  //
   678  // Would return multiple series which are each the result of applying the “sum” aggregation to groups joined on the
   679  // nodes’ list (0 indexed) resulting in a list of targets like
   680  //
   681  // 		sumSeries(ganglia.server1.*.cpu.load5),sumSeries(ganglia.server1.*.cpu.load10),sumSeries(ganglia.server1.*.cpu.load15),
   682  // 		sumSeries(ganglia.server2.*.cpu.load5),sumSeries(ganglia.server2.*.cpu.load10),sumSeries(ganglia.server2.*.cpu.load15),...
   683  //
   684  // NOTE: if len(nodes) = 0, aggregate all series into 1 series.
   685  func groupByNodes(ctx *common.Context, seriesList singlePathSpec, fname string, nodes ...int) (ts.SeriesList, error) {
   686  	metaSeries, err := getMetaSeriesGrouping(seriesList, nodes)
   687  	if err != nil {
   688  		return ts.NewSeriesList(), err
   689  	}
   690  
   691  	if len(metaSeries) == 0 {
   692  		// if nodes is an empty slice or every node in nodes exceeds the number
   693  		// of parts in each series, just treat it like aggregate
   694  		return aggregate(ctx, seriesList, fname)
   695  	}
   696  
   697  	return applyFnToMetaSeries(ctx, seriesList, metaSeries, fname)
   698  }
   699  
   700  func applyFnToMetaSeries(ctx *common.Context, series singlePathSpec, metaSeries map[string][]*ts.Series, fname string) (ts.SeriesList, error) {
   701  	newSeries := make([]*ts.Series, 0, len(metaSeries))
   702  	for key, metaSeries := range metaSeries {
   703  		seriesList := ts.SeriesList{
   704  			Values:   metaSeries,
   705  			Metadata: series.Metadata,
   706  		}
   707  		output, err := aggregate(ctx, singlePathSpec(seriesList), fname)
   708  		if err != nil {
   709  			return ts.NewSeriesList(), err
   710  		}
   711  
   712  		if len(output.Values) > 0 {
   713  			series := output.Values[0].RenamedTo(key)
   714  			newSeries = append(newSeries, series)
   715  		}
   716  	}
   717  
   718  	r := ts.SeriesList(series)
   719  
   720  	r.Values = newSeries
   721  
   722  	// Ranging over hash map to create results destroys
   723  	// any sort order on the incoming series list
   724  	r.SortApplied = false
   725  
   726  	return r, nil
   727  }
   728  
   729  // combineSeries combines multiple series into a single series using a
   730  // consolidation func.  If the series use different time intervals, the
   731  // coarsest time will apply.
   732  func combineSeries(ctx *common.Context,
   733  	series multiplePathSpecs,
   734  	fname string,
   735  	f ts.ConsolidationFunc,
   736  ) (ts.SeriesList, error) {
   737  	if len(series.Values) == 0 { // no data; no work
   738  		return ts.SeriesList(series), nil
   739  	}
   740  
   741  	normalized, start, end, millisPerStep, err := common.Normalize(ctx, ts.SeriesList(series))
   742  	if err != nil {
   743  		err := xerrors.NewInvalidParamsError(fmt.Errorf("combine series error: %w", err))
   744  		return ts.NewSeriesList(), err
   745  	}
   746  
   747  	consolidation := ts.NewConsolidation(ctx, start, end, millisPerStep, f)
   748  	for _, s := range normalized.Values {
   749  		consolidation.AddSeries(s, ts.Avg)
   750  	}
   751  
   752  	result := consolidation.BuildSeries(fname, ts.Finalize)
   753  	return ts.SeriesList{
   754  		Values:   []*ts.Series{result},
   755  		Metadata: series.Metadata,
   756  	}, nil
   757  }
   758  
   759  // weightedAverage takes a series of values and a series of weights and produces a weighted
   760  // average for all values. The corresponding values should share a node as defined by the
   761  // node parameter, 0-indexed.
   762  func weightedAverage(
   763  	ctx *common.Context,
   764  	input singlePathSpec,
   765  	weights singlePathSpec,
   766  	node int,
   767  ) (ts.SeriesList, error) {
   768  	step := math.MaxInt32
   769  	if len(input.Values) > 0 {
   770  		step = input.Values[0].MillisPerStep()
   771  	} else if len(weights.Values) > 0 {
   772  		step = weights.Values[0].MillisPerStep()
   773  	} else {
   774  		return ts.SeriesList(input), nil
   775  	}
   776  
   777  	for _, series := range input.Values {
   778  		if step != series.MillisPerStep() {
   779  			err := xerrors.NewInvalidParamsError(fmt.Errorf("different step sizes in input series not supported"))
   780  			return ts.NewSeriesList(), err
   781  		}
   782  	}
   783  
   784  	for _, series := range weights.Values {
   785  		if step != series.MillisPerStep() {
   786  			err := xerrors.NewInvalidParamsError(fmt.Errorf("different step sizes in input series not supported"))
   787  			return ts.NewSeriesList(), err
   788  		}
   789  	}
   790  
   791  	valuesByKey, err := aliasByNode(ctx, input, node)
   792  	if err != nil {
   793  		return ts.NewSeriesList(), err
   794  	}
   795  	weightsByKey, err := aliasByNode(ctx, weights, node)
   796  	if err != nil {
   797  		return ts.NewSeriesList(), err
   798  	}
   799  
   800  	type pairedSeries struct {
   801  		values  *ts.Series
   802  		weights *ts.Series
   803  	}
   804  
   805  	keys := make(map[string]*pairedSeries, len(valuesByKey.Values))
   806  
   807  	for _, series := range valuesByKey.Values {
   808  		keys[series.Name()] = &pairedSeries{values: series}
   809  	}
   810  
   811  	for _, series := range weightsByKey.Values {
   812  		if tuple, ok := keys[series.Name()]; ok {
   813  			tuple.weights = series
   814  		}
   815  	}
   816  
   817  	productSeries := make([]*ts.Series, 0, len(keys))
   818  	consideredWeights := make([]*ts.Series, 0, len(keys))
   819  
   820  	for key, pair := range keys {
   821  
   822  		if pair.weights == nil {
   823  			continue // skip - no associated weight series
   824  		}
   825  
   826  		vals := ts.NewValues(ctx, pair.values.MillisPerStep(), pair.values.Len())
   827  		for i := 0; i < pair.values.Len(); i++ {
   828  			v := pair.values.ValueAt(i)
   829  			w := pair.weights.ValueAt(i)
   830  			vals.SetValueAt(i, v*w)
   831  		}
   832  		series := ts.NewSeries(ctx, key, pair.values.StartTime(), vals)
   833  		productSeries = append(productSeries, series)
   834  		consideredWeights = append(consideredWeights, pair.weights)
   835  	}
   836  
   837  	meta := input.Metadata.CombineMetadata(weights.Metadata)
   838  	top, err := sumSeries(ctx, multiplePathSpecs(ts.SeriesList{
   839  		Values:   productSeries,
   840  		Metadata: meta,
   841  	}))
   842  	if err != nil {
   843  		return ts.NewSeriesList(), err
   844  	}
   845  
   846  	bottom, err := sumSeries(ctx, multiplePathSpecs(ts.SeriesList{
   847  		Values:   consideredWeights,
   848  		Metadata: meta,
   849  	}))
   850  	if err != nil {
   851  		return ts.NewSeriesList(), err
   852  	}
   853  
   854  	results, err := divideSeries(ctx, singlePathSpec(top), singlePathSpec(bottom))
   855  	if err != nil {
   856  		return ts.NewSeriesList(), err
   857  	}
   858  
   859  	return alias(ctx, singlePathSpec(results), "weightedAverage")
   860  }
   861  
   862  // countSeries draws a horizontal line representing the number of nodes found in the seriesList.
   863  func countSeries(ctx *common.Context, seriesList multiplePathSpecs) (ts.SeriesList, error) {
   864  	count, err := common.Count(ctx, ts.SeriesList(seriesList), func(series ts.SeriesList) string {
   865  		return wrapPathExpr(countSeriesFnName, series)
   866  	})
   867  	if err != nil {
   868  		return ts.NewSeriesList(), err
   869  	}
   870  
   871  	r := ts.SeriesList(seriesList)
   872  	r.Values = count.Values
   873  	return r, nil
   874  }