bosun.org@v0.0.0-20210513094433-e25bc3e69a1f/cmd/bosun/expr/funcs.go (about) 1 package expr 2 3 import ( 4 "errors" 5 "fmt" 6 "math" 7 "reflect" 8 "sort" 9 "strconv" 10 "strings" 11 "time" 12 13 "bosun.org/cmd/bosun/expr/parse" 14 "bosun.org/models" 15 "bosun.org/opentsdb" 16 "github.com/GaryBoone/GoStats/stats" 17 "github.com/jinzhu/now" 18 ) 19 20 func tagQuery(args []parse.Node) (parse.Tags, error) { 21 n := args[0].(*parse.StringNode) 22 // Since all 2.1 queries are valid 2.2 queries, at this time 23 // we can just use 2.2 to parse to identify group by tags 24 q, err := opentsdb.ParseQuery(n.Text, opentsdb.Version2_2) 25 if q == nil && err != nil { 26 return nil, err 27 } 28 t := make(parse.Tags) 29 for k := range q.GroupByTags { 30 t[k] = struct{}{} 31 } 32 return t, nil 33 } 34 35 func tagFirst(args []parse.Node) (parse.Tags, error) { 36 return args[0].Tags() 37 } 38 39 func tagRemove(args []parse.Node) (parse.Tags, error) { 40 tags, err := tagFirst(args) 41 if err != nil { 42 return nil, err 43 } 44 key := args[1].(*parse.StringNode).Text 45 delete(tags, key) 46 return tags, nil 47 } 48 49 func seriesFuncTags(args []parse.Node) (parse.Tags, error) { 50 s := args[0].(*parse.StringNode).Text 51 return tagsFromString(s) 52 } 53 54 func aggrFuncTags(args []parse.Node) (parse.Tags, error) { 55 if len(args) < 3 { 56 return nil, errors.New("aggr: expect 3 arguments") 57 } 58 if _, ok := args[1].(*parse.StringNode); !ok { 59 return nil, errors.New("aggr: expect group to be string") 60 } 61 s := args[1].(*parse.StringNode).Text 62 if s == "" { 63 return tagsFromString(s) 64 } 65 tags := strings.Split(s, ",") 66 for i := range tags { 67 tags[i] += "=*" 68 } 69 return tagsFromString(strings.Join(tags, ",")) 70 } 71 72 func tagsFromString(text string) (parse.Tags, error) { 73 t := make(parse.Tags) 74 if text == "" { 75 return t, nil 76 } 77 ts, err := opentsdb.ParseTags(text) 78 if err != nil { 79 return nil, err 80 } 81 82 for k := range ts { 83 t[k] = struct{}{} 84 } 85 return t, nil 86 } 87 88 func tagTranspose(args []parse.Node) (parse.Tags, error) { 89 tags := make(parse.Tags) 90 sp := strings.Split(args[1].(*parse.StringNode).Text, ",") 91 if sp[0] != "" { 92 for _, t := range sp { 93 tags[t] = struct{}{} 94 } 95 } 96 if atags, err := args[0].Tags(); err != nil { 97 return nil, err 98 } else if !tags.Subset(atags) { 99 return nil, fmt.Errorf("transpose tags (%v) must be a subset of first argument's tags (%v)", tags, atags) 100 } 101 return tags, nil 102 } 103 104 func tagRename(args []parse.Node) (parse.Tags, error) { 105 tags, err := tagFirst(args) 106 if err != nil { 107 return nil, err 108 } 109 for _, section := range strings.Split(args[1].(*parse.StringNode).Text, ",") { 110 kv := strings.Split(section, "=") 111 if len(kv) != 2 { 112 return nil, fmt.Errorf("error passing groups") 113 } 114 for oldTagKey := range tags { 115 if kv[0] == oldTagKey { 116 if _, ok := tags[kv[1]]; ok { 117 return nil, fmt.Errorf("%s already in group", kv[1]) 118 } 119 delete(tags, kv[0]) 120 tags[kv[1]] = struct{}{} 121 } 122 } 123 } 124 return tags, nil 125 } 126 127 var builtins = map[string]parse.Func{ 128 // Reduction functions 129 130 "avg": { 131 Args: []models.FuncType{models.TypeSeriesSet}, 132 Return: models.TypeNumberSet, 133 Tags: tagFirst, 134 F: Avg, 135 }, 136 "cCount": { 137 Args: []models.FuncType{models.TypeSeriesSet}, 138 Return: models.TypeNumberSet, 139 Tags: tagFirst, 140 F: CCount, 141 }, 142 "dev": { 143 Args: []models.FuncType{models.TypeSeriesSet}, 144 Return: models.TypeNumberSet, 145 Tags: tagFirst, 146 F: Dev, 147 }, 148 "diff": { 149 Args: []models.FuncType{models.TypeSeriesSet}, 150 Return: models.TypeNumberSet, 151 Tags: tagFirst, 152 F: Diff, 153 }, 154 "first": { 155 Args: []models.FuncType{models.TypeSeriesSet}, 156 Return: models.TypeNumberSet, 157 Tags: tagFirst, 158 F: First, 159 }, 160 "forecastlr": { 161 Args: []models.FuncType{models.TypeSeriesSet, models.TypeNumberSet}, 162 Return: models.TypeNumberSet, 163 Tags: tagFirst, 164 F: Forecast_lr, 165 }, 166 "linelr": { 167 Args: []models.FuncType{models.TypeSeriesSet, models.TypeString}, 168 Return: models.TypeSeriesSet, 169 Tags: tagFirst, 170 F: Line_lr, 171 }, 172 "last": { 173 Args: []models.FuncType{models.TypeSeriesSet}, 174 Return: models.TypeNumberSet, 175 Tags: tagFirst, 176 F: Last, 177 }, 178 "len": { 179 Args: []models.FuncType{models.TypeSeriesSet}, 180 Return: models.TypeNumberSet, 181 Tags: tagFirst, 182 F: Length, 183 }, 184 "max": { 185 Args: []models.FuncType{models.TypeSeriesSet}, 186 Return: models.TypeNumberSet, 187 Tags: tagFirst, 188 F: Max, 189 }, 190 "median": { 191 Args: []models.FuncType{models.TypeSeriesSet}, 192 Return: models.TypeNumberSet, 193 Tags: tagFirst, 194 F: Median, 195 }, 196 "min": { 197 Args: []models.FuncType{models.TypeSeriesSet}, 198 Return: models.TypeNumberSet, 199 Tags: tagFirst, 200 F: Min, 201 }, 202 "percentile": { 203 Args: []models.FuncType{models.TypeSeriesSet, models.TypeNumberSet}, 204 Return: models.TypeNumberSet, 205 Tags: tagFirst, 206 F: Percentile, 207 }, 208 "since": { 209 Args: []models.FuncType{models.TypeSeriesSet}, 210 Return: models.TypeNumberSet, 211 Tags: tagFirst, 212 F: Since, 213 }, 214 "sum": { 215 Args: []models.FuncType{models.TypeSeriesSet}, 216 Return: models.TypeNumberSet, 217 Tags: tagFirst, 218 F: Sum, 219 }, 220 "streak": { 221 Args: []models.FuncType{models.TypeSeriesSet}, 222 Return: models.TypeNumberSet, 223 Tags: tagFirst, 224 F: Streak, 225 }, 226 227 // Aggregation functions 228 "aggr": { 229 Args: []models.FuncType{models.TypeSeriesSet, models.TypeString, models.TypeString}, 230 Return: models.TypeSeriesSet, 231 Tags: aggrFuncTags, 232 F: Aggr, 233 Check: aggrCheck, 234 }, 235 236 // Group functions 237 "addtags": { 238 Args: []models.FuncType{models.TypeVariantSet, models.TypeString}, 239 VariantReturn: true, 240 Tags: tagRename, 241 F: AddTags, 242 }, 243 "rename": { 244 Args: []models.FuncType{models.TypeVariantSet, models.TypeString}, 245 VariantReturn: true, 246 Tags: tagRename, 247 F: Rename, 248 }, 249 "remove": { 250 Args: []models.FuncType{models.TypeVariantSet, models.TypeString}, 251 VariantReturn: true, 252 Tags: tagRemove, 253 F: Remove, 254 }, 255 "t": { 256 Args: []models.FuncType{models.TypeNumberSet, models.TypeString}, 257 Return: models.TypeSeriesSet, 258 Tags: tagTranspose, 259 F: Transpose, 260 }, 261 "ungroup": { 262 Args: []models.FuncType{models.TypeNumberSet}, 263 Return: models.TypeScalar, 264 F: Ungroup, 265 }, 266 267 // Other functions 268 269 "abs": { 270 Args: []models.FuncType{models.TypeVariantSet}, 271 VariantReturn: true, 272 Tags: tagFirst, 273 F: Abs, 274 }, 275 "crop": { 276 Args: []models.FuncType{models.TypeSeriesSet, models.TypeNumberSet, models.TypeNumberSet}, 277 Return: models.TypeSeriesSet, 278 Tags: tagFirst, 279 F: Crop, 280 }, 281 "d": { 282 Args: []models.FuncType{models.TypeString}, 283 Return: models.TypeScalar, 284 F: Duration, 285 }, 286 "tod": { 287 Args: []models.FuncType{models.TypeScalar}, 288 Return: models.TypeString, 289 F: ToDuration, 290 }, 291 "des": { 292 Args: []models.FuncType{models.TypeSeriesSet, models.TypeScalar, models.TypeScalar}, 293 Return: models.TypeSeriesSet, 294 Tags: tagFirst, 295 F: Des, 296 }, 297 "dropge": { 298 Args: []models.FuncType{models.TypeSeriesSet, models.TypeNumberSet}, 299 Return: models.TypeSeriesSet, 300 Tags: tagFirst, 301 F: DropGe, 302 }, 303 "dropg": { 304 Args: []models.FuncType{models.TypeSeriesSet, models.TypeNumberSet}, 305 Return: models.TypeSeriesSet, 306 Tags: tagFirst, 307 F: DropG, 308 }, 309 "drople": { 310 Args: []models.FuncType{models.TypeSeriesSet, models.TypeNumberSet}, 311 Return: models.TypeSeriesSet, 312 Tags: tagFirst, 313 F: DropLe, 314 }, 315 "dropl": { 316 Args: []models.FuncType{models.TypeSeriesSet, models.TypeNumberSet}, 317 Return: models.TypeSeriesSet, 318 Tags: tagFirst, 319 F: DropL, 320 }, 321 "dropna": { 322 Args: []models.FuncType{models.TypeSeriesSet}, 323 Return: models.TypeSeriesSet, 324 Tags: tagFirst, 325 F: DropNA, 326 }, 327 "dropbool": { 328 Args: []models.FuncType{models.TypeSeriesSet, models.TypeSeriesSet}, 329 Return: models.TypeSeriesSet, 330 Tags: tagFirst, 331 F: DropBool, 332 }, 333 "epoch": { 334 Args: []models.FuncType{}, 335 Return: models.TypeScalar, 336 F: Epoch, 337 }, 338 "filter": { 339 Args: []models.FuncType{models.TypeVariantSet, models.TypeNumberSet}, 340 VariantReturn: true, 341 Tags: tagFirst, 342 F: Filter, 343 }, 344 "limit": { 345 Args: []models.FuncType{models.TypeVariantSet, models.TypeScalar}, 346 VariantReturn: true, 347 Tags: tagFirst, 348 F: Limit, 349 }, 350 "isnan": { 351 Args: []models.FuncType{models.TypeNumberSet}, 352 F: IsNaN, 353 Return: models.TypeNumberSet, 354 Tags: tagFirst, 355 }, 356 "nv": { 357 Args: []models.FuncType{models.TypeNumberSet, models.TypeScalar}, 358 Return: models.TypeNumberSet, 359 Tags: tagFirst, 360 F: NV, 361 }, 362 "series": { 363 Args: []models.FuncType{models.TypeString, models.TypeScalar}, 364 VArgs: true, 365 VArgsPos: 1, 366 VArgsOmit: true, 367 Return: models.TypeSeriesSet, 368 Tags: seriesFuncTags, 369 F: SeriesFunc, 370 }, 371 "sort": { 372 Args: []models.FuncType{models.TypeNumberSet, models.TypeString}, 373 Return: models.TypeNumberSet, 374 Tags: tagFirst, 375 F: Sort, 376 }, 377 "shift": { 378 Args: []models.FuncType{models.TypeSeriesSet, models.TypeString}, 379 Return: models.TypeSeriesSet, 380 Tags: tagFirst, 381 F: Shift, 382 }, 383 "leftjoin": { 384 Args: []models.FuncType{models.TypeString, models.TypeString, models.TypeNumberSet}, 385 VArgs: true, 386 VArgsPos: 2, 387 Return: models.TypeTable, 388 Tags: nil, // TODO 389 F: LeftJoin, 390 }, 391 "merge": { 392 Args: []models.FuncType{models.TypeSeriesSet}, 393 VArgs: true, 394 Return: models.TypeSeriesSet, 395 Tags: tagFirst, 396 F: Merge, 397 }, 398 "month": { 399 Args: []models.FuncType{models.TypeScalar, models.TypeString}, 400 Return: models.TypeScalar, 401 F: Month, 402 }, 403 "timedelta": { 404 Args: []models.FuncType{models.TypeSeriesSet}, 405 Return: models.TypeSeriesSet, 406 Tags: tagFirst, 407 F: TimeDelta, 408 }, 409 "tail": { 410 Args: []models.FuncType{models.TypeSeriesSet, models.TypeNumberSet}, 411 Return: models.TypeSeriesSet, 412 Tags: tagFirst, 413 F: Tail, 414 }, 415 "map": { 416 Args: []models.FuncType{models.TypeSeriesSet, models.TypeNumberExpr}, 417 Return: models.TypeSeriesSet, 418 Tags: tagFirst, 419 F: Map, 420 }, 421 "v": { 422 Return: models.TypeScalar, 423 F: V, 424 MapFunc: true, 425 }, 426 } 427 428 // Aggr combines multiple series matching the specified groups using an aggregator function. If group 429 // is empty, all given series are combined, regardless of existing groups. 430 // Available aggregator functions include: avg, min, max, sum, and pN, where N is a float between 431 // 0 and 1 inclusive, e.g. p.50 represents the 50th percentile. p0 and p1 are equal to min and max, 432 // respectively, but min and max are preferred for readability. 433 func Aggr(e *State, series *Results, groups string, aggregator string) (*Results, error) { 434 results := Results{} 435 436 grps := splitGroups(groups) 437 if len(grps) == 0 { 438 // no groups specified, so we merge all group values 439 res, err := aggr(e, series, aggregator) 440 if err != nil { 441 return &results, err 442 } 443 res.Group = opentsdb.TagSet{} 444 results.Results = append(results.Results, res) 445 return &results, nil 446 } 447 448 // at least one group specified, so we work out what 449 // the new group values will be 450 newGroups := map[string]*Results{} 451 for _, result := range series.Results { 452 var vals []string 453 for _, grp := range grps { 454 if val, ok := result.Group[grp]; ok { 455 vals = append(vals, val) 456 continue 457 } 458 return nil, fmt.Errorf("unmatched group in at least one series: %v", grp) 459 } 460 groupName := strings.Join(vals, ",") 461 if _, ok := newGroups[groupName]; !ok { 462 newGroups[groupName] = &Results{} 463 } 464 newGroups[groupName].Results = append(newGroups[groupName].Results, result) 465 } 466 467 for groupName, series := range newGroups { 468 res, err := aggr(e, series, aggregator) 469 if err != nil { 470 return &results, err 471 } 472 vs := strings.Split(groupName, ",") 473 res.Group = opentsdb.TagSet{} 474 for i := 0; i < len(grps); i++ { 475 res.Group.Merge(opentsdb.TagSet{grps[i]: vs[i]}) 476 } 477 results.Results = append(results.Results, res) 478 } 479 480 return &results, nil 481 } 482 483 // Splits a string of groups by comma, but also trims any added whitespace 484 // and returns an empty slice if the string is empty. 485 func splitGroups(groups string) []string { 486 if len(groups) == 0 { 487 return []string{} 488 } 489 grps := strings.Split(groups, ",") 490 for i, grp := range grps { 491 grps[i] = strings.Trim(grp, " ") 492 } 493 return grps 494 } 495 496 func aggr(e *State, series *Results, aggfunc string) (*Result, error) { 497 res := Result{} 498 newSeries := make(Series) 499 var isPerc bool 500 var percValue float64 501 if len(aggfunc) > 0 && aggfunc[0] == 'p' { 502 var err error 503 percValue, err = strconv.ParseFloat(aggfunc[1:], 10) 504 isPerc = err == nil 505 } 506 if isPerc { 507 if percValue < 0 || percValue > 1 { 508 return nil, fmt.Errorf("expr: aggr: percentile number must be greater than or equal to zero 0 and less than or equal 1") 509 } 510 aggfunc = "percentile" 511 } 512 513 switch aggfunc { 514 case "percentile": 515 newSeries = aggrPercentile(series.Results, percValue) 516 case "min": 517 newSeries = aggrPercentile(series.Results, 0.0) 518 case "max": 519 newSeries = aggrPercentile(series.Results, 1.0) 520 case "avg": 521 newSeries = aggrAverage(series.Results) 522 case "sum": 523 newSeries = aggrSum(series.Results) 524 default: 525 return &res, fmt.Errorf("unknown aggfunc: %v. Options are avg, p50, min, max", aggfunc) 526 } 527 528 res.Value = newSeries 529 return &res, nil 530 } 531 532 func aggrPercentile(series ResultSlice, percValue float64) Series { 533 newSeries := make(Series) 534 merged := map[time.Time][]float64{} 535 for _, result := range series { 536 for t, v := range result.Value.Value().(Series) { 537 merged[t] = append(merged[t], v) 538 } 539 } 540 for t := range merged { 541 // transform points from merged series into a made-up 542 // single timeseries, so that we can use the existing 543 // percentile reduction function here 544 dps := Series{} 545 for i := range merged[t] { 546 dps[time.Unix(int64(i), 0)] = merged[t][i] 547 } 548 newSeries[t] = percentile(dps, percValue) 549 } 550 return newSeries 551 } 552 553 func aggrAverage(series ResultSlice) Series { 554 newSeries := make(Series) 555 counts := map[time.Time]int64{} 556 for _, result := range series { 557 for t, v := range result.Value.Value().(Series) { 558 newSeries[t] += v 559 counts[t]++ 560 } 561 } 562 for t := range newSeries { 563 newSeries[t] /= float64(counts[t]) 564 } 565 return newSeries 566 } 567 568 func aggrSum(series ResultSlice) Series { 569 newSeries := make(Series) 570 for _, result := range series { 571 for t, v := range result.Value.Value().(Series) { 572 newSeries[t] += v 573 } 574 } 575 return newSeries 576 } 577 578 func aggrCheck(t *parse.Tree, f *parse.FuncNode) error { 579 if len(f.Args) < 3 { 580 return errors.New("aggr: expect 3 arguments") 581 } 582 if _, ok := f.Args[2].(*parse.StringNode); !ok { 583 return errors.New("aggr: expect string as aggregator function name") 584 } 585 name := f.Args[2].(*parse.StringNode).Text 586 var isPerc bool 587 var percValue float64 588 if len(name) > 0 && name[0] == 'p' { 589 var err error 590 percValue, err = strconv.ParseFloat(name[1:], 10) 591 isPerc = err == nil 592 } 593 if isPerc { 594 if percValue < 0 || percValue > 1 { 595 return errors.New("aggr: percentile number must be greater than or equal to zero 0 and less than or equal 1") 596 } 597 return nil 598 } 599 switch name { 600 case "avg", "min", "max", "sum": 601 return nil 602 } 603 return fmt.Errorf("aggr: unrecognized aggregation function %s", name) 604 } 605 606 func V(e *State) (*Results, error) { 607 return fromScalar(e.vValue), nil 608 } 609 610 func Map(e *State, series *Results, expr *Results) (*Results, error) { 611 newExpr := Expr{expr.Results[0].Value.Value().(NumberExpr).Tree} 612 for _, result := range series.Results { 613 newSeries := make(Series) 614 for t, v := range result.Value.Value().(Series) { 615 e.vValue = v 616 subResults, _, err := newExpr.ExecuteState(e) 617 if err != nil { 618 return series, err 619 } 620 for _, res := range subResults.Results { 621 var v float64 622 switch res.Value.Value().(type) { 623 case Number: 624 v = float64(res.Value.Value().(Number)) 625 case Scalar: 626 v = float64(res.Value.Value().(Scalar)) 627 default: 628 return series, fmt.Errorf("wrong return type for map expr: %v", res.Type()) 629 } 630 newSeries[t] = v 631 } 632 } 633 result.Value = newSeries 634 } 635 return series, nil 636 } 637 638 func SeriesFunc(e *State, tags string, pairs ...float64) (*Results, error) { 639 if len(pairs)%2 != 0 { 640 return nil, fmt.Errorf("uneven number of time stamps and values") 641 } 642 group := opentsdb.TagSet{} 643 if tags != "" { 644 var err error 645 group, err = opentsdb.ParseTags(tags) 646 if err != nil { 647 return nil, fmt.Errorf("unable to parse tags: %v", err) 648 } 649 } 650 651 series := make(Series) 652 for i := 0; i < len(pairs); i += 2 { 653 series[time.Unix(int64(pairs[i]), 0)] = pairs[i+1] 654 } 655 return &Results{ 656 Results: []*Result{ 657 { 658 Value: series, 659 Group: group, 660 }, 661 }, 662 }, nil 663 } 664 665 func Crop(e *State, sSet *Results, startSet *Results, endSet *Results) (*Results, error) { 666 results := Results{} 667 INNER: 668 for _, seriesResult := range sSet.Results { 669 for _, startResult := range startSet.Results { 670 for _, endResult := range endSet.Results { 671 startHasNoGroup := len(startResult.Group) == 0 672 endHasNoGroup := len(endResult.Group) == 0 673 startOverlapsSeries := seriesResult.Group.Overlaps(startResult.Group) 674 endOverlapsSeries := seriesResult.Group.Overlaps(endResult.Group) 675 if (startHasNoGroup || startOverlapsSeries) && (endHasNoGroup || endOverlapsSeries) { 676 res := crop(e, seriesResult, startResult, endResult) 677 results.Results = append(results.Results, res) 678 continue INNER 679 } 680 } 681 } 682 } 683 return &results, nil 684 } 685 686 func crop(e *State, seriesResult *Result, startResult *Result, endResult *Result) *Result { 687 startNumber := startResult.Value.(Number) 688 endNumber := endResult.Value.(Number) 689 start := e.now.Add(-time.Duration(time.Duration(startNumber) * time.Second)) 690 end := e.now.Add(-time.Duration(time.Duration(endNumber) * time.Second)) 691 series := seriesResult.Value.(Series) 692 for timeStamp := range series { 693 if timeStamp.Before(start) || timeStamp.After(end) { 694 delete(series, timeStamp) 695 } 696 } 697 return seriesResult 698 } 699 700 func DropBool(e *State, target *Results, filter *Results) (*Results, error) { 701 res := Results{} 702 unions := e.union(target, filter, "dropbool union") 703 for _, union := range unions { 704 aSeries := union.A.Value().(Series) 705 bSeries := union.B.Value().(Series) 706 newSeries := make(Series) 707 for k, v := range aSeries { 708 if bv, ok := bSeries[k]; ok { 709 if bv != float64(0) { 710 newSeries[k] = v 711 } 712 } 713 } 714 if len(newSeries) > 0 { 715 res.Results = append(res.Results, &Result{Group: union.Group, Value: newSeries}) 716 } 717 } 718 return &res, nil 719 } 720 721 func Epoch(e *State) (*Results, error) { 722 return &Results{ 723 Results: []*Result{ 724 {Value: Scalar(float64(e.now.Unix()))}, 725 }, 726 }, nil 727 } 728 729 func IsNaN(e *State, nSet *Results) (*Results, error) { 730 for _, res := range nSet.Results { 731 if math.IsNaN(float64(res.Value.Value().(Number))) { 732 res.Value = Number(1) 733 continue 734 } 735 res.Value = Number(0) 736 } 737 return nSet, nil 738 } 739 740 func Month(e *State, offset float64, startEnd string) (*Results, error) { 741 if startEnd != "start" && startEnd != "end" { 742 return nil, fmt.Errorf("last parameter for mtod must be 'start' or 'end'") 743 } 744 offsetInt := int(offset) 745 location := time.FixedZone(fmt.Sprintf("%v", offsetInt), offsetInt*60*60) 746 timeZoned := e.now.In(location) 747 var mtod float64 748 if startEnd == "start" { 749 mtod = float64(now.New(timeZoned).BeginningOfMonth().Unix()) 750 } else { 751 mtod = float64(now.New(timeZoned).EndOfMonth().Unix()) 752 } 753 return &Results{ 754 Results: []*Result{ 755 {Value: Scalar(float64(mtod))}, 756 }, 757 }, nil 758 } 759 760 func NV(e *State, series *Results, v float64) (results *Results, err error) { 761 // If there are no results in the set, promote it to a number with the empty group ({}) 762 if len(series.Results) == 0 { 763 series.Results = append(series.Results, &Result{Value: Number(v), Group: make(opentsdb.TagSet)}) 764 return series, nil 765 } 766 series.NaNValue = &v 767 return series, nil 768 } 769 770 func Sort(e *State, series *Results, order string) (*Results, error) { 771 // Sort by groupname first to make the search deterministic 772 sort.Sort(ResultSliceByGroup(series.Results)) 773 switch order { 774 case "desc": 775 sort.Stable(sort.Reverse(ResultSliceByValue(series.Results))) 776 case "asc": 777 sort.Stable(ResultSliceByValue(series.Results)) 778 default: 779 return nil, fmt.Errorf("second argument of order() must be asc or desc") 780 } 781 return series, nil 782 } 783 784 func Limit(e *State, set *Results, v float64) (*Results, error) { 785 if v < 0 { 786 return nil, errors.New(fmt.Sprintf("Limit can't be negative value. We have received value %f as limit", v)) 787 } 788 i := int(v) 789 if len(set.Results) > i { 790 set.Results = set.Results[:i] 791 } 792 return set, nil 793 } 794 795 func Filter(e *State, set *Results, numberSet *Results) (*Results, error) { 796 var ns ResultSlice 797 for _, sr := range set.Results { 798 for _, nr := range numberSet.Results { 799 if sr.Group.Subset(nr.Group) || nr.Group.Subset(sr.Group) { 800 if nr.Value.Value().(Number) != 0 { 801 ns = append(ns, sr) 802 } 803 } 804 } 805 } 806 set.Results = ns 807 return set, nil 808 } 809 810 func Tail(e *State, series *Results, number *Results) (*Results, error) { 811 f := func(res *Results, s *Result, floats []float64) error { 812 tailLength := int(floats[0]) 813 814 // if there are fewer points than the requested tail 815 // short circut and just return current series 816 if len(s.Value.Value().(Series)) <= tailLength { 817 res.Results = append(res.Results, s) 818 return nil 819 } 820 821 // create new sorted series 822 // not going to do quick select 823 // see https://github.com/bosun-monitor/bosun/pull/1802 824 // for details 825 oldSr := s.Value.Value().(Series) 826 sorted := NewSortedSeries(oldSr) 827 828 // create new series keep a reference 829 // and point sr.Value interface at reference 830 // as we don't need old series any more 831 newSeries := make(Series) 832 s.Value = newSeries 833 834 // load up new series with desired 835 // number of points 836 // we already checked len so this is safe 837 for _, item := range sorted[len(sorted)-tailLength:] { 838 newSeries[item.T] = item.V 839 } 840 res.Results = append(res.Results, s) 841 return nil 842 } 843 844 return match(f, series, number) 845 } 846 847 func Merge(e *State, series ...*Results) (*Results, error) { 848 res := &Results{} 849 if len(series) == 0 { 850 return res, fmt.Errorf("merge requires at least one result") 851 } 852 if len(series) == 1 { 853 return series[0], nil 854 } 855 seen := make(map[string]bool) 856 for _, r := range series { 857 for _, entry := range r.Results { 858 if _, ok := seen[entry.Group.String()]; ok { 859 return res, fmt.Errorf("duplicate group in merge: %s", entry.Group.String()) 860 } 861 seen[entry.Group.String()] = true 862 } 863 res.Results = append(res.Results, r.Results...) 864 } 865 return res, nil 866 } 867 868 func Remove(e *State, set *Results, tagKey string) (*Results, error) { 869 seen := make(map[string]bool) 870 for _, r := range set.Results { 871 if _, ok := r.Group[tagKey]; ok { 872 delete(r.Group, tagKey) 873 if _, ok := seen[r.Group.String()]; ok { 874 return set, fmt.Errorf("duplicate group would result from removing tag key: %v", tagKey) 875 } 876 seen[r.Group.String()] = true 877 } else { 878 return set, fmt.Errorf("tag key %v not found in result", tagKey) 879 } 880 } 881 return set, nil 882 } 883 884 func LeftJoin(e *State, keysCSV, columnsCSV string, rowData ...*Results) (*Results, error) { 885 res := &Results{} 886 dataWidth := len(rowData) 887 if dataWidth == 0 { 888 return res, fmt.Errorf("leftjoin requires at least one item to populate rows") 889 } 890 keyColumns := strings.Split(keysCSV, ",") 891 dataColumns := strings.Split(columnsCSV, ",") 892 if len(dataColumns) != dataWidth { 893 return res, fmt.Errorf("mismatch in length of data rows and data labels") 894 } 895 keyWidth := len(keyColumns) 896 keyIndex := make(map[string]int, keyWidth) 897 for i, v := range keyColumns { 898 keyIndex[v] = i 899 } 900 t := Table{} 901 t.Columns = append(keyColumns, dataColumns...) 902 rowWidth := len(dataColumns) + len(keyColumns) 903 rowGroups := []opentsdb.TagSet{} 904 for i, r := range rowData { 905 if i == 0 { 906 for _, val := range r.Results { 907 row := make([]interface{}, rowWidth) 908 for k, v := range val.Group { 909 if ki, ok := keyIndex[k]; ok { 910 row[ki] = v 911 } 912 } 913 row[keyWidth+i] = val.Value.Value() 914 rowGroups = append(rowGroups, val.Group) 915 t.Rows = append(t.Rows, row) 916 } 917 continue 918 } 919 for rowIndex, group := range rowGroups { 920 for _, val := range r.Results { 921 if group.Subset(val.Group) { 922 t.Rows[rowIndex][keyWidth+i] = val.Value.Value() 923 } 924 } 925 } 926 } 927 return &Results{ 928 Results: []*Result{ 929 {Value: t}, 930 }, 931 }, nil 932 } 933 934 func Shift(e *State, series *Results, d string) (*Results, error) { 935 dur, err := opentsdb.ParseDuration(d) 936 if err != nil { 937 return series, err 938 } 939 for _, result := range series.Results { 940 newSeries := make(Series) 941 for t, v := range result.Value.Value().(Series) { 942 newSeries[t.Add(time.Duration(dur))] = v 943 } 944 result.Group["shift"] = d 945 result.Value = newSeries 946 } 947 return series, nil 948 } 949 950 func Duration(e *State, d string) (*Results, error) { 951 duration, err := opentsdb.ParseDuration(d) 952 if err != nil { 953 return nil, err 954 } 955 return &Results{ 956 Results: []*Result{ 957 {Value: Scalar(duration.Seconds())}, 958 }, 959 }, nil 960 } 961 962 func ToDuration(e *State, sec float64) (*Results, error) { 963 d := opentsdb.Duration(time.Duration(int64(sec)) * time.Second) 964 return &Results{ 965 Results: []*Result{ 966 {Value: String(d.HumanString())}, 967 }, 968 }, nil 969 } 970 971 func DropValues(e *State, series *Results, threshold *Results, dropFunction func(float64, float64) bool) (*Results, error) { 972 f := func(res *Results, s *Result, floats []float64) error { 973 nv := make(Series) 974 for k, v := range s.Value.Value().(Series) { 975 if !dropFunction(float64(v), floats[0]) { 976 //preserve values which should not be discarded 977 nv[k] = v 978 } 979 } 980 if len(nv) == 0 { 981 return fmt.Errorf("series %s is empty", s.Group) 982 } 983 s.Value = nv 984 res.Results = append(res.Results, s) 985 return nil 986 } 987 return match(f, series, threshold) 988 } 989 990 func DropGe(e *State, series *Results, threshold *Results) (*Results, error) { 991 dropFunction := func(value float64, threshold float64) bool { return value >= threshold } 992 return DropValues(e, series, threshold, dropFunction) 993 } 994 995 func DropG(e *State, series *Results, threshold *Results) (*Results, error) { 996 dropFunction := func(value float64, threshold float64) bool { return value > threshold } 997 return DropValues(e, series, threshold, dropFunction) 998 } 999 1000 func DropLe(e *State, series *Results, threshold *Results) (*Results, error) { 1001 dropFunction := func(value float64, threshold float64) bool { return value <= threshold } 1002 return DropValues(e, series, threshold, dropFunction) 1003 } 1004 1005 func DropL(e *State, series *Results, threshold *Results) (*Results, error) { 1006 dropFunction := func(value float64, threshold float64) bool { return value < threshold } 1007 return DropValues(e, series, threshold, dropFunction) 1008 } 1009 1010 func DropNA(e *State, series *Results) (*Results, error) { 1011 dropFunction := func(value float64, threshold float64) bool { 1012 return math.IsNaN(float64(value)) || math.IsInf(float64(value), 0) 1013 } 1014 return DropValues(e, series, fromScalar(0), dropFunction) 1015 } 1016 1017 func fromScalar(f float64) *Results { 1018 return &Results{ 1019 Results: ResultSlice{ 1020 &Result{ 1021 Value: Number(f), 1022 }, 1023 }, 1024 } 1025 } 1026 1027 func match(f func(res *Results, series *Result, floats []float64) error, series *Results, numberSets ...*Results) (*Results, error) { 1028 res := *series 1029 res.Results = nil 1030 for _, s := range series.Results { 1031 var floats []float64 1032 for _, num := range numberSets { 1033 for _, n := range num.Results { 1034 if len(n.Group) == 0 || s.Group.Overlaps(n.Group) { 1035 floats = append(floats, float64(n.Value.(Number))) 1036 break 1037 } 1038 } 1039 } 1040 if len(floats) != len(numberSets) { 1041 if !series.IgnoreUnjoined { 1042 return nil, fmt.Errorf("unjoined groups for %s", s.Group) 1043 } 1044 continue 1045 } 1046 if err := f(&res, s, floats); err != nil { 1047 return nil, err 1048 } 1049 } 1050 return &res, nil 1051 } 1052 1053 func reduce(e *State, series *Results, F func(Series, ...float64) float64, args ...*Results) (*Results, error) { 1054 f := func(res *Results, s *Result, floats []float64) error { 1055 switch tp := s.Value.(type) { 1056 case Series: 1057 t := s.Value.(Series) 1058 if len(t) == 0 { 1059 return nil 1060 } 1061 s.Value = Number(F(t, floats...)) 1062 res.Results = append(res.Results, s) 1063 return nil 1064 default: 1065 return errors.New( 1066 fmt.Sprintf( 1067 "Unsupported type passed to reduce for alarm [%s]. Want: Series, got: %s. "+ 1068 "It can happen when we can't unjoin values. Please set IgnoreUnjoined and/or "+ 1069 "IgnoreOtherUnjoined for distiguish this error.", e.Origin, reflect.TypeOf(tp).String(), 1070 ), 1071 ) 1072 } 1073 1074 } 1075 return match(f, series, args...) 1076 } 1077 1078 func Abs(e *State, set *Results) *Results { 1079 for _, s := range set.Results { 1080 switch s.Type() { 1081 case models.TypeNumberSet: 1082 s.Value = Number(math.Abs(float64(s.Value.Value().(Number)))) 1083 case models.TypeSeriesSet: 1084 for k, v := range s.Value.Value().(Series) { 1085 s.Value.Value().(Series)[k] = math.Abs(v) 1086 } 1087 } 1088 } 1089 return set 1090 } 1091 1092 func Diff(e *State, series *Results) (r *Results, err error) { 1093 return reduce(e, series, diff) 1094 } 1095 1096 func diff(dps Series, args ...float64) float64 { 1097 return last(dps) - first(dps) 1098 } 1099 1100 func Avg(e *State, series *Results) (*Results, error) { 1101 return reduce(e, series, avg) 1102 } 1103 1104 // avg returns the mean of x. 1105 func avg(dps Series, args ...float64) (a float64) { 1106 for _, v := range dps { 1107 a += float64(v) 1108 } 1109 a /= float64(len(dps)) 1110 return 1111 } 1112 1113 func CCount(e *State, series *Results) (*Results, error) { 1114 return reduce(e, series, cCount) 1115 } 1116 1117 func cCount(dps Series, args ...float64) (a float64) { 1118 if len(dps) < 2 { 1119 return float64(0) 1120 } 1121 series := NewSortedSeries(dps) 1122 count := 0 1123 last := series[0].V 1124 for _, p := range series[1:] { 1125 if p.V != last { 1126 count++ 1127 } 1128 last = p.V 1129 } 1130 return float64(count) 1131 } 1132 1133 func TimeDelta(e *State, series *Results) (*Results, error) { 1134 for _, res := range series.Results { 1135 sorted := NewSortedSeries(res.Value.Value().(Series)) 1136 newSeries := make(Series) 1137 if len(sorted) == 0 { 1138 continue 1139 } 1140 if len(sorted) < 2 { 1141 newSeries[sorted[0].T] = 0 1142 res.Value = newSeries 1143 continue 1144 } 1145 lastTime := sorted[0].T.Unix() 1146 for _, dp := range sorted[1:] { 1147 unixTime := dp.T.Unix() 1148 diff := unixTime - lastTime 1149 newSeries[dp.T] = float64(diff) 1150 lastTime = unixTime 1151 } 1152 res.Value = newSeries 1153 } 1154 return series, nil 1155 } 1156 1157 func Count(e *State, query, sduration, eduration string) (r *Results, err error) { 1158 r, err = Query(e, query, sduration, eduration) 1159 if err != nil { 1160 return 1161 } 1162 return &Results{ 1163 Results: []*Result{ 1164 {Value: Scalar(len(r.Results))}, 1165 }, 1166 }, nil 1167 } 1168 1169 func Sum(e *State, series *Results) (*Results, error) { 1170 return reduce(e, series, sum) 1171 } 1172 1173 func sum(dps Series, args ...float64) (a float64) { 1174 for _, v := range dps { 1175 a += float64(v) 1176 } 1177 return 1178 } 1179 1180 func Des(e *State, series *Results, alpha float64, beta float64) *Results { 1181 for _, res := range series.Results { 1182 sorted := NewSortedSeries(res.Value.Value().(Series)) 1183 if len(sorted) < 2 { 1184 continue 1185 } 1186 des := make(Series) 1187 s := make([]float64, len(sorted)) 1188 b := make([]float64, len(sorted)) 1189 s[0] = sorted[0].V 1190 for i := 1; i < len(sorted); i++ { 1191 s[i] = alpha*sorted[i].V + (1-alpha)*(s[i-1]+b[i-1]) 1192 b[i] = beta*(s[i]-s[i-1]) + (1-beta)*b[i-1] 1193 des[sorted[i].T] = s[i] 1194 } 1195 res.Value = des 1196 } 1197 return series 1198 } 1199 1200 func Streak(e *State, series *Results) (*Results, error) { 1201 return reduce(e, series, streak) 1202 } 1203 1204 func streak(dps Series, args ...float64) (a float64) { 1205 max := func(a, b int) int { 1206 if a > b { 1207 return a 1208 } 1209 return b 1210 } 1211 1212 series := NewSortedSeries(dps) 1213 1214 current := 0 1215 longest := 0 1216 for _, p := range series { 1217 if p.V != 0 { 1218 current++ 1219 } else { 1220 longest = max(current, longest) 1221 current = 0 1222 } 1223 } 1224 longest = max(current, longest) 1225 return float64(longest) 1226 } 1227 1228 func Dev(e *State, series *Results) (*Results, error) { 1229 return reduce(e, series, dev) 1230 } 1231 1232 // dev returns the sample standard deviation of x. 1233 func dev(dps Series, args ...float64) (d float64) { 1234 if len(dps) == 1 { 1235 return 0 1236 } 1237 a := avg(dps) 1238 for _, v := range dps { 1239 d += math.Pow(float64(v)-a, 2) 1240 } 1241 d /= float64(len(dps) - 1) 1242 return math.Sqrt(d) 1243 } 1244 1245 func Length(e *State, series *Results) (*Results, error) { 1246 return reduce(e, series, length) 1247 } 1248 1249 func length(dps Series, args ...float64) (a float64) { 1250 return float64(len(dps)) 1251 } 1252 1253 func Last(e *State, series *Results) (*Results, error) { 1254 return reduce(e, series, last) 1255 } 1256 1257 func last(dps Series, args ...float64) (a float64) { 1258 var last time.Time 1259 for k, v := range dps { 1260 if k.After(last) { 1261 a = v 1262 last = k 1263 } 1264 } 1265 return 1266 } 1267 1268 func First(e *State, series *Results) (*Results, error) { 1269 return reduce(e, series, first) 1270 } 1271 1272 func first(dps Series, args ...float64) (a float64) { 1273 var first time.Time 1274 for k, v := range dps { 1275 if k.Before(first) || first.IsZero() { 1276 a = v 1277 first = k 1278 } 1279 } 1280 return 1281 } 1282 1283 func Since(e *State, series *Results) (*Results, error) { 1284 return reduce(e, series, e.since) 1285 } 1286 1287 func (e *State) since(dps Series, args ...float64) (a float64) { 1288 var last time.Time 1289 for k, v := range dps { 1290 if k.After(last) { 1291 a = v 1292 last = k 1293 } 1294 } 1295 s := e.now.Sub(last) 1296 return s.Seconds() 1297 } 1298 1299 func Forecast_lr(e *State, series *Results, y *Results) (r *Results, err error) { 1300 return reduce(e, series, e.forecast_lr, y) 1301 } 1302 1303 // forecast_lr returns the number of seconds a linear regression predicts the 1304 // series will take to reach y_val. 1305 func (e *State) forecast_lr(dps Series, args ...float64) float64 { 1306 const tenYears = time.Hour * 24 * 365 * 10 1307 yVal := args[0] 1308 var x []float64 1309 var y []float64 1310 for k, v := range dps { 1311 x = append(x, float64(k.Unix())) 1312 y = append(y, v) 1313 } 1314 var slope, intercept, _, _, _, _ = stats.LinearRegression(x, y) 1315 it := (yVal - intercept) / slope 1316 var i64 int64 1317 if it < math.MinInt64 { 1318 i64 = math.MinInt64 1319 } else if it > math.MaxInt64 { 1320 i64 = math.MaxInt64 1321 } else if math.IsNaN(it) { 1322 i64 = e.now.Unix() 1323 } else { 1324 i64 = int64(it) 1325 } 1326 t := time.Unix(i64, 0) 1327 s := -e.now.Sub(t) 1328 if s < -tenYears { 1329 s = -tenYears 1330 } else if s > tenYears { 1331 s = tenYears 1332 } 1333 return s.Seconds() 1334 } 1335 1336 func Line_lr(e *State, series *Results, d string) (*Results, error) { 1337 dur, err := opentsdb.ParseDuration(d) 1338 if err != nil { 1339 return series, err 1340 } 1341 for _, res := range series.Results { 1342 res.Value = line_lr(res.Value.(Series), time.Duration(dur)) 1343 res.Group.Merge(opentsdb.TagSet{"regression": "line"}) 1344 } 1345 return series, nil 1346 } 1347 1348 // line_lr generates a series representing the line up to duration in the future. 1349 func line_lr(dps Series, d time.Duration) Series { 1350 var x []float64 1351 var y []float64 1352 sortedDPS := NewSortedSeries(dps) 1353 var maxT time.Time 1354 if len(sortedDPS) > 1 { 1355 maxT = sortedDPS[len(sortedDPS)-1].T 1356 } 1357 for _, v := range sortedDPS { 1358 xv := float64(v.T.Unix()) 1359 x = append(x, xv) 1360 y = append(y, v.V) 1361 } 1362 var slope, intercept, _, _, _, _ = stats.LinearRegression(x, y) 1363 s := make(Series) 1364 // First point in the regression line 1365 s[maxT] = float64(maxT.Unix())*slope + intercept 1366 // Last point 1367 last := maxT.Add(d) 1368 s[last] = float64(last.Unix())*slope + intercept 1369 return s 1370 } 1371 1372 func Percentile(e *State, series *Results, p *Results) (r *Results, err error) { 1373 return reduce(e, series, percentile, p) 1374 } 1375 1376 func Min(e *State, series *Results) (r *Results, err error) { 1377 return reduce(e, series, percentile, fromScalar(0)) 1378 } 1379 1380 func Median(e *State, series *Results) (r *Results, err error) { 1381 return reduce(e, series, percentile, fromScalar(.5)) 1382 } 1383 1384 func Max(e *State, series *Results) (r *Results, err error) { 1385 return reduce(e, series, percentile, fromScalar(1)) 1386 } 1387 1388 // percentile returns the value at the corresponding percentile between 0 and 1. 1389 // Min and Max can be simulated using p <= 0 and p >= 1, respectively. 1390 func percentile(dps Series, args ...float64) (a float64) { 1391 p := args[0] 1392 var x []float64 1393 for _, v := range dps { 1394 x = append(x, float64(v)) 1395 } 1396 sort.Float64s(x) 1397 if p <= 0 { 1398 return x[0] 1399 } 1400 if p >= 1 { 1401 return x[len(x)-1] 1402 } 1403 i := p * float64(len(x)-1) 1404 i = math.Ceil(i) 1405 return x[int(i)] 1406 } 1407 1408 func Rename(e *State, set *Results, s string) (*Results, error) { 1409 for _, section := range strings.Split(s, ",") { 1410 kv := strings.Split(section, "=") 1411 if len(kv) != 2 { 1412 return nil, fmt.Errorf("error passing groups") 1413 } 1414 oldKey, newKey := kv[0], kv[1] 1415 for _, res := range set.Results { 1416 for tag, v := range res.Group { 1417 if oldKey == tag { 1418 if _, ok := res.Group[newKey]; ok { 1419 return nil, fmt.Errorf("%s already in group", newKey) 1420 } 1421 delete(res.Group, oldKey) 1422 res.Group[newKey] = v 1423 } 1424 1425 } 1426 } 1427 } 1428 return set, nil 1429 } 1430 1431 func AddTags(e *State, set *Results, s string) (*Results, error) { 1432 if s == "" { 1433 return set, nil 1434 } 1435 tagSetToAdd, err := opentsdb.ParseTags(s) 1436 if err != nil { 1437 return nil, err 1438 } 1439 for tagKey, tagValue := range tagSetToAdd { 1440 for _, res := range set.Results { 1441 if res.Group == nil { 1442 res.Group = make(opentsdb.TagSet) 1443 } 1444 if _, ok := res.Group[tagKey]; ok { 1445 return nil, fmt.Errorf("%s key already in group", tagKey) 1446 } 1447 res.Group[tagKey] = tagValue 1448 } 1449 } 1450 return set, nil 1451 } 1452 1453 func Ungroup(e *State, d *Results) (*Results, error) { 1454 if len(d.Results) != 1 { 1455 return nil, fmt.Errorf("ungroup: requires exactly one group") 1456 } 1457 return &Results{ 1458 Results: ResultSlice{ 1459 &Result{ 1460 Value: Scalar(d.Results[0].Value.Value().(Number)), 1461 }, 1462 }, 1463 }, nil 1464 } 1465 1466 func Transpose(e *State, d *Results, gp string) (*Results, error) { 1467 gps := strings.Split(gp, ",") 1468 m := make(map[string]*Result) 1469 for _, v := range d.Results { 1470 ts := make(opentsdb.TagSet) 1471 for k, v := range v.Group { 1472 for _, b := range gps { 1473 if k == b { 1474 ts[k] = v 1475 } 1476 } 1477 } 1478 if _, ok := m[ts.String()]; !ok { 1479 m[ts.String()] = &Result{ 1480 Group: ts, 1481 Value: make(Series), 1482 } 1483 } 1484 switch t := v.Value.(type) { 1485 case Number: 1486 r := m[ts.String()] 1487 i := int64(len(r.Value.(Series))) 1488 r.Value.(Series)[time.Unix(i, 0).UTC()] = float64(t) 1489 r.Computations = append(r.Computations, v.Computations...) 1490 default: 1491 panic(fmt.Errorf("expr: expected a number")) 1492 } 1493 } 1494 var r Results 1495 for _, res := range m { 1496 r.Results = append(r.Results, res) 1497 } 1498 return &r, nil 1499 } 1500 1501 // parseDurationPair is a helper to parse Bosun/OpenTSDB style duration strings that are often 1502 // the last two arguments of tsdb query functions. It uses the State object's now property 1503 // and returns absolute start and end times 1504 func parseDurationPair(e *State, startDuration, endDuration string) (start, end time.Time, err error) { 1505 sd, err := opentsdb.ParseDuration(startDuration) 1506 if err != nil { 1507 return 1508 } 1509 var ed opentsdb.Duration 1510 if endDuration != "" { 1511 ed, err = opentsdb.ParseDuration(endDuration) 1512 if err != nil { 1513 return 1514 } 1515 } 1516 return e.now.Add(time.Duration(-sd)), e.now.Add(time.Duration(-ed)), nil 1517 }