github.com/siglens/siglens@v0.0.0-20240328180423-f7ce9ae441ed/pkg/integrations/prometheus/promql/metricsSearchHandler.go (about) 1 /* 2 Copyright 2023. 3 4 Licensed under the Apache License, Version 2.0 (the "License"); 5 you may not use this file except in compliance with the License. 6 You may obtain a copy of the License at 7 8 http://www.apache.org/licenses/LICENSE-2.0 9 10 Unless required by applicable law or agreed to in writing, software 11 distributed under the License is distributed on an "AS IS" BASIS, 12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 See the License for the specific language governing permissions and 14 limitations under the License. 15 */ 16 17 package promql 18 19 import ( 20 "bytes" 21 "errors" 22 "fmt" 23 "math" 24 "regexp" 25 "strconv" 26 "strings" 27 "time" 28 "unicode" 29 30 "github.com/cespare/xxhash" 31 pql "github.com/influxdata/promql/v2" 32 jsoniter "github.com/json-iterator/go" 33 "github.com/prometheus/common/model" 34 35 "github.com/influxdata/promql/v2/pkg/labels" 36 dtu "github.com/siglens/siglens/pkg/common/dtypeutils" 37 rutils "github.com/siglens/siglens/pkg/readerUtils" 38 "github.com/siglens/siglens/pkg/segment" 39 "github.com/siglens/siglens/pkg/segment/reader/metrics/tagstree" 40 "github.com/siglens/siglens/pkg/segment/structs" 41 segutils "github.com/siglens/siglens/pkg/segment/utils" 42 "github.com/siglens/siglens/pkg/usageStats" 43 "github.com/siglens/siglens/pkg/utils" 44 . "github.com/siglens/siglens/pkg/utils" 45 log "github.com/sirupsen/logrus" 46 "github.com/valyala/fasthttp" 47 ) 48 49 const MIN_IN_MS = 60_000 50 const HOUR_IN_MS = 3600_000 51 const DAY_IN_MS = 86400_000 52 53 func parseSearchBody(jsonSource map[string]interface{}) (string, uint32, uint32, time.Duration, usageStats.UsageStatsGranularity, error) { 54 searchText := "" 55 var err error 56 var startTime, endTime uint32 57 var step time.Duration 58 var granularity usageStats.UsageStatsGranularity 59 var pastXhours uint64 60 for key, value := range jsonSource { 61 switch key { 62 case "query": 63 switch valtype := value.(type) { 64 case string: 65 searchText = valtype 66 default: 67 log.Errorf("promql/parseSearchBody query is not a string! Val %+v", valtype) 68 return searchText, 0, 0, 0, usageStats.Hourly, errors.New("promql/parseSearchBody query is not a string") 69 } 70 case "step": 71 switch valtype := value.(type) { 72 case string: 73 step, err = parseDuration(valtype) 74 if err != nil { 75 log.Error("Query bad request:" + err.Error()) 76 return searchText, 0, 0, 0, usageStats.Hourly, err 77 } 78 default: 79 log.Errorf("promql/parseSearchBody step is not a string! Val %+v", valtype) 80 return searchText, 0, 0, 0, usageStats.Hourly, errors.New("promql/parseSearchBody step is not a string") 81 } 82 if step <= 0 { 83 err := errors.New("zero or negative query resolution step widths are not accepted. Try a positive integer") 84 log.Info("msg", "Query bad request:"+err.Error()) 85 return searchText, 0, 0, 0, usageStats.Hourly, err 86 } 87 88 case "start": 89 var startTimeStr string 90 switch valtype := value.(type) { 91 case int: 92 startTimeStr = fmt.Sprintf("%d", valtype) 93 case float64: 94 startTimeStr = fmt.Sprintf("%d", int64(valtype)) 95 case string: 96 if strings.Contains(value.(string), "now") { 97 nowTs := utils.GetCurrentTimeInMs() 98 defValue := nowTs - (1 * 60 * 1000) 99 pastXhours, granularity = parseAlphaNumTime(nowTs, value.(string), defValue) 100 startTimeStr = fmt.Sprintf("%d", pastXhours) 101 } else { 102 startTimeStr = valtype 103 } 104 default: 105 log.Errorf("promql/parseSearchBody start is not a string! Val %+v", valtype) 106 return searchText, 0, 0, 0, usageStats.Hourly, errors.New("promql/parseSearchBody start is not a string") 107 } 108 startTime, err = parseTimeFromString(startTimeStr) 109 if err != nil { 110 log.Errorf("Unable to parse start time: %v. Error: %+v", value, err) 111 return searchText, 0, 0, 0, usageStats.Hourly, err 112 113 } 114 case "end": 115 var endTimeStr string 116 switch valtype := value.(type) { 117 case int: 118 endTimeStr = fmt.Sprintf("%d", valtype) 119 case float64: 120 endTimeStr = fmt.Sprintf("%d", int64(valtype)) 121 case string: 122 if strings.Contains(value.(string), "now") { 123 nowTs := utils.GetCurrentTimeInMs() 124 defValue := nowTs 125 pastXhours, _ = parseAlphaNumTime(nowTs, value.(string), defValue) 126 endTimeStr = fmt.Sprintf("%d", pastXhours) 127 128 } else { 129 endTimeStr = valtype 130 } 131 default: 132 log.Errorf("promql/parseSearchBody end time is not a string! Val %+v", valtype) 133 return searchText, 0, 0, 0, usageStats.Hourly, errors.New("promql/parseSearchBody:end time is not a string") 134 } 135 endTime, err = parseTimeFromString(endTimeStr) 136 if err != nil { 137 log.Errorf("Unable to parse end time: %v. Error: %+v", value, err) 138 return searchText, 0, 0, 0, usageStats.Hourly, err 139 } 140 default: 141 log.Errorf("promql/parseSearchBody invalid key %+v", key) 142 return "", 0, 0, 0, usageStats.Hourly, errors.New("promql/parseSearchBody error invalid key") 143 } 144 145 } 146 if endTime < startTime { 147 err := errors.New("end timestamp must not be before start time") 148 log.Info("msg", "Query bad request:"+err.Error()) 149 return searchText, 0, 0, 0, usageStats.Hourly, err 150 } 151 return searchText, startTime, endTime, 0, granularity, nil 152 } 153 154 func ProcessMetricsSearchRequest(ctx *fasthttp.RequestCtx, myid uint64) { 155 rawJSON := ctx.PostBody() 156 if rawJSON == nil { 157 log.Errorf(" ProcessMetricsSearchRequest: received empty search request body ") 158 utils.SetBadMsg(ctx, "") 159 return 160 } 161 qid := rutils.GetNextQid() 162 163 readJSON := make(map[string]interface{}) 164 var jsonc = jsoniter.ConfigCompatibleWithStandardLibrary 165 decoder := jsonc.NewDecoder(bytes.NewReader(rawJSON)) 166 decoder.UseNumber() 167 err := decoder.Decode(&readJSON) 168 if err != nil { 169 ctx.SetContentType(ContentJson) 170 ctx.SetStatusCode(fasthttp.StatusBadRequest) 171 WriteJsonResponse(ctx, nil) 172 _, err = ctx.WriteString(err.Error()) 173 if err != nil { 174 log.Errorf("qid=%v, ProcessMetricsSearchRequest: could not write error message err=%v", qid, err) 175 } 176 log.Errorf("qid=%v, ProcessMetricsSearchRequest: failed to decode search request body! Err=%+v", qid, err) 177 return 178 } 179 180 searchText, startTime, endTime, step, _, err := parseSearchBody(readJSON) 181 if err != nil { 182 ctx.SetContentType(ContentJson) 183 ctx.SetStatusCode(fasthttp.StatusBadRequest) 184 _, err = ctx.WriteString(err.Error()) 185 if err != nil { 186 log.Errorf("qid=%v, ProcessMetricsSearchRequest: could not write error message err=%v", qid, err) 187 } 188 189 log.Errorf("qid=%v, ProcessMetricsSearchRequest: parseSearchBody , err=%v", qid, err) 190 return 191 } 192 if endTime == 0 { 193 endTime = uint32(time.Now().Unix()) 194 } 195 if startTime == 0 { 196 startTime = uint32(time.Now().Add(time.Duration(-5) * time.Minute).Unix()) 197 } 198 199 log.Infof("qid=%v, ProcessMetricsSearchRequest: searchString=[%v] startEpochMs=[%v] endEpochMs=[%v] step=[%v]", qid, searchText, startTime, endTime, step) 200 201 metricQueryRequest, pqlQuerytype, _, err := convertPqlToMetricsQuery(searchText, startTime, endTime, myid) 202 if err != nil { 203 ctx.SetContentType(ContentJson) 204 ctx.SetStatusCode(fasthttp.StatusBadRequest) 205 WriteJsonResponse(ctx, nil) 206 log.Errorf("qid=%v, ProcessMetricsSearchRequest: Error parsing query err=%+v", qid, err) 207 _, err = ctx.WriteString(err.Error()) 208 if err != nil { 209 log.Errorf("qid=%v, ProcessMetricsSearchRequest: could not write error message err=%v", qid, err) 210 } 211 return 212 } 213 segment.LogMetricsQuery("PromQL metrics query parser", &metricQueryRequest[0], qid) 214 res := segment.ExecuteMetricsQuery(&metricQueryRequest[0].MetricsQuery, &metricQueryRequest[0].TimeRange, qid) 215 216 mQResponse, err := res.GetResultsPromQl(&metricQueryRequest[0].MetricsQuery, pqlQuerytype) 217 if err != nil { 218 log.Errorf("ExecuteAsyncQuery: Error getting results! %+v", err) 219 } 220 WriteJsonResponse(ctx, &mQResponse) 221 ctx.SetContentType(ContentJson) 222 ctx.SetStatusCode(fasthttp.StatusOK) 223 } 224 225 func ProcessUiMetricsSearchRequest(ctx *fasthttp.RequestCtx, myid uint64) { 226 rawJSON := ctx.PostBody() 227 if rawJSON == nil { 228 log.Errorf(" ProcessMetricsSearchRequest: received empty search request body ") 229 utils.SetBadMsg(ctx, "") 230 return 231 } 232 qid := rutils.GetNextQid() 233 234 readJSON := make(map[string]interface{}) 235 var jsonc = jsoniter.ConfigCompatibleWithStandardLibrary 236 decoder := jsonc.NewDecoder(bytes.NewReader(rawJSON)) 237 decoder.UseNumber() 238 err := decoder.Decode(&readJSON) 239 if err != nil { 240 ctx.SetContentType(ContentJson) 241 ctx.SetStatusCode(fasthttp.StatusBadRequest) 242 WriteJsonResponse(ctx, nil) 243 _, err = ctx.WriteString(err.Error()) 244 if err != nil { 245 log.Errorf("qid=%v, ProcessMetricsSearchRequest: could not write error message err=%v", qid, err) 246 } 247 log.Errorf("qid=%v, ProcessMetricsSearchRequest: failed to decode search request body! Err=%+v", qid, err) 248 return 249 } 250 251 searchText, startTime, endTime, step, _, err := parseSearchBody(readJSON) 252 if err != nil { 253 ctx.SetContentType(ContentJson) 254 ctx.SetStatusCode(fasthttp.StatusBadRequest) 255 _, err = ctx.WriteString(err.Error()) 256 if err != nil { 257 log.Errorf("qid=%v, ProcessMetricsSearchRequest: could not write error message err=%v", qid, err) 258 } 259 260 log.Errorf("qid=%v, ProcessMetricsSearchRequest: parseSearchBody , err=%v", qid, err) 261 return 262 } 263 if endTime == 0 { 264 endTime = uint32(time.Now().Unix()) 265 } 266 if startTime == 0 { 267 startTime = uint32(time.Now().Add(time.Duration(-60) * time.Minute).Unix()) 268 } 269 270 log.Infof("qid=%v, ProcessMetricsSearchRequest: searchString=[%v] startEpochMs=[%v] endEpochMs=[%v] step=[%v]", qid, searchText, startTime, endTime, step) 271 272 searchText, startTime, endTime, interval := parseSearchTextForRangeSelection(searchText, startTime, endTime) 273 metricQueryRequest, pqlQuerytype, queryArithmetic, err := convertPqlToMetricsQuery(searchText, startTime, endTime, myid) 274 275 if err != nil { 276 ctx.SetContentType(ContentJson) 277 ctx.SetStatusCode(fasthttp.StatusBadRequest) 278 WriteJsonResponse(ctx, nil) 279 log.Errorf("qid=%v, ProcessMetricsSearchRequest: Error parsing query err=%+v", qid, err) 280 _, err = ctx.WriteString(err.Error()) 281 if err != nil { 282 log.Errorf("qid=%v, ProcessMetricsSearchRequest: could not write error message err=%v", qid, err) 283 } 284 return 285 } 286 287 metricQueriesList := make([]*structs.MetricsQuery, 0) 288 var timeRange *dtu.MetricsTimeRange 289 hashList := make([]uint64, 0) 290 for _, metricQuery := range metricQueryRequest { 291 hashList = append(hashList, metricQuery.MetricsQuery.HashedMName) 292 metricQueriesList = append(metricQueriesList, &metricQuery.MetricsQuery) 293 segment.LogMetricsQuery("PromQL metrics query parser", &metricQuery, qid) 294 timeRange = &metricQuery.TimeRange 295 } 296 res := segment.ExecuteMultipleMetricsQuery(hashList, metricQueriesList, queryArithmetic, timeRange, qid) 297 mQResponse, err := res.GetResultsPromQlForUi(metricQueriesList[0], pqlQuerytype, startTime, endTime, interval) 298 if err != nil { 299 log.Errorf("ExecuteAsyncQuery: Error getting results! %+v", err) 300 } 301 WriteJsonResponse(ctx, &mQResponse) 302 ctx.SetContentType(ContentJson) 303 ctx.SetStatusCode(fasthttp.StatusOK) 304 } 305 306 func convertPqlToMetricsQuery(searchText string, startTime, endTime uint32, myid uint64) ([]structs.MetricsQueryRequest, pql.ValueType, []structs.QueryArithmetic, error) { 307 // call prometheus promql parser 308 expr, err := pql.ParseExpr(searchText) 309 if err != nil { 310 return []structs.MetricsQueryRequest{}, "", []structs.QueryArithmetic{}, err 311 } 312 pqlQuerytype := expr.Type() 313 var mquery structs.MetricsQuery 314 mquery.Aggregator = structs.Aggreation{} 315 selectors := extractSelectors(expr) 316 //go through labels 317 for _, lblEntry := range selectors { 318 for _, entry := range lblEntry { 319 if entry.Name != "__name__" { 320 tagFilter := &structs.TagsFilter{ 321 TagKey: entry.Name, 322 RawTagValue: entry.Value, 323 HashTagValue: xxhash.Sum64String(entry.Value), 324 TagOperator: segutils.TagOperator(entry.Type), 325 LogicalOperator: segutils.And, 326 } 327 mquery.TagsFilters = append(mquery.TagsFilters, tagFilter) 328 } else { 329 mquery.MetricName = entry.Value 330 } 331 } 332 } 333 334 var groupby bool 335 switch expr := expr.(type) { 336 case *pql.AggregateExpr: 337 es := &pql.EvalStmt{ 338 Expr: expr, 339 Start: time.Now().Add(time.Duration(-5) * time.Minute), 340 End: time.Now(), 341 Interval: time.Duration(1 * time.Minute), 342 // LookbackDelta: 0, 343 } 344 345 mquery.Aggregator = structs.Aggreation{} 346 pql.Inspect(es.Expr, func(node pql.Node, path []pql.Node) error { 347 switch expr := node.(type) { 348 case *pql.AggregateExpr: 349 aggFunc := extractFuncFromPath(path) 350 switch aggFunc { 351 case "avg": 352 mquery.Aggregator.AggregatorFunction = segutils.Avg 353 case "count": 354 mquery.Aggregator.AggregatorFunction = segutils.Count 355 case "sum": 356 mquery.Aggregator.AggregatorFunction = segutils.Sum 357 case "max": 358 mquery.Aggregator.AggregatorFunction = segutils.Max 359 case "min": 360 mquery.Aggregator.AggregatorFunction = segutils.Min 361 case "quantile": 362 mquery.Aggregator.AggregatorFunction = segutils.Quantile 363 default: 364 log.Infof("convertPqlToMetricsQuery: using sum aggregator by default for AggregateExpr (got %v)", aggFunc) 365 mquery.Aggregator = structs.Aggreation{AggregatorFunction: segutils.Sum} 366 } 367 case *pql.VectorSelector: 368 _, grouping := extractGroupsFromPath(path) 369 aggFunc := extractFuncFromPath(path) 370 for _, grp := range grouping { 371 groupby = true 372 tagFilter := structs.TagsFilter{ 373 TagKey: grp, 374 RawTagValue: "*", 375 HashTagValue: xxhash.Sum64String(tagstree.STAR), 376 TagOperator: segutils.TagOperator(segutils.Equal), 377 LogicalOperator: segutils.And, 378 } 379 mquery.TagsFilters = append(mquery.TagsFilters, &tagFilter) 380 } 381 switch aggFunc { 382 case "avg": 383 mquery.Aggregator.AggregatorFunction = segutils.Avg 384 case "count": 385 mquery.Aggregator.AggregatorFunction = segutils.Count 386 case "sum": 387 mquery.Aggregator.AggregatorFunction = segutils.Sum 388 case "max": 389 mquery.Aggregator.AggregatorFunction = segutils.Max 390 case "min": 391 mquery.Aggregator.AggregatorFunction = segutils.Min 392 case "quantile": 393 mquery.Aggregator.AggregatorFunction = segutils.Quantile 394 default: 395 log.Infof("convertPqlToMetricsQuery: using sum aggregator by default for VectorSelector (got %v)", aggFunc) 396 mquery.Aggregator = structs.Aggreation{AggregatorFunction: segutils.Sum} 397 } 398 case *pql.NumberLiteral: 399 mquery.Aggregator.FuncConstant = expr.Val 400 default: 401 err := fmt.Errorf("pql.Inspect: Unsupported node type %T", node) 402 log.Errorf("%v", err) 403 return err 404 } 405 return nil 406 }) 407 case *pql.Call: 408 pql.Inspect(expr, func(node pql.Node, path []pql.Node) error { 409 switch node.(type) { 410 case *pql.MatrixSelector: 411 function := extractFuncFromPath(path) 412 413 if mquery.TagsFilters != nil { 414 groupby = true 415 } 416 switch function { 417 case "deriv": 418 mquery.Aggregator = structs.Aggreation{RangeFunction: segutils.Derivative} 419 case "rate": 420 mquery.Aggregator = structs.Aggreation{RangeFunction: segutils.Rate} 421 default: 422 return fmt.Errorf("pql.Inspect: unsupported function type %v", function) 423 } 424 425 } 426 return nil 427 }) 428 case *pql.VectorSelector: 429 mquery.HashedMName = xxhash.Sum64String(mquery.MetricName) 430 mquery.OrgId = myid 431 mquery.SelectAllSeries = true 432 agg := structs.Aggreation{AggregatorFunction: segutils.Sum} 433 mquery.Downsampler = structs.Downsampler{Interval: 1, Unit: "m", Aggregator: agg} 434 metricQueryRequest := &structs.MetricsQueryRequest{ 435 MetricsQuery: mquery, 436 TimeRange: dtu.MetricsTimeRange{ 437 StartEpochSec: startTime, 438 EndEpochSec: endTime, 439 }, 440 } 441 return []structs.MetricsQueryRequest{*metricQueryRequest}, pqlQuerytype, []structs.QueryArithmetic{}, nil 442 case *pql.BinaryExpr: 443 arithmeticOperation := structs.QueryArithmetic{} 444 var lhsValType, rhsValType pql.ValueType 445 var lhsRequest, rhsRequest []structs.MetricsQueryRequest 446 if constant, ok := expr.LHS.(*pql.NumberLiteral); ok { 447 arithmeticOperation.ConstantOp = true 448 arithmeticOperation.Constant = constant.Val 449 } else { 450 lhsRequest, lhsValType, _, err = convertPqlToMetricsQuery(expr.LHS.String(), startTime, endTime, myid) 451 if err != nil { 452 return []structs.MetricsQueryRequest{}, "", []structs.QueryArithmetic{}, err 453 } 454 arithmeticOperation.LHS = lhsRequest[0].MetricsQuery.HashedMName 455 456 } 457 458 if constant, ok := expr.RHS.(*pql.NumberLiteral); ok { 459 arithmeticOperation.ConstantOp = true 460 arithmeticOperation.Constant = constant.Val 461 } else { 462 rhsRequest, rhsValType, _, err = convertPqlToMetricsQuery(expr.RHS.String(), startTime, endTime, myid) 463 if err != nil { 464 return []structs.MetricsQueryRequest{}, "", []structs.QueryArithmetic{}, err 465 } 466 arithmeticOperation.RHS = rhsRequest[0].MetricsQuery.HashedMName 467 468 } 469 arithmeticOperation.Operation = getArithmeticOperation(expr.Op) 470 if rhsValType == pql.ValueTypeVector { 471 lhsValType = pql.ValueTypeVector 472 } 473 return append(lhsRequest, rhsRequest...), lhsValType, []structs.QueryArithmetic{arithmeticOperation}, nil 474 case *pql.ParenExpr: 475 return convertPqlToMetricsQuery(expr.Expr.String(), startTime, endTime, myid) 476 default: 477 return []structs.MetricsQueryRequest{}, "", []structs.QueryArithmetic{}, fmt.Errorf("convertPqlToMetricsQuery: Unsupported query type %T", expr) 478 } 479 480 tags := mquery.TagsFilters 481 for idx, tag := range tags { 482 var hashedTagVal uint64 483 switch v := tag.RawTagValue.(type) { 484 case string: 485 hashedTagVal = xxhash.Sum64String(v) 486 case int64: 487 hashedTagVal = uint64(v) 488 case float64: 489 hashedTagVal = uint64(v) 490 case uint64: 491 hashedTagVal = v 492 default: 493 log.Errorf("ParseMetricsRequest: invalid tag value type") 494 } 495 tags[idx].HashTagValue = hashedTagVal 496 } 497 498 metricName := mquery.MetricName 499 mquery.HashedMName = xxhash.Sum64String(metricName) 500 501 if mquery.Aggregator.AggregatorFunction == 0 && !groupby { 502 mquery.Aggregator = structs.Aggreation{AggregatorFunction: segutils.Sum} 503 } 504 mquery.Downsampler = structs.Downsampler{Interval: 1, Unit: "m", Aggregator: mquery.Aggregator} 505 mquery.SelectAllSeries = !groupby // if group by is not present, then we need to select all series 506 mquery.OrgId = myid 507 metricQueryRequest := &structs.MetricsQueryRequest{ 508 MetricsQuery: mquery, 509 TimeRange: dtu.MetricsTimeRange{ 510 StartEpochSec: startTime, 511 EndEpochSec: endTime, 512 }, 513 } 514 return []structs.MetricsQueryRequest{*metricQueryRequest}, pqlQuerytype, []structs.QueryArithmetic{}, nil 515 } 516 517 func getArithmeticOperation(op pql.ItemType) segutils.ArithmeticOperator { 518 switch op { 519 case pql.ItemADD: 520 return segutils.Add 521 case pql.ItemSUB: 522 return segutils.Subtract 523 case pql.ItemMUL: 524 return segutils.Multiply 525 case pql.ItemDIV: 526 return segutils.Divide 527 default: 528 log.Errorf("getArithmeticOperation: unexpected op: %v", op) 529 return 0 530 } 531 } 532 533 func parseTimeFromString(timeStr string) (uint32, error) { 534 // if it is not a relative time, parse as absolute time 535 var t time.Time 536 var err error 537 // unixtime 538 if unixTime, err := strconv.ParseInt(timeStr, 10, 64); err == nil { 539 if IsTimeInMilli(uint64(unixTime)) { 540 return uint32(unixTime / 1e3), nil 541 } 542 return uint32(unixTime), nil 543 } 544 545 //absolute time formats 546 timeStr = absoluteTimeFormat(timeStr) 547 formats := []string{ 548 "2006-01-02 15:04:05", 549 "2006-01-02", 550 } 551 for _, format := range formats { 552 t, err = time.Parse(format, timeStr) 553 if err == nil { 554 break 555 } 556 } 557 if err != nil { 558 log.Errorf("parseTime: invalid time format: %s. Error: %v", timeStr, err) 559 return 0, err 560 } 561 return uint32(t.Unix()), nil 562 } 563 564 func extractSelectors(expr pql.Expr) [][]*labels.Matcher { 565 var selectors [][]*labels.Matcher 566 pql.Inspect(expr, func(node pql.Node, _ []pql.Node) error { 567 var vs interface{} 568 vs, ok := node.(*pql.VectorSelector) 569 if ok { 570 selectors = append(selectors, vs.(*pql.VectorSelector).LabelMatchers) 571 } 572 vs, ok = node.(pql.Expressions) 573 if ok { 574 for _, entry := range vs.(pql.Expressions) { 575 expr, ok := entry.(*pql.MatrixSelector) 576 if ok { 577 selectors = append(selectors, expr.LabelMatchers) 578 } 579 } 580 } 581 return nil 582 }) 583 return selectors 584 } 585 586 // extractGroupsFromPath parses vector outer function and extracts grouping information if by or without was used. 587 func extractGroupsFromPath(p []pql.Node) (bool, []string) { 588 if len(p) == 0 { 589 return false, nil 590 } 591 switch n := p[len(p)-1].(type) { 592 case *pql.AggregateExpr: 593 return !n.Without, n.Grouping 594 case pql.Expressions: 595 groupByVals := make([]string, 0) 596 for _, entry := range n { 597 expr, ok := entry.(*pql.MatrixSelector) 598 if ok { 599 for _, labels := range expr.LabelMatchers { 600 if labels.Name != "__name__" { 601 groupByVals = append(groupByVals, labels.Name) 602 } 603 } 604 } 605 } 606 return false, groupByVals 607 default: 608 return false, nil 609 } 610 } 611 612 // extractFuncFromPath walks up the path and searches for the first instance of 613 // a function or aggregation. 614 func extractFuncFromPath(p []pql.Node) string { 615 if len(p) == 0 { 616 return "" 617 } 618 switch n := p[len(p)-1].(type) { 619 case *pql.AggregateExpr: 620 return n.Op.String() 621 case *pql.Call: 622 return n.Func.Name 623 case *pql.BinaryExpr: 624 // If we hit a binary expression we terminate since we only care about functions 625 // or aggregations over a single metric. 626 return "" 627 default: 628 return extractFuncFromPath(p[:len(p)-1]) 629 } 630 } 631 632 func parseDuration(s string) (time.Duration, error) { 633 if d, err := strconv.ParseFloat(s, 64); err == nil { 634 ts := d * float64(time.Second) 635 if ts > float64(math.MaxInt64) || ts < float64(math.MinInt64) { 636 return 0, fmt.Errorf("cannot parse %q to a valid duration. It overflows int64", s) 637 } 638 return time.Duration(ts), nil 639 } 640 if d, err := model.ParseDuration(s); err == nil { 641 return time.Duration(d), nil 642 } 643 return 0, fmt.Errorf("cannot parse %q to a valid duration", s) 644 } 645 646 func absoluteTimeFormat(timeStr string) string { 647 if strings.Contains(timeStr, "-") && strings.Count(timeStr, "-") == 1 { 648 timeStr = strings.Replace(timeStr, "-", " ", 1) 649 } 650 timeStr = strings.Replace(timeStr, "/", "-", 2) 651 if strings.Contains(timeStr, ":") { 652 if strings.Count(timeStr, ":") < 2 { 653 timeStr += ":00" 654 } 655 } 656 return timeStr 657 } 658 659 /* 660 Supports "now-[Num][Unit]" 661 Num ==> any positive integer 662 Unit ==> m(minutes), h(hours), d(days) 663 */ 664 665 func parseAlphaNumTime(nowTs uint64, inp string, defValue uint64) (uint64, usageStats.UsageStatsGranularity) { 666 granularity := usageStats.Daily 667 sanTime := strings.ReplaceAll(inp, " ", "") 668 669 if sanTime == "now" { 670 return nowTs, usageStats.Hourly 671 } 672 673 retVal := defValue 674 675 strln := len(sanTime) 676 if strln < 6 { 677 return retVal, usageStats.Daily 678 } 679 680 unit := sanTime[strln-1] 681 num, err := strconv.ParseInt(sanTime[4:strln-1], 0, 64) 682 if err != nil { 683 return defValue, usageStats.Daily 684 } 685 686 switch unit { 687 case 'm': 688 retVal = nowTs - MIN_IN_MS*uint64(num) 689 granularity = usageStats.Hourly 690 case 'h': 691 retVal = nowTs - HOUR_IN_MS*uint64(num) 692 granularity = usageStats.Hourly 693 case 'd': 694 retVal = nowTs - DAY_IN_MS*uint64(num) 695 granularity = usageStats.Daily 696 default: 697 log.Errorf("parseAlphaNumTime: Unknown time unit %v", unit) 698 } 699 return retVal, granularity 700 } 701 702 func parseSearchTextForRangeSelection(searchText string, startTime uint32, endTime uint32) (string, uint32, uint32, uint32) { 703 704 pattern := `\[(.*?)\]` 705 706 regex := regexp.MustCompile(pattern) 707 708 matches := regex.FindAllStringSubmatch(searchText, -1) 709 710 var timeRange string 711 712 for _, match := range matches { 713 timeRange = match[1] 714 } 715 716 var totalVal uint32 = 0 717 var curVal uint32 = 0 718 var curDimension string = "" 719 var dimensionVal uint32 = 0 720 721 for _, ch := range timeRange { 722 if unicode.IsDigit(ch) { 723 if curDimension == "" { 724 curVal = curVal*10 + uint32(ch-'0') 725 } else { 726 totalVal += curVal * dimensionVal 727 curDimension = "" 728 curVal = uint32(ch - '0') 729 } 730 } else { 731 curDimension += curDimension + string(ch) 732 if curDimension == "s" || curDimension == "S" { 733 dimensionVal = 1 734 } else if curDimension == "m" || curDimension == "M" { 735 dimensionVal = 60 736 } else if curDimension == "h" || curDimension == "H" { 737 dimensionVal = 3600 738 } else if curDimension == "d" || curDimension == "D" { 739 dimensionVal = 24 * 3600 740 } else if curDimension == "w" || curDimension == "W" { 741 dimensionVal = 7 * 24 * 3600 742 } 743 } 744 } 745 totalVal += curVal * dimensionVal 746 747 if totalVal > 0 { 748 startTime = endTime - totalVal 749 } 750 751 return searchText, startTime, endTime, totalVal 752 }