github.com/m3db/m3@v1.5.1-0.20231129193456-75a402aa583b/src/query/parser/m3ql/grammar.peg.go (about) 1 // Copyright (c) 2018 Uber Technologies, Inc. 2 // 3 // Permission is hereby granted, free of charge, to any person obtaining a copy 4 // of this software and associated documentation files (the "Software"), to deal 5 // in the Software without restriction, including without limitation the rights 6 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 // copies of the Software, and to permit persons to whom the Software is 8 // furnished to do so, subject to the following conditions: 9 // 10 // The above copyright notice and this permission notice shall be included in 11 // all copies or substantial portions of the Software. 12 // 13 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 // THE SOFTWARE. 20 21 package m3ql 22 23 //go:generate peg -inline -switch src/query/parser/m3ql/grammar.peg 24 25 import ( 26 "fmt" 27 "io" 28 "math" 29 "os" 30 "sort" 31 "strconv" 32 ) 33 34 const endSymbol rune = 1114112 35 36 /* The rule types inferred from the grammar are below. */ 37 type pegRule uint8 38 39 const ( 40 ruleUnknown pegRule = iota 41 ruleGrammar 42 ruleMacroDef 43 rulePipeline 44 ruleExpression 45 ruleFunctionCall 46 ruleArgument 47 ruleKeywordSpecifier 48 ruleNesting 49 ruleSpacing 50 ruleSpace 51 ruleEOL 52 ruleComment 53 ruleCommentStart 54 ruleIdentifier 55 ruleIdentifierStart 56 ruleIdentifierChars 57 ruleOperator 58 ruleOperatorSymbols 59 ruleBoolean 60 ruleTrue 61 ruleFalse 62 ruleNumber 63 ruleIntegralNumber 64 ruleFloatingNumber 65 ruleMinus 66 ruleStringLiteral 67 ruleQuoteChar 68 rulePattern 69 rulePatternChars 70 ruleGlobSymbols 71 ruleSemicolon 72 ruleEquals 73 rulePipe 74 ruleLParenthesis 75 ruleRParenthesis 76 ruleColon 77 ruleEOF 78 ruleAction0 79 ruleAction1 80 ruleAction2 81 ruleAction3 82 ruleAction4 83 ruleAction5 84 ruleAction6 85 ruleAction7 86 ruleAction8 87 ruleAction9 88 rulePegText 89 ) 90 91 var rul3s = [...]string{ 92 "Unknown", 93 "Grammar", 94 "MacroDef", 95 "Pipeline", 96 "Expression", 97 "FunctionCall", 98 "Argument", 99 "KeywordSpecifier", 100 "Nesting", 101 "Spacing", 102 "Space", 103 "EOL", 104 "Comment", 105 "CommentStart", 106 "Identifier", 107 "IdentifierStart", 108 "IdentifierChars", 109 "Operator", 110 "OperatorSymbols", 111 "Boolean", 112 "True", 113 "False", 114 "Number", 115 "IntegralNumber", 116 "FloatingNumber", 117 "Minus", 118 "StringLiteral", 119 "QuoteChar", 120 "Pattern", 121 "PatternChars", 122 "GlobSymbols", 123 "Semicolon", 124 "Equals", 125 "Pipe", 126 "LParenthesis", 127 "RParenthesis", 128 "Colon", 129 "EOF", 130 "Action0", 131 "Action1", 132 "Action2", 133 "Action3", 134 "Action4", 135 "Action5", 136 "Action6", 137 "Action7", 138 "Action8", 139 "Action9", 140 "PegText", 141 } 142 143 type token32 struct { 144 pegRule 145 begin, end uint32 146 } 147 148 func (t *token32) String() string { 149 return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v", rul3s[t.pegRule], t.begin, t.end) 150 } 151 152 type node32 struct { 153 token32 154 up, next *node32 155 } 156 157 func (node *node32) print(w io.Writer, pretty bool, buffer string) { 158 var print func(node *node32, depth int) 159 print = func(node *node32, depth int) { 160 for node != nil { 161 for c := 0; c < depth; c++ { 162 fmt.Printf(" ") 163 } 164 rule := rul3s[node.pegRule] 165 quote := strconv.Quote(string(([]rune(buffer)[node.begin:node.end]))) 166 if !pretty { 167 fmt.Fprintf(w, "%v %v\n", rule, quote) 168 } else { 169 fmt.Fprintf(w, "\x1B[34m%v\x1B[m %v\n", rule, quote) 170 } 171 if node.up != nil { 172 print(node.up, depth+1) 173 } 174 node = node.next 175 } 176 } 177 print(node, 0) 178 } 179 180 func (node *node32) Print(w io.Writer, buffer string) { 181 node.print(w, false, buffer) 182 } 183 184 func (node *node32) PrettyPrint(w io.Writer, buffer string) { 185 node.print(w, true, buffer) 186 } 187 188 type tokens32 struct { 189 tree []token32 190 } 191 192 func (t *tokens32) Trim(length uint32) { 193 t.tree = t.tree[:length] 194 } 195 196 func (t *tokens32) Print() { 197 for _, token := range t.tree { 198 fmt.Println(token.String()) 199 } 200 } 201 202 func (t *tokens32) AST() *node32 { 203 type element struct { 204 node *node32 205 down *element 206 } 207 tokens := t.Tokens() 208 var stack *element 209 for _, token := range tokens { 210 if token.begin == token.end { 211 continue 212 } 213 node := &node32{token32: token} 214 for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { 215 stack.node.next = node.up 216 node.up = stack.node 217 stack = stack.down 218 } 219 stack = &element{node: node, down: stack} 220 } 221 if stack != nil { 222 return stack.node 223 } 224 return nil 225 } 226 227 func (t *tokens32) PrintSyntaxTree(buffer string) { 228 t.AST().Print(os.Stdout, buffer) 229 } 230 231 func (t *tokens32) WriteSyntaxTree(w io.Writer, buffer string) { 232 t.AST().Print(w, buffer) 233 } 234 235 func (t *tokens32) PrettyPrintSyntaxTree(buffer string) { 236 t.AST().PrettyPrint(os.Stdout, buffer) 237 } 238 239 func (t *tokens32) Add(rule pegRule, begin, end, index uint32) { 240 if tree := t.tree; int(index) >= len(tree) { 241 expanded := make([]token32, 2*len(tree)) 242 copy(expanded, tree) 243 t.tree = expanded 244 } 245 t.tree[index] = token32{ 246 pegRule: rule, 247 begin: begin, 248 end: end, 249 } 250 } 251 252 func (t *tokens32) Tokens() []token32 { 253 return t.tree 254 } 255 256 type m3ql struct { 257 scriptBuilder 258 259 Buffer string 260 buffer []rune 261 rules [49]func() bool 262 parse func(rule ...int) error 263 reset func() 264 Pretty bool 265 tokens32 266 } 267 268 func (p *m3ql) Parse(rule ...int) error { 269 return p.parse(rule...) 270 } 271 272 func (p *m3ql) Reset() { 273 p.reset() 274 } 275 276 type textPosition struct { 277 line, symbol int 278 } 279 280 type textPositionMap map[int]textPosition 281 282 func translatePositions(buffer []rune, positions []int) textPositionMap { 283 length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 284 sort.Ints(positions) 285 286 search: 287 for i, c := range buffer { 288 if c == '\n' { 289 line, symbol = line+1, 0 290 } else { 291 symbol++ 292 } 293 if i == positions[j] { 294 translations[positions[j]] = textPosition{line, symbol} 295 for j++; j < length; j++ { 296 if i != positions[j] { 297 continue search 298 } 299 } 300 break search 301 } 302 } 303 304 return translations 305 } 306 307 type parseError struct { 308 p *m3ql 309 max token32 310 } 311 312 func (e *parseError) Error() string { 313 tokens, error := []token32{e.max}, "\n" 314 positions, p := make([]int, 2*len(tokens)), 0 315 for _, token := range tokens { 316 positions[p], p = int(token.begin), p+1 317 positions[p], p = int(token.end), p+1 318 } 319 translations := translatePositions(e.p.buffer, positions) 320 format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" 321 if e.p.Pretty { 322 format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" 323 } 324 for _, token := range tokens { 325 begin, end := int(token.begin), int(token.end) 326 error += fmt.Sprintf(format, 327 rul3s[token.pegRule], 328 translations[begin].line, translations[begin].symbol, 329 translations[end].line, translations[end].symbol, 330 strconv.Quote(string(e.p.buffer[begin:end]))) 331 } 332 333 return error 334 } 335 336 func (p *m3ql) PrintSyntaxTree() { 337 if p.Pretty { 338 p.tokens32.PrettyPrintSyntaxTree(p.Buffer) 339 } else { 340 p.tokens32.PrintSyntaxTree(p.Buffer) 341 } 342 } 343 344 func (p *m3ql) WriteSyntaxTree(w io.Writer) { 345 p.tokens32.WriteSyntaxTree(w, p.Buffer) 346 } 347 348 func (p *m3ql) Execute() { 349 buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 350 for _, token := range p.Tokens() { 351 switch token.pegRule { 352 353 case rulePegText: 354 begin, end = int(token.begin), int(token.end) 355 text = string(_buffer[begin:end]) 356 357 case ruleAction0: 358 p.newMacro(text) 359 case ruleAction1: 360 p.newPipeline() 361 case ruleAction2: 362 p.endPipeline() 363 case ruleAction3: 364 p.newExpression(text) 365 case ruleAction4: 366 p.endExpression() 367 case ruleAction5: 368 p.newBooleanArgument(text) 369 case ruleAction6: 370 p.newNumericArgument(text) 371 case ruleAction7: 372 p.newPatternArgument(text) 373 case ruleAction8: 374 p.newStringLiteralArgument(text) 375 case ruleAction9: 376 p.newKeywordArgument(text) 377 378 } 379 } 380 _, _, _, _, _ = buffer, _buffer, text, begin, end 381 } 382 383 func (p *m3ql) Init() { 384 var ( 385 max token32 386 position, tokenIndex uint32 387 buffer []rune 388 ) 389 p.reset = func() { 390 max = token32{} 391 position, tokenIndex = 0, 0 392 393 p.buffer = []rune(p.Buffer) 394 if len(p.buffer) == 0 || p.buffer[len(p.buffer)-1] != endSymbol { 395 p.buffer = append(p.buffer, endSymbol) 396 } 397 buffer = p.buffer 398 } 399 p.reset() 400 401 _rules := p.rules 402 tree := tokens32{tree: make([]token32, math.MaxInt16)} 403 p.parse = func(rule ...int) error { 404 r := 1 405 if len(rule) > 0 { 406 r = rule[0] 407 } 408 matches := p.rules[r]() 409 p.tokens32 = tree 410 if matches { 411 p.Trim(tokenIndex) 412 return nil 413 } 414 return &parseError{p, max} 415 } 416 417 add := func(rule pegRule, begin uint32) { 418 tree.Add(rule, begin, position, tokenIndex) 419 tokenIndex++ 420 if begin != position && position > max.end { 421 max = token32{rule, begin, position} 422 } 423 } 424 425 matchDot := func() bool { 426 if buffer[position] != endSymbol { 427 position++ 428 return true 429 } 430 return false 431 } 432 433 /*matchChar := func(c byte) bool { 434 if buffer[position] == c { 435 position++ 436 return true 437 } 438 return false 439 }*/ 440 441 /*matchRange := func(lower byte, upper byte) bool { 442 if c := buffer[position]; c >= lower && c <= upper { 443 position++ 444 return true 445 } 446 return false 447 }*/ 448 449 _rules = [...]func() bool{ 450 nil, 451 /* 0 Grammar <- <(Spacing (MacroDef Semicolon)* Pipeline EOF)> */ 452 func() bool { 453 position0, tokenIndex0 := position, tokenIndex 454 { 455 position1 := position 456 if !_rules[ruleSpacing]() { 457 goto l0 458 } 459 l2: 460 { 461 position3, tokenIndex3 := position, tokenIndex 462 { 463 position4 := position 464 if !_rules[ruleIdentifier]() { 465 goto l3 466 } 467 { 468 add(ruleAction0, position) 469 } 470 { 471 position6 := position 472 if buffer[position] != rune('=') { 473 goto l3 474 } 475 position++ 476 if !_rules[ruleSpacing]() { 477 goto l3 478 } 479 add(ruleEquals, position6) 480 } 481 if !_rules[rulePipeline]() { 482 goto l3 483 } 484 add(ruleMacroDef, position4) 485 } 486 { 487 position7 := position 488 if buffer[position] != rune(';') { 489 goto l3 490 } 491 position++ 492 if !_rules[ruleSpacing]() { 493 goto l3 494 } 495 add(ruleSemicolon, position7) 496 } 497 goto l2 498 l3: 499 position, tokenIndex = position3, tokenIndex3 500 } 501 if !_rules[rulePipeline]() { 502 goto l0 503 } 504 { 505 position8 := position 506 { 507 position9, tokenIndex9 := position, tokenIndex 508 if !matchDot() { 509 goto l9 510 } 511 goto l0 512 l9: 513 position, tokenIndex = position9, tokenIndex9 514 } 515 add(ruleEOF, position8) 516 } 517 add(ruleGrammar, position1) 518 } 519 return true 520 l0: 521 position, tokenIndex = position0, tokenIndex0 522 return false 523 }, 524 /* 1 MacroDef <- <(Identifier Action0 Equals Pipeline)> */ 525 nil, 526 /* 2 Pipeline <- <(Action1 Expression (Pipe Expression)* Action2)> */ 527 func() bool { 528 position11, tokenIndex11 := position, tokenIndex 529 { 530 position12 := position 531 { 532 add(ruleAction1, position) 533 } 534 if !_rules[ruleExpression]() { 535 goto l11 536 } 537 l14: 538 { 539 position15, tokenIndex15 := position, tokenIndex 540 { 541 position16 := position 542 if buffer[position] != rune('|') { 543 goto l15 544 } 545 position++ 546 if !_rules[ruleSpacing]() { 547 goto l15 548 } 549 add(rulePipe, position16) 550 } 551 if !_rules[ruleExpression]() { 552 goto l15 553 } 554 goto l14 555 l15: 556 position, tokenIndex = position15, tokenIndex15 557 } 558 { 559 add(ruleAction2, position) 560 } 561 add(rulePipeline, position12) 562 } 563 return true 564 l11: 565 position, tokenIndex = position11, tokenIndex11 566 return false 567 }, 568 /* 3 Expression <- <(FunctionCall / Nesting)> */ 569 func() bool { 570 position18, tokenIndex18 := position, tokenIndex 571 { 572 position19 := position 573 { 574 position20, tokenIndex20 := position, tokenIndex 575 { 576 position22 := position 577 { 578 position23, tokenIndex23 := position, tokenIndex 579 if !_rules[ruleIdentifier]() { 580 goto l24 581 } 582 goto l23 583 l24: 584 position, tokenIndex = position23, tokenIndex23 585 { 586 position25 := position 587 { 588 position26 := position 589 { 590 position27 := position 591 { 592 position28, tokenIndex28 := position, tokenIndex 593 if buffer[position] != rune('<') { 594 goto l29 595 } 596 position++ 597 if buffer[position] != rune('=') { 598 goto l29 599 } 600 position++ 601 goto l28 602 l29: 603 position, tokenIndex = position28, tokenIndex28 604 if buffer[position] != rune('>') { 605 goto l30 606 } 607 position++ 608 if buffer[position] != rune('=') { 609 goto l30 610 } 611 position++ 612 goto l28 613 l30: 614 position, tokenIndex = position28, tokenIndex28 615 { 616 switch buffer[position] { 617 case '>': 618 if buffer[position] != rune('>') { 619 goto l21 620 } 621 position++ 622 break 623 case '!': 624 if buffer[position] != rune('!') { 625 goto l21 626 } 627 position++ 628 if buffer[position] != rune('=') { 629 goto l21 630 } 631 position++ 632 break 633 case '=': 634 if buffer[position] != rune('=') { 635 goto l21 636 } 637 position++ 638 if buffer[position] != rune('=') { 639 goto l21 640 } 641 position++ 642 break 643 default: 644 if buffer[position] != rune('<') { 645 goto l21 646 } 647 position++ 648 break 649 } 650 } 651 652 } 653 l28: 654 add(ruleOperatorSymbols, position27) 655 } 656 add(rulePegText, position26) 657 } 658 if !_rules[ruleSpacing]() { 659 goto l21 660 } 661 add(ruleOperator, position25) 662 } 663 } 664 l23: 665 { 666 add(ruleAction3, position) 667 } 668 l33: 669 { 670 position34, tokenIndex34 := position, tokenIndex 671 { 672 position35 := position 673 { 674 position36, tokenIndex36 := position, tokenIndex 675 { 676 position38 := position 677 if !_rules[ruleIdentifier]() { 678 goto l36 679 } 680 { 681 add(ruleAction9, position) 682 } 683 { 684 position40 := position 685 if buffer[position] != rune(':') { 686 goto l36 687 } 688 position++ 689 if !_rules[ruleSpacing]() { 690 goto l36 691 } 692 add(ruleColon, position40) 693 } 694 add(ruleKeywordSpecifier, position38) 695 } 696 goto l37 697 l36: 698 position, tokenIndex = position36, tokenIndex36 699 } 700 l37: 701 { 702 position41, tokenIndex41 := position, tokenIndex 703 { 704 position43 := position 705 { 706 position44 := position 707 { 708 position45, tokenIndex45 := position, tokenIndex 709 { 710 position47 := position 711 { 712 position48, tokenIndex48 := position, tokenIndex 713 if buffer[position] != rune('t') { 714 goto l49 715 } 716 position++ 717 goto l48 718 l49: 719 position, tokenIndex = position48, tokenIndex48 720 if buffer[position] != rune('T') { 721 goto l46 722 } 723 position++ 724 } 725 l48: 726 { 727 position50, tokenIndex50 := position, tokenIndex 728 if buffer[position] != rune('r') { 729 goto l51 730 } 731 position++ 732 goto l50 733 l51: 734 position, tokenIndex = position50, tokenIndex50 735 if buffer[position] != rune('R') { 736 goto l46 737 } 738 position++ 739 } 740 l50: 741 { 742 position52, tokenIndex52 := position, tokenIndex 743 if buffer[position] != rune('u') { 744 goto l53 745 } 746 position++ 747 goto l52 748 l53: 749 position, tokenIndex = position52, tokenIndex52 750 if buffer[position] != rune('U') { 751 goto l46 752 } 753 position++ 754 } 755 l52: 756 { 757 position54, tokenIndex54 := position, tokenIndex 758 if buffer[position] != rune('e') { 759 goto l55 760 } 761 position++ 762 goto l54 763 l55: 764 position, tokenIndex = position54, tokenIndex54 765 if buffer[position] != rune('E') { 766 goto l46 767 } 768 position++ 769 } 770 l54: 771 add(ruleTrue, position47) 772 } 773 goto l45 774 l46: 775 position, tokenIndex = position45, tokenIndex45 776 { 777 position56 := position 778 { 779 position57, tokenIndex57 := position, tokenIndex 780 if buffer[position] != rune('f') { 781 goto l58 782 } 783 position++ 784 goto l57 785 l58: 786 position, tokenIndex = position57, tokenIndex57 787 if buffer[position] != rune('F') { 788 goto l42 789 } 790 position++ 791 } 792 l57: 793 { 794 position59, tokenIndex59 := position, tokenIndex 795 if buffer[position] != rune('a') { 796 goto l60 797 } 798 position++ 799 goto l59 800 l60: 801 position, tokenIndex = position59, tokenIndex59 802 if buffer[position] != rune('A') { 803 goto l42 804 } 805 position++ 806 } 807 l59: 808 { 809 position61, tokenIndex61 := position, tokenIndex 810 if buffer[position] != rune('l') { 811 goto l62 812 } 813 position++ 814 goto l61 815 l62: 816 position, tokenIndex = position61, tokenIndex61 817 if buffer[position] != rune('L') { 818 goto l42 819 } 820 position++ 821 } 822 l61: 823 { 824 position63, tokenIndex63 := position, tokenIndex 825 if buffer[position] != rune('s') { 826 goto l64 827 } 828 position++ 829 goto l63 830 l64: 831 position, tokenIndex = position63, tokenIndex63 832 if buffer[position] != rune('S') { 833 goto l42 834 } 835 position++ 836 } 837 l63: 838 { 839 position65, tokenIndex65 := position, tokenIndex 840 if buffer[position] != rune('e') { 841 goto l66 842 } 843 position++ 844 goto l65 845 l66: 846 position, tokenIndex = position65, tokenIndex65 847 if buffer[position] != rune('E') { 848 goto l42 849 } 850 position++ 851 } 852 l65: 853 add(ruleFalse, position56) 854 } 855 } 856 l45: 857 add(rulePegText, position44) 858 } 859 { 860 position67, tokenIndex67 := position, tokenIndex 861 if !_rules[rulePatternChars]() { 862 goto l67 863 } 864 goto l42 865 l67: 866 position, tokenIndex = position67, tokenIndex67 867 } 868 if !_rules[ruleSpacing]() { 869 goto l42 870 } 871 add(ruleBoolean, position43) 872 } 873 { 874 add(ruleAction5, position) 875 } 876 goto l41 877 l42: 878 position, tokenIndex = position41, tokenIndex41 879 { 880 position70 := position 881 { 882 position71 := position 883 { 884 position72, tokenIndex72 := position, tokenIndex 885 { 886 position74 := position 887 if buffer[position] != rune('-') { 888 goto l72 889 } 890 position++ 891 add(ruleMinus, position74) 892 } 893 goto l73 894 l72: 895 position, tokenIndex = position72, tokenIndex72 896 } 897 l73: 898 { 899 position75, tokenIndex75 := position, tokenIndex 900 { 901 position77 := position 902 { 903 position78, tokenIndex78 := position, tokenIndex 904 if !_rules[ruleIntegralNumber]() { 905 goto l78 906 } 907 goto l79 908 l78: 909 position, tokenIndex = position78, tokenIndex78 910 } 911 l79: 912 if buffer[position] != rune('.') { 913 goto l76 914 } 915 position++ 916 if !_rules[ruleIntegralNumber]() { 917 goto l76 918 } 919 add(ruleFloatingNumber, position77) 920 } 921 goto l75 922 l76: 923 position, tokenIndex = position75, tokenIndex75 924 if !_rules[ruleIntegralNumber]() { 925 goto l69 926 } 927 } 928 l75: 929 add(rulePegText, position71) 930 } 931 { 932 position80, tokenIndex80 := position, tokenIndex 933 if !_rules[rulePatternChars]() { 934 goto l80 935 } 936 goto l69 937 l80: 938 position, tokenIndex = position80, tokenIndex80 939 } 940 if !_rules[ruleSpacing]() { 941 goto l69 942 } 943 add(ruleNumber, position70) 944 } 945 { 946 add(ruleAction6, position) 947 } 948 goto l41 949 l69: 950 position, tokenIndex = position41, tokenIndex41 951 { 952 switch buffer[position] { 953 case '(': 954 if !_rules[ruleNesting]() { 955 goto l34 956 } 957 break 958 case '"': 959 { 960 position83 := position 961 if !_rules[ruleQuoteChar]() { 962 goto l34 963 } 964 { 965 position84 := position 966 l85: 967 { 968 position86, tokenIndex86 := position, tokenIndex 969 { 970 position87, tokenIndex87 := position, tokenIndex 971 if buffer[position] != rune('"') { 972 goto l87 973 } 974 position++ 975 goto l86 976 l87: 977 position, tokenIndex = position87, tokenIndex87 978 } 979 if !matchDot() { 980 goto l86 981 } 982 goto l85 983 l86: 984 position, tokenIndex = position86, tokenIndex86 985 } 986 add(rulePegText, position84) 987 } 988 if !_rules[ruleQuoteChar]() { 989 goto l34 990 } 991 if !_rules[ruleSpacing]() { 992 goto l34 993 } 994 add(ruleStringLiteral, position83) 995 } 996 { 997 add(ruleAction8, position) 998 } 999 break 1000 default: 1001 { 1002 position89 := position 1003 { 1004 position90 := position 1005 if !_rules[rulePatternChars]() { 1006 goto l34 1007 } 1008 l91: 1009 { 1010 position92, tokenIndex92 := position, tokenIndex 1011 if !_rules[rulePatternChars]() { 1012 goto l92 1013 } 1014 goto l91 1015 l92: 1016 position, tokenIndex = position92, tokenIndex92 1017 } 1018 add(rulePegText, position90) 1019 } 1020 if !_rules[ruleSpacing]() { 1021 goto l34 1022 } 1023 add(rulePattern, position89) 1024 } 1025 { 1026 add(ruleAction7, position) 1027 } 1028 break 1029 } 1030 } 1031 1032 } 1033 l41: 1034 add(ruleArgument, position35) 1035 } 1036 goto l33 1037 l34: 1038 position, tokenIndex = position34, tokenIndex34 1039 } 1040 { 1041 add(ruleAction4, position) 1042 } 1043 add(ruleFunctionCall, position22) 1044 } 1045 goto l20 1046 l21: 1047 position, tokenIndex = position20, tokenIndex20 1048 if !_rules[ruleNesting]() { 1049 goto l18 1050 } 1051 } 1052 l20: 1053 add(ruleExpression, position19) 1054 } 1055 return true 1056 l18: 1057 position, tokenIndex = position18, tokenIndex18 1058 return false 1059 }, 1060 /* 4 FunctionCall <- <((Identifier / Operator) Action3 Argument* Action4)> */ 1061 nil, 1062 /* 5 Argument <- <(KeywordSpecifier? ((Boolean Action5) / (Number Action6) / ((&('(') Nesting) | (&('"') (StringLiteral Action8)) | (&('$' | '*' | ',' | '-' | '.' | '/' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '?' | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '[' | '\\' | ']' | '^' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z' | '{' | '}') (Pattern Action7)))))> */ 1063 nil, 1064 /* 6 KeywordSpecifier <- <(Identifier Action9 Colon)> */ 1065 nil, 1066 /* 7 Nesting <- <(LParenthesis Pipeline RParenthesis)> */ 1067 func() bool { 1068 position98, tokenIndex98 := position, tokenIndex 1069 { 1070 position99 := position 1071 { 1072 position100 := position 1073 if buffer[position] != rune('(') { 1074 goto l98 1075 } 1076 position++ 1077 if !_rules[ruleSpacing]() { 1078 goto l98 1079 } 1080 add(ruleLParenthesis, position100) 1081 } 1082 if !_rules[rulePipeline]() { 1083 goto l98 1084 } 1085 { 1086 position101 := position 1087 if buffer[position] != rune(')') { 1088 goto l98 1089 } 1090 position++ 1091 if !_rules[ruleSpacing]() { 1092 goto l98 1093 } 1094 add(ruleRParenthesis, position101) 1095 } 1096 add(ruleNesting, position99) 1097 } 1098 return true 1099 l98: 1100 position, tokenIndex = position98, tokenIndex98 1101 return false 1102 }, 1103 /* 8 Spacing <- <((&('#') Comment) | (&('\n' | '\r') EOL) | (&('\t' | ' ') Space))*> */ 1104 func() bool { 1105 { 1106 position103 := position 1107 l104: 1108 { 1109 position105, tokenIndex105 := position, tokenIndex 1110 { 1111 switch buffer[position] { 1112 case '#': 1113 { 1114 position107 := position 1115 { 1116 position108 := position 1117 if buffer[position] != rune('#') { 1118 goto l105 1119 } 1120 position++ 1121 add(ruleCommentStart, position108) 1122 } 1123 l109: 1124 { 1125 position110, tokenIndex110 := position, tokenIndex 1126 { 1127 position111, tokenIndex111 := position, tokenIndex 1128 if !_rules[ruleEOL]() { 1129 goto l111 1130 } 1131 goto l110 1132 l111: 1133 position, tokenIndex = position111, tokenIndex111 1134 } 1135 if !matchDot() { 1136 goto l110 1137 } 1138 goto l109 1139 l110: 1140 position, tokenIndex = position110, tokenIndex110 1141 } 1142 add(ruleComment, position107) 1143 } 1144 break 1145 case '\n', '\r': 1146 if !_rules[ruleEOL]() { 1147 goto l105 1148 } 1149 break 1150 default: 1151 { 1152 position112 := position 1153 { 1154 position113, tokenIndex113 := position, tokenIndex 1155 if buffer[position] != rune(' ') { 1156 goto l114 1157 } 1158 position++ 1159 goto l113 1160 l114: 1161 position, tokenIndex = position113, tokenIndex113 1162 if buffer[position] != rune('\t') { 1163 goto l105 1164 } 1165 position++ 1166 } 1167 l113: 1168 add(ruleSpace, position112) 1169 } 1170 break 1171 } 1172 } 1173 1174 goto l104 1175 l105: 1176 position, tokenIndex = position105, tokenIndex105 1177 } 1178 add(ruleSpacing, position103) 1179 } 1180 return true 1181 }, 1182 /* 9 Space <- <(' ' / '\t')> */ 1183 nil, 1184 /* 10 EOL <- <(('\r' '\n') / '\n' / '\r')> */ 1185 func() bool { 1186 position116, tokenIndex116 := position, tokenIndex 1187 { 1188 position117 := position 1189 { 1190 position118, tokenIndex118 := position, tokenIndex 1191 if buffer[position] != rune('\r') { 1192 goto l119 1193 } 1194 position++ 1195 if buffer[position] != rune('\n') { 1196 goto l119 1197 } 1198 position++ 1199 goto l118 1200 l119: 1201 position, tokenIndex = position118, tokenIndex118 1202 if buffer[position] != rune('\n') { 1203 goto l120 1204 } 1205 position++ 1206 goto l118 1207 l120: 1208 position, tokenIndex = position118, tokenIndex118 1209 if buffer[position] != rune('\r') { 1210 goto l116 1211 } 1212 position++ 1213 } 1214 l118: 1215 add(ruleEOL, position117) 1216 } 1217 return true 1218 l116: 1219 position, tokenIndex = position116, tokenIndex116 1220 return false 1221 }, 1222 /* 11 Comment <- <(CommentStart (!EOL .)*)> */ 1223 nil, 1224 /* 12 CommentStart <- <'#'> */ 1225 nil, 1226 /* 13 Identifier <- <(<(IdentifierStart IdentifierChars*)> Spacing)> */ 1227 func() bool { 1228 position123, tokenIndex123 := position, tokenIndex 1229 { 1230 position124 := position 1231 { 1232 position125 := position 1233 if !_rules[ruleIdentifierStart]() { 1234 goto l123 1235 } 1236 l126: 1237 { 1238 position127, tokenIndex127 := position, tokenIndex 1239 if !_rules[ruleIdentifierChars]() { 1240 goto l127 1241 } 1242 goto l126 1243 l127: 1244 position, tokenIndex = position127, tokenIndex127 1245 } 1246 add(rulePegText, position125) 1247 } 1248 if !_rules[ruleSpacing]() { 1249 goto l123 1250 } 1251 add(ruleIdentifier, position124) 1252 } 1253 return true 1254 l123: 1255 position, tokenIndex = position123, tokenIndex123 1256 return false 1257 }, 1258 /* 14 IdentifierStart <- <((&('_') '_') | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z') [A-Z]) | (&('a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') [a-z]))> */ 1259 func() bool { 1260 position128, tokenIndex128 := position, tokenIndex 1261 { 1262 position129 := position 1263 { 1264 switch buffer[position] { 1265 case '_': 1266 if buffer[position] != rune('_') { 1267 goto l128 1268 } 1269 position++ 1270 break 1271 case 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': 1272 if c := buffer[position]; c < rune('A') || c > rune('Z') { 1273 goto l128 1274 } 1275 position++ 1276 break 1277 default: 1278 if c := buffer[position]; c < rune('a') || c > rune('z') { 1279 goto l128 1280 } 1281 position++ 1282 break 1283 } 1284 } 1285 1286 add(ruleIdentifierStart, position129) 1287 } 1288 return true 1289 l128: 1290 position, tokenIndex = position128, tokenIndex128 1291 return false 1292 }, 1293 /* 15 IdentifierChars <- <((&('\\') '\\') | (&('/') '/') | (&('-') '-') | (&('.') '.') | (&('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9') [0-9]) | (&('A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z' | '_' | 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z') IdentifierStart))> */ 1294 func() bool { 1295 position131, tokenIndex131 := position, tokenIndex 1296 { 1297 position132 := position 1298 { 1299 switch buffer[position] { 1300 case '\\': 1301 if buffer[position] != rune('\\') { 1302 goto l131 1303 } 1304 position++ 1305 break 1306 case '/': 1307 if buffer[position] != rune('/') { 1308 goto l131 1309 } 1310 position++ 1311 break 1312 case '-': 1313 if buffer[position] != rune('-') { 1314 goto l131 1315 } 1316 position++ 1317 break 1318 case '.': 1319 if buffer[position] != rune('.') { 1320 goto l131 1321 } 1322 position++ 1323 break 1324 case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': 1325 if c := buffer[position]; c < rune('0') || c > rune('9') { 1326 goto l131 1327 } 1328 position++ 1329 break 1330 default: 1331 if !_rules[ruleIdentifierStart]() { 1332 goto l131 1333 } 1334 break 1335 } 1336 } 1337 1338 add(ruleIdentifierChars, position132) 1339 } 1340 return true 1341 l131: 1342 position, tokenIndex = position131, tokenIndex131 1343 return false 1344 }, 1345 /* 16 Operator <- <(<OperatorSymbols> Spacing)> */ 1346 nil, 1347 /* 17 OperatorSymbols <- <(('<' '=') / ('>' '=') / ((&('>') '>') | (&('!') ('!' '=')) | (&('=') ('=' '=')) | (&('<') '<')))> */ 1348 nil, 1349 /* 18 Boolean <- <(<(True / False)> !PatternChars Spacing)> */ 1350 nil, 1351 /* 19 True <- <(('t' / 'T') ('r' / 'R') ('u' / 'U') ('e' / 'E'))> */ 1352 nil, 1353 /* 20 False <- <(('f' / 'F') ('a' / 'A') ('l' / 'L') ('s' / 'S') ('e' / 'E'))> */ 1354 nil, 1355 /* 21 Number <- <(<(Minus? (FloatingNumber / IntegralNumber))> !PatternChars Spacing)> */ 1356 nil, 1357 /* 22 IntegralNumber <- <[0-9]+> */ 1358 func() bool { 1359 position140, tokenIndex140 := position, tokenIndex 1360 { 1361 position141 := position 1362 if c := buffer[position]; c < rune('0') || c > rune('9') { 1363 goto l140 1364 } 1365 position++ 1366 l142: 1367 { 1368 position143, tokenIndex143 := position, tokenIndex 1369 if c := buffer[position]; c < rune('0') || c > rune('9') { 1370 goto l143 1371 } 1372 position++ 1373 goto l142 1374 l143: 1375 position, tokenIndex = position143, tokenIndex143 1376 } 1377 add(ruleIntegralNumber, position141) 1378 } 1379 return true 1380 l140: 1381 position, tokenIndex = position140, tokenIndex140 1382 return false 1383 }, 1384 /* 23 FloatingNumber <- <(IntegralNumber? '.' IntegralNumber)> */ 1385 nil, 1386 /* 24 Minus <- <'-'> */ 1387 nil, 1388 /* 25 StringLiteral <- <(QuoteChar <(!'"' .)*> QuoteChar Spacing)> */ 1389 nil, 1390 /* 26 QuoteChar <- <'"'> */ 1391 func() bool { 1392 position147, tokenIndex147 := position, tokenIndex 1393 { 1394 position148 := position 1395 if buffer[position] != rune('"') { 1396 goto l147 1397 } 1398 position++ 1399 add(ruleQuoteChar, position148) 1400 } 1401 return true 1402 l147: 1403 position, tokenIndex = position147, tokenIndex147 1404 return false 1405 }, 1406 /* 27 Pattern <- <(<PatternChars+> Spacing)> */ 1407 nil, 1408 /* 28 PatternChars <- <(IdentifierChars / GlobSymbols)> */ 1409 func() bool { 1410 position150, tokenIndex150 := position, tokenIndex 1411 { 1412 position151 := position 1413 { 1414 position152, tokenIndex152 := position, tokenIndex 1415 if !_rules[ruleIdentifierChars]() { 1416 goto l153 1417 } 1418 goto l152 1419 l153: 1420 position, tokenIndex = position152, tokenIndex152 1421 { 1422 position154 := position 1423 { 1424 switch buffer[position] { 1425 case '$': 1426 if buffer[position] != rune('$') { 1427 goto l150 1428 } 1429 position++ 1430 break 1431 case '^': 1432 if buffer[position] != rune('^') { 1433 goto l150 1434 } 1435 position++ 1436 break 1437 case ',': 1438 if buffer[position] != rune(',') { 1439 goto l150 1440 } 1441 position++ 1442 break 1443 case '?': 1444 if buffer[position] != rune('?') { 1445 goto l150 1446 } 1447 position++ 1448 break 1449 case '*': 1450 if buffer[position] != rune('*') { 1451 goto l150 1452 } 1453 position++ 1454 break 1455 case ']': 1456 if buffer[position] != rune(']') { 1457 goto l150 1458 } 1459 position++ 1460 break 1461 case '[': 1462 if buffer[position] != rune('[') { 1463 goto l150 1464 } 1465 position++ 1466 break 1467 case '}': 1468 if buffer[position] != rune('}') { 1469 goto l150 1470 } 1471 position++ 1472 break 1473 default: 1474 if buffer[position] != rune('{') { 1475 goto l150 1476 } 1477 position++ 1478 break 1479 } 1480 } 1481 1482 add(ruleGlobSymbols, position154) 1483 } 1484 } 1485 l152: 1486 add(rulePatternChars, position151) 1487 } 1488 return true 1489 l150: 1490 position, tokenIndex = position150, tokenIndex150 1491 return false 1492 }, 1493 /* 29 GlobSymbols <- <((&('$') '$') | (&('^') '^') | (&(',') ',') | (&('?') '?') | (&('*') '*') | (&(']') ']') | (&('[') '[') | (&('}') '}') | (&('{') '{'))> */ 1494 nil, 1495 /* 30 Semicolon <- <(';' Spacing)> */ 1496 nil, 1497 /* 31 Equals <- <('=' Spacing)> */ 1498 nil, 1499 /* 32 Pipe <- <('|' Spacing)> */ 1500 nil, 1501 /* 33 LParenthesis <- <('(' Spacing)> */ 1502 nil, 1503 /* 34 RParenthesis <- <(')' Spacing)> */ 1504 nil, 1505 /* 35 Colon <- <(':' Spacing)> */ 1506 nil, 1507 /* 36 EOF <- <!.> */ 1508 nil, 1509 /* 38 Action0 <- <{ p.newMacro(text) }> */ 1510 nil, 1511 /* 39 Action1 <- <{ p.newPipeline() }> */ 1512 nil, 1513 /* 40 Action2 <- <{ p.endPipeline() }> */ 1514 nil, 1515 /* 41 Action3 <- <{ p.newExpression(text) }> */ 1516 nil, 1517 /* 42 Action4 <- <{ p.endExpression() }> */ 1518 nil, 1519 /* 43 Action5 <- <{ p.newBooleanArgument(text) }> */ 1520 nil, 1521 /* 44 Action6 <- <{ p.newNumericArgument(text) }> */ 1522 nil, 1523 /* 45 Action7 <- <{ p.newPatternArgument(text) }> */ 1524 nil, 1525 /* 46 Action8 <- <{ p.newStringLiteralArgument(text) }> */ 1526 nil, 1527 /* 47 Action9 <- <{ p.newKeywordArgument(text) }> */ 1528 nil, 1529 nil, 1530 } 1531 p.rules = _rules 1532 }