github.com/m3db/m3@v1.5.0/src/query/graphite/lexer/lexer_test.go (about)

     1  // Copyright (c) 2019 Uber Technologies, Inc.
     2  //
     3  // Permission is hereby granted, free of charge, to any person obtaining a copy
     4  // of this software and associated documentation files (the "Software"), to deal
     5  // in the Software without restriction, including without limitation the rights
     6  // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     7  // copies of the Software, and to permit persons to whom the Software is
     8  // furnished to do so, subject to the following conditions:
     9  //
    10  // The above copyright notice and this permission notice shall be included in
    11  // all copies or substantial portions of the Software.
    12  //
    13  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    14  // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    15  // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    16  // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    17  // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    18  // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    19  // THE SOFTWARE.
    20  
    21  package lexer
    22  
    23  import (
    24  	"testing"
    25  
    26  	"github.com/stretchr/testify/assert"
    27  	"github.com/stretchr/testify/require"
    28  )
    29  
    30  type lexerTestData struct {
    31  	input          string
    32  	expectedTokens []Token
    33  	reservedIdents map[string]TokenType
    34  }
    35  
    36  func TestLexer(t *testing.T) {
    37  	lexerTestInput := []lexerTestData{
    38  		{"call09", []Token{{Identifier, "call09"}}, nil},
    39  		{"sortByName(foo.bar.zed)", []Token{
    40  			{Identifier, "sortByName"},
    41  			{LParenthesis, "("},
    42  			{Pattern, "foo.bar.zed"},
    43  			{RParenthesis, ")"}}, nil},
    44  		{"foo.'bar<1001>'.baz",
    45  			[]Token{{Pattern, "foo.'bar<1001>'.baz"}}, nil},
    46  		{"a.{b,c,d}.node[0-2].qux.*",
    47  			[]Token{{Pattern, "a.{b,c,d}.node[0-2].qux.*"}}, nil},
    48  		{"qaz{0[3-9],1[0-9],20}",
    49  			[]Token{{Pattern, "qaz{0[3-9],1[0-9],20}"}}, nil},
    50  		{"qaz{0[3-9],1[0-9],20}.bar",
    51  			[]Token{{Pattern, "qaz{0[3-9],1[0-9],20}.bar"}}, nil},
    52  		{"stats.foo.counts.bar.baz.status_code.?XX",
    53  			[]Token{{Pattern, "stats.foo.counts.bar.baz.status_code.?XX"}}, nil},
    54  		{"456.03", []Token{{Number, "456.03"}}, nil},
    55  		{"foo:bar~baz", []Token{{Identifier, "foo:bar~baz"}}, nil},
    56  		{"foo:bar", []Token{
    57  			{Identifier, "foo:bar"}}, nil},
    58  		{"foo:bar  baz:q*x", []Token{
    59  			{Identifier, "foo:bar"},
    60  			{Pattern, "baz:q*x"}}, nil},
    61  		{"!service:dispatch.*", []Token{
    62  			{NotOperator, "!"},
    63  			{Pattern, "service:dispatch.*"}}, nil},
    64  		{"\"Whatever man\"", []Token{{String, "Whatever man"}}, nil}, // double quoted string
    65  		// no need to escape single quotes within double quoted string
    66  		{"\"Whatever 'man'\"", []Token{{String, "Whatever 'man'"}}, nil},
    67  		// escape double quotes within double quoted string
    68  		{`"Whatever \"man\""`, []Token{{String, "Whatever \"man\""}}, nil},
    69  		{"'Whatever man'", []Token{{String, "Whatever man"}}, nil}, // single quoted string
    70  		// no need to escape double quote within single quoted strings (start boundary), but may
    71  		// do it if so desired (end boundary)
    72  		{`'Whatever "man\"'`, []Token{{String, "Whatever \"man\\\""}}, nil},
    73  		{" call09(a.{b,c,d}.node[0-2].qux.*, a{e,g}, 4546.abc, 45ahj, " +
    74  			`"Hello there \"Good \ Sir\" ", +20, 39540.459,-349845,.393) `,
    75  			[]Token{
    76  				{Identifier, "call09"},
    77  				{LParenthesis, "("},
    78  				{Pattern, "a.{b,c,d}.node[0-2].qux.*"},
    79  				{Comma, ","},
    80  				{Pattern, "a{e,g}"},
    81  				{Comma, ","},
    82  				{Pattern, "4546.abc"},
    83  				{Comma, ","},
    84  				{Pattern, "45ahj"},
    85  				{Comma, ","},
    86  				{String, "Hello there \"Good \\ Sir\" "},
    87  				{Comma, ","},
    88  				{Number, "+20"},
    89  				{Comma, ","},
    90  				{Number, "39540.459"},
    91  				{Comma, ","},
    92  				{Number, "-349845"},
    93  				{Comma, ","},
    94  				{Number, ".393"},
    95  				{RParenthesis, ")"}}, nil},
    96  		{`aliasSub(stats.foo.timers.scream.scream.views.quz.end_to_end_latency.p95, ` +
    97  			`'stats.(.*).timers.scream.scream.views.quz.(.*)', '\1.\2')`,
    98  			[]Token{
    99  				{Identifier, "aliasSub"},
   100  				{LParenthesis, "("},
   101  				{Pattern, "stats.foo.timers.scream.scream.views.quz.end_to_end_latency.p95"},
   102  				{Comma, ","},
   103  				{String, "stats.(.*).timers.scream.scream.views.quz.(.*)"},
   104  				{Comma, ","},
   105  				{String, `\1.\2`},
   106  				{RParenthesis, ")"}}, nil},
   107  		{"sumSeries(stats.with:colon)",
   108  			[]Token{
   109  				{Identifier, "sumSeries"},
   110  				{LParenthesis, "("},
   111  				{Pattern, "stats.with:colon"},
   112  				{RParenthesis, ")"},
   113  			},
   114  			nil},
   115  		{"sumSeries(stats.with:colon.extended.pattern.*)",
   116  			[]Token{
   117  				{Identifier, "sumSeries"},
   118  				{LParenthesis, "("},
   119  				{Pattern, "stats.with:colon.extended.pattern.*"},
   120  				{RParenthesis, ")"},
   121  			},
   122  			nil},
   123  	}
   124  
   125  	for _, test := range lexerTestInput {
   126  		lex, tokens := NewLexer(test.input, test.reservedIdents, Options{})
   127  		go lex.Run()
   128  
   129  		i := 0
   130  		for token := range tokens {
   131  			require.True(t, i < len(test.expectedTokens),
   132  				"received more tokens than expected for %s", test.input)
   133  			assert.Equal(t, &test.expectedTokens[i], token, "incorrect token %d for %s",
   134  				i, test.input)
   135  			i++
   136  		}
   137  
   138  		assert.True(t, lex.eof(), "did not reach eof for %s", test.input)
   139  	}
   140  }
   141  
   142  func TestLexerErrors(t *testing.T) {
   143  	badLines := [][]string{
   144  		{"\"this is \\\"unterminated",
   145  			"reached end of input while processing string \"this is \\\"unterminated"},
   146  		{"ajs.djd]", // group closed without open
   147  			"encountered unbalanced end of group ] in pattern ajs.djd]"},
   148  		{"{dfklf]", // mismatch between open and close of group
   149  			"encountered unbalanced end of group ] in pattern {dfklf]"},
   150  		{"{[ajs.djd}]", // flipped close groups
   151  			"encountered unbalanced end of group } in pattern {[ajs.djd}"},
   152  		{"{unclosed", "end of pattern {unclosed reached while still in group {"}, // unclosed group
   153  		{"+a", "expected one of 0123456789, found a"},                            // plus with no number
   154  		{"-b", "expected one of 0123456789, found b"},                            // minus with no number
   155  		{".c", "expected one of 0123456789, found c"},                            // digit with no number
   156  		{"^", "unexpected character ^"},                                          // unknown symbol
   157  	}
   158  
   159  	for _, badLine := range badLines {
   160  		l, tokens := NewLexer(badLine[0], nil, Options{})
   161  		go l.Run()
   162  
   163  		expected := &Token{Error, badLine[1]}
   164  		actual := <-tokens
   165  		assert.Equal(t, expected, actual, "%s did not result in error", badLine[0])
   166  
   167  		// Should have closed the channel after the error
   168  		actual = <-tokens
   169  		assert.Nil(t, actual)
   170  	}
   171  }