github.com/lalkh/containerd@v1.4.3/filters/scanner_test.go (about)

     1  /*
     2     Copyright The containerd Authors.
     3  
     4     Licensed under the Apache License, Version 2.0 (the "License");
     5     you may not use this file except in compliance with the License.
     6     You may obtain a copy of the License at
     7  
     8         http://www.apache.org/licenses/LICENSE-2.0
     9  
    10     Unless required by applicable law or agreed to in writing, software
    11     distributed under the License is distributed on an "AS IS" BASIS,
    12     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    13     See the License for the specific language governing permissions and
    14     limitations under the License.
    15  */
    16  
    17  package filters
    18  
    19  import (
    20  	"fmt"
    21  	"testing"
    22  )
    23  
    24  type tokenResult struct {
    25  	pos   int
    26  	token token
    27  	text  string
    28  	err   string
    29  }
    30  
    31  func (tr tokenResult) String() string {
    32  	if tr.err != "" {
    33  		return fmt.Sprintf("{pos: %v, token: %v, text: %q, err: %q}", tr.pos, tr.token, tr.text, tr.err)
    34  	}
    35  	return fmt.Sprintf("{pos: %v, token: %v, text: %q}", tr.pos, tr.token, tr.text)
    36  }
    37  
    38  func TestScanner(t *testing.T) {
    39  
    40  	for _, testcase := range []struct {
    41  		name     string
    42  		input    string
    43  		expected []tokenResult
    44  	}{
    45  		{
    46  			name:  "Field",
    47  			input: "name",
    48  			expected: []tokenResult{
    49  				{pos: 0, token: tokenField, text: "name"},
    50  				{pos: 4, token: tokenEOF},
    51  			},
    52  		},
    53  		{
    54  			name:  "SelectorsWithOperators",
    55  			input: "name==value,foo!=bar",
    56  			expected: []tokenResult{
    57  				{pos: 0, token: tokenField, text: "name"},
    58  				{pos: 4, token: tokenOperator, text: "=="},
    59  				{pos: 6, token: tokenValue, text: "value"},
    60  				{pos: 11, token: tokenSeparator, text: ","},
    61  				{pos: 12, token: tokenField, text: "foo"},
    62  				{pos: 15, token: tokenOperator, text: "!="},
    63  				{pos: 17, token: tokenValue, text: "bar"},
    64  				{pos: 20, token: tokenEOF},
    65  			},
    66  		},
    67  		{
    68  			name:  "SelectorsWithFieldPaths",
    69  			input: "name==value,labels.foo=value,other.bar~=match",
    70  			expected: []tokenResult{
    71  				{pos: 0, token: tokenField, text: "name"},
    72  				{pos: 4, token: tokenOperator, text: "=="},
    73  				{pos: 6, token: tokenValue, text: "value"},
    74  				{pos: 11, token: tokenSeparator, text: ","},
    75  				{pos: 12, token: tokenField, text: "labels"},
    76  				{pos: 18, token: tokenSeparator, text: "."},
    77  				{pos: 19, token: tokenField, text: "foo"},
    78  				{pos: 22, token: tokenOperator, text: "="},
    79  				{pos: 23, token: tokenValue, text: "value"},
    80  				{pos: 28, token: tokenSeparator, text: ","},
    81  				{pos: 29, token: tokenField, text: "other"},
    82  				{pos: 34, token: tokenSeparator, text: "."},
    83  				{pos: 35, token: tokenField, text: "bar"},
    84  				{pos: 38, token: tokenOperator, text: "~="},
    85  				{pos: 40, token: tokenValue, text: "match"},
    86  				{pos: 45, token: tokenEOF},
    87  			},
    88  		},
    89  		{
    90  			name:  "RegexpValue",
    91  			input: "name~=[abc]+,foo=test",
    92  			expected: []tokenResult{
    93  				{pos: 0, token: tokenField, text: "name"},
    94  				{pos: 4, token: tokenOperator, text: "~="},
    95  				{pos: 6, token: tokenValue, text: "[abc]+"},
    96  				{pos: 12, token: tokenSeparator, text: ","},
    97  				{pos: 13, token: tokenField, text: "foo"},
    98  				{pos: 16, token: tokenOperator, text: "="},
    99  				{pos: 17, token: tokenValue, text: "test"},
   100  				{pos: 21, token: tokenEOF},
   101  			},
   102  		},
   103  		{
   104  			name:  "RegexpEscapedValue",
   105  			input: `name~=[abc]\+,foo=test`,
   106  			expected: []tokenResult{
   107  				{pos: 0, token: tokenField, text: "name"},
   108  				{pos: 4, token: tokenOperator, text: "~="},
   109  				{pos: 6, token: tokenValue, text: "[abc]\\+"},
   110  				{pos: 13, token: tokenSeparator, text: ","},
   111  				{pos: 14, token: tokenField, text: "foo"},
   112  				{pos: 17, token: tokenOperator, text: "="},
   113  				{pos: 18, token: tokenValue, text: "test"},
   114  				{pos: 22, token: tokenEOF},
   115  			},
   116  		},
   117  		{
   118  			name:  "RegexpQuotedValue",
   119  			input: `name~=/[abc]{0,2}/,foo=test`,
   120  			expected: []tokenResult{
   121  				{pos: 0, token: tokenField, text: "name"},
   122  				{pos: 4, token: tokenOperator, text: "~="},
   123  				{pos: 6, token: tokenQuoted, text: "/[abc]{0,2}/"},
   124  				{pos: 18, token: tokenSeparator, text: ","},
   125  				{pos: 19, token: tokenField, text: "foo"},
   126  				{pos: 22, token: tokenOperator, text: "="},
   127  				{pos: 23, token: tokenValue, text: "test"},
   128  				{pos: 27, token: tokenEOF},
   129  			},
   130  		},
   131  		{
   132  			name:  "Cowsay",
   133  			input: "name~=牛,labels.moo=true",
   134  			expected: []tokenResult{
   135  				{pos: 0, token: tokenField, text: "name"},
   136  				{pos: 4, token: tokenOperator, text: "~="},
   137  				{pos: 6, token: tokenValue, text: "牛"},
   138  				{pos: 9, token: tokenSeparator, text: ","},
   139  				{pos: 10, token: tokenField, text: "labels"},
   140  				{pos: 16, token: tokenSeparator, text: "."},
   141  				{pos: 17, token: tokenField, text: "moo"},
   142  				{pos: 20, token: tokenOperator, text: "="},
   143  				{pos: 21, token: tokenValue, text: "true"},
   144  				{pos: 25, token: tokenEOF},
   145  			},
   146  		},
   147  		{
   148  			name:  "CowsayRegexpQuoted",
   149  			input: "name~=|牛|,labels.moo=true",
   150  			expected: []tokenResult{
   151  				{pos: 0, token: tokenField, text: "name"},
   152  				{pos: 4, token: tokenOperator, text: "~="},
   153  				{pos: 6, token: tokenQuoted, text: "|牛|"},
   154  				{pos: 11, token: tokenSeparator, text: ","},
   155  				{pos: 12, token: tokenField, text: "labels"},
   156  				{pos: 18, token: tokenSeparator, text: "."},
   157  				{pos: 19, token: tokenField, text: "moo"},
   158  				{pos: 22, token: tokenOperator, text: "="},
   159  				{pos: 23, token: tokenValue, text: "true"},
   160  				{pos: 27, token: tokenEOF},
   161  			},
   162  		},
   163  		{
   164  			name:  "Escapes",
   165  			input: `name~="asdf\n\tfooo"`,
   166  			expected: []tokenResult{
   167  				{pos: 0, token: tokenField, text: "name"},
   168  				{pos: 4, token: tokenOperator, text: "~="},
   169  				{pos: 6, token: tokenQuoted, text: "\"asdf\\n\\tfooo\""},
   170  				{pos: 20, token: tokenEOF},
   171  			},
   172  		},
   173  		{
   174  			name:  "NullInput",
   175  			input: "foo\x00bar",
   176  			expected: []tokenResult{
   177  				{pos: 0, token: tokenField, text: "foo"},
   178  				{pos: 3, token: tokenIllegal, err: "unexpected null"},
   179  				{pos: 4, token: tokenField, text: "bar"},
   180  				{pos: 7, token: tokenEOF},
   181  			},
   182  		},
   183  		{
   184  			name:  "SpacesChomped",
   185  			input: "foo = bar    ",
   186  			expected: []tokenResult{
   187  				{pos: 0, token: tokenField, text: "foo"},
   188  				{pos: 4, token: tokenOperator, text: "="},
   189  				{pos: 6, token: tokenValue, text: "bar"},
   190  				{pos: 13, token: tokenEOF},
   191  			},
   192  		},
   193  		{
   194  			name:  "ValuesPunctauted",
   195  			input: "compound.labels==punctuated_value.foo-bar",
   196  			expected: []tokenResult{
   197  				{pos: 0, token: tokenField, text: "compound"},
   198  				{pos: 8, token: tokenSeparator, text: "."},
   199  				{pos: 9, token: tokenField, text: "labels"},
   200  				{pos: 15, token: tokenOperator, text: "=="},
   201  				{pos: 17, token: tokenValue, text: "punctuated_value.foo-bar"},
   202  				{pos: 41, token: tokenEOF},
   203  			},
   204  		},
   205  		{
   206  			name:  "PartialInput",
   207  			input: "interrupted=",
   208  			expected: []tokenResult{
   209  				{pos: 0, token: tokenField, text: "interrupted"},
   210  				{pos: 11, token: tokenOperator, text: "="},
   211  				{pos: 12, token: tokenEOF},
   212  			},
   213  		},
   214  		{
   215  			name:  "DoubleValue",
   216  			input: "doublevalue=value value",
   217  			expected: []tokenResult{
   218  				{pos: 0, token: tokenField, text: "doublevalue"},
   219  				{pos: 11, token: tokenOperator, text: "="},
   220  				{pos: 12, token: tokenValue, text: "value"},
   221  				{pos: 18, token: tokenField, text: "value"},
   222  				{pos: 23, token: tokenEOF},
   223  			},
   224  		},
   225  		{
   226  			name:  "LeadingWithQuoted",
   227  			input: `"leading quote".postquote==value`,
   228  			expected: []tokenResult{
   229  				{pos: 0, token: tokenQuoted, text: "\"leading quote\""},
   230  				{pos: 15, token: tokenSeparator, text: "."},
   231  				{pos: 16, token: tokenField, text: "postquote"},
   232  				{pos: 25, token: tokenOperator, text: "=="},
   233  				{pos: 27, token: tokenValue, text: "value"},
   234  				{pos: 32, token: tokenEOF},
   235  			},
   236  		},
   237  		{
   238  			name:  "MissingValue",
   239  			input: "input==,id!=ff",
   240  			expected: []tokenResult{
   241  				{pos: 0, token: tokenField, text: "input"},
   242  				{pos: 5, token: tokenOperator, text: "=="},
   243  				{pos: 7, token: tokenSeparator, text: ","},
   244  				{pos: 8, token: tokenField, text: "id"},
   245  				{pos: 10, token: tokenOperator, text: "!="},
   246  				{pos: 12, token: tokenValue, text: "ff"},
   247  				{pos: 14, token: tokenEOF},
   248  			},
   249  		},
   250  		{
   251  			name:  "QuotedRegexp",
   252  			input: "input~=/foo\\/bar/,id!=ff",
   253  			expected: []tokenResult{
   254  				{pos: 0, token: tokenField, text: "input"},
   255  				{pos: 5, token: tokenOperator, text: "~="},
   256  				{pos: 7, token: tokenQuoted, text: "/foo\\/bar/"},
   257  				{pos: 17, token: tokenSeparator, text: ","},
   258  				{pos: 18, token: tokenField, text: "id"},
   259  				{pos: 20, token: tokenOperator, text: "!="},
   260  				{pos: 22, token: tokenValue, text: "ff"},
   261  				{pos: 24, token: tokenEOF},
   262  			},
   263  		},
   264  		{
   265  			name:  "QuotedRegexpAlt",
   266  			input: "input~=|foo/bar|,id!=ff",
   267  			expected: []tokenResult{
   268  				{pos: 0, token: tokenField, text: "input"},
   269  				{pos: 5, token: tokenOperator, text: "~="},
   270  				{pos: 7, token: tokenQuoted, text: "|foo/bar|"},
   271  				{pos: 16, token: tokenSeparator, text: ","},
   272  				{pos: 17, token: tokenField, text: "id"},
   273  				{pos: 19, token: tokenOperator, text: "!="},
   274  				{pos: 21, token: tokenValue, text: "ff"},
   275  				{pos: 23, token: tokenEOF},
   276  			},
   277  		},
   278  		{
   279  			name:  "IllegalQuoted",
   280  			input: "labels.containerd.io/key==value",
   281  			expected: []tokenResult{
   282  				{pos: 0, token: tokenField, text: "labels"},
   283  				{pos: 6, token: tokenSeparator, text: "."},
   284  				{pos: 7, token: tokenField, text: "containerd"},
   285  				{pos: 17, token: tokenSeparator, text: "."},
   286  				{pos: 18, token: tokenField, text: "io"},
   287  				{pos: 20, token: tokenIllegal, text: "/key==value", err: "quoted literal not terminated"},
   288  				{pos: 31, token: tokenEOF},
   289  			},
   290  		},
   291  		{
   292  			name:  "IllegalQuotedWithNewLine",
   293  			input: "labels.\"containerd.io\nkey\"==value",
   294  			expected: []tokenResult{
   295  				{pos: 0, token: tokenField, text: "labels"},
   296  				{pos: 6, token: tokenSeparator, text: "."},
   297  				{pos: 7, token: tokenIllegal, text: "\"containerd.io\n", err: "quoted literal not terminated"},
   298  				{pos: 22, token: tokenField, text: "key"},
   299  				{pos: 25, token: tokenIllegal, text: "\"==value", err: "quoted literal not terminated"},
   300  				{pos: 33, token: tokenEOF},
   301  			},
   302  		},
   303  		{
   304  			name:  "IllegalEscapeSequence",
   305  			input: `labels."\g"`,
   306  			expected: []tokenResult{
   307  				{pos: 0, token: tokenField, text: "labels"},
   308  				{pos: 6, token: tokenSeparator, text: "."},
   309  				{pos: 7, token: tokenIllegal, text: `"\g"`, err: "illegal escape sequence"},
   310  				{pos: 11, token: tokenEOF},
   311  			},
   312  		},
   313  		{
   314  			name:  "IllegalNumericEscapeSequence",
   315  			input: `labels."\xaz"`,
   316  			expected: []tokenResult{
   317  				{pos: 0, token: tokenField, text: "labels"},
   318  				{pos: 6, token: tokenSeparator, text: "."},
   319  				{pos: 7, token: tokenIllegal, text: `"\xaz"`, err: "illegal numeric escape sequence"},
   320  				{pos: 13, token: tokenEOF},
   321  			},
   322  		},
   323  	} {
   324  		t.Run(testcase.name, func(t *testing.T) {
   325  			var sc scanner
   326  			sc.init(testcase.input)
   327  
   328  			// If you leave the expected empty, the test case will just print
   329  			// out the token stream, which you can paste into the testcase when
   330  			// adding new cases.
   331  			if len(testcase.expected) == 0 {
   332  				fmt.Println("Name", testcase.name)
   333  			}
   334  
   335  			for i := 0; ; i++ {
   336  				pos, tok, s := sc.scan()
   337  				if len(testcase.expected) == 0 {
   338  					if len(s) > 0 {
   339  						fmt.Printf("{pos: %v, token: %#v, text: %q},\n", pos, tok, s)
   340  					} else {
   341  						fmt.Printf("{pos: %v, token: %#v},\n", pos, tok)
   342  					}
   343  				} else {
   344  					tokv := tokenResult{pos: pos, token: tok, text: s}
   345  					if i >= len(testcase.expected) {
   346  						t.Fatalf("too many tokens parsed")
   347  					}
   348  					if tok == tokenIllegal {
   349  						tokv.err = sc.err
   350  					}
   351  
   352  					if tokv != testcase.expected[i] {
   353  						t.Fatalf("token unexpected: %v != %v", tokv, testcase.expected[i])
   354  					}
   355  				}
   356  
   357  				if tok == tokenEOF {
   358  					break
   359  				}
   360  
   361  			}
   362  
   363  			// make sure we've eof'd
   364  			_, tok, _ := sc.scan()
   365  			if tok != tokenEOF {
   366  				t.Fatal("must consume all input")
   367  			}
   368  
   369  			if len(testcase.expected) == 0 {
   370  				t.Fatal("must define expected tokens")
   371  			}
   372  		})
   373  	}
   374  }