github.com/kubeshop/testkube@v1.17.23/pkg/tcl/expressionstcl/tokenize_test.go (about) 1 // Copyright 2024 Testkube. 2 // 3 // Licensed as a Testkube Pro file under the Testkube Community 4 // License (the "License"); you may not use this file except in compliance with 5 // the License. You may obtain a copy of the License at 6 // 7 // https://github.com/kubeshop/testkube/blob/main/licenses/TCL.txt 8 9 package expressionstcl 10 11 import ( 12 "testing" 13 14 "github.com/stretchr/testify/assert" 15 ) 16 17 func slice(v ...interface{}) []interface{} { 18 return v 19 } 20 21 func TestTokenizeSimple(t *testing.T) { 22 operators := []string{"&&", "||", "!=", "<>", "==", "=", "+", "-", "*", ">", "<", "<=", ">=", "%", "**"} 23 for _, op := range operators { 24 assert.Equal(t, []token{tokenAccessor("a"), tokenMath(op), tokenAccessor("b")}, mustTokenize("a"+op+"b")) 25 } 26 assert.Equal(t, []token{tokenNot, tokenAccessor("abc")}, mustTokenize(`!abc`)) 27 assert.Equal(t, []token{tokenAccessor("a"), tokenTernary, tokenAccessor("b"), tokenTernarySeparator, tokenAccessor("c")}, mustTokenize(`a ? b : c`)) 28 assert.Equal(t, []token{tokenOpen, tokenAccessor("a"), tokenClose}, mustTokenize(`(a)`)) 29 assert.Equal(t, []token{tokenAccessor("a"), tokenOpen, tokenAccessor("b"), tokenComma, tokenJson(true), tokenClose}, mustTokenize(`a(b, true)`)) 30 assert.Equal(t, []token{tokenJson(noneValue)}, mustTokenize("null")) 31 assert.Equal(t, []token{tokenJson(noneValue), tokenMath("+"), tokenJson(4.0)}, mustTokenize("null + 4")) 32 assert.Equal(t, []token{tokenJson([]interface{}{1.0}), tokenPropertyAccessor("0")}, mustTokenize("[1].0")) 33 } 34 35 func TestTokenizeWildcard(t *testing.T) { 36 assert.Equal(t, []token{tokenAccessor("a.b.c.*.d.e")}, mustTokenize(`a.b.c.*.d.e`)) 37 assert.Equal(t, []token{tokenAccessor("a.b.c.0.d.e")}, mustTokenize(`a.b.c.0.d.e`)) 38 } 39 40 func TestTokenizeJson(t *testing.T) { 41 assert.Equal(t, []token{tokenJson(1.0), tokenMath("+"), tokenJson(255.0)}, mustTokenize(`1 + 255`)) 42 assert.Equal(t, []token{tokenJson(1.6), tokenMath("+"), tokenJson(255.0)}, mustTokenize(`1.6 + 255`)) 43 assert.Equal(t, []token{tokenJson("abc"), tokenMath("+"), tokenJson("d")}, mustTokenize(`"abc" + "d"`)) 44 assert.Equal(t, []token{tokenJson(map[string]interface{}{"key1": "value1", "key2": "value2"})}, mustTokenize(`{"key1": "value1", "key2": "value2"}`)) 45 assert.Equal(t, []token{tokenJson(slice("a", "b"))}, mustTokenize(`["a", "b"]`)) 46 assert.Equal(t, []token{tokenJson(true)}, mustTokenize(`true`)) 47 assert.Equal(t, []token{tokenJson(false)}, mustTokenize(`false`)) 48 } 49 50 func TestTokenizeComplex(t *testing.T) { 51 want := []token{ 52 tokenAccessor("env.value"), tokenMath("&&"), tokenOpen, tokenAccessor("env.alternative"), tokenMath("+"), tokenJson("cd"), tokenClose, tokenMath("=="), tokenJson("abc"), 53 tokenTernary, tokenJson(10.5), 54 tokenTernarySeparator, tokenNot, tokenAccessor("ignored"), tokenTernary, tokenOpen, tokenJson(14.0), tokenMath("+"), tokenJson(3.1), tokenMath("*"), tokenJson(5.0), tokenClose, 55 tokenTernarySeparator, tokenAccessor("transform"), tokenOpen, tokenJson("a"), tokenComma, 56 tokenJson(map[string]interface{}{"x": "y"}), tokenComma, tokenJson(slice("z")), tokenClose, 57 } 58 assert.Equal(t, want, mustTokenize(` 59 env.value && (env.alternative + "cd") == "abc" 60 ? 10.5 61 : !ignored ? (14 + 3.1 * 5) 62 : transform("a", 63 {"x": "y"}, ["z"]) 64 `)) 65 } 66 67 func TestTokenizeInvalidAccessor(t *testing.T) { 68 tokens, _, err := tokenize(`abc.`, 0) 69 assert.Error(t, err) 70 assert.Equal(t, []token{tokenAccessor("abc")}, tokens) 71 } 72 73 func TestTokenizeInvalidJson(t *testing.T) { 74 tokens, _, err := tokenize(`{"abc": "d"`, 0) 75 tokens2, _, err2 := tokenize(`{"abc": d}`, 0) 76 assert.Error(t, err) 77 assert.Equal(t, []token{}, tokens) 78 assert.Error(t, err2) 79 assert.Equal(t, []token{}, tokens2) 80 }