github.com/kubeshop/testkube@v1.17.23/pkg/tcl/expressionstcl/tokenize.go (about)

     1  // Copyright 2024 Testkube.
     2  //
     3  // Licensed as a Testkube Pro file under the Testkube Community
     4  // License (the "License"); you may not use this file except in compliance with
     5  // the License. You may obtain a copy of the License at
     6  //
     7  //     https://github.com/kubeshop/testkube/blob/main/licenses/TCL.txt
     8  
     9  package expressionstcl
    10  
    11  import (
    12  	"bytes"
    13  	"encoding/json"
    14  	"fmt"
    15  	"io"
    16  	"regexp"
    17  )
    18  
    19  var mathOperatorRe = regexp.MustCompile(`^(?:!=|<>|==|>=|<=|&&|\*\*|\|\||[+\-*/><=%])`)
    20  var noneRe = regexp.MustCompile(`^null(?:[^a-zA-Z\d_.]|$)`)
    21  var jsonValueRe = regexp.MustCompile(`^(?:["{\[\d]|((?:true|false)(?:[^a-zA-Z\d_.]|$)))`)
    22  var accessorRe = regexp.MustCompile(`^[a-zA-Z\d_]+(?:\s*\.\s*([a-zA-Z\d_]+|\*))*`)
    23  var propertyAccessorRe = regexp.MustCompile(`^\.\s*([a-zA-Z\d_]+|\*)`)
    24  var spreadRe = regexp.MustCompile(`^\.\.\.`)
    25  var spaceRe = regexp.MustCompile(`^\s+`)
    26  
    27  func tokenizeNext(exp string, i int) (token, int, error) {
    28  	for i < len(exp) {
    29  		switch true {
    30  		case exp[i] == ',':
    31  			return tokenComma, i + 1, nil
    32  		case exp[i] == '(':
    33  			return tokenOpen, i + 1, nil
    34  		case exp[i] == ')':
    35  			return tokenClose, i + 1, nil
    36  		case exp[i] == ':':
    37  			return tokenTernarySeparator, i + 1, nil
    38  		case mathOperatorRe.MatchString(exp[i:]):
    39  			op := mathOperatorRe.FindString(exp[i:])
    40  			return tokenMath(op), i + len(op), nil
    41  		case exp[i] == '?':
    42  			return tokenTernary, i + 1, nil
    43  		case exp[i] == '!':
    44  			return tokenNot, i + 1, nil
    45  		case spaceRe.MatchString(exp[i:]):
    46  			space := spaceRe.FindString(exp[i:])
    47  			i += len(space)
    48  		case noneRe.MatchString(exp[i:]):
    49  			return tokenJson(noneValue), i + 4, nil
    50  		case spreadRe.MatchString(exp[i:]):
    51  			return tokenSpread, i + 3, nil
    52  		case jsonValueRe.MatchString(exp[i:]):
    53  			// Allow multi-line string with literal \n
    54  			// TODO: Optimize, and allow deeper in the tree
    55  			appended := 0
    56  			if exp[i] == '"' {
    57  				inside := true
    58  				for index := i + 1; inside && index < len(exp); index++ {
    59  					if exp[index] == '\\' {
    60  						index++
    61  					} else if exp[index] == '"' {
    62  						inside = false
    63  					} else if exp[index] == '\n' {
    64  						exp = exp[0:index] + "\\n" + exp[index+1:]
    65  						appended++
    66  					} else if exp[index] == '\t' {
    67  						exp = exp[0:index] + "\\t" + exp[index+1:]
    68  						appended++
    69  					}
    70  				}
    71  			}
    72  			decoder := json.NewDecoder(bytes.NewBuffer([]byte(exp[i:])))
    73  			var val interface{}
    74  			err := decoder.Decode(&val)
    75  			if err != nil {
    76  				return token{}, i, fmt.Errorf("error while decoding JSON from index %d in expression: %s: %s", i, exp, err.Error())
    77  			}
    78  			return tokenJson(val), i + int(decoder.InputOffset()) - appended, nil
    79  		case accessorRe.MatchString(exp[i:]):
    80  			acc := accessorRe.FindString(exp[i:])
    81  			return tokenAccessor(acc), i + len(acc), nil
    82  		case propertyAccessorRe.MatchString(exp[i:]):
    83  			acc := propertyAccessorRe.FindString(exp[i:])
    84  			return tokenPropertyAccessor(acc[1:]), i + len(acc), nil
    85  		default:
    86  			return token{}, i, fmt.Errorf("unknown character at index %d in expression: %s", i, exp)
    87  		}
    88  	}
    89  	return token{}, 0, io.EOF
    90  }
    91  
    92  func tokenize(exp string, index int) (tokens []token, i int, err error) {
    93  	tokens = make([]token, 0)
    94  	var t token
    95  	for i = index; i < len(exp); {
    96  		t, i, err = tokenizeNext(exp, i)
    97  		if err != nil {
    98  			if err == io.EOF {
    99  				return tokens, i, nil
   100  			}
   101  			return tokens, i, err
   102  		}
   103  		tokens = append(tokens, t)
   104  	}
   105  	return
   106  }
   107  
   108  func mustTokenize(exp string) []token {
   109  	tokens, _, err := tokenize(exp, 0)
   110  	if err != nil {
   111  		panic(err)
   112  	}
   113  	return tokens
   114  }