github.com/movsb/taorm@v0.0.0-20201209183410-91bafb0b22a6/filter/parser.go (about)

     1  package filter
     2  
     3  // ValueType is
     4  type ValueType uint
     5  
     6  // ValueType list
     7  const (
     8  	ValueTypeRaw ValueType = iota
     9  	ValueTypeBoolean
    10  	ValueTypeNumber
    11  	ValueTypeString
    12  )
    13  
    14  func (t ValueType) String() string {
    15  	switch t {
    16  	case ValueTypeRaw:
    17  		return "raw"
    18  	case ValueTypeBoolean:
    19  		return "bool"
    20  	case ValueTypeNumber:
    21  		return "number"
    22  	case ValueTypeString:
    23  		return "string"
    24  	default:
    25  		return "unknown"
    26  	}
    27  }
    28  
    29  // AndExpression is
    30  type AndExpression struct {
    31  	OrExprs []*Expression
    32  }
    33  
    34  // AbstractSyntaxTree (AST) is the syntax tree of expression clauses.
    35  type AbstractSyntaxTree struct {
    36  	AndExprs []*AndExpression
    37  }
    38  
    39  // Parser parses tokens into parser tree.
    40  type Parser struct {
    41  	tokenizer *Tokenizer
    42  }
    43  
    44  // NewParser creates a new parser.
    45  func NewParser(tokenizer *Tokenizer) *Parser {
    46  	return &Parser{
    47  		tokenizer: tokenizer,
    48  	}
    49  }
    50  
    51  // Parse creates a new parser and parses source into AST.
    52  func Parse(source string) (*AbstractSyntaxTree, error) {
    53  	tokenizer := NewTokenizer(source)
    54  	parser := NewParser(tokenizer)
    55  	return parser.Parse()
    56  }
    57  
    58  // Parse parses tokens into AST.
    59  func (p *Parser) Parse() (ast *AbstractSyntaxTree, err error) {
    60  	ast = &AbstractSyntaxTree{}
    61  
    62  	defer func() {
    63  		if er := recover(); er != nil {
    64  			err = er.(error)
    65  		}
    66  	}()
    67  
    68  	ast.AndExprs = p.parseAndExpressions()
    69  
    70  	return
    71  }
    72  
    73  func (p *Parser) parseAndExpressions() []*AndExpression {
    74  	andExprs := make([]*AndExpression, 0)
    75  	for {
    76  		and := p.parseAndExpression()
    77  		if len(and.OrExprs) == 0 {
    78  			break
    79  		}
    80  		andExprs = append(andExprs, and)
    81  	}
    82  	return andExprs
    83  }
    84  
    85  func (p *Parser) parseAndExpression() *AndExpression {
    86  	and := &AndExpression{}
    87  	for {
    88  		token, err := p.tokenizer.Next()
    89  		if err != nil {
    90  			panic(err)
    91  		}
    92  		if token.TokenType == TokenTypeEOF {
    93  			break
    94  		} else if token.TokenType == TokenTypeAnd {
    95  			break
    96  		} else if token.TokenType == TokenTypeOr {
    97  			continue
    98  		} else {
    99  			p.tokenizer.Undo(token)
   100  		}
   101  		expression := p.parseExpression()
   102  		if expression == nil {
   103  			break
   104  		}
   105  		and.OrExprs = append(and.OrExprs, expression)
   106  	}
   107  	return and
   108  }
   109  
   110  func (p *Parser) parseExpression() *Expression {
   111  	var expr = &Expression{}
   112  	var token1, token2, token3 Token
   113  	var err error
   114  
   115  	token1, err = p.tokenizer.Next()
   116  	if err != nil {
   117  		panic(err)
   118  	}
   119  	if token1.TokenType == TokenTypeEOF {
   120  		return nil
   121  	} else if token1.TokenType == TokenTypeInvalid {
   122  		panic(newSyntaxError("invalid token: %s", token1.TokenValue))
   123  	} else if token1.TokenType != TokenTypeIdent {
   124  		p.tokenizer.Undo(token1)
   125  		return nil
   126  	}
   127  
   128  	token2, err = p.tokenizer.Next()
   129  	if err != nil {
   130  		panic(err)
   131  	}
   132  	if token2.TokenType == TokenTypeEOF {
   133  		panic(newSyntaxError("operator expected after %s", token1.TokenValue))
   134  	} else if token2.TokenType == TokenTypeInvalid {
   135  		panic(newSyntaxError("invalid token: %s", token2.TokenValue))
   136  	}
   137  
   138  	switch token2.TokenType {
   139  	case TokenTypeEqual, TokenTypeNotEqual:
   140  		break
   141  	case TokenTypeInclude, TokenTypeNotInclude, TokenTypeStartsWith, TokenTypeEndsWith, TokenTypeMatch, TokenTypeNotMatch:
   142  		break
   143  	case TokenTypeGreaterThan, TokenTypeLessThan, TokenTypeGreaterThanOrEqual, TokenTypeLessThanOrEqual:
   144  		break
   145  	default:
   146  		panic(newSyntaxError("unsupported operator: %s", token2.TokenValue))
   147  	}
   148  
   149  	token3, err = p.tokenizer.Next()
   150  	if err != nil {
   151  		panic(err)
   152  	}
   153  	if token3.TokenType == TokenTypeEOF {
   154  		// token value defaults to empty string
   155  	} else if token3.TokenType == TokenTypeInvalid {
   156  		panic(newSyntaxError("invalid token: %s", token3.TokenValue))
   157  	} else if token3.TokenType != TokenTypeIdent {
   158  		panic(newSyntaxError("ident expected, but found: %s", token1.TokenValue))
   159  	}
   160  
   161  	expr.Name = token1.TokenValue
   162  	expr.Operator = token2
   163  	expr.Value = token3.TokenValue
   164  	expr.Type = ValueTypeRaw
   165  
   166  	return expr
   167  }