github.com/influxdata/influxql@v1.1.0/token.go (about) 1 package influxql 2 3 import ( 4 "strings" 5 ) 6 7 // Token is a lexical token of the InfluxQL language. 8 type Token int 9 10 // These are a comprehensive list of InfluxQL language tokens. 11 const ( 12 // ILLEGAL Token, EOF, WS are Special InfluxQL tokens. 13 ILLEGAL Token = iota 14 EOF 15 WS 16 COMMENT 17 18 literalBeg 19 // IDENT and the following are InfluxQL literal tokens. 20 IDENT // main 21 BOUNDPARAM // $param 22 NUMBER // 12345.67 23 INTEGER // 12345 24 DURATIONVAL // 13h 25 STRING // "abc" 26 BADSTRING // "abc 27 BADESCAPE // \q 28 TRUE // true 29 FALSE // false 30 REGEX // Regular expressions 31 BADREGEX // `.* 32 literalEnd 33 34 operatorBeg 35 // ADD and the following are InfluxQL Operators 36 ADD // + 37 SUB // - 38 MUL // * 39 DIV // / 40 MOD // % 41 BITWISE_AND // & 42 BITWISE_OR // | 43 BITWISE_XOR // ^ 44 45 AND // AND 46 OR // OR 47 48 EQ // = 49 NEQ // != 50 EQREGEX // =~ 51 NEQREGEX // !~ 52 LT // < 53 LTE // <= 54 GT // > 55 GTE // >= 56 operatorEnd 57 58 LPAREN // ( 59 RPAREN // ) 60 COMMA // , 61 COLON // : 62 DOUBLECOLON // :: 63 SEMICOLON // ; 64 DOT // . 65 66 keywordBeg 67 // ALL and the following are InfluxQL Keywords 68 ALL 69 ALTER 70 ANALYZE 71 ANY 72 AS 73 ASC 74 BEGIN 75 BY 76 CARDINALITY 77 CREATE 78 CONTINUOUS 79 DATABASE 80 DATABASES 81 DEFAULT 82 DELETE 83 DESC 84 DESTINATIONS 85 DIAGNOSTICS 86 DISTINCT 87 DROP 88 DURATION 89 END 90 EVERY 91 EXACT 92 EXPLAIN 93 FIELD 94 FOR 95 FROM 96 GRANT 97 GRANTS 98 GROUP 99 GROUPS 100 IN 101 INF 102 INSERT 103 INTO 104 KEY 105 KEYS 106 KILL 107 LIMIT 108 MEASUREMENT 109 MEASUREMENTS 110 NAME 111 OFFSET 112 ON 113 ORDER 114 PASSWORD 115 POLICY 116 POLICIES 117 PRIVILEGES 118 QUERIES 119 QUERY 120 READ 121 REPLICATION 122 RESAMPLE 123 RETENTION 124 REVOKE 125 SELECT 126 SERIES 127 SET 128 SHOW 129 SHARD 130 SHARDS 131 SLIMIT 132 SOFFSET 133 STATS 134 SUBSCRIPTION 135 SUBSCRIPTIONS 136 TAG 137 TO 138 USER 139 USERS 140 VALUES 141 WHERE 142 WITH 143 WRITE 144 keywordEnd 145 ) 146 147 var tokens = [...]string{ 148 ILLEGAL: "ILLEGAL", 149 EOF: "EOF", 150 WS: "WS", 151 152 IDENT: "IDENT", 153 NUMBER: "NUMBER", 154 DURATIONVAL: "DURATIONVAL", 155 STRING: "STRING", 156 BADSTRING: "BADSTRING", 157 BADESCAPE: "BADESCAPE", 158 TRUE: "TRUE", 159 FALSE: "FALSE", 160 REGEX: "REGEX", 161 162 ADD: "+", 163 SUB: "-", 164 MUL: "*", 165 DIV: "/", 166 MOD: "%", 167 BITWISE_AND: "&", 168 BITWISE_OR: "|", 169 BITWISE_XOR: "^", 170 171 AND: "AND", 172 OR: "OR", 173 174 EQ: "=", 175 NEQ: "!=", 176 EQREGEX: "=~", 177 NEQREGEX: "!~", 178 LT: "<", 179 LTE: "<=", 180 GT: ">", 181 GTE: ">=", 182 183 LPAREN: "(", 184 RPAREN: ")", 185 COMMA: ",", 186 COLON: ":", 187 DOUBLECOLON: "::", 188 SEMICOLON: ";", 189 DOT: ".", 190 191 ALL: "ALL", 192 ALTER: "ALTER", 193 ANALYZE: "ANALYZE", 194 ANY: "ANY", 195 AS: "AS", 196 ASC: "ASC", 197 BEGIN: "BEGIN", 198 BY: "BY", 199 CARDINALITY: "CARDINALITY", 200 CREATE: "CREATE", 201 CONTINUOUS: "CONTINUOUS", 202 DATABASE: "DATABASE", 203 DATABASES: "DATABASES", 204 DEFAULT: "DEFAULT", 205 DELETE: "DELETE", 206 DESC: "DESC", 207 DESTINATIONS: "DESTINATIONS", 208 DIAGNOSTICS: "DIAGNOSTICS", 209 DISTINCT: "DISTINCT", 210 DROP: "DROP", 211 DURATION: "DURATION", 212 END: "END", 213 EVERY: "EVERY", 214 EXACT: "EXACT", 215 EXPLAIN: "EXPLAIN", 216 FIELD: "FIELD", 217 FOR: "FOR", 218 FROM: "FROM", 219 GRANT: "GRANT", 220 GRANTS: "GRANTS", 221 GROUP: "GROUP", 222 GROUPS: "GROUPS", 223 IN: "IN", 224 INF: "INF", 225 INSERT: "INSERT", 226 INTO: "INTO", 227 KEY: "KEY", 228 KEYS: "KEYS", 229 KILL: "KILL", 230 LIMIT: "LIMIT", 231 MEASUREMENT: "MEASUREMENT", 232 MEASUREMENTS: "MEASUREMENTS", 233 NAME: "NAME", 234 OFFSET: "OFFSET", 235 ON: "ON", 236 ORDER: "ORDER", 237 PASSWORD: "PASSWORD", 238 POLICY: "POLICY", 239 POLICIES: "POLICIES", 240 PRIVILEGES: "PRIVILEGES", 241 QUERIES: "QUERIES", 242 QUERY: "QUERY", 243 READ: "READ", 244 REPLICATION: "REPLICATION", 245 RESAMPLE: "RESAMPLE", 246 RETENTION: "RETENTION", 247 REVOKE: "REVOKE", 248 SELECT: "SELECT", 249 SERIES: "SERIES", 250 SET: "SET", 251 SHOW: "SHOW", 252 SHARD: "SHARD", 253 SHARDS: "SHARDS", 254 SLIMIT: "SLIMIT", 255 SOFFSET: "SOFFSET", 256 STATS: "STATS", 257 SUBSCRIPTION: "SUBSCRIPTION", 258 SUBSCRIPTIONS: "SUBSCRIPTIONS", 259 TAG: "TAG", 260 TO: "TO", 261 USER: "USER", 262 USERS: "USERS", 263 VALUES: "VALUES", 264 WHERE: "WHERE", 265 WITH: "WITH", 266 WRITE: "WRITE", 267 } 268 269 var keywords map[string]Token 270 271 func init() { 272 keywords = make(map[string]Token) 273 for tok := keywordBeg + 1; tok < keywordEnd; tok++ { 274 keywords[strings.ToLower(tokens[tok])] = tok 275 } 276 for _, tok := range []Token{AND, OR} { 277 keywords[strings.ToLower(tokens[tok])] = tok 278 } 279 keywords["true"] = TRUE 280 keywords["false"] = FALSE 281 } 282 283 // String returns the string representation of the token. 284 func (tok Token) String() string { 285 if tok >= 0 && tok < Token(len(tokens)) { 286 return tokens[tok] 287 } 288 return "" 289 } 290 291 // Precedence returns the operator precedence of the binary operator token. 292 func (tok Token) Precedence() int { 293 switch tok { 294 case OR: 295 return 1 296 case AND: 297 return 2 298 case EQ, NEQ, EQREGEX, NEQREGEX, LT, LTE, GT, GTE: 299 return 3 300 case ADD, SUB, BITWISE_OR, BITWISE_XOR: 301 return 4 302 case MUL, DIV, MOD, BITWISE_AND: 303 return 5 304 } 305 return 0 306 } 307 308 // isOperator returns true for operator tokens. 309 func (tok Token) isOperator() bool { return tok > operatorBeg && tok < operatorEnd } 310 311 // tokstr returns a literal if provided, otherwise returns the token string. 312 func tokstr(tok Token, lit string) string { 313 if lit != "" { 314 return lit 315 } 316 return tok.String() 317 } 318 319 // Lookup returns the token associated with a given string. 320 func Lookup(ident string) Token { 321 if tok, ok := keywords[strings.ToLower(ident)]; ok { 322 return tok 323 } 324 return IDENT 325 } 326 327 // Pos specifies the line and character position of a token. 328 // The Char and Line are both zero-based indexes. 329 type Pos struct { 330 Line int 331 Char int 332 }