github.com/epfl-dcsl/gotee@v0.0.0-20200909122901-014b35f5e5e9/src/go/token/token.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Package token defines constants representing the lexical tokens of the Go 6 // programming language and basic operations on tokens (printing, predicates). 7 // 8 package token 9 10 import "strconv" 11 12 // Token is the set of lexical tokens of the Go programming language. 13 type Token int 14 15 // The list of tokens. 16 const ( 17 // Special tokens 18 ILLEGAL Token = iota 19 EOF 20 COMMENT 21 22 literal_beg 23 // Identifiers and basic type literals 24 // (these tokens stand for classes of literals) 25 IDENT // main 26 INT // 12345 27 FLOAT // 123.45 28 IMAG // 123.45i 29 CHAR // 'a' 30 STRING // "abc" 31 literal_end 32 33 operator_beg 34 // Operators and delimiters 35 ADD // + 36 SUB // - 37 MUL // * 38 QUO // / 39 REM // % 40 41 AND // & 42 OR // | 43 XOR // ^ 44 SHL // << 45 SHR // >> 46 AND_NOT // &^ 47 48 ADD_ASSIGN // += 49 SUB_ASSIGN // -= 50 MUL_ASSIGN // *= 51 QUO_ASSIGN // /= 52 REM_ASSIGN // %= 53 54 AND_ASSIGN // &= 55 OR_ASSIGN // |= 56 XOR_ASSIGN // ^= 57 SHL_ASSIGN // <<= 58 SHR_ASSIGN // >>= 59 AND_NOT_ASSIGN // &^= 60 61 LAND // && 62 LOR // || 63 ARROW // <- 64 INC // ++ 65 DEC // -- 66 67 EQL // == 68 LSS // < 69 GTR // > 70 ASSIGN // = 71 NOT // ! 72 73 NEQ // != 74 LEQ // <= 75 GEQ // >= 76 DEFINE // := 77 ELLIPSIS // ... 78 79 LPAREN // ( 80 LBRACK // [ 81 LBRACE // { 82 COMMA // , 83 PERIOD // . 84 85 RPAREN // ) 86 RBRACK // ] 87 RBRACE // } 88 SEMICOLON // ; 89 COLON // : 90 operator_end 91 92 keyword_beg 93 // Keywords 94 BREAK 95 CASE 96 CHAN 97 CONST 98 CONTINUE 99 100 DEFAULT 101 DEFER 102 ELSE 103 FALLTHROUGH 104 FOR 105 106 FUNC 107 GO 108 GOSEC 109 GOTO 110 IF 111 IMPORT 112 113 INTERFACE 114 MAP 115 PACKAGE 116 RANGE 117 RETURN 118 119 SELECT 120 STRUCT 121 SWITCH 122 TYPE 123 VAR 124 keyword_end 125 ) 126 127 var tokens = [...]string{ 128 ILLEGAL: "ILLEGAL", 129 130 EOF: "EOF", 131 COMMENT: "COMMENT", 132 133 IDENT: "IDENT", 134 INT: "INT", 135 FLOAT: "FLOAT", 136 IMAG: "IMAG", 137 CHAR: "CHAR", 138 STRING: "STRING", 139 140 ADD: "+", 141 SUB: "-", 142 MUL: "*", 143 QUO: "/", 144 REM: "%", 145 146 AND: "&", 147 OR: "|", 148 XOR: "^", 149 SHL: "<<", 150 SHR: ">>", 151 AND_NOT: "&^", 152 153 ADD_ASSIGN: "+=", 154 SUB_ASSIGN: "-=", 155 MUL_ASSIGN: "*=", 156 QUO_ASSIGN: "/=", 157 REM_ASSIGN: "%=", 158 159 AND_ASSIGN: "&=", 160 OR_ASSIGN: "|=", 161 XOR_ASSIGN: "^=", 162 SHL_ASSIGN: "<<=", 163 SHR_ASSIGN: ">>=", 164 AND_NOT_ASSIGN: "&^=", 165 166 LAND: "&&", 167 LOR: "||", 168 ARROW: "<-", 169 INC: "++", 170 DEC: "--", 171 172 EQL: "==", 173 LSS: "<", 174 GTR: ">", 175 ASSIGN: "=", 176 NOT: "!", 177 178 NEQ: "!=", 179 LEQ: "<=", 180 GEQ: ">=", 181 DEFINE: ":=", 182 ELLIPSIS: "...", 183 184 LPAREN: "(", 185 LBRACK: "[", 186 LBRACE: "{", 187 COMMA: ",", 188 PERIOD: ".", 189 190 RPAREN: ")", 191 RBRACK: "]", 192 RBRACE: "}", 193 SEMICOLON: ";", 194 COLON: ":", 195 196 BREAK: "break", 197 CASE: "case", 198 CHAN: "chan", 199 CONST: "const", 200 CONTINUE: "continue", 201 202 DEFAULT: "default", 203 DEFER: "defer", 204 ELSE: "else", 205 FALLTHROUGH: "fallthrough", 206 FOR: "for", 207 208 FUNC: "func", 209 GO: "go", 210 GOSEC: "gosecure", 211 GOTO: "goto", 212 IF: "if", 213 IMPORT: "import", 214 215 INTERFACE: "interface", 216 MAP: "map", 217 PACKAGE: "package", 218 RANGE: "range", 219 RETURN: "return", 220 221 SELECT: "select", 222 STRUCT: "struct", 223 SWITCH: "switch", 224 TYPE: "type", 225 VAR: "var", 226 } 227 228 // String returns the string corresponding to the token tok. 229 // For operators, delimiters, and keywords the string is the actual 230 // token character sequence (e.g., for the token ADD, the string is 231 // "+"). For all other tokens the string corresponds to the token 232 // constant name (e.g. for the token IDENT, the string is "IDENT"). 233 // 234 func (tok Token) String() string { 235 s := "" 236 if 0 <= tok && tok < Token(len(tokens)) { 237 s = tokens[tok] 238 } 239 if s == "" { 240 s = "token(" + strconv.Itoa(int(tok)) + ")" 241 } 242 return s 243 } 244 245 // A set of constants for precedence-based expression parsing. 246 // Non-operators have lowest precedence, followed by operators 247 // starting with precedence 1 up to unary operators. The highest 248 // precedence serves as "catch-all" precedence for selector, 249 // indexing, and other operator and delimiter tokens. 250 // 251 const ( 252 LowestPrec = 0 // non-operators 253 UnaryPrec = 6 254 HighestPrec = 7 255 ) 256 257 // Precedence returns the operator precedence of the binary 258 // operator op. If op is not a binary operator, the result 259 // is LowestPrecedence. 260 // 261 func (op Token) Precedence() int { 262 switch op { 263 case LOR: 264 return 1 265 case LAND: 266 return 2 267 case EQL, NEQ, LSS, LEQ, GTR, GEQ: 268 return 3 269 case ADD, SUB, OR, XOR: 270 return 4 271 case MUL, QUO, REM, SHL, SHR, AND, AND_NOT: 272 return 5 273 } 274 return LowestPrec 275 } 276 277 var keywords map[string]Token 278 279 func init() { 280 keywords = make(map[string]Token) 281 for i := keyword_beg + 1; i < keyword_end; i++ { 282 keywords[tokens[i]] = i 283 } 284 } 285 286 // Lookup maps an identifier to its keyword token or IDENT (if not a keyword). 287 // 288 func Lookup(ident string) Token { 289 if tok, is_keyword := keywords[ident]; is_keyword { 290 return tok 291 } 292 return IDENT 293 } 294 295 // Predicates 296 297 // IsLiteral returns true for tokens corresponding to identifiers 298 // and basic type literals; it returns false otherwise. 299 // 300 func (tok Token) IsLiteral() bool { return literal_beg < tok && tok < literal_end } 301 302 // IsOperator returns true for tokens corresponding to operators and 303 // delimiters; it returns false otherwise. 304 // 305 func (tok Token) IsOperator() bool { return operator_beg < tok && tok < operator_end } 306 307 // IsKeyword returns true for tokens corresponding to keywords; 308 // it returns false otherwise. 309 // 310 func (tok Token) IsKeyword() bool { return keyword_beg < tok && tok < keyword_end }