github.com/panjjo/go@v0.0.0-20161104043856-d62b31386338/src/go/token/token.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Package token defines constants representing the lexical tokens of the Go
     6  // programming language and basic operations on tokens (printing, predicates).
     7  //
     8  package token
     9  
    10  import "strconv"
    11  
    12  // Token is the set of lexical tokens of the Go programming language.
    13  type Token int
    14  
    15  // The list of tokens.
    16  const (
    17  	// Special tokens
    18  	ILLEGAL Token = iota
    19  	EOF
    20  	COMMENT
    21  
    22  	literal_beg
    23  	// Identifiers and basic type literals
    24  	// (these tokens stand for classes of literals)
    25  	IDENT  // main
    26  	INT    // 12345
    27  	FLOAT  // 123.45
    28  	IMAG   // 123.45i
    29  	CHAR   // 'a'
    30  	STRING // "abc"
    31  	literal_end
    32  
    33  	operator_beg
    34  	// Operators and delimiters
    35  	ADD // +
    36  	SUB // -
    37  	MUL // *
    38  	QUO // /
    39  	REM // %
    40  
    41  	AND     // &
    42  	OR      // |
    43  	XOR     // ^
    44  	SHL     // <<
    45  	SHR     // >>
    46  	AND_NOT // &^
    47  
    48  	ADD_ASSIGN // +=
    49  	SUB_ASSIGN // -=
    50  	MUL_ASSIGN // *=
    51  	QUO_ASSIGN // /=
    52  	REM_ASSIGN // %=
    53  
    54  	AND_ASSIGN     // &=
    55  	OR_ASSIGN      // |=
    56  	XOR_ASSIGN     // ^=
    57  	SHL_ASSIGN     // <<=
    58  	SHR_ASSIGN     // >>=
    59  	AND_NOT_ASSIGN // &^=
    60  
    61  	LAND  // &&
    62  	LOR   // ||
    63  	ARROW // <-
    64  	INC   // ++
    65  	DEC   // --
    66  
    67  	EQL    // ==
    68  	LSS    // <
    69  	GTR    // >
    70  	ASSIGN // =
    71  	NOT    // !
    72  
    73  	NEQ      // !=
    74  	LEQ      // <=
    75  	GEQ      // >=
    76  	DEFINE   // :=
    77  	ELLIPSIS // ...
    78  
    79  	LPAREN // (
    80  	LBRACK // [
    81  	LBRACE // {
    82  	COMMA  // ,
    83  	PERIOD // .
    84  
    85  	RPAREN    // )
    86  	RBRACK    // ]
    87  	RBRACE    // }
    88  	SEMICOLON // ;
    89  	COLON     // :
    90  	operator_end
    91  
    92  	keyword_beg
    93  	// Keywords
    94  	BREAK
    95  	CASE
    96  	CHAN
    97  	CONST
    98  	CONTINUE
    99  
   100  	DEFAULT
   101  	DEFER
   102  	ELSE
   103  	FALLTHROUGH
   104  	FOR
   105  
   106  	FUNC
   107  	GO
   108  	GOTO
   109  	IF
   110  	IMPORT
   111  
   112  	INTERFACE
   113  	MAP
   114  	PACKAGE
   115  	RANGE
   116  	RETURN
   117  
   118  	SELECT
   119  	STRUCT
   120  	SWITCH
   121  	TYPE
   122  	VAR
   123  	keyword_end
   124  
   125  	// Alias support - must add at end to pass Go 1 compatibility test
   126  
   127  	ALIAS // =>
   128  )
   129  
   130  var tokens = [...]string{
   131  	ILLEGAL: "ILLEGAL",
   132  
   133  	EOF:     "EOF",
   134  	COMMENT: "COMMENT",
   135  
   136  	IDENT:  "IDENT",
   137  	INT:    "INT",
   138  	FLOAT:  "FLOAT",
   139  	IMAG:   "IMAG",
   140  	CHAR:   "CHAR",
   141  	STRING: "STRING",
   142  
   143  	ADD: "+",
   144  	SUB: "-",
   145  	MUL: "*",
   146  	QUO: "/",
   147  	REM: "%",
   148  
   149  	AND:     "&",
   150  	OR:      "|",
   151  	XOR:     "^",
   152  	SHL:     "<<",
   153  	SHR:     ">>",
   154  	AND_NOT: "&^",
   155  
   156  	ADD_ASSIGN: "+=",
   157  	SUB_ASSIGN: "-=",
   158  	MUL_ASSIGN: "*=",
   159  	QUO_ASSIGN: "/=",
   160  	REM_ASSIGN: "%=",
   161  
   162  	AND_ASSIGN:     "&=",
   163  	OR_ASSIGN:      "|=",
   164  	XOR_ASSIGN:     "^=",
   165  	SHL_ASSIGN:     "<<=",
   166  	SHR_ASSIGN:     ">>=",
   167  	AND_NOT_ASSIGN: "&^=",
   168  
   169  	LAND:  "&&",
   170  	LOR:   "||",
   171  	ARROW: "<-",
   172  	INC:   "++",
   173  	DEC:   "--",
   174  
   175  	EQL:    "==",
   176  	LSS:    "<",
   177  	GTR:    ">",
   178  	ASSIGN: "=",
   179  	NOT:    "!",
   180  
   181  	NEQ:      "!=",
   182  	LEQ:      "<=",
   183  	GEQ:      ">=",
   184  	DEFINE:   ":=",
   185  	ELLIPSIS: "...",
   186  
   187  	LPAREN: "(",
   188  	LBRACK: "[",
   189  	LBRACE: "{",
   190  	COMMA:  ",",
   191  	PERIOD: ".",
   192  
   193  	RPAREN:    ")",
   194  	RBRACK:    "]",
   195  	RBRACE:    "}",
   196  	SEMICOLON: ";",
   197  	COLON:     ":",
   198  
   199  	BREAK:    "break",
   200  	CASE:     "case",
   201  	CHAN:     "chan",
   202  	CONST:    "const",
   203  	CONTINUE: "continue",
   204  
   205  	DEFAULT:     "default",
   206  	DEFER:       "defer",
   207  	ELSE:        "else",
   208  	FALLTHROUGH: "fallthrough",
   209  	FOR:         "for",
   210  
   211  	FUNC:   "func",
   212  	GO:     "go",
   213  	GOTO:   "goto",
   214  	IF:     "if",
   215  	IMPORT: "import",
   216  
   217  	INTERFACE: "interface",
   218  	MAP:       "map",
   219  	PACKAGE:   "package",
   220  	RANGE:     "range",
   221  	RETURN:    "return",
   222  
   223  	SELECT: "select",
   224  	STRUCT: "struct",
   225  	SWITCH: "switch",
   226  	TYPE:   "type",
   227  	VAR:    "var",
   228  
   229  	ALIAS: "=>",
   230  }
   231  
   232  // String returns the string corresponding to the token tok.
   233  // For operators, delimiters, and keywords the string is the actual
   234  // token character sequence (e.g., for the token ADD, the string is
   235  // "+"). For all other tokens the string corresponds to the token
   236  // constant name (e.g. for the token IDENT, the string is "IDENT").
   237  //
   238  func (tok Token) String() string {
   239  	s := ""
   240  	if 0 <= tok && tok < Token(len(tokens)) {
   241  		s = tokens[tok]
   242  	}
   243  	if s == "" {
   244  		s = "token(" + strconv.Itoa(int(tok)) + ")"
   245  	}
   246  	return s
   247  }
   248  
   249  // A set of constants for precedence-based expression parsing.
   250  // Non-operators have lowest precedence, followed by operators
   251  // starting with precedence 1 up to unary operators. The highest
   252  // precedence serves as "catch-all" precedence for selector,
   253  // indexing, and other operator and delimiter tokens.
   254  //
   255  const (
   256  	LowestPrec  = 0 // non-operators
   257  	UnaryPrec   = 6
   258  	HighestPrec = 7
   259  )
   260  
   261  // Precedence returns the operator precedence of the binary
   262  // operator op. If op is not a binary operator, the result
   263  // is LowestPrecedence.
   264  //
   265  func (op Token) Precedence() int {
   266  	switch op {
   267  	case LOR:
   268  		return 1
   269  	case LAND:
   270  		return 2
   271  	case EQL, NEQ, LSS, LEQ, GTR, GEQ:
   272  		return 3
   273  	case ADD, SUB, OR, XOR:
   274  		return 4
   275  	case MUL, QUO, REM, SHL, SHR, AND, AND_NOT:
   276  		return 5
   277  	}
   278  	return LowestPrec
   279  }
   280  
   281  var keywords map[string]Token
   282  
   283  func init() {
   284  	keywords = make(map[string]Token)
   285  	for i := keyword_beg + 1; i < keyword_end; i++ {
   286  		keywords[tokens[i]] = i
   287  	}
   288  }
   289  
   290  // Lookup maps an identifier to its keyword token or IDENT (if not a keyword).
   291  //
   292  func Lookup(ident string) Token {
   293  	if tok, is_keyword := keywords[ident]; is_keyword {
   294  		return tok
   295  	}
   296  	return IDENT
   297  }
   298  
   299  // Predicates
   300  
   301  // IsLiteral returns true for tokens corresponding to identifiers
   302  // and basic type literals; it returns false otherwise.
   303  //
   304  func (tok Token) IsLiteral() bool { return literal_beg < tok && tok < literal_end }
   305  
   306  // IsOperator returns true for tokens corresponding to operators and
   307  // delimiters; it returns false otherwise.
   308  //
   309  func (tok Token) IsOperator() bool { return operator_beg < tok && tok < operator_end || tok == ALIAS }
   310  
   311  // IsKeyword returns true for tokens corresponding to keywords;
   312  // it returns false otherwise.
   313  //
   314  func (tok Token) IsKeyword() bool { return keyword_beg < tok && tok < keyword_end }