github.com/rsc/go@v0.0.0-20150416155037-e040fd465409/src/go/parser/parser.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Package parser implements a parser for Go source files. Input may be
     6  // provided in a variety of forms (see the various Parse* functions); the
     7  // output is an abstract syntax tree (AST) representing the Go source. The
     8  // parser is invoked through one of the Parse* functions.
     9  //
    10  package parser
    11  
    12  import (
    13  	"fmt"
    14  	"go/ast"
    15  	"go/scanner"
    16  	"go/token"
    17  	"strconv"
    18  	"strings"
    19  	"unicode"
    20  )
    21  
    22  // The parser structure holds the parser's internal state.
    23  type parser struct {
    24  	file    *token.File
    25  	errors  scanner.ErrorList
    26  	scanner scanner.Scanner
    27  
    28  	// Tracing/debugging
    29  	mode   Mode // parsing mode
    30  	trace  bool // == (mode & Trace != 0)
    31  	indent int  // indentation used for tracing output
    32  
    33  	// Comments
    34  	comments    []*ast.CommentGroup
    35  	leadComment *ast.CommentGroup // last lead comment
    36  	lineComment *ast.CommentGroup // last line comment
    37  
    38  	// Next token
    39  	pos token.Pos   // token position
    40  	tok token.Token // one token look-ahead
    41  	lit string      // token literal
    42  
    43  	// Error recovery
    44  	// (used to limit the number of calls to syncXXX functions
    45  	// w/o making scanning progress - avoids potential endless
    46  	// loops across multiple parser functions during error recovery)
    47  	syncPos token.Pos // last synchronization position
    48  	syncCnt int       // number of calls to syncXXX without progress
    49  
    50  	// Non-syntactic parser control
    51  	exprLev int  // < 0: in control clause, >= 0: in expression
    52  	inRhs   bool // if set, the parser is parsing a rhs expression
    53  
    54  	// Ordinary identifier scopes
    55  	pkgScope   *ast.Scope        // pkgScope.Outer == nil
    56  	topScope   *ast.Scope        // top-most scope; may be pkgScope
    57  	unresolved []*ast.Ident      // unresolved identifiers
    58  	imports    []*ast.ImportSpec // list of imports
    59  
    60  	// Label scopes
    61  	// (maintained by open/close LabelScope)
    62  	labelScope  *ast.Scope     // label scope for current function
    63  	targetStack [][]*ast.Ident // stack of unresolved labels
    64  }
    65  
    66  func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) {
    67  	p.file = fset.AddFile(filename, -1, len(src))
    68  	var m scanner.Mode
    69  	if mode&ParseComments != 0 {
    70  		m = scanner.ScanComments
    71  	}
    72  	eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) }
    73  	p.scanner.Init(p.file, src, eh, m)
    74  
    75  	p.mode = mode
    76  	p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently)
    77  
    78  	p.next()
    79  }
    80  
    81  // ----------------------------------------------------------------------------
    82  // Scoping support
    83  
    84  func (p *parser) openScope() {
    85  	p.topScope = ast.NewScope(p.topScope)
    86  }
    87  
    88  func (p *parser) closeScope() {
    89  	p.topScope = p.topScope.Outer
    90  }
    91  
    92  func (p *parser) openLabelScope() {
    93  	p.labelScope = ast.NewScope(p.labelScope)
    94  	p.targetStack = append(p.targetStack, nil)
    95  }
    96  
    97  func (p *parser) closeLabelScope() {
    98  	// resolve labels
    99  	n := len(p.targetStack) - 1
   100  	scope := p.labelScope
   101  	for _, ident := range p.targetStack[n] {
   102  		ident.Obj = scope.Lookup(ident.Name)
   103  		if ident.Obj == nil && p.mode&DeclarationErrors != 0 {
   104  			p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name))
   105  		}
   106  	}
   107  	// pop label scope
   108  	p.targetStack = p.targetStack[0:n]
   109  	p.labelScope = p.labelScope.Outer
   110  }
   111  
   112  func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) {
   113  	for _, ident := range idents {
   114  		assert(ident.Obj == nil, "identifier already declared or resolved")
   115  		obj := ast.NewObj(kind, ident.Name)
   116  		// remember the corresponding declaration for redeclaration
   117  		// errors and global variable resolution/typechecking phase
   118  		obj.Decl = decl
   119  		obj.Data = data
   120  		ident.Obj = obj
   121  		if ident.Name != "_" {
   122  			if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 {
   123  				prevDecl := ""
   124  				if pos := alt.Pos(); pos.IsValid() {
   125  					prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos))
   126  				}
   127  				p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl))
   128  			}
   129  		}
   130  	}
   131  }
   132  
   133  func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) {
   134  	// Go spec: A short variable declaration may redeclare variables
   135  	// provided they were originally declared in the same block with
   136  	// the same type, and at least one of the non-blank variables is new.
   137  	n := 0 // number of new variables
   138  	for _, x := range list {
   139  		if ident, isIdent := x.(*ast.Ident); isIdent {
   140  			assert(ident.Obj == nil, "identifier already declared or resolved")
   141  			obj := ast.NewObj(ast.Var, ident.Name)
   142  			// remember corresponding assignment for other tools
   143  			obj.Decl = decl
   144  			ident.Obj = obj
   145  			if ident.Name != "_" {
   146  				if alt := p.topScope.Insert(obj); alt != nil {
   147  					ident.Obj = alt // redeclaration
   148  				} else {
   149  					n++ // new declaration
   150  				}
   151  			}
   152  		} else {
   153  			p.errorExpected(x.Pos(), "identifier on left side of :=")
   154  		}
   155  	}
   156  	if n == 0 && p.mode&DeclarationErrors != 0 {
   157  		p.error(list[0].Pos(), "no new variables on left side of :=")
   158  	}
   159  }
   160  
   161  // The unresolved object is a sentinel to mark identifiers that have been added
   162  // to the list of unresolved identifiers. The sentinel is only used for verifying
   163  // internal consistency.
   164  var unresolved = new(ast.Object)
   165  
   166  // If x is an identifier, tryResolve attempts to resolve x by looking up
   167  // the object it denotes. If no object is found and collectUnresolved is
   168  // set, x is marked as unresolved and collected in the list of unresolved
   169  // identifiers.
   170  //
   171  func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) {
   172  	// nothing to do if x is not an identifier or the blank identifier
   173  	ident, _ := x.(*ast.Ident)
   174  	if ident == nil {
   175  		return
   176  	}
   177  	assert(ident.Obj == nil, "identifier already declared or resolved")
   178  	if ident.Name == "_" {
   179  		return
   180  	}
   181  	// try to resolve the identifier
   182  	for s := p.topScope; s != nil; s = s.Outer {
   183  		if obj := s.Lookup(ident.Name); obj != nil {
   184  			ident.Obj = obj
   185  			return
   186  		}
   187  	}
   188  	// all local scopes are known, so any unresolved identifier
   189  	// must be found either in the file scope, package scope
   190  	// (perhaps in another file), or universe scope --- collect
   191  	// them so that they can be resolved later
   192  	if collectUnresolved {
   193  		ident.Obj = unresolved
   194  		p.unresolved = append(p.unresolved, ident)
   195  	}
   196  }
   197  
   198  func (p *parser) resolve(x ast.Expr) {
   199  	p.tryResolve(x, true)
   200  }
   201  
   202  // ----------------------------------------------------------------------------
   203  // Parsing support
   204  
   205  func (p *parser) printTrace(a ...interface{}) {
   206  	const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
   207  	const n = len(dots)
   208  	pos := p.file.Position(p.pos)
   209  	fmt.Printf("%5d:%3d: ", pos.Line, pos.Column)
   210  	i := 2 * p.indent
   211  	for i > n {
   212  		fmt.Print(dots)
   213  		i -= n
   214  	}
   215  	// i <= n
   216  	fmt.Print(dots[0:i])
   217  	fmt.Println(a...)
   218  }
   219  
   220  func trace(p *parser, msg string) *parser {
   221  	p.printTrace(msg, "(")
   222  	p.indent++
   223  	return p
   224  }
   225  
   226  // Usage pattern: defer un(trace(p, "..."))
   227  func un(p *parser) {
   228  	p.indent--
   229  	p.printTrace(")")
   230  }
   231  
   232  // Advance to the next token.
   233  func (p *parser) next0() {
   234  	// Because of one-token look-ahead, print the previous token
   235  	// when tracing as it provides a more readable output. The
   236  	// very first token (!p.pos.IsValid()) is not initialized
   237  	// (it is token.ILLEGAL), so don't print it .
   238  	if p.trace && p.pos.IsValid() {
   239  		s := p.tok.String()
   240  		switch {
   241  		case p.tok.IsLiteral():
   242  			p.printTrace(s, p.lit)
   243  		case p.tok.IsOperator(), p.tok.IsKeyword():
   244  			p.printTrace("\"" + s + "\"")
   245  		default:
   246  			p.printTrace(s)
   247  		}
   248  	}
   249  
   250  	p.pos, p.tok, p.lit = p.scanner.Scan()
   251  }
   252  
   253  // Consume a comment and return it and the line on which it ends.
   254  func (p *parser) consumeComment() (comment *ast.Comment, endline int) {
   255  	// /*-style comments may end on a different line than where they start.
   256  	// Scan the comment for '\n' chars and adjust endline accordingly.
   257  	endline = p.file.Line(p.pos)
   258  	if p.lit[1] == '*' {
   259  		// don't use range here - no need to decode Unicode code points
   260  		for i := 0; i < len(p.lit); i++ {
   261  			if p.lit[i] == '\n' {
   262  				endline++
   263  			}
   264  		}
   265  	}
   266  
   267  	comment = &ast.Comment{Slash: p.pos, Text: p.lit}
   268  	p.next0()
   269  
   270  	return
   271  }
   272  
   273  // Consume a group of adjacent comments, add it to the parser's
   274  // comments list, and return it together with the line at which
   275  // the last comment in the group ends. A non-comment token or n
   276  // empty lines terminate a comment group.
   277  //
   278  func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) {
   279  	var list []*ast.Comment
   280  	endline = p.file.Line(p.pos)
   281  	for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n {
   282  		var comment *ast.Comment
   283  		comment, endline = p.consumeComment()
   284  		list = append(list, comment)
   285  	}
   286  
   287  	// add comment group to the comments list
   288  	comments = &ast.CommentGroup{List: list}
   289  	p.comments = append(p.comments, comments)
   290  
   291  	return
   292  }
   293  
   294  // Advance to the next non-comment token. In the process, collect
   295  // any comment groups encountered, and remember the last lead and
   296  // and line comments.
   297  //
   298  // A lead comment is a comment group that starts and ends in a
   299  // line without any other tokens and that is followed by a non-comment
   300  // token on the line immediately after the comment group.
   301  //
   302  // A line comment is a comment group that follows a non-comment
   303  // token on the same line, and that has no tokens after it on the line
   304  // where it ends.
   305  //
   306  // Lead and line comments may be considered documentation that is
   307  // stored in the AST.
   308  //
   309  func (p *parser) next() {
   310  	p.leadComment = nil
   311  	p.lineComment = nil
   312  	prev := p.pos
   313  	p.next0()
   314  
   315  	if p.tok == token.COMMENT {
   316  		var comment *ast.CommentGroup
   317  		var endline int
   318  
   319  		if p.file.Line(p.pos) == p.file.Line(prev) {
   320  			// The comment is on same line as the previous token; it
   321  			// cannot be a lead comment but may be a line comment.
   322  			comment, endline = p.consumeCommentGroup(0)
   323  			if p.file.Line(p.pos) != endline {
   324  				// The next token is on a different line, thus
   325  				// the last comment group is a line comment.
   326  				p.lineComment = comment
   327  			}
   328  		}
   329  
   330  		// consume successor comments, if any
   331  		endline = -1
   332  		for p.tok == token.COMMENT {
   333  			comment, endline = p.consumeCommentGroup(1)
   334  		}
   335  
   336  		if endline+1 == p.file.Line(p.pos) {
   337  			// The next token is following on the line immediately after the
   338  			// comment group, thus the last comment group is a lead comment.
   339  			p.leadComment = comment
   340  		}
   341  	}
   342  }
   343  
   344  // A bailout panic is raised to indicate early termination.
   345  type bailout struct{}
   346  
   347  func (p *parser) error(pos token.Pos, msg string) {
   348  	epos := p.file.Position(pos)
   349  
   350  	// If AllErrors is not set, discard errors reported on the same line
   351  	// as the last recorded error and stop parsing if there are more than
   352  	// 10 errors.
   353  	if p.mode&AllErrors == 0 {
   354  		n := len(p.errors)
   355  		if n > 0 && p.errors[n-1].Pos.Line == epos.Line {
   356  			return // discard - likely a spurious error
   357  		}
   358  		if n > 10 {
   359  			panic(bailout{})
   360  		}
   361  	}
   362  
   363  	p.errors.Add(epos, msg)
   364  }
   365  
   366  func (p *parser) errorExpected(pos token.Pos, msg string) {
   367  	msg = "expected " + msg
   368  	if pos == p.pos {
   369  		// the error happened at the current position;
   370  		// make the error message more specific
   371  		if p.tok == token.SEMICOLON && p.lit == "\n" {
   372  			msg += ", found newline"
   373  		} else {
   374  			msg += ", found '" + p.tok.String() + "'"
   375  			if p.tok.IsLiteral() {
   376  				msg += " " + p.lit
   377  			}
   378  		}
   379  	}
   380  	p.error(pos, msg)
   381  }
   382  
   383  func (p *parser) expect(tok token.Token) token.Pos {
   384  	pos := p.pos
   385  	if p.tok != tok {
   386  		p.errorExpected(pos, "'"+tok.String()+"'")
   387  	}
   388  	p.next() // make progress
   389  	return pos
   390  }
   391  
   392  // expectClosing is like expect but provides a better error message
   393  // for the common case of a missing comma before a newline.
   394  //
   395  func (p *parser) expectClosing(tok token.Token, context string) token.Pos {
   396  	if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" {
   397  		p.error(p.pos, "missing ',' before newline in "+context)
   398  		p.next()
   399  	}
   400  	return p.expect(tok)
   401  }
   402  
   403  func (p *parser) expectSemi() {
   404  	// semicolon is optional before a closing ')' or '}'
   405  	if p.tok != token.RPAREN && p.tok != token.RBRACE {
   406  		if p.tok == token.SEMICOLON {
   407  			p.next()
   408  		} else {
   409  			p.errorExpected(p.pos, "';'")
   410  			syncStmt(p)
   411  		}
   412  	}
   413  }
   414  
   415  func (p *parser) atComma(context string) bool {
   416  	if p.tok == token.COMMA {
   417  		return true
   418  	}
   419  	if p.tok == token.SEMICOLON && p.lit == "\n" {
   420  		p.error(p.pos, "missing ',' before newline in "+context)
   421  		return true // "insert" the comma and continue
   422  
   423  	}
   424  	return false
   425  }
   426  
   427  func assert(cond bool, msg string) {
   428  	if !cond {
   429  		panic("go/parser internal error: " + msg)
   430  	}
   431  }
   432  
   433  // syncStmt advances to the next statement.
   434  // Used for synchronization after an error.
   435  //
   436  func syncStmt(p *parser) {
   437  	for {
   438  		switch p.tok {
   439  		case token.BREAK, token.CONST, token.CONTINUE, token.DEFER,
   440  			token.FALLTHROUGH, token.FOR, token.GO, token.GOTO,
   441  			token.IF, token.RETURN, token.SELECT, token.SWITCH,
   442  			token.TYPE, token.VAR:
   443  			// Return only if parser made some progress since last
   444  			// sync or if it has not reached 10 sync calls without
   445  			// progress. Otherwise consume at least one token to
   446  			// avoid an endless parser loop (it is possible that
   447  			// both parseOperand and parseStmt call syncStmt and
   448  			// correctly do not advance, thus the need for the
   449  			// invocation limit p.syncCnt).
   450  			if p.pos == p.syncPos && p.syncCnt < 10 {
   451  				p.syncCnt++
   452  				return
   453  			}
   454  			if p.pos > p.syncPos {
   455  				p.syncPos = p.pos
   456  				p.syncCnt = 0
   457  				return
   458  			}
   459  			// Reaching here indicates a parser bug, likely an
   460  			// incorrect token list in this function, but it only
   461  			// leads to skipping of possibly correct code if a
   462  			// previous error is present, and thus is preferred
   463  			// over a non-terminating parse.
   464  		case token.EOF:
   465  			return
   466  		}
   467  		p.next()
   468  	}
   469  }
   470  
   471  // syncDecl advances to the next declaration.
   472  // Used for synchronization after an error.
   473  //
   474  func syncDecl(p *parser) {
   475  	for {
   476  		switch p.tok {
   477  		case token.CONST, token.TYPE, token.VAR:
   478  			// see comments in syncStmt
   479  			if p.pos == p.syncPos && p.syncCnt < 10 {
   480  				p.syncCnt++
   481  				return
   482  			}
   483  			if p.pos > p.syncPos {
   484  				p.syncPos = p.pos
   485  				p.syncCnt = 0
   486  				return
   487  			}
   488  		case token.EOF:
   489  			return
   490  		}
   491  		p.next()
   492  	}
   493  }
   494  
   495  // safePos returns a valid file position for a given position: If pos
   496  // is valid to begin with, safePos returns pos. If pos is out-of-range,
   497  // safePos returns the EOF position.
   498  //
   499  // This is hack to work around "artificial" end positions in the AST which
   500  // are computed by adding 1 to (presumably valid) token positions. If the
   501  // token positions are invalid due to parse errors, the resulting end position
   502  // may be past the file's EOF position, which would lead to panics if used
   503  // later on.
   504  //
   505  func (p *parser) safePos(pos token.Pos) (res token.Pos) {
   506  	defer func() {
   507  		if recover() != nil {
   508  			res = token.Pos(p.file.Base() + p.file.Size()) // EOF position
   509  		}
   510  	}()
   511  	_ = p.file.Offset(pos) // trigger a panic if position is out-of-range
   512  	return pos
   513  }
   514  
   515  // ----------------------------------------------------------------------------
   516  // Identifiers
   517  
   518  func (p *parser) parseIdent() *ast.Ident {
   519  	pos := p.pos
   520  	name := "_"
   521  	if p.tok == token.IDENT {
   522  		name = p.lit
   523  		p.next()
   524  	} else {
   525  		p.expect(token.IDENT) // use expect() error handling
   526  	}
   527  	return &ast.Ident{NamePos: pos, Name: name}
   528  }
   529  
   530  func (p *parser) parseIdentList() (list []*ast.Ident) {
   531  	if p.trace {
   532  		defer un(trace(p, "IdentList"))
   533  	}
   534  
   535  	list = append(list, p.parseIdent())
   536  	for p.tok == token.COMMA {
   537  		p.next()
   538  		list = append(list, p.parseIdent())
   539  	}
   540  
   541  	return
   542  }
   543  
   544  // ----------------------------------------------------------------------------
   545  // Common productions
   546  
   547  // If lhs is set, result list elements which are identifiers are not resolved.
   548  func (p *parser) parseExprList(lhs bool) (list []ast.Expr) {
   549  	if p.trace {
   550  		defer un(trace(p, "ExpressionList"))
   551  	}
   552  
   553  	list = append(list, p.checkExpr(p.parseExpr(lhs)))
   554  	for p.tok == token.COMMA {
   555  		p.next()
   556  		list = append(list, p.checkExpr(p.parseExpr(lhs)))
   557  	}
   558  
   559  	return
   560  }
   561  
   562  func (p *parser) parseLhsList() []ast.Expr {
   563  	old := p.inRhs
   564  	p.inRhs = false
   565  	list := p.parseExprList(true)
   566  	switch p.tok {
   567  	case token.DEFINE:
   568  		// lhs of a short variable declaration
   569  		// but doesn't enter scope until later:
   570  		// caller must call p.shortVarDecl(p.makeIdentList(list))
   571  		// at appropriate time.
   572  	case token.COLON:
   573  		// lhs of a label declaration or a communication clause of a select
   574  		// statement (parseLhsList is not called when parsing the case clause
   575  		// of a switch statement):
   576  		// - labels are declared by the caller of parseLhsList
   577  		// - for communication clauses, if there is a stand-alone identifier
   578  		//   followed by a colon, we have a syntax error; there is no need
   579  		//   to resolve the identifier in that case
   580  	default:
   581  		// identifiers must be declared elsewhere
   582  		for _, x := range list {
   583  			p.resolve(x)
   584  		}
   585  	}
   586  	p.inRhs = old
   587  	return list
   588  }
   589  
   590  func (p *parser) parseRhsList() []ast.Expr {
   591  	old := p.inRhs
   592  	p.inRhs = true
   593  	list := p.parseExprList(false)
   594  	p.inRhs = old
   595  	return list
   596  }
   597  
   598  // ----------------------------------------------------------------------------
   599  // Types
   600  
   601  func (p *parser) parseType() ast.Expr {
   602  	if p.trace {
   603  		defer un(trace(p, "Type"))
   604  	}
   605  
   606  	typ := p.tryType()
   607  
   608  	if typ == nil {
   609  		pos := p.pos
   610  		p.errorExpected(pos, "type")
   611  		p.next() // make progress
   612  		return &ast.BadExpr{From: pos, To: p.pos}
   613  	}
   614  
   615  	return typ
   616  }
   617  
   618  // If the result is an identifier, it is not resolved.
   619  func (p *parser) parseTypeName() ast.Expr {
   620  	if p.trace {
   621  		defer un(trace(p, "TypeName"))
   622  	}
   623  
   624  	ident := p.parseIdent()
   625  	// don't resolve ident yet - it may be a parameter or field name
   626  
   627  	if p.tok == token.PERIOD {
   628  		// ident is a package name
   629  		p.next()
   630  		p.resolve(ident)
   631  		sel := p.parseIdent()
   632  		return &ast.SelectorExpr{X: ident, Sel: sel}
   633  	}
   634  
   635  	return ident
   636  }
   637  
   638  func (p *parser) parseArrayType() ast.Expr {
   639  	if p.trace {
   640  		defer un(trace(p, "ArrayType"))
   641  	}
   642  
   643  	lbrack := p.expect(token.LBRACK)
   644  	p.exprLev++
   645  	var len ast.Expr
   646  	// always permit ellipsis for more fault-tolerant parsing
   647  	if p.tok == token.ELLIPSIS {
   648  		len = &ast.Ellipsis{Ellipsis: p.pos}
   649  		p.next()
   650  	} else if p.tok != token.RBRACK {
   651  		len = p.parseRhs()
   652  	}
   653  	p.exprLev--
   654  	p.expect(token.RBRACK)
   655  	elt := p.parseType()
   656  
   657  	return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt}
   658  }
   659  
   660  func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident {
   661  	idents := make([]*ast.Ident, len(list))
   662  	for i, x := range list {
   663  		ident, isIdent := x.(*ast.Ident)
   664  		if !isIdent {
   665  			if _, isBad := x.(*ast.BadExpr); !isBad {
   666  				// only report error if it's a new one
   667  				p.errorExpected(x.Pos(), "identifier")
   668  			}
   669  			ident = &ast.Ident{NamePos: x.Pos(), Name: "_"}
   670  		}
   671  		idents[i] = ident
   672  	}
   673  	return idents
   674  }
   675  
   676  func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field {
   677  	if p.trace {
   678  		defer un(trace(p, "FieldDecl"))
   679  	}
   680  
   681  	doc := p.leadComment
   682  
   683  	// FieldDecl
   684  	list, typ := p.parseVarList(false)
   685  
   686  	// Tag
   687  	var tag *ast.BasicLit
   688  	if p.tok == token.STRING {
   689  		tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
   690  		p.next()
   691  	}
   692  
   693  	// analyze case
   694  	var idents []*ast.Ident
   695  	if typ != nil {
   696  		// IdentifierList Type
   697  		idents = p.makeIdentList(list)
   698  	} else {
   699  		// ["*"] TypeName (AnonymousField)
   700  		typ = list[0] // we always have at least one element
   701  		if n := len(list); n > 1 || !isTypeName(deref(typ)) {
   702  			pos := typ.Pos()
   703  			p.errorExpected(pos, "anonymous field")
   704  			typ = &ast.BadExpr{From: pos, To: p.safePos(list[n-1].End())}
   705  		}
   706  	}
   707  
   708  	p.expectSemi() // call before accessing p.linecomment
   709  
   710  	field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment}
   711  	p.declare(field, nil, scope, ast.Var, idents...)
   712  	p.resolve(typ)
   713  
   714  	return field
   715  }
   716  
   717  func (p *parser) parseStructType() *ast.StructType {
   718  	if p.trace {
   719  		defer un(trace(p, "StructType"))
   720  	}
   721  
   722  	pos := p.expect(token.STRUCT)
   723  	lbrace := p.expect(token.LBRACE)
   724  	scope := ast.NewScope(nil) // struct scope
   725  	var list []*ast.Field
   726  	for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN {
   727  		// a field declaration cannot start with a '(' but we accept
   728  		// it here for more robust parsing and better error messages
   729  		// (parseFieldDecl will check and complain if necessary)
   730  		list = append(list, p.parseFieldDecl(scope))
   731  	}
   732  	rbrace := p.expect(token.RBRACE)
   733  
   734  	return &ast.StructType{
   735  		Struct: pos,
   736  		Fields: &ast.FieldList{
   737  			Opening: lbrace,
   738  			List:    list,
   739  			Closing: rbrace,
   740  		},
   741  	}
   742  }
   743  
   744  func (p *parser) parsePointerType() *ast.StarExpr {
   745  	if p.trace {
   746  		defer un(trace(p, "PointerType"))
   747  	}
   748  
   749  	star := p.expect(token.MUL)
   750  	base := p.parseType()
   751  
   752  	return &ast.StarExpr{Star: star, X: base}
   753  }
   754  
   755  // If the result is an identifier, it is not resolved.
   756  func (p *parser) tryVarType(isParam bool) ast.Expr {
   757  	if isParam && p.tok == token.ELLIPSIS {
   758  		pos := p.pos
   759  		p.next()
   760  		typ := p.tryIdentOrType() // don't use parseType so we can provide better error message
   761  		if typ != nil {
   762  			p.resolve(typ)
   763  		} else {
   764  			p.error(pos, "'...' parameter is missing type")
   765  			typ = &ast.BadExpr{From: pos, To: p.pos}
   766  		}
   767  		return &ast.Ellipsis{Ellipsis: pos, Elt: typ}
   768  	}
   769  	return p.tryIdentOrType()
   770  }
   771  
   772  // If the result is an identifier, it is not resolved.
   773  func (p *parser) parseVarType(isParam bool) ast.Expr {
   774  	typ := p.tryVarType(isParam)
   775  	if typ == nil {
   776  		pos := p.pos
   777  		p.errorExpected(pos, "type")
   778  		p.next() // make progress
   779  		typ = &ast.BadExpr{From: pos, To: p.pos}
   780  	}
   781  	return typ
   782  }
   783  
   784  // If any of the results are identifiers, they are not resolved.
   785  func (p *parser) parseVarList(isParam bool) (list []ast.Expr, typ ast.Expr) {
   786  	if p.trace {
   787  		defer un(trace(p, "VarList"))
   788  	}
   789  
   790  	// a list of identifiers looks like a list of type names
   791  	//
   792  	// parse/tryVarType accepts any type (including parenthesized
   793  	// ones) even though the syntax does not permit them here: we
   794  	// accept them all for more robust parsing and complain later
   795  	for typ := p.parseVarType(isParam); typ != nil; {
   796  		list = append(list, typ)
   797  		if p.tok != token.COMMA {
   798  			break
   799  		}
   800  		p.next()
   801  		typ = p.tryVarType(isParam) // maybe nil as in: func f(int,) {}
   802  	}
   803  
   804  	// if we had a list of identifiers, it must be followed by a type
   805  	typ = p.tryVarType(isParam)
   806  
   807  	return
   808  }
   809  
   810  func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) {
   811  	if p.trace {
   812  		defer un(trace(p, "ParameterList"))
   813  	}
   814  
   815  	// ParameterDecl
   816  	list, typ := p.parseVarList(ellipsisOk)
   817  
   818  	// analyze case
   819  	if typ != nil {
   820  		// IdentifierList Type
   821  		idents := p.makeIdentList(list)
   822  		field := &ast.Field{Names: idents, Type: typ}
   823  		params = append(params, field)
   824  		// Go spec: The scope of an identifier denoting a function
   825  		// parameter or result variable is the function body.
   826  		p.declare(field, nil, scope, ast.Var, idents...)
   827  		p.resolve(typ)
   828  		if !p.atComma("parameter list") {
   829  			return
   830  		}
   831  		p.next()
   832  		for p.tok != token.RPAREN && p.tok != token.EOF {
   833  			idents := p.parseIdentList()
   834  			typ := p.parseVarType(ellipsisOk)
   835  			field := &ast.Field{Names: idents, Type: typ}
   836  			params = append(params, field)
   837  			// Go spec: The scope of an identifier denoting a function
   838  			// parameter or result variable is the function body.
   839  			p.declare(field, nil, scope, ast.Var, idents...)
   840  			p.resolve(typ)
   841  			if !p.atComma("parameter list") {
   842  				break
   843  			}
   844  			p.next()
   845  		}
   846  		return
   847  	}
   848  
   849  	// Type { "," Type } (anonymous parameters)
   850  	params = make([]*ast.Field, len(list))
   851  	for i, typ := range list {
   852  		p.resolve(typ)
   853  		params[i] = &ast.Field{Type: typ}
   854  	}
   855  	return
   856  }
   857  
   858  func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList {
   859  	if p.trace {
   860  		defer un(trace(p, "Parameters"))
   861  	}
   862  
   863  	var params []*ast.Field
   864  	lparen := p.expect(token.LPAREN)
   865  	if p.tok != token.RPAREN {
   866  		params = p.parseParameterList(scope, ellipsisOk)
   867  	}
   868  	rparen := p.expect(token.RPAREN)
   869  
   870  	return &ast.FieldList{Opening: lparen, List: params, Closing: rparen}
   871  }
   872  
   873  func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList {
   874  	if p.trace {
   875  		defer un(trace(p, "Result"))
   876  	}
   877  
   878  	if p.tok == token.LPAREN {
   879  		return p.parseParameters(scope, false)
   880  	}
   881  
   882  	typ := p.tryType()
   883  	if typ != nil {
   884  		list := make([]*ast.Field, 1)
   885  		list[0] = &ast.Field{Type: typ}
   886  		return &ast.FieldList{List: list}
   887  	}
   888  
   889  	return nil
   890  }
   891  
   892  func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) {
   893  	if p.trace {
   894  		defer un(trace(p, "Signature"))
   895  	}
   896  
   897  	params = p.parseParameters(scope, true)
   898  	results = p.parseResult(scope)
   899  
   900  	return
   901  }
   902  
   903  func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) {
   904  	if p.trace {
   905  		defer un(trace(p, "FuncType"))
   906  	}
   907  
   908  	pos := p.expect(token.FUNC)
   909  	scope := ast.NewScope(p.topScope) // function scope
   910  	params, results := p.parseSignature(scope)
   911  
   912  	return &ast.FuncType{Func: pos, Params: params, Results: results}, scope
   913  }
   914  
   915  func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field {
   916  	if p.trace {
   917  		defer un(trace(p, "MethodSpec"))
   918  	}
   919  
   920  	doc := p.leadComment
   921  	var idents []*ast.Ident
   922  	var typ ast.Expr
   923  	x := p.parseTypeName()
   924  	if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN {
   925  		// method
   926  		idents = []*ast.Ident{ident}
   927  		scope := ast.NewScope(nil) // method scope
   928  		params, results := p.parseSignature(scope)
   929  		typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results}
   930  	} else {
   931  		// embedded interface
   932  		typ = x
   933  		p.resolve(typ)
   934  	}
   935  	p.expectSemi() // call before accessing p.linecomment
   936  
   937  	spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment}
   938  	p.declare(spec, nil, scope, ast.Fun, idents...)
   939  
   940  	return spec
   941  }
   942  
   943  func (p *parser) parseInterfaceType() *ast.InterfaceType {
   944  	if p.trace {
   945  		defer un(trace(p, "InterfaceType"))
   946  	}
   947  
   948  	pos := p.expect(token.INTERFACE)
   949  	lbrace := p.expect(token.LBRACE)
   950  	scope := ast.NewScope(nil) // interface scope
   951  	var list []*ast.Field
   952  	for p.tok == token.IDENT {
   953  		list = append(list, p.parseMethodSpec(scope))
   954  	}
   955  	rbrace := p.expect(token.RBRACE)
   956  
   957  	return &ast.InterfaceType{
   958  		Interface: pos,
   959  		Methods: &ast.FieldList{
   960  			Opening: lbrace,
   961  			List:    list,
   962  			Closing: rbrace,
   963  		},
   964  	}
   965  }
   966  
   967  func (p *parser) parseMapType() *ast.MapType {
   968  	if p.trace {
   969  		defer un(trace(p, "MapType"))
   970  	}
   971  
   972  	pos := p.expect(token.MAP)
   973  	p.expect(token.LBRACK)
   974  	key := p.parseType()
   975  	p.expect(token.RBRACK)
   976  	value := p.parseType()
   977  
   978  	return &ast.MapType{Map: pos, Key: key, Value: value}
   979  }
   980  
   981  func (p *parser) parseChanType() *ast.ChanType {
   982  	if p.trace {
   983  		defer un(trace(p, "ChanType"))
   984  	}
   985  
   986  	pos := p.pos
   987  	dir := ast.SEND | ast.RECV
   988  	var arrow token.Pos
   989  	if p.tok == token.CHAN {
   990  		p.next()
   991  		if p.tok == token.ARROW {
   992  			arrow = p.pos
   993  			p.next()
   994  			dir = ast.SEND
   995  		}
   996  	} else {
   997  		arrow = p.expect(token.ARROW)
   998  		p.expect(token.CHAN)
   999  		dir = ast.RECV
  1000  	}
  1001  	value := p.parseType()
  1002  
  1003  	return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value}
  1004  }
  1005  
  1006  // If the result is an identifier, it is not resolved.
  1007  func (p *parser) tryIdentOrType() ast.Expr {
  1008  	switch p.tok {
  1009  	case token.IDENT:
  1010  		return p.parseTypeName()
  1011  	case token.LBRACK:
  1012  		return p.parseArrayType()
  1013  	case token.STRUCT:
  1014  		return p.parseStructType()
  1015  	case token.MUL:
  1016  		return p.parsePointerType()
  1017  	case token.FUNC:
  1018  		typ, _ := p.parseFuncType()
  1019  		return typ
  1020  	case token.INTERFACE:
  1021  		return p.parseInterfaceType()
  1022  	case token.MAP:
  1023  		return p.parseMapType()
  1024  	case token.CHAN, token.ARROW:
  1025  		return p.parseChanType()
  1026  	case token.LPAREN:
  1027  		lparen := p.pos
  1028  		p.next()
  1029  		typ := p.parseType()
  1030  		rparen := p.expect(token.RPAREN)
  1031  		return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen}
  1032  	}
  1033  
  1034  	// no type found
  1035  	return nil
  1036  }
  1037  
  1038  func (p *parser) tryType() ast.Expr {
  1039  	typ := p.tryIdentOrType()
  1040  	if typ != nil {
  1041  		p.resolve(typ)
  1042  	}
  1043  	return typ
  1044  }
  1045  
  1046  // ----------------------------------------------------------------------------
  1047  // Blocks
  1048  
  1049  func (p *parser) parseStmtList() (list []ast.Stmt) {
  1050  	if p.trace {
  1051  		defer un(trace(p, "StatementList"))
  1052  	}
  1053  
  1054  	for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF {
  1055  		list = append(list, p.parseStmt())
  1056  	}
  1057  
  1058  	return
  1059  }
  1060  
  1061  func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt {
  1062  	if p.trace {
  1063  		defer un(trace(p, "Body"))
  1064  	}
  1065  
  1066  	lbrace := p.expect(token.LBRACE)
  1067  	p.topScope = scope // open function scope
  1068  	p.openLabelScope()
  1069  	list := p.parseStmtList()
  1070  	p.closeLabelScope()
  1071  	p.closeScope()
  1072  	rbrace := p.expect(token.RBRACE)
  1073  
  1074  	return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1075  }
  1076  
  1077  func (p *parser) parseBlockStmt() *ast.BlockStmt {
  1078  	if p.trace {
  1079  		defer un(trace(p, "BlockStmt"))
  1080  	}
  1081  
  1082  	lbrace := p.expect(token.LBRACE)
  1083  	p.openScope()
  1084  	list := p.parseStmtList()
  1085  	p.closeScope()
  1086  	rbrace := p.expect(token.RBRACE)
  1087  
  1088  	return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1089  }
  1090  
  1091  // ----------------------------------------------------------------------------
  1092  // Expressions
  1093  
  1094  func (p *parser) parseFuncTypeOrLit() ast.Expr {
  1095  	if p.trace {
  1096  		defer un(trace(p, "FuncTypeOrLit"))
  1097  	}
  1098  
  1099  	typ, scope := p.parseFuncType()
  1100  	if p.tok != token.LBRACE {
  1101  		// function type only
  1102  		return typ
  1103  	}
  1104  
  1105  	p.exprLev++
  1106  	body := p.parseBody(scope)
  1107  	p.exprLev--
  1108  
  1109  	return &ast.FuncLit{Type: typ, Body: body}
  1110  }
  1111  
  1112  // parseOperand may return an expression or a raw type (incl. array
  1113  // types of the form [...]T. Callers must verify the result.
  1114  // If lhs is set and the result is an identifier, it is not resolved.
  1115  //
  1116  func (p *parser) parseOperand(lhs bool) ast.Expr {
  1117  	if p.trace {
  1118  		defer un(trace(p, "Operand"))
  1119  	}
  1120  
  1121  	switch p.tok {
  1122  	case token.IDENT:
  1123  		x := p.parseIdent()
  1124  		if !lhs {
  1125  			p.resolve(x)
  1126  		}
  1127  		return x
  1128  
  1129  	case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING:
  1130  		x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit}
  1131  		p.next()
  1132  		return x
  1133  
  1134  	case token.LPAREN:
  1135  		lparen := p.pos
  1136  		p.next()
  1137  		p.exprLev++
  1138  		x := p.parseRhsOrType() // types may be parenthesized: (some type)
  1139  		p.exprLev--
  1140  		rparen := p.expect(token.RPAREN)
  1141  		return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen}
  1142  
  1143  	case token.FUNC:
  1144  		return p.parseFuncTypeOrLit()
  1145  	}
  1146  
  1147  	if typ := p.tryIdentOrType(); typ != nil {
  1148  		// could be type for composite literal or conversion
  1149  		_, isIdent := typ.(*ast.Ident)
  1150  		assert(!isIdent, "type cannot be identifier")
  1151  		return typ
  1152  	}
  1153  
  1154  	// we have an error
  1155  	pos := p.pos
  1156  	p.errorExpected(pos, "operand")
  1157  	syncStmt(p)
  1158  	return &ast.BadExpr{From: pos, To: p.pos}
  1159  }
  1160  
  1161  func (p *parser) parseSelector(x ast.Expr) ast.Expr {
  1162  	if p.trace {
  1163  		defer un(trace(p, "Selector"))
  1164  	}
  1165  
  1166  	sel := p.parseIdent()
  1167  
  1168  	return &ast.SelectorExpr{X: x, Sel: sel}
  1169  }
  1170  
  1171  func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr {
  1172  	if p.trace {
  1173  		defer un(trace(p, "TypeAssertion"))
  1174  	}
  1175  
  1176  	lparen := p.expect(token.LPAREN)
  1177  	var typ ast.Expr
  1178  	if p.tok == token.TYPE {
  1179  		// type switch: typ == nil
  1180  		p.next()
  1181  	} else {
  1182  		typ = p.parseType()
  1183  	}
  1184  	rparen := p.expect(token.RPAREN)
  1185  
  1186  	return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen}
  1187  }
  1188  
  1189  func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr {
  1190  	if p.trace {
  1191  		defer un(trace(p, "IndexOrSlice"))
  1192  	}
  1193  
  1194  	const N = 3 // change the 3 to 2 to disable 3-index slices
  1195  	lbrack := p.expect(token.LBRACK)
  1196  	p.exprLev++
  1197  	var index [N]ast.Expr
  1198  	var colons [N - 1]token.Pos
  1199  	if p.tok != token.COLON {
  1200  		index[0] = p.parseRhs()
  1201  	}
  1202  	ncolons := 0
  1203  	for p.tok == token.COLON && ncolons < len(colons) {
  1204  		colons[ncolons] = p.pos
  1205  		ncolons++
  1206  		p.next()
  1207  		if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF {
  1208  			index[ncolons] = p.parseRhs()
  1209  		}
  1210  	}
  1211  	p.exprLev--
  1212  	rbrack := p.expect(token.RBRACK)
  1213  
  1214  	if ncolons > 0 {
  1215  		// slice expression
  1216  		slice3 := false
  1217  		if ncolons == 2 {
  1218  			slice3 = true
  1219  			// Check presence of 2nd and 3rd index here rather than during type-checking
  1220  			// to prevent erroneous programs from passing through gofmt (was issue 7305).
  1221  			if index[1] == nil {
  1222  				p.error(colons[0], "2nd index required in 3-index slice")
  1223  				index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]}
  1224  			}
  1225  			if index[2] == nil {
  1226  				p.error(colons[1], "3rd index required in 3-index slice")
  1227  				index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack}
  1228  			}
  1229  		}
  1230  		return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack}
  1231  	}
  1232  
  1233  	return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack}
  1234  }
  1235  
  1236  func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr {
  1237  	if p.trace {
  1238  		defer un(trace(p, "CallOrConversion"))
  1239  	}
  1240  
  1241  	lparen := p.expect(token.LPAREN)
  1242  	p.exprLev++
  1243  	var list []ast.Expr
  1244  	var ellipsis token.Pos
  1245  	for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() {
  1246  		list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...)
  1247  		if p.tok == token.ELLIPSIS {
  1248  			ellipsis = p.pos
  1249  			p.next()
  1250  		}
  1251  		if !p.atComma("argument list") {
  1252  			break
  1253  		}
  1254  		p.next()
  1255  	}
  1256  	p.exprLev--
  1257  	rparen := p.expectClosing(token.RPAREN, "argument list")
  1258  
  1259  	return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen}
  1260  }
  1261  
  1262  func (p *parser) parseValue(keyOk bool) ast.Expr {
  1263  	if p.trace {
  1264  		defer un(trace(p, "Element"))
  1265  	}
  1266  
  1267  	if p.tok == token.LBRACE {
  1268  		return p.parseLiteralValue(nil)
  1269  	}
  1270  
  1271  	// Because the parser doesn't know the composite literal type, it cannot
  1272  	// know if a key that's an identifier is a struct field name or a name
  1273  	// denoting a value. The former is not resolved by the parser or the
  1274  	// resolver.
  1275  	//
  1276  	// Instead, _try_ to resolve such a key if possible. If it resolves,
  1277  	// it a) has correctly resolved, or b) incorrectly resolved because
  1278  	// the key is a struct field with a name matching another identifier.
  1279  	// In the former case we are done, and in the latter case we don't
  1280  	// care because the type checker will do a separate field lookup.
  1281  	//
  1282  	// If the key does not resolve, it a) must be defined at the top
  1283  	// level in another file of the same package, the universe scope, or be
  1284  	// undeclared; or b) it is a struct field. In the former case, the type
  1285  	// checker can do a top-level lookup, and in the latter case it will do
  1286  	// a separate field lookup.
  1287  	x := p.checkExpr(p.parseExpr(keyOk))
  1288  	if keyOk {
  1289  		if p.tok == token.COLON {
  1290  			// Try to resolve the key but don't collect it
  1291  			// as unresolved identifier if it fails so that
  1292  			// we don't get (possibly false) errors about
  1293  			// undeclared names.
  1294  			p.tryResolve(x, false)
  1295  		} else {
  1296  			// not a key
  1297  			p.resolve(x)
  1298  		}
  1299  	}
  1300  
  1301  	return x
  1302  }
  1303  
  1304  func (p *parser) parseElement() ast.Expr {
  1305  	if p.trace {
  1306  		defer un(trace(p, "Element"))
  1307  	}
  1308  
  1309  	x := p.parseValue(true)
  1310  	if p.tok == token.COLON {
  1311  		colon := p.pos
  1312  		p.next()
  1313  		x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue(false)}
  1314  	}
  1315  
  1316  	return x
  1317  }
  1318  
  1319  func (p *parser) parseElementList() (list []ast.Expr) {
  1320  	if p.trace {
  1321  		defer un(trace(p, "ElementList"))
  1322  	}
  1323  
  1324  	for p.tok != token.RBRACE && p.tok != token.EOF {
  1325  		list = append(list, p.parseElement())
  1326  		if !p.atComma("composite literal") {
  1327  			break
  1328  		}
  1329  		p.next()
  1330  	}
  1331  
  1332  	return
  1333  }
  1334  
  1335  func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr {
  1336  	if p.trace {
  1337  		defer un(trace(p, "LiteralValue"))
  1338  	}
  1339  
  1340  	lbrace := p.expect(token.LBRACE)
  1341  	var elts []ast.Expr
  1342  	p.exprLev++
  1343  	if p.tok != token.RBRACE {
  1344  		elts = p.parseElementList()
  1345  	}
  1346  	p.exprLev--
  1347  	rbrace := p.expectClosing(token.RBRACE, "composite literal")
  1348  	return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace}
  1349  }
  1350  
  1351  // checkExpr checks that x is an expression (and not a type).
  1352  func (p *parser) checkExpr(x ast.Expr) ast.Expr {
  1353  	switch unparen(x).(type) {
  1354  	case *ast.BadExpr:
  1355  	case *ast.Ident:
  1356  	case *ast.BasicLit:
  1357  	case *ast.FuncLit:
  1358  	case *ast.CompositeLit:
  1359  	case *ast.ParenExpr:
  1360  		panic("unreachable")
  1361  	case *ast.SelectorExpr:
  1362  	case *ast.IndexExpr:
  1363  	case *ast.SliceExpr:
  1364  	case *ast.TypeAssertExpr:
  1365  		// If t.Type == nil we have a type assertion of the form
  1366  		// y.(type), which is only allowed in type switch expressions.
  1367  		// It's hard to exclude those but for the case where we are in
  1368  		// a type switch. Instead be lenient and test this in the type
  1369  		// checker.
  1370  	case *ast.CallExpr:
  1371  	case *ast.StarExpr:
  1372  	case *ast.UnaryExpr:
  1373  	case *ast.BinaryExpr:
  1374  	default:
  1375  		// all other nodes are not proper expressions
  1376  		p.errorExpected(x.Pos(), "expression")
  1377  		x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
  1378  	}
  1379  	return x
  1380  }
  1381  
  1382  // isTypeName reports whether x is a (qualified) TypeName.
  1383  func isTypeName(x ast.Expr) bool {
  1384  	switch t := x.(type) {
  1385  	case *ast.BadExpr:
  1386  	case *ast.Ident:
  1387  	case *ast.SelectorExpr:
  1388  		_, isIdent := t.X.(*ast.Ident)
  1389  		return isIdent
  1390  	default:
  1391  		return false // all other nodes are not type names
  1392  	}
  1393  	return true
  1394  }
  1395  
  1396  // isLiteralType reports whether x is a legal composite literal type.
  1397  func isLiteralType(x ast.Expr) bool {
  1398  	switch t := x.(type) {
  1399  	case *ast.BadExpr:
  1400  	case *ast.Ident:
  1401  	case *ast.SelectorExpr:
  1402  		_, isIdent := t.X.(*ast.Ident)
  1403  		return isIdent
  1404  	case *ast.ArrayType:
  1405  	case *ast.StructType:
  1406  	case *ast.MapType:
  1407  	default:
  1408  		return false // all other nodes are not legal composite literal types
  1409  	}
  1410  	return true
  1411  }
  1412  
  1413  // If x is of the form *T, deref returns T, otherwise it returns x.
  1414  func deref(x ast.Expr) ast.Expr {
  1415  	if p, isPtr := x.(*ast.StarExpr); isPtr {
  1416  		x = p.X
  1417  	}
  1418  	return x
  1419  }
  1420  
  1421  // If x is of the form (T), unparen returns unparen(T), otherwise it returns x.
  1422  func unparen(x ast.Expr) ast.Expr {
  1423  	if p, isParen := x.(*ast.ParenExpr); isParen {
  1424  		x = unparen(p.X)
  1425  	}
  1426  	return x
  1427  }
  1428  
  1429  // checkExprOrType checks that x is an expression or a type
  1430  // (and not a raw type such as [...]T).
  1431  //
  1432  func (p *parser) checkExprOrType(x ast.Expr) ast.Expr {
  1433  	switch t := unparen(x).(type) {
  1434  	case *ast.ParenExpr:
  1435  		panic("unreachable")
  1436  	case *ast.UnaryExpr:
  1437  	case *ast.ArrayType:
  1438  		if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis {
  1439  			p.error(len.Pos(), "expected array length, found '...'")
  1440  			x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())}
  1441  		}
  1442  	}
  1443  
  1444  	// all other nodes are expressions or types
  1445  	return x
  1446  }
  1447  
  1448  // If lhs is set and the result is an identifier, it is not resolved.
  1449  func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr {
  1450  	if p.trace {
  1451  		defer un(trace(p, "PrimaryExpr"))
  1452  	}
  1453  
  1454  	x := p.parseOperand(lhs)
  1455  L:
  1456  	for {
  1457  		switch p.tok {
  1458  		case token.PERIOD:
  1459  			p.next()
  1460  			if lhs {
  1461  				p.resolve(x)
  1462  			}
  1463  			switch p.tok {
  1464  			case token.IDENT:
  1465  				x = p.parseSelector(p.checkExprOrType(x))
  1466  			case token.LPAREN:
  1467  				x = p.parseTypeAssertion(p.checkExpr(x))
  1468  			default:
  1469  				pos := p.pos
  1470  				p.errorExpected(pos, "selector or type assertion")
  1471  				p.next() // make progress
  1472  				x = &ast.BadExpr{From: pos, To: p.pos}
  1473  			}
  1474  		case token.LBRACK:
  1475  			if lhs {
  1476  				p.resolve(x)
  1477  			}
  1478  			x = p.parseIndexOrSlice(p.checkExpr(x))
  1479  		case token.LPAREN:
  1480  			if lhs {
  1481  				p.resolve(x)
  1482  			}
  1483  			x = p.parseCallOrConversion(p.checkExprOrType(x))
  1484  		case token.LBRACE:
  1485  			if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) {
  1486  				if lhs {
  1487  					p.resolve(x)
  1488  				}
  1489  				x = p.parseLiteralValue(x)
  1490  			} else {
  1491  				break L
  1492  			}
  1493  		default:
  1494  			break L
  1495  		}
  1496  		lhs = false // no need to try to resolve again
  1497  	}
  1498  
  1499  	return x
  1500  }
  1501  
  1502  // If lhs is set and the result is an identifier, it is not resolved.
  1503  func (p *parser) parseUnaryExpr(lhs bool) ast.Expr {
  1504  	if p.trace {
  1505  		defer un(trace(p, "UnaryExpr"))
  1506  	}
  1507  
  1508  	switch p.tok {
  1509  	case token.ADD, token.SUB, token.NOT, token.XOR, token.AND:
  1510  		pos, op := p.pos, p.tok
  1511  		p.next()
  1512  		x := p.parseUnaryExpr(false)
  1513  		return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)}
  1514  
  1515  	case token.ARROW:
  1516  		// channel type or receive expression
  1517  		arrow := p.pos
  1518  		p.next()
  1519  
  1520  		// If the next token is token.CHAN we still don't know if it
  1521  		// is a channel type or a receive operation - we only know
  1522  		// once we have found the end of the unary expression. There
  1523  		// are two cases:
  1524  		//
  1525  		//   <- type  => (<-type) must be channel type
  1526  		//   <- expr  => <-(expr) is a receive from an expression
  1527  		//
  1528  		// In the first case, the arrow must be re-associated with
  1529  		// the channel type parsed already:
  1530  		//
  1531  		//   <- (chan type)    =>  (<-chan type)
  1532  		//   <- (chan<- type)  =>  (<-chan (<-type))
  1533  
  1534  		x := p.parseUnaryExpr(false)
  1535  
  1536  		// determine which case we have
  1537  		if typ, ok := x.(*ast.ChanType); ok {
  1538  			// (<-type)
  1539  
  1540  			// re-associate position info and <-
  1541  			dir := ast.SEND
  1542  			for ok && dir == ast.SEND {
  1543  				if typ.Dir == ast.RECV {
  1544  					// error: (<-type) is (<-(<-chan T))
  1545  					p.errorExpected(typ.Arrow, "'chan'")
  1546  				}
  1547  				arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow
  1548  				dir, typ.Dir = typ.Dir, ast.RECV
  1549  				typ, ok = typ.Value.(*ast.ChanType)
  1550  			}
  1551  			if dir == ast.SEND {
  1552  				p.errorExpected(arrow, "channel type")
  1553  			}
  1554  
  1555  			return x
  1556  		}
  1557  
  1558  		// <-(expr)
  1559  		return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)}
  1560  
  1561  	case token.MUL:
  1562  		// pointer type or unary "*" expression
  1563  		pos := p.pos
  1564  		p.next()
  1565  		x := p.parseUnaryExpr(false)
  1566  		return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)}
  1567  	}
  1568  
  1569  	return p.parsePrimaryExpr(lhs)
  1570  }
  1571  
  1572  func (p *parser) tokPrec() (token.Token, int) {
  1573  	tok := p.tok
  1574  	if p.inRhs && tok == token.ASSIGN {
  1575  		tok = token.EQL
  1576  	}
  1577  	return tok, tok.Precedence()
  1578  }
  1579  
  1580  // If lhs is set and the result is an identifier, it is not resolved.
  1581  func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr {
  1582  	if p.trace {
  1583  		defer un(trace(p, "BinaryExpr"))
  1584  	}
  1585  
  1586  	x := p.parseUnaryExpr(lhs)
  1587  	for _, prec := p.tokPrec(); prec >= prec1; prec-- {
  1588  		for {
  1589  			op, oprec := p.tokPrec()
  1590  			if oprec != prec {
  1591  				break
  1592  			}
  1593  			pos := p.expect(op)
  1594  			if lhs {
  1595  				p.resolve(x)
  1596  				lhs = false
  1597  			}
  1598  			y := p.parseBinaryExpr(false, prec+1)
  1599  			x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)}
  1600  		}
  1601  	}
  1602  
  1603  	return x
  1604  }
  1605  
  1606  // If lhs is set and the result is an identifier, it is not resolved.
  1607  // The result may be a type or even a raw type ([...]int). Callers must
  1608  // check the result (using checkExpr or checkExprOrType), depending on
  1609  // context.
  1610  func (p *parser) parseExpr(lhs bool) ast.Expr {
  1611  	if p.trace {
  1612  		defer un(trace(p, "Expression"))
  1613  	}
  1614  
  1615  	return p.parseBinaryExpr(lhs, token.LowestPrec+1)
  1616  }
  1617  
  1618  func (p *parser) parseRhs() ast.Expr {
  1619  	old := p.inRhs
  1620  	p.inRhs = true
  1621  	x := p.checkExpr(p.parseExpr(false))
  1622  	p.inRhs = old
  1623  	return x
  1624  }
  1625  
  1626  func (p *parser) parseRhsOrType() ast.Expr {
  1627  	old := p.inRhs
  1628  	p.inRhs = true
  1629  	x := p.checkExprOrType(p.parseExpr(false))
  1630  	p.inRhs = old
  1631  	return x
  1632  }
  1633  
  1634  // ----------------------------------------------------------------------------
  1635  // Statements
  1636  
  1637  // Parsing modes for parseSimpleStmt.
  1638  const (
  1639  	basic = iota
  1640  	labelOk
  1641  	rangeOk
  1642  )
  1643  
  1644  // parseSimpleStmt returns true as 2nd result if it parsed the assignment
  1645  // of a range clause (with mode == rangeOk). The returned statement is an
  1646  // assignment with a right-hand side that is a single unary expression of
  1647  // the form "range x". No guarantees are given for the left-hand side.
  1648  func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) {
  1649  	if p.trace {
  1650  		defer un(trace(p, "SimpleStmt"))
  1651  	}
  1652  
  1653  	x := p.parseLhsList()
  1654  
  1655  	switch p.tok {
  1656  	case
  1657  		token.DEFINE, token.ASSIGN, token.ADD_ASSIGN,
  1658  		token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
  1659  		token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN,
  1660  		token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN:
  1661  		// assignment statement, possibly part of a range clause
  1662  		pos, tok := p.pos, p.tok
  1663  		p.next()
  1664  		var y []ast.Expr
  1665  		isRange := false
  1666  		if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) {
  1667  			pos := p.pos
  1668  			p.next()
  1669  			y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
  1670  			isRange = true
  1671  		} else {
  1672  			y = p.parseRhsList()
  1673  		}
  1674  		as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y}
  1675  		if tok == token.DEFINE {
  1676  			p.shortVarDecl(as, x)
  1677  		}
  1678  		return as, isRange
  1679  	}
  1680  
  1681  	if len(x) > 1 {
  1682  		p.errorExpected(x[0].Pos(), "1 expression")
  1683  		// continue with first expression
  1684  	}
  1685  
  1686  	switch p.tok {
  1687  	case token.COLON:
  1688  		// labeled statement
  1689  		colon := p.pos
  1690  		p.next()
  1691  		if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent {
  1692  			// Go spec: The scope of a label is the body of the function
  1693  			// in which it is declared and excludes the body of any nested
  1694  			// function.
  1695  			stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()}
  1696  			p.declare(stmt, nil, p.labelScope, ast.Lbl, label)
  1697  			return stmt, false
  1698  		}
  1699  		// The label declaration typically starts at x[0].Pos(), but the label
  1700  		// declaration may be erroneous due to a token after that position (and
  1701  		// before the ':'). If SpuriousErrors is not set, the (only) error re-
  1702  		// ported for the line is the illegal label error instead of the token
  1703  		// before the ':' that caused the problem. Thus, use the (latest) colon
  1704  		// position for error reporting.
  1705  		p.error(colon, "illegal label declaration")
  1706  		return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false
  1707  
  1708  	case token.ARROW:
  1709  		// send statement
  1710  		arrow := p.pos
  1711  		p.next()
  1712  		y := p.parseRhs()
  1713  		return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false
  1714  
  1715  	case token.INC, token.DEC:
  1716  		// increment or decrement
  1717  		s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok}
  1718  		p.next()
  1719  		return s, false
  1720  	}
  1721  
  1722  	// expression
  1723  	return &ast.ExprStmt{X: x[0]}, false
  1724  }
  1725  
  1726  func (p *parser) parseCallExpr(callType string) *ast.CallExpr {
  1727  	x := p.parseRhsOrType() // could be a conversion: (some type)(x)
  1728  	if call, isCall := x.(*ast.CallExpr); isCall {
  1729  		return call
  1730  	}
  1731  	if _, isBad := x.(*ast.BadExpr); !isBad {
  1732  		// only report error if it's a new one
  1733  		p.error(p.safePos(x.End()), fmt.Sprintf("function must be invoked in %s statement", callType))
  1734  	}
  1735  	return nil
  1736  }
  1737  
  1738  func (p *parser) parseGoStmt() ast.Stmt {
  1739  	if p.trace {
  1740  		defer un(trace(p, "GoStmt"))
  1741  	}
  1742  
  1743  	pos := p.expect(token.GO)
  1744  	call := p.parseCallExpr("go")
  1745  	p.expectSemi()
  1746  	if call == nil {
  1747  		return &ast.BadStmt{From: pos, To: pos + 2} // len("go")
  1748  	}
  1749  
  1750  	return &ast.GoStmt{Go: pos, Call: call}
  1751  }
  1752  
  1753  func (p *parser) parseDeferStmt() ast.Stmt {
  1754  	if p.trace {
  1755  		defer un(trace(p, "DeferStmt"))
  1756  	}
  1757  
  1758  	pos := p.expect(token.DEFER)
  1759  	call := p.parseCallExpr("defer")
  1760  	p.expectSemi()
  1761  	if call == nil {
  1762  		return &ast.BadStmt{From: pos, To: pos + 5} // len("defer")
  1763  	}
  1764  
  1765  	return &ast.DeferStmt{Defer: pos, Call: call}
  1766  }
  1767  
  1768  func (p *parser) parseReturnStmt() *ast.ReturnStmt {
  1769  	if p.trace {
  1770  		defer un(trace(p, "ReturnStmt"))
  1771  	}
  1772  
  1773  	pos := p.pos
  1774  	p.expect(token.RETURN)
  1775  	var x []ast.Expr
  1776  	if p.tok != token.SEMICOLON && p.tok != token.RBRACE {
  1777  		x = p.parseRhsList()
  1778  	}
  1779  	p.expectSemi()
  1780  
  1781  	return &ast.ReturnStmt{Return: pos, Results: x}
  1782  }
  1783  
  1784  func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt {
  1785  	if p.trace {
  1786  		defer un(trace(p, "BranchStmt"))
  1787  	}
  1788  
  1789  	pos := p.expect(tok)
  1790  	var label *ast.Ident
  1791  	if tok != token.FALLTHROUGH && p.tok == token.IDENT {
  1792  		label = p.parseIdent()
  1793  		// add to list of unresolved targets
  1794  		n := len(p.targetStack) - 1
  1795  		p.targetStack[n] = append(p.targetStack[n], label)
  1796  	}
  1797  	p.expectSemi()
  1798  
  1799  	return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label}
  1800  }
  1801  
  1802  func (p *parser) makeExpr(s ast.Stmt, kind string) ast.Expr {
  1803  	if s == nil {
  1804  		return nil
  1805  	}
  1806  	if es, isExpr := s.(*ast.ExprStmt); isExpr {
  1807  		return p.checkExpr(es.X)
  1808  	}
  1809  	p.error(s.Pos(), fmt.Sprintf("expected %s, found simple statement (missing parentheses around composite literal?)", kind))
  1810  	return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())}
  1811  }
  1812  
  1813  func (p *parser) parseIfStmt() *ast.IfStmt {
  1814  	if p.trace {
  1815  		defer un(trace(p, "IfStmt"))
  1816  	}
  1817  
  1818  	pos := p.expect(token.IF)
  1819  	p.openScope()
  1820  	defer p.closeScope()
  1821  
  1822  	var s ast.Stmt
  1823  	var x ast.Expr
  1824  	{
  1825  		prevLev := p.exprLev
  1826  		p.exprLev = -1
  1827  		if p.tok == token.SEMICOLON {
  1828  			p.next()
  1829  			x = p.parseRhs()
  1830  		} else {
  1831  			s, _ = p.parseSimpleStmt(basic)
  1832  			if p.tok == token.SEMICOLON {
  1833  				p.next()
  1834  				x = p.parseRhs()
  1835  			} else {
  1836  				x = p.makeExpr(s, "boolean expression")
  1837  				s = nil
  1838  			}
  1839  		}
  1840  		p.exprLev = prevLev
  1841  	}
  1842  
  1843  	body := p.parseBlockStmt()
  1844  	var else_ ast.Stmt
  1845  	if p.tok == token.ELSE {
  1846  		p.next()
  1847  		else_ = p.parseStmt()
  1848  	} else {
  1849  		p.expectSemi()
  1850  	}
  1851  
  1852  	return &ast.IfStmt{If: pos, Init: s, Cond: x, Body: body, Else: else_}
  1853  }
  1854  
  1855  func (p *parser) parseTypeList() (list []ast.Expr) {
  1856  	if p.trace {
  1857  		defer un(trace(p, "TypeList"))
  1858  	}
  1859  
  1860  	list = append(list, p.parseType())
  1861  	for p.tok == token.COMMA {
  1862  		p.next()
  1863  		list = append(list, p.parseType())
  1864  	}
  1865  
  1866  	return
  1867  }
  1868  
  1869  func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause {
  1870  	if p.trace {
  1871  		defer un(trace(p, "CaseClause"))
  1872  	}
  1873  
  1874  	pos := p.pos
  1875  	var list []ast.Expr
  1876  	if p.tok == token.CASE {
  1877  		p.next()
  1878  		if typeSwitch {
  1879  			list = p.parseTypeList()
  1880  		} else {
  1881  			list = p.parseRhsList()
  1882  		}
  1883  	} else {
  1884  		p.expect(token.DEFAULT)
  1885  	}
  1886  
  1887  	colon := p.expect(token.COLON)
  1888  	p.openScope()
  1889  	body := p.parseStmtList()
  1890  	p.closeScope()
  1891  
  1892  	return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body}
  1893  }
  1894  
  1895  func isTypeSwitchAssert(x ast.Expr) bool {
  1896  	a, ok := x.(*ast.TypeAssertExpr)
  1897  	return ok && a.Type == nil
  1898  }
  1899  
  1900  func isTypeSwitchGuard(s ast.Stmt) bool {
  1901  	switch t := s.(type) {
  1902  	case *ast.ExprStmt:
  1903  		// x.(nil)
  1904  		return isTypeSwitchAssert(t.X)
  1905  	case *ast.AssignStmt:
  1906  		// v := x.(nil)
  1907  		return len(t.Lhs) == 1 && t.Tok == token.DEFINE && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0])
  1908  	}
  1909  	return false
  1910  }
  1911  
  1912  func (p *parser) parseSwitchStmt() ast.Stmt {
  1913  	if p.trace {
  1914  		defer un(trace(p, "SwitchStmt"))
  1915  	}
  1916  
  1917  	pos := p.expect(token.SWITCH)
  1918  	p.openScope()
  1919  	defer p.closeScope()
  1920  
  1921  	var s1, s2 ast.Stmt
  1922  	if p.tok != token.LBRACE {
  1923  		prevLev := p.exprLev
  1924  		p.exprLev = -1
  1925  		if p.tok != token.SEMICOLON {
  1926  			s2, _ = p.parseSimpleStmt(basic)
  1927  		}
  1928  		if p.tok == token.SEMICOLON {
  1929  			p.next()
  1930  			s1 = s2
  1931  			s2 = nil
  1932  			if p.tok != token.LBRACE {
  1933  				// A TypeSwitchGuard may declare a variable in addition
  1934  				// to the variable declared in the initial SimpleStmt.
  1935  				// Introduce extra scope to avoid redeclaration errors:
  1936  				//
  1937  				//	switch t := 0; t := x.(T) { ... }
  1938  				//
  1939  				// (this code is not valid Go because the first t
  1940  				// cannot be accessed and thus is never used, the extra
  1941  				// scope is needed for the correct error message).
  1942  				//
  1943  				// If we don't have a type switch, s2 must be an expression.
  1944  				// Having the extra nested but empty scope won't affect it.
  1945  				p.openScope()
  1946  				defer p.closeScope()
  1947  				s2, _ = p.parseSimpleStmt(basic)
  1948  			}
  1949  		}
  1950  		p.exprLev = prevLev
  1951  	}
  1952  
  1953  	typeSwitch := isTypeSwitchGuard(s2)
  1954  	lbrace := p.expect(token.LBRACE)
  1955  	var list []ast.Stmt
  1956  	for p.tok == token.CASE || p.tok == token.DEFAULT {
  1957  		list = append(list, p.parseCaseClause(typeSwitch))
  1958  	}
  1959  	rbrace := p.expect(token.RBRACE)
  1960  	p.expectSemi()
  1961  	body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  1962  
  1963  	if typeSwitch {
  1964  		return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body}
  1965  	}
  1966  
  1967  	return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body}
  1968  }
  1969  
  1970  func (p *parser) parseCommClause() *ast.CommClause {
  1971  	if p.trace {
  1972  		defer un(trace(p, "CommClause"))
  1973  	}
  1974  
  1975  	p.openScope()
  1976  	pos := p.pos
  1977  	var comm ast.Stmt
  1978  	if p.tok == token.CASE {
  1979  		p.next()
  1980  		lhs := p.parseLhsList()
  1981  		if p.tok == token.ARROW {
  1982  			// SendStmt
  1983  			if len(lhs) > 1 {
  1984  				p.errorExpected(lhs[0].Pos(), "1 expression")
  1985  				// continue with first expression
  1986  			}
  1987  			arrow := p.pos
  1988  			p.next()
  1989  			rhs := p.parseRhs()
  1990  			comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs}
  1991  		} else {
  1992  			// RecvStmt
  1993  			if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE {
  1994  				// RecvStmt with assignment
  1995  				if len(lhs) > 2 {
  1996  					p.errorExpected(lhs[0].Pos(), "1 or 2 expressions")
  1997  					// continue with first two expressions
  1998  					lhs = lhs[0:2]
  1999  				}
  2000  				pos := p.pos
  2001  				p.next()
  2002  				rhs := p.parseRhs()
  2003  				as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}}
  2004  				if tok == token.DEFINE {
  2005  					p.shortVarDecl(as, lhs)
  2006  				}
  2007  				comm = as
  2008  			} else {
  2009  				// lhs must be single receive operation
  2010  				if len(lhs) > 1 {
  2011  					p.errorExpected(lhs[0].Pos(), "1 expression")
  2012  					// continue with first expression
  2013  				}
  2014  				comm = &ast.ExprStmt{X: lhs[0]}
  2015  			}
  2016  		}
  2017  	} else {
  2018  		p.expect(token.DEFAULT)
  2019  	}
  2020  
  2021  	colon := p.expect(token.COLON)
  2022  	body := p.parseStmtList()
  2023  	p.closeScope()
  2024  
  2025  	return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body}
  2026  }
  2027  
  2028  func (p *parser) parseSelectStmt() *ast.SelectStmt {
  2029  	if p.trace {
  2030  		defer un(trace(p, "SelectStmt"))
  2031  	}
  2032  
  2033  	pos := p.expect(token.SELECT)
  2034  	lbrace := p.expect(token.LBRACE)
  2035  	var list []ast.Stmt
  2036  	for p.tok == token.CASE || p.tok == token.DEFAULT {
  2037  		list = append(list, p.parseCommClause())
  2038  	}
  2039  	rbrace := p.expect(token.RBRACE)
  2040  	p.expectSemi()
  2041  	body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace}
  2042  
  2043  	return &ast.SelectStmt{Select: pos, Body: body}
  2044  }
  2045  
  2046  func (p *parser) parseForStmt() ast.Stmt {
  2047  	if p.trace {
  2048  		defer un(trace(p, "ForStmt"))
  2049  	}
  2050  
  2051  	pos := p.expect(token.FOR)
  2052  	p.openScope()
  2053  	defer p.closeScope()
  2054  
  2055  	var s1, s2, s3 ast.Stmt
  2056  	var isRange bool
  2057  	if p.tok != token.LBRACE {
  2058  		prevLev := p.exprLev
  2059  		p.exprLev = -1
  2060  		if p.tok != token.SEMICOLON {
  2061  			if p.tok == token.RANGE {
  2062  				// "for range x" (nil lhs in assignment)
  2063  				pos := p.pos
  2064  				p.next()
  2065  				y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}}
  2066  				s2 = &ast.AssignStmt{Rhs: y}
  2067  				isRange = true
  2068  			} else {
  2069  				s2, isRange = p.parseSimpleStmt(rangeOk)
  2070  			}
  2071  		}
  2072  		if !isRange && p.tok == token.SEMICOLON {
  2073  			p.next()
  2074  			s1 = s2
  2075  			s2 = nil
  2076  			if p.tok != token.SEMICOLON {
  2077  				s2, _ = p.parseSimpleStmt(basic)
  2078  			}
  2079  			p.expectSemi()
  2080  			if p.tok != token.LBRACE {
  2081  				s3, _ = p.parseSimpleStmt(basic)
  2082  			}
  2083  		}
  2084  		p.exprLev = prevLev
  2085  	}
  2086  
  2087  	body := p.parseBlockStmt()
  2088  	p.expectSemi()
  2089  
  2090  	if isRange {
  2091  		as := s2.(*ast.AssignStmt)
  2092  		// check lhs
  2093  		var key, value ast.Expr
  2094  		switch len(as.Lhs) {
  2095  		case 0:
  2096  			// nothing to do
  2097  		case 1:
  2098  			key = as.Lhs[0]
  2099  		case 2:
  2100  			key, value = as.Lhs[0], as.Lhs[1]
  2101  		default:
  2102  			p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions")
  2103  			return &ast.BadStmt{From: pos, To: p.safePos(body.End())}
  2104  		}
  2105  		// parseSimpleStmt returned a right-hand side that
  2106  		// is a single unary expression of the form "range x"
  2107  		x := as.Rhs[0].(*ast.UnaryExpr).X
  2108  		return &ast.RangeStmt{
  2109  			For:    pos,
  2110  			Key:    key,
  2111  			Value:  value,
  2112  			TokPos: as.TokPos,
  2113  			Tok:    as.Tok,
  2114  			X:      x,
  2115  			Body:   body,
  2116  		}
  2117  	}
  2118  
  2119  	// regular for statement
  2120  	return &ast.ForStmt{
  2121  		For:  pos,
  2122  		Init: s1,
  2123  		Cond: p.makeExpr(s2, "boolean or range expression"),
  2124  		Post: s3,
  2125  		Body: body,
  2126  	}
  2127  }
  2128  
  2129  func (p *parser) parseStmt() (s ast.Stmt) {
  2130  	if p.trace {
  2131  		defer un(trace(p, "Statement"))
  2132  	}
  2133  
  2134  	switch p.tok {
  2135  	case token.CONST, token.TYPE, token.VAR:
  2136  		s = &ast.DeclStmt{Decl: p.parseDecl(syncStmt)}
  2137  	case
  2138  		// tokens that may start an expression
  2139  		token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands
  2140  		token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE, // composite types
  2141  		token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators
  2142  		s, _ = p.parseSimpleStmt(labelOk)
  2143  		// because of the required look-ahead, labeled statements are
  2144  		// parsed by parseSimpleStmt - don't expect a semicolon after
  2145  		// them
  2146  		if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt {
  2147  			p.expectSemi()
  2148  		}
  2149  	case token.GO:
  2150  		s = p.parseGoStmt()
  2151  	case token.DEFER:
  2152  		s = p.parseDeferStmt()
  2153  	case token.RETURN:
  2154  		s = p.parseReturnStmt()
  2155  	case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH:
  2156  		s = p.parseBranchStmt(p.tok)
  2157  	case token.LBRACE:
  2158  		s = p.parseBlockStmt()
  2159  		p.expectSemi()
  2160  	case token.IF:
  2161  		s = p.parseIfStmt()
  2162  	case token.SWITCH:
  2163  		s = p.parseSwitchStmt()
  2164  	case token.SELECT:
  2165  		s = p.parseSelectStmt()
  2166  	case token.FOR:
  2167  		s = p.parseForStmt()
  2168  	case token.SEMICOLON:
  2169  		// Is it ever possible to have an implicit semicolon
  2170  		// producing an empty statement in a valid program?
  2171  		// (handle correctly anyway)
  2172  		s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"}
  2173  		p.next()
  2174  	case token.RBRACE:
  2175  		// a semicolon may be omitted before a closing "}"
  2176  		s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true}
  2177  	default:
  2178  		// no statement found
  2179  		pos := p.pos
  2180  		p.errorExpected(pos, "statement")
  2181  		syncStmt(p)
  2182  		s = &ast.BadStmt{From: pos, To: p.pos}
  2183  	}
  2184  
  2185  	return
  2186  }
  2187  
  2188  // ----------------------------------------------------------------------------
  2189  // Declarations
  2190  
  2191  type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec
  2192  
  2193  func isValidImport(lit string) bool {
  2194  	const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD"
  2195  	s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal
  2196  	for _, r := range s {
  2197  		if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) {
  2198  			return false
  2199  		}
  2200  	}
  2201  	return s != ""
  2202  }
  2203  
  2204  func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
  2205  	if p.trace {
  2206  		defer un(trace(p, "ImportSpec"))
  2207  	}
  2208  
  2209  	var ident *ast.Ident
  2210  	switch p.tok {
  2211  	case token.PERIOD:
  2212  		ident = &ast.Ident{NamePos: p.pos, Name: "."}
  2213  		p.next()
  2214  	case token.IDENT:
  2215  		ident = p.parseIdent()
  2216  	}
  2217  
  2218  	pos := p.pos
  2219  	var path string
  2220  	if p.tok == token.STRING {
  2221  		path = p.lit
  2222  		if !isValidImport(path) {
  2223  			p.error(pos, "invalid import path: "+path)
  2224  		}
  2225  		p.next()
  2226  	} else {
  2227  		p.expect(token.STRING) // use expect() error handling
  2228  	}
  2229  	p.expectSemi() // call before accessing p.linecomment
  2230  
  2231  	// collect imports
  2232  	spec := &ast.ImportSpec{
  2233  		Doc:     doc,
  2234  		Name:    ident,
  2235  		Path:    &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path},
  2236  		Comment: p.lineComment,
  2237  	}
  2238  	p.imports = append(p.imports, spec)
  2239  
  2240  	return spec
  2241  }
  2242  
  2243  func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec {
  2244  	if p.trace {
  2245  		defer un(trace(p, keyword.String()+"Spec"))
  2246  	}
  2247  
  2248  	pos := p.pos
  2249  	idents := p.parseIdentList()
  2250  	typ := p.tryType()
  2251  	var values []ast.Expr
  2252  	// always permit optional initialization for more tolerant parsing
  2253  	if p.tok == token.ASSIGN {
  2254  		p.next()
  2255  		values = p.parseRhsList()
  2256  	}
  2257  	p.expectSemi() // call before accessing p.linecomment
  2258  
  2259  	switch keyword {
  2260  	case token.VAR:
  2261  		if typ == nil && values == nil {
  2262  			p.error(pos, "missing variable type or initialization")
  2263  		}
  2264  	case token.CONST:
  2265  		if values == nil && (iota == 0 || typ != nil) {
  2266  			p.error(pos, "missing constant value")
  2267  		}
  2268  	}
  2269  
  2270  	// Go spec: The scope of a constant or variable identifier declared inside
  2271  	// a function begins at the end of the ConstSpec or VarSpec and ends at
  2272  	// the end of the innermost containing block.
  2273  	// (Global identifiers are resolved in a separate phase after parsing.)
  2274  	spec := &ast.ValueSpec{
  2275  		Doc:     doc,
  2276  		Names:   idents,
  2277  		Type:    typ,
  2278  		Values:  values,
  2279  		Comment: p.lineComment,
  2280  	}
  2281  	kind := ast.Con
  2282  	if keyword == token.VAR {
  2283  		kind = ast.Var
  2284  	}
  2285  	p.declare(spec, iota, p.topScope, kind, idents...)
  2286  
  2287  	return spec
  2288  }
  2289  
  2290  func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec {
  2291  	if p.trace {
  2292  		defer un(trace(p, "TypeSpec"))
  2293  	}
  2294  
  2295  	ident := p.parseIdent()
  2296  
  2297  	// Go spec: The scope of a type identifier declared inside a function begins
  2298  	// at the identifier in the TypeSpec and ends at the end of the innermost
  2299  	// containing block.
  2300  	// (Global identifiers are resolved in a separate phase after parsing.)
  2301  	spec := &ast.TypeSpec{Doc: doc, Name: ident}
  2302  	p.declare(spec, nil, p.topScope, ast.Typ, ident)
  2303  
  2304  	spec.Type = p.parseType()
  2305  	p.expectSemi() // call before accessing p.linecomment
  2306  	spec.Comment = p.lineComment
  2307  
  2308  	return spec
  2309  }
  2310  
  2311  func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
  2312  	if p.trace {
  2313  		defer un(trace(p, "GenDecl("+keyword.String()+")"))
  2314  	}
  2315  
  2316  	doc := p.leadComment
  2317  	pos := p.expect(keyword)
  2318  	var lparen, rparen token.Pos
  2319  	var list []ast.Spec
  2320  	if p.tok == token.LPAREN {
  2321  		lparen = p.pos
  2322  		p.next()
  2323  		for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ {
  2324  			list = append(list, f(p.leadComment, keyword, iota))
  2325  		}
  2326  		rparen = p.expect(token.RPAREN)
  2327  		p.expectSemi()
  2328  	} else {
  2329  		list = append(list, f(nil, keyword, 0))
  2330  	}
  2331  
  2332  	return &ast.GenDecl{
  2333  		Doc:    doc,
  2334  		TokPos: pos,
  2335  		Tok:    keyword,
  2336  		Lparen: lparen,
  2337  		Specs:  list,
  2338  		Rparen: rparen,
  2339  	}
  2340  }
  2341  
  2342  func (p *parser) parseFuncDecl() *ast.FuncDecl {
  2343  	if p.trace {
  2344  		defer un(trace(p, "FunctionDecl"))
  2345  	}
  2346  
  2347  	doc := p.leadComment
  2348  	pos := p.expect(token.FUNC)
  2349  	scope := ast.NewScope(p.topScope) // function scope
  2350  
  2351  	var recv *ast.FieldList
  2352  	if p.tok == token.LPAREN {
  2353  		recv = p.parseParameters(scope, false)
  2354  	}
  2355  
  2356  	ident := p.parseIdent()
  2357  
  2358  	params, results := p.parseSignature(scope)
  2359  
  2360  	var body *ast.BlockStmt
  2361  	if p.tok == token.LBRACE {
  2362  		body = p.parseBody(scope)
  2363  	}
  2364  	p.expectSemi()
  2365  
  2366  	decl := &ast.FuncDecl{
  2367  		Doc:  doc,
  2368  		Recv: recv,
  2369  		Name: ident,
  2370  		Type: &ast.FuncType{
  2371  			Func:    pos,
  2372  			Params:  params,
  2373  			Results: results,
  2374  		},
  2375  		Body: body,
  2376  	}
  2377  	if recv == nil {
  2378  		// Go spec: The scope of an identifier denoting a constant, type,
  2379  		// variable, or function (but not method) declared at top level
  2380  		// (outside any function) is the package block.
  2381  		//
  2382  		// init() functions cannot be referred to and there may
  2383  		// be more than one - don't put them in the pkgScope
  2384  		if ident.Name != "init" {
  2385  			p.declare(decl, nil, p.pkgScope, ast.Fun, ident)
  2386  		}
  2387  	}
  2388  
  2389  	return decl
  2390  }
  2391  
  2392  func (p *parser) parseDecl(sync func(*parser)) ast.Decl {
  2393  	if p.trace {
  2394  		defer un(trace(p, "Declaration"))
  2395  	}
  2396  
  2397  	var f parseSpecFunction
  2398  	switch p.tok {
  2399  	case token.CONST, token.VAR:
  2400  		f = p.parseValueSpec
  2401  
  2402  	case token.TYPE:
  2403  		f = p.parseTypeSpec
  2404  
  2405  	case token.FUNC:
  2406  		return p.parseFuncDecl()
  2407  
  2408  	default:
  2409  		pos := p.pos
  2410  		p.errorExpected(pos, "declaration")
  2411  		sync(p)
  2412  		return &ast.BadDecl{From: pos, To: p.pos}
  2413  	}
  2414  
  2415  	return p.parseGenDecl(p.tok, f)
  2416  }
  2417  
  2418  // ----------------------------------------------------------------------------
  2419  // Source files
  2420  
  2421  func (p *parser) parseFile() *ast.File {
  2422  	if p.trace {
  2423  		defer un(trace(p, "File"))
  2424  	}
  2425  
  2426  	// Don't bother parsing the rest if we had errors scanning the first token.
  2427  	// Likely not a Go source file at all.
  2428  	if p.errors.Len() != 0 {
  2429  		return nil
  2430  	}
  2431  
  2432  	// package clause
  2433  	doc := p.leadComment
  2434  	pos := p.expect(token.PACKAGE)
  2435  	// Go spec: The package clause is not a declaration;
  2436  	// the package name does not appear in any scope.
  2437  	ident := p.parseIdent()
  2438  	if ident.Name == "_" && p.mode&DeclarationErrors != 0 {
  2439  		p.error(p.pos, "invalid package name _")
  2440  	}
  2441  	p.expectSemi()
  2442  
  2443  	// Don't bother parsing the rest if we had errors parsing the package clause.
  2444  	// Likely not a Go source file at all.
  2445  	if p.errors.Len() != 0 {
  2446  		return nil
  2447  	}
  2448  
  2449  	p.openScope()
  2450  	p.pkgScope = p.topScope
  2451  	var decls []ast.Decl
  2452  	if p.mode&PackageClauseOnly == 0 {
  2453  		// import decls
  2454  		for p.tok == token.IMPORT {
  2455  			decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec))
  2456  		}
  2457  
  2458  		if p.mode&ImportsOnly == 0 {
  2459  			// rest of package body
  2460  			for p.tok != token.EOF {
  2461  				decls = append(decls, p.parseDecl(syncDecl))
  2462  			}
  2463  		}
  2464  	}
  2465  	p.closeScope()
  2466  	assert(p.topScope == nil, "unbalanced scopes")
  2467  	assert(p.labelScope == nil, "unbalanced label scopes")
  2468  
  2469  	// resolve global identifiers within the same file
  2470  	i := 0
  2471  	for _, ident := range p.unresolved {
  2472  		// i <= index for current ident
  2473  		assert(ident.Obj == unresolved, "object already resolved")
  2474  		ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel
  2475  		if ident.Obj == nil {
  2476  			p.unresolved[i] = ident
  2477  			i++
  2478  		}
  2479  	}
  2480  
  2481  	return &ast.File{
  2482  		Doc:        doc,
  2483  		Package:    pos,
  2484  		Name:       ident,
  2485  		Decls:      decls,
  2486  		Scope:      p.pkgScope,
  2487  		Imports:    p.imports,
  2488  		Unresolved: p.unresolved[0:i],
  2489  		Comments:   p.comments,
  2490  	}
  2491  }