github.com/corona10/go@v0.0.0-20180224231303-7a218942be57/src/go/parser/parser.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Package parser implements a parser for Go source files. Input may be 6 // provided in a variety of forms (see the various Parse* functions); the 7 // output is an abstract syntax tree (AST) representing the Go source. The 8 // parser is invoked through one of the Parse* functions. 9 // 10 // The parser accepts a larger language than is syntactically permitted by 11 // the Go spec, for simplicity, and for improved robustness in the presence 12 // of syntax errors. For instance, in method declarations, the receiver is 13 // treated like an ordinary parameter list and thus may contain multiple 14 // entries where the spec permits exactly one. Consequently, the corresponding 15 // field in the AST (ast.FuncDecl.Recv) field is not restricted to one entry. 16 // 17 package parser 18 19 import ( 20 "fmt" 21 "go/ast" 22 "go/scanner" 23 "go/token" 24 "strconv" 25 "strings" 26 "unicode" 27 ) 28 29 // The parser structure holds the parser's internal state. 30 type parser struct { 31 file *token.File 32 errors scanner.ErrorList 33 scanner scanner.Scanner 34 35 // Tracing/debugging 36 mode Mode // parsing mode 37 trace bool // == (mode & Trace != 0) 38 indent int // indentation used for tracing output 39 40 // Comments 41 comments []*ast.CommentGroup 42 leadComment *ast.CommentGroup // last lead comment 43 lineComment *ast.CommentGroup // last line comment 44 45 // Next token 46 pos token.Pos // token position 47 tok token.Token // one token look-ahead 48 lit string // token literal 49 50 // Error recovery 51 // (used to limit the number of calls to parser.advance 52 // w/o making scanning progress - avoids potential endless 53 // loops across multiple parser functions during error recovery) 54 syncPos token.Pos // last synchronization position 55 syncCnt int // number of parser.advance calls without progress 56 57 // Non-syntactic parser control 58 exprLev int // < 0: in control clause, >= 0: in expression 59 inRhs bool // if set, the parser is parsing a rhs expression 60 61 // Ordinary identifier scopes 62 pkgScope *ast.Scope // pkgScope.Outer == nil 63 topScope *ast.Scope // top-most scope; may be pkgScope 64 unresolved []*ast.Ident // unresolved identifiers 65 imports []*ast.ImportSpec // list of imports 66 67 // Label scopes 68 // (maintained by open/close LabelScope) 69 labelScope *ast.Scope // label scope for current function 70 targetStack [][]*ast.Ident // stack of unresolved labels 71 } 72 73 func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) { 74 p.file = fset.AddFile(filename, -1, len(src)) 75 var m scanner.Mode 76 if mode&ParseComments != 0 { 77 m = scanner.ScanComments 78 } 79 eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) } 80 p.scanner.Init(p.file, src, eh, m) 81 82 p.mode = mode 83 p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently) 84 85 p.next() 86 } 87 88 // ---------------------------------------------------------------------------- 89 // Scoping support 90 91 func (p *parser) openScope() { 92 p.topScope = ast.NewScope(p.topScope) 93 } 94 95 func (p *parser) closeScope() { 96 p.topScope = p.topScope.Outer 97 } 98 99 func (p *parser) openLabelScope() { 100 p.labelScope = ast.NewScope(p.labelScope) 101 p.targetStack = append(p.targetStack, nil) 102 } 103 104 func (p *parser) closeLabelScope() { 105 // resolve labels 106 n := len(p.targetStack) - 1 107 scope := p.labelScope 108 for _, ident := range p.targetStack[n] { 109 ident.Obj = scope.Lookup(ident.Name) 110 if ident.Obj == nil && p.mode&DeclarationErrors != 0 { 111 p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name)) 112 } 113 } 114 // pop label scope 115 p.targetStack = p.targetStack[0:n] 116 p.labelScope = p.labelScope.Outer 117 } 118 119 func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) { 120 for _, ident := range idents { 121 assert(ident.Obj == nil, "identifier already declared or resolved") 122 obj := ast.NewObj(kind, ident.Name) 123 // remember the corresponding declaration for redeclaration 124 // errors and global variable resolution/typechecking phase 125 obj.Decl = decl 126 obj.Data = data 127 ident.Obj = obj 128 if ident.Name != "_" { 129 if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 { 130 prevDecl := "" 131 if pos := alt.Pos(); pos.IsValid() { 132 prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos)) 133 } 134 p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl)) 135 } 136 } 137 } 138 } 139 140 func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) { 141 // Go spec: A short variable declaration may redeclare variables 142 // provided they were originally declared in the same block with 143 // the same type, and at least one of the non-blank variables is new. 144 n := 0 // number of new variables 145 for _, x := range list { 146 if ident, isIdent := x.(*ast.Ident); isIdent { 147 assert(ident.Obj == nil, "identifier already declared or resolved") 148 obj := ast.NewObj(ast.Var, ident.Name) 149 // remember corresponding assignment for other tools 150 obj.Decl = decl 151 ident.Obj = obj 152 if ident.Name != "_" { 153 if alt := p.topScope.Insert(obj); alt != nil { 154 ident.Obj = alt // redeclaration 155 } else { 156 n++ // new declaration 157 } 158 } 159 } else { 160 p.errorExpected(x.Pos(), "identifier on left side of :=") 161 } 162 } 163 if n == 0 && p.mode&DeclarationErrors != 0 { 164 p.error(list[0].Pos(), "no new variables on left side of :=") 165 } 166 } 167 168 // The unresolved object is a sentinel to mark identifiers that have been added 169 // to the list of unresolved identifiers. The sentinel is only used for verifying 170 // internal consistency. 171 var unresolved = new(ast.Object) 172 173 // If x is an identifier, tryResolve attempts to resolve x by looking up 174 // the object it denotes. If no object is found and collectUnresolved is 175 // set, x is marked as unresolved and collected in the list of unresolved 176 // identifiers. 177 // 178 func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) { 179 // nothing to do if x is not an identifier or the blank identifier 180 ident, _ := x.(*ast.Ident) 181 if ident == nil { 182 return 183 } 184 assert(ident.Obj == nil, "identifier already declared or resolved") 185 if ident.Name == "_" { 186 return 187 } 188 // try to resolve the identifier 189 for s := p.topScope; s != nil; s = s.Outer { 190 if obj := s.Lookup(ident.Name); obj != nil { 191 ident.Obj = obj 192 return 193 } 194 } 195 // all local scopes are known, so any unresolved identifier 196 // must be found either in the file scope, package scope 197 // (perhaps in another file), or universe scope --- collect 198 // them so that they can be resolved later 199 if collectUnresolved { 200 ident.Obj = unresolved 201 p.unresolved = append(p.unresolved, ident) 202 } 203 } 204 205 func (p *parser) resolve(x ast.Expr) { 206 p.tryResolve(x, true) 207 } 208 209 // ---------------------------------------------------------------------------- 210 // Parsing support 211 212 func (p *parser) printTrace(a ...interface{}) { 213 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " 214 const n = len(dots) 215 pos := p.file.Position(p.pos) 216 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column) 217 i := 2 * p.indent 218 for i > n { 219 fmt.Print(dots) 220 i -= n 221 } 222 // i <= n 223 fmt.Print(dots[0:i]) 224 fmt.Println(a...) 225 } 226 227 func trace(p *parser, msg string) *parser { 228 p.printTrace(msg, "(") 229 p.indent++ 230 return p 231 } 232 233 // Usage pattern: defer un(trace(p, "...")) 234 func un(p *parser) { 235 p.indent-- 236 p.printTrace(")") 237 } 238 239 // Advance to the next token. 240 func (p *parser) next0() { 241 // Because of one-token look-ahead, print the previous token 242 // when tracing as it provides a more readable output. The 243 // very first token (!p.pos.IsValid()) is not initialized 244 // (it is token.ILLEGAL), so don't print it . 245 if p.trace && p.pos.IsValid() { 246 s := p.tok.String() 247 switch { 248 case p.tok.IsLiteral(): 249 p.printTrace(s, p.lit) 250 case p.tok.IsOperator(), p.tok.IsKeyword(): 251 p.printTrace("\"" + s + "\"") 252 default: 253 p.printTrace(s) 254 } 255 } 256 257 p.pos, p.tok, p.lit = p.scanner.Scan() 258 } 259 260 // Consume a comment and return it and the line on which it ends. 261 func (p *parser) consumeComment() (comment *ast.Comment, endline int) { 262 // /*-style comments may end on a different line than where they start. 263 // Scan the comment for '\n' chars and adjust endline accordingly. 264 endline = p.file.Line(p.pos) 265 if p.lit[1] == '*' { 266 // don't use range here - no need to decode Unicode code points 267 for i := 0; i < len(p.lit); i++ { 268 if p.lit[i] == '\n' { 269 endline++ 270 } 271 } 272 } 273 274 comment = &ast.Comment{Slash: p.pos, Text: p.lit} 275 p.next0() 276 277 return 278 } 279 280 // Consume a group of adjacent comments, add it to the parser's 281 // comments list, and return it together with the line at which 282 // the last comment in the group ends. A non-comment token or n 283 // empty lines terminate a comment group. 284 // 285 func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) { 286 var list []*ast.Comment 287 endline = p.file.Line(p.pos) 288 for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n { 289 var comment *ast.Comment 290 comment, endline = p.consumeComment() 291 list = append(list, comment) 292 } 293 294 // add comment group to the comments list 295 comments = &ast.CommentGroup{List: list} 296 p.comments = append(p.comments, comments) 297 298 return 299 } 300 301 // Advance to the next non-comment token. In the process, collect 302 // any comment groups encountered, and remember the last lead and 303 // and line comments. 304 // 305 // A lead comment is a comment group that starts and ends in a 306 // line without any other tokens and that is followed by a non-comment 307 // token on the line immediately after the comment group. 308 // 309 // A line comment is a comment group that follows a non-comment 310 // token on the same line, and that has no tokens after it on the line 311 // where it ends. 312 // 313 // Lead and line comments may be considered documentation that is 314 // stored in the AST. 315 // 316 func (p *parser) next() { 317 p.leadComment = nil 318 p.lineComment = nil 319 prev := p.pos 320 p.next0() 321 322 if p.tok == token.COMMENT { 323 var comment *ast.CommentGroup 324 var endline int 325 326 if p.file.Line(p.pos) == p.file.Line(prev) { 327 // The comment is on same line as the previous token; it 328 // cannot be a lead comment but may be a line comment. 329 comment, endline = p.consumeCommentGroup(0) 330 if p.file.Line(p.pos) != endline || p.tok == token.EOF { 331 // The next token is on a different line, thus 332 // the last comment group is a line comment. 333 p.lineComment = comment 334 } 335 } 336 337 // consume successor comments, if any 338 endline = -1 339 for p.tok == token.COMMENT { 340 comment, endline = p.consumeCommentGroup(1) 341 } 342 343 if endline+1 == p.file.Line(p.pos) { 344 // The next token is following on the line immediately after the 345 // comment group, thus the last comment group is a lead comment. 346 p.leadComment = comment 347 } 348 } 349 } 350 351 // A bailout panic is raised to indicate early termination. 352 type bailout struct{} 353 354 func (p *parser) error(pos token.Pos, msg string) { 355 epos := p.file.Position(pos) 356 357 // If AllErrors is not set, discard errors reported on the same line 358 // as the last recorded error and stop parsing if there are more than 359 // 10 errors. 360 if p.mode&AllErrors == 0 { 361 n := len(p.errors) 362 if n > 0 && p.errors[n-1].Pos.Line == epos.Line { 363 return // discard - likely a spurious error 364 } 365 if n > 10 { 366 panic(bailout{}) 367 } 368 } 369 370 p.errors.Add(epos, msg) 371 } 372 373 func (p *parser) errorExpected(pos token.Pos, msg string) { 374 msg = "expected " + msg 375 if pos == p.pos { 376 // the error happened at the current position; 377 // make the error message more specific 378 switch { 379 case p.tok == token.SEMICOLON && p.lit == "\n": 380 msg += ", found newline" 381 case p.tok.IsLiteral(): 382 // print 123 rather than 'INT', etc. 383 msg += ", found " + p.lit 384 default: 385 msg += ", found '" + p.tok.String() + "'" 386 } 387 } 388 p.error(pos, msg) 389 } 390 391 func (p *parser) expect(tok token.Token) token.Pos { 392 pos := p.pos 393 if p.tok != tok { 394 p.errorExpected(pos, "'"+tok.String()+"'") 395 } 396 p.next() // make progress 397 return pos 398 } 399 400 // expectClosing is like expect but provides a better error message 401 // for the common case of a missing comma before a newline. 402 // 403 func (p *parser) expectClosing(tok token.Token, context string) token.Pos { 404 if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" { 405 p.error(p.pos, "missing ',' before newline in "+context) 406 p.next() 407 } 408 return p.expect(tok) 409 } 410 411 func (p *parser) expectSemi() { 412 // semicolon is optional before a closing ')' or '}' 413 if p.tok != token.RPAREN && p.tok != token.RBRACE { 414 switch p.tok { 415 case token.COMMA: 416 // permit a ',' instead of a ';' but complain 417 p.errorExpected(p.pos, "';'") 418 fallthrough 419 case token.SEMICOLON: 420 p.next() 421 default: 422 p.errorExpected(p.pos, "';'") 423 p.advance(stmtStart) 424 } 425 } 426 } 427 428 func (p *parser) atComma(context string, follow token.Token) bool { 429 if p.tok == token.COMMA { 430 return true 431 } 432 if p.tok != follow { 433 msg := "missing ','" 434 if p.tok == token.SEMICOLON && p.lit == "\n" { 435 msg += " before newline" 436 } 437 p.error(p.pos, msg+" in "+context) 438 return true // "insert" comma and continue 439 } 440 return false 441 } 442 443 func assert(cond bool, msg string) { 444 if !cond { 445 panic("go/parser internal error: " + msg) 446 } 447 } 448 449 // advance consumes tokens until the current token p.tok 450 // is in the 'to' set, or token.EOF. For error recovery. 451 func (p *parser) advance(to map[token.Token]bool) { 452 for ; p.tok != token.EOF; p.next() { 453 if to[p.tok] { 454 // Return only if parser made some progress since last 455 // sync or if it has not reached 10 advance calls without 456 // progress. Otherwise consume at least one token to 457 // avoid an endless parser loop (it is possible that 458 // both parseOperand and parseStmt call advance and 459 // correctly do not advance, thus the need for the 460 // invocation limit p.syncCnt). 461 if p.pos == p.syncPos && p.syncCnt < 10 { 462 p.syncCnt++ 463 return 464 } 465 if p.pos > p.syncPos { 466 p.syncPos = p.pos 467 p.syncCnt = 0 468 return 469 } 470 // Reaching here indicates a parser bug, likely an 471 // incorrect token list in this function, but it only 472 // leads to skipping of possibly correct code if a 473 // previous error is present, and thus is preferred 474 // over a non-terminating parse. 475 } 476 } 477 } 478 479 var stmtStart = map[token.Token]bool{ 480 token.BREAK: true, 481 token.CONST: true, 482 token.CONTINUE: true, 483 token.DEFER: true, 484 token.FALLTHROUGH: true, 485 token.FOR: true, 486 token.GO: true, 487 token.GOTO: true, 488 token.IF: true, 489 token.RETURN: true, 490 token.SELECT: true, 491 token.SWITCH: true, 492 token.TYPE: true, 493 token.VAR: true, 494 } 495 496 var declStart = map[token.Token]bool{ 497 token.CONST: true, 498 token.TYPE: true, 499 token.VAR: true, 500 } 501 502 var exprEnd = map[token.Token]bool{ 503 token.COMMA: true, 504 token.COLON: true, 505 token.SEMICOLON: true, 506 token.RPAREN: true, 507 token.RBRACK: true, 508 token.RBRACE: true, 509 } 510 511 // safePos returns a valid file position for a given position: If pos 512 // is valid to begin with, safePos returns pos. If pos is out-of-range, 513 // safePos returns the EOF position. 514 // 515 // This is hack to work around "artificial" end positions in the AST which 516 // are computed by adding 1 to (presumably valid) token positions. If the 517 // token positions are invalid due to parse errors, the resulting end position 518 // may be past the file's EOF position, which would lead to panics if used 519 // later on. 520 // 521 func (p *parser) safePos(pos token.Pos) (res token.Pos) { 522 defer func() { 523 if recover() != nil { 524 res = token.Pos(p.file.Base() + p.file.Size()) // EOF position 525 } 526 }() 527 _ = p.file.Offset(pos) // trigger a panic if position is out-of-range 528 return pos 529 } 530 531 // ---------------------------------------------------------------------------- 532 // Identifiers 533 534 func (p *parser) parseIdent() *ast.Ident { 535 pos := p.pos 536 name := "_" 537 if p.tok == token.IDENT { 538 name = p.lit 539 p.next() 540 } else { 541 p.expect(token.IDENT) // use expect() error handling 542 } 543 return &ast.Ident{NamePos: pos, Name: name} 544 } 545 546 func (p *parser) parseIdentList() (list []*ast.Ident) { 547 if p.trace { 548 defer un(trace(p, "IdentList")) 549 } 550 551 list = append(list, p.parseIdent()) 552 for p.tok == token.COMMA { 553 p.next() 554 list = append(list, p.parseIdent()) 555 } 556 557 return 558 } 559 560 // ---------------------------------------------------------------------------- 561 // Common productions 562 563 // If lhs is set, result list elements which are identifiers are not resolved. 564 func (p *parser) parseExprList(lhs bool) (list []ast.Expr) { 565 if p.trace { 566 defer un(trace(p, "ExpressionList")) 567 } 568 569 list = append(list, p.checkExpr(p.parseExpr(lhs))) 570 for p.tok == token.COMMA { 571 p.next() 572 list = append(list, p.checkExpr(p.parseExpr(lhs))) 573 } 574 575 return 576 } 577 578 func (p *parser) parseLhsList() []ast.Expr { 579 old := p.inRhs 580 p.inRhs = false 581 list := p.parseExprList(true) 582 switch p.tok { 583 case token.DEFINE: 584 // lhs of a short variable declaration 585 // but doesn't enter scope until later: 586 // caller must call p.shortVarDecl(p.makeIdentList(list)) 587 // at appropriate time. 588 case token.COLON: 589 // lhs of a label declaration or a communication clause of a select 590 // statement (parseLhsList is not called when parsing the case clause 591 // of a switch statement): 592 // - labels are declared by the caller of parseLhsList 593 // - for communication clauses, if there is a stand-alone identifier 594 // followed by a colon, we have a syntax error; there is no need 595 // to resolve the identifier in that case 596 default: 597 // identifiers must be declared elsewhere 598 for _, x := range list { 599 p.resolve(x) 600 } 601 } 602 p.inRhs = old 603 return list 604 } 605 606 func (p *parser) parseRhsList() []ast.Expr { 607 old := p.inRhs 608 p.inRhs = true 609 list := p.parseExprList(false) 610 p.inRhs = old 611 return list 612 } 613 614 // ---------------------------------------------------------------------------- 615 // Types 616 617 func (p *parser) parseType() ast.Expr { 618 if p.trace { 619 defer un(trace(p, "Type")) 620 } 621 622 typ := p.tryType() 623 624 if typ == nil { 625 pos := p.pos 626 p.errorExpected(pos, "type") 627 p.advance(exprEnd) 628 return &ast.BadExpr{From: pos, To: p.pos} 629 } 630 631 return typ 632 } 633 634 // If the result is an identifier, it is not resolved. 635 func (p *parser) parseTypeName() ast.Expr { 636 if p.trace { 637 defer un(trace(p, "TypeName")) 638 } 639 640 ident := p.parseIdent() 641 // don't resolve ident yet - it may be a parameter or field name 642 643 if p.tok == token.PERIOD { 644 // ident is a package name 645 p.next() 646 p.resolve(ident) 647 sel := p.parseIdent() 648 return &ast.SelectorExpr{X: ident, Sel: sel} 649 } 650 651 return ident 652 } 653 654 func (p *parser) parseArrayType() ast.Expr { 655 if p.trace { 656 defer un(trace(p, "ArrayType")) 657 } 658 659 lbrack := p.expect(token.LBRACK) 660 p.exprLev++ 661 var len ast.Expr 662 // always permit ellipsis for more fault-tolerant parsing 663 if p.tok == token.ELLIPSIS { 664 len = &ast.Ellipsis{Ellipsis: p.pos} 665 p.next() 666 } else if p.tok != token.RBRACK { 667 len = p.parseRhs() 668 } 669 p.exprLev-- 670 p.expect(token.RBRACK) 671 elt := p.parseType() 672 673 return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt} 674 } 675 676 func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident { 677 idents := make([]*ast.Ident, len(list)) 678 for i, x := range list { 679 ident, isIdent := x.(*ast.Ident) 680 if !isIdent { 681 if _, isBad := x.(*ast.BadExpr); !isBad { 682 // only report error if it's a new one 683 p.errorExpected(x.Pos(), "identifier") 684 } 685 ident = &ast.Ident{NamePos: x.Pos(), Name: "_"} 686 } 687 idents[i] = ident 688 } 689 return idents 690 } 691 692 func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field { 693 if p.trace { 694 defer un(trace(p, "FieldDecl")) 695 } 696 697 doc := p.leadComment 698 699 // 1st FieldDecl 700 // A type name used as an anonymous field looks like a field identifier. 701 var list []ast.Expr 702 for { 703 list = append(list, p.parseVarType(false)) 704 if p.tok != token.COMMA { 705 break 706 } 707 p.next() 708 } 709 710 typ := p.tryVarType(false) 711 712 // analyze case 713 var idents []*ast.Ident 714 if typ != nil { 715 // IdentifierList Type 716 idents = p.makeIdentList(list) 717 } else { 718 // ["*"] TypeName (AnonymousField) 719 typ = list[0] // we always have at least one element 720 if n := len(list); n > 1 { 721 p.errorExpected(p.pos, "type") 722 typ = &ast.BadExpr{From: p.pos, To: p.pos} 723 } else if !isTypeName(deref(typ)) { 724 p.errorExpected(typ.Pos(), "anonymous field") 725 typ = &ast.BadExpr{From: typ.Pos(), To: p.safePos(typ.End())} 726 } 727 } 728 729 // Tag 730 var tag *ast.BasicLit 731 if p.tok == token.STRING { 732 tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit} 733 p.next() 734 } 735 736 p.expectSemi() // call before accessing p.linecomment 737 738 field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment} 739 p.declare(field, nil, scope, ast.Var, idents...) 740 p.resolve(typ) 741 742 return field 743 } 744 745 func (p *parser) parseStructType() *ast.StructType { 746 if p.trace { 747 defer un(trace(p, "StructType")) 748 } 749 750 pos := p.expect(token.STRUCT) 751 lbrace := p.expect(token.LBRACE) 752 scope := ast.NewScope(nil) // struct scope 753 var list []*ast.Field 754 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN { 755 // a field declaration cannot start with a '(' but we accept 756 // it here for more robust parsing and better error messages 757 // (parseFieldDecl will check and complain if necessary) 758 list = append(list, p.parseFieldDecl(scope)) 759 } 760 rbrace := p.expect(token.RBRACE) 761 762 return &ast.StructType{ 763 Struct: pos, 764 Fields: &ast.FieldList{ 765 Opening: lbrace, 766 List: list, 767 Closing: rbrace, 768 }, 769 } 770 } 771 772 func (p *parser) parsePointerType() *ast.StarExpr { 773 if p.trace { 774 defer un(trace(p, "PointerType")) 775 } 776 777 star := p.expect(token.MUL) 778 base := p.parseType() 779 780 return &ast.StarExpr{Star: star, X: base} 781 } 782 783 // If the result is an identifier, it is not resolved. 784 func (p *parser) tryVarType(isParam bool) ast.Expr { 785 if isParam && p.tok == token.ELLIPSIS { 786 pos := p.pos 787 p.next() 788 typ := p.tryIdentOrType() // don't use parseType so we can provide better error message 789 if typ != nil { 790 p.resolve(typ) 791 } else { 792 p.error(pos, "'...' parameter is missing type") 793 typ = &ast.BadExpr{From: pos, To: p.pos} 794 } 795 return &ast.Ellipsis{Ellipsis: pos, Elt: typ} 796 } 797 return p.tryIdentOrType() 798 } 799 800 // If the result is an identifier, it is not resolved. 801 func (p *parser) parseVarType(isParam bool) ast.Expr { 802 typ := p.tryVarType(isParam) 803 if typ == nil { 804 pos := p.pos 805 p.errorExpected(pos, "type") 806 p.next() // make progress 807 typ = &ast.BadExpr{From: pos, To: p.pos} 808 } 809 return typ 810 } 811 812 func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) { 813 if p.trace { 814 defer un(trace(p, "ParameterList")) 815 } 816 817 // 1st ParameterDecl 818 // A list of identifiers looks like a list of type names. 819 var list []ast.Expr 820 for { 821 list = append(list, p.parseVarType(ellipsisOk)) 822 if p.tok != token.COMMA { 823 break 824 } 825 p.next() 826 if p.tok == token.RPAREN { 827 break 828 } 829 } 830 831 // analyze case 832 if typ := p.tryVarType(ellipsisOk); typ != nil { 833 // IdentifierList Type 834 idents := p.makeIdentList(list) 835 field := &ast.Field{Names: idents, Type: typ} 836 params = append(params, field) 837 // Go spec: The scope of an identifier denoting a function 838 // parameter or result variable is the function body. 839 p.declare(field, nil, scope, ast.Var, idents...) 840 p.resolve(typ) 841 if !p.atComma("parameter list", token.RPAREN) { 842 return 843 } 844 p.next() 845 for p.tok != token.RPAREN && p.tok != token.EOF { 846 idents := p.parseIdentList() 847 typ := p.parseVarType(ellipsisOk) 848 field := &ast.Field{Names: idents, Type: typ} 849 params = append(params, field) 850 // Go spec: The scope of an identifier denoting a function 851 // parameter or result variable is the function body. 852 p.declare(field, nil, scope, ast.Var, idents...) 853 p.resolve(typ) 854 if !p.atComma("parameter list", token.RPAREN) { 855 break 856 } 857 p.next() 858 } 859 return 860 } 861 862 // Type { "," Type } (anonymous parameters) 863 params = make([]*ast.Field, len(list)) 864 for i, typ := range list { 865 p.resolve(typ) 866 params[i] = &ast.Field{Type: typ} 867 } 868 return 869 } 870 871 func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList { 872 if p.trace { 873 defer un(trace(p, "Parameters")) 874 } 875 876 var params []*ast.Field 877 lparen := p.expect(token.LPAREN) 878 if p.tok != token.RPAREN { 879 params = p.parseParameterList(scope, ellipsisOk) 880 } 881 rparen := p.expect(token.RPAREN) 882 883 return &ast.FieldList{Opening: lparen, List: params, Closing: rparen} 884 } 885 886 func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList { 887 if p.trace { 888 defer un(trace(p, "Result")) 889 } 890 891 if p.tok == token.LPAREN { 892 return p.parseParameters(scope, false) 893 } 894 895 typ := p.tryType() 896 if typ != nil { 897 list := make([]*ast.Field, 1) 898 list[0] = &ast.Field{Type: typ} 899 return &ast.FieldList{List: list} 900 } 901 902 return nil 903 } 904 905 func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) { 906 if p.trace { 907 defer un(trace(p, "Signature")) 908 } 909 910 params = p.parseParameters(scope, true) 911 results = p.parseResult(scope) 912 913 return 914 } 915 916 func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) { 917 if p.trace { 918 defer un(trace(p, "FuncType")) 919 } 920 921 pos := p.expect(token.FUNC) 922 scope := ast.NewScope(p.topScope) // function scope 923 params, results := p.parseSignature(scope) 924 925 return &ast.FuncType{Func: pos, Params: params, Results: results}, scope 926 } 927 928 func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field { 929 if p.trace { 930 defer un(trace(p, "MethodSpec")) 931 } 932 933 doc := p.leadComment 934 var idents []*ast.Ident 935 var typ ast.Expr 936 x := p.parseTypeName() 937 if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN { 938 // method 939 idents = []*ast.Ident{ident} 940 scope := ast.NewScope(nil) // method scope 941 params, results := p.parseSignature(scope) 942 typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results} 943 } else { 944 // embedded interface 945 typ = x 946 p.resolve(typ) 947 } 948 p.expectSemi() // call before accessing p.linecomment 949 950 spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment} 951 p.declare(spec, nil, scope, ast.Fun, idents...) 952 953 return spec 954 } 955 956 func (p *parser) parseInterfaceType() *ast.InterfaceType { 957 if p.trace { 958 defer un(trace(p, "InterfaceType")) 959 } 960 961 pos := p.expect(token.INTERFACE) 962 lbrace := p.expect(token.LBRACE) 963 scope := ast.NewScope(nil) // interface scope 964 var list []*ast.Field 965 for p.tok == token.IDENT { 966 list = append(list, p.parseMethodSpec(scope)) 967 } 968 rbrace := p.expect(token.RBRACE) 969 970 return &ast.InterfaceType{ 971 Interface: pos, 972 Methods: &ast.FieldList{ 973 Opening: lbrace, 974 List: list, 975 Closing: rbrace, 976 }, 977 } 978 } 979 980 func (p *parser) parseMapType() *ast.MapType { 981 if p.trace { 982 defer un(trace(p, "MapType")) 983 } 984 985 pos := p.expect(token.MAP) 986 p.expect(token.LBRACK) 987 key := p.parseType() 988 p.expect(token.RBRACK) 989 value := p.parseType() 990 991 return &ast.MapType{Map: pos, Key: key, Value: value} 992 } 993 994 func (p *parser) parseChanType() *ast.ChanType { 995 if p.trace { 996 defer un(trace(p, "ChanType")) 997 } 998 999 pos := p.pos 1000 dir := ast.SEND | ast.RECV 1001 var arrow token.Pos 1002 if p.tok == token.CHAN { 1003 p.next() 1004 if p.tok == token.ARROW { 1005 arrow = p.pos 1006 p.next() 1007 dir = ast.SEND 1008 } 1009 } else { 1010 arrow = p.expect(token.ARROW) 1011 p.expect(token.CHAN) 1012 dir = ast.RECV 1013 } 1014 value := p.parseType() 1015 1016 return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value} 1017 } 1018 1019 // If the result is an identifier, it is not resolved. 1020 func (p *parser) tryIdentOrType() ast.Expr { 1021 switch p.tok { 1022 case token.IDENT: 1023 return p.parseTypeName() 1024 case token.LBRACK: 1025 return p.parseArrayType() 1026 case token.STRUCT: 1027 return p.parseStructType() 1028 case token.MUL: 1029 return p.parsePointerType() 1030 case token.FUNC: 1031 typ, _ := p.parseFuncType() 1032 return typ 1033 case token.INTERFACE: 1034 return p.parseInterfaceType() 1035 case token.MAP: 1036 return p.parseMapType() 1037 case token.CHAN, token.ARROW: 1038 return p.parseChanType() 1039 case token.LPAREN: 1040 lparen := p.pos 1041 p.next() 1042 typ := p.parseType() 1043 rparen := p.expect(token.RPAREN) 1044 return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen} 1045 } 1046 1047 // no type found 1048 return nil 1049 } 1050 1051 func (p *parser) tryType() ast.Expr { 1052 typ := p.tryIdentOrType() 1053 if typ != nil { 1054 p.resolve(typ) 1055 } 1056 return typ 1057 } 1058 1059 // ---------------------------------------------------------------------------- 1060 // Blocks 1061 1062 func (p *parser) parseStmtList() (list []ast.Stmt) { 1063 if p.trace { 1064 defer un(trace(p, "StatementList")) 1065 } 1066 1067 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF { 1068 list = append(list, p.parseStmt()) 1069 } 1070 1071 return 1072 } 1073 1074 func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt { 1075 if p.trace { 1076 defer un(trace(p, "Body")) 1077 } 1078 1079 lbrace := p.expect(token.LBRACE) 1080 p.topScope = scope // open function scope 1081 p.openLabelScope() 1082 list := p.parseStmtList() 1083 p.closeLabelScope() 1084 p.closeScope() 1085 rbrace := p.expect(token.RBRACE) 1086 1087 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 1088 } 1089 1090 func (p *parser) parseBlockStmt() *ast.BlockStmt { 1091 if p.trace { 1092 defer un(trace(p, "BlockStmt")) 1093 } 1094 1095 lbrace := p.expect(token.LBRACE) 1096 p.openScope() 1097 list := p.parseStmtList() 1098 p.closeScope() 1099 rbrace := p.expect(token.RBRACE) 1100 1101 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 1102 } 1103 1104 // ---------------------------------------------------------------------------- 1105 // Expressions 1106 1107 func (p *parser) parseFuncTypeOrLit() ast.Expr { 1108 if p.trace { 1109 defer un(trace(p, "FuncTypeOrLit")) 1110 } 1111 1112 typ, scope := p.parseFuncType() 1113 if p.tok != token.LBRACE { 1114 // function type only 1115 return typ 1116 } 1117 1118 p.exprLev++ 1119 body := p.parseBody(scope) 1120 p.exprLev-- 1121 1122 return &ast.FuncLit{Type: typ, Body: body} 1123 } 1124 1125 // parseOperand may return an expression or a raw type (incl. array 1126 // types of the form [...]T. Callers must verify the result. 1127 // If lhs is set and the result is an identifier, it is not resolved. 1128 // 1129 func (p *parser) parseOperand(lhs bool) ast.Expr { 1130 if p.trace { 1131 defer un(trace(p, "Operand")) 1132 } 1133 1134 switch p.tok { 1135 case token.IDENT: 1136 x := p.parseIdent() 1137 if !lhs { 1138 p.resolve(x) 1139 } 1140 return x 1141 1142 case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING: 1143 x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit} 1144 p.next() 1145 return x 1146 1147 case token.LPAREN: 1148 lparen := p.pos 1149 p.next() 1150 p.exprLev++ 1151 x := p.parseRhsOrType() // types may be parenthesized: (some type) 1152 p.exprLev-- 1153 rparen := p.expect(token.RPAREN) 1154 return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen} 1155 1156 case token.FUNC: 1157 return p.parseFuncTypeOrLit() 1158 } 1159 1160 if typ := p.tryIdentOrType(); typ != nil { 1161 // could be type for composite literal or conversion 1162 _, isIdent := typ.(*ast.Ident) 1163 assert(!isIdent, "type cannot be identifier") 1164 return typ 1165 } 1166 1167 // we have an error 1168 pos := p.pos 1169 p.errorExpected(pos, "operand") 1170 p.advance(stmtStart) 1171 return &ast.BadExpr{From: pos, To: p.pos} 1172 } 1173 1174 func (p *parser) parseSelector(x ast.Expr) ast.Expr { 1175 if p.trace { 1176 defer un(trace(p, "Selector")) 1177 } 1178 1179 sel := p.parseIdent() 1180 1181 return &ast.SelectorExpr{X: x, Sel: sel} 1182 } 1183 1184 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr { 1185 if p.trace { 1186 defer un(trace(p, "TypeAssertion")) 1187 } 1188 1189 lparen := p.expect(token.LPAREN) 1190 var typ ast.Expr 1191 if p.tok == token.TYPE { 1192 // type switch: typ == nil 1193 p.next() 1194 } else { 1195 typ = p.parseType() 1196 } 1197 rparen := p.expect(token.RPAREN) 1198 1199 return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen} 1200 } 1201 1202 func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr { 1203 if p.trace { 1204 defer un(trace(p, "IndexOrSlice")) 1205 } 1206 1207 const N = 3 // change the 3 to 2 to disable 3-index slices 1208 lbrack := p.expect(token.LBRACK) 1209 p.exprLev++ 1210 var index [N]ast.Expr 1211 var colons [N - 1]token.Pos 1212 if p.tok != token.COLON { 1213 index[0] = p.parseRhs() 1214 } 1215 ncolons := 0 1216 for p.tok == token.COLON && ncolons < len(colons) { 1217 colons[ncolons] = p.pos 1218 ncolons++ 1219 p.next() 1220 if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF { 1221 index[ncolons] = p.parseRhs() 1222 } 1223 } 1224 p.exprLev-- 1225 rbrack := p.expect(token.RBRACK) 1226 1227 if ncolons > 0 { 1228 // slice expression 1229 slice3 := false 1230 if ncolons == 2 { 1231 slice3 = true 1232 // Check presence of 2nd and 3rd index here rather than during type-checking 1233 // to prevent erroneous programs from passing through gofmt (was issue 7305). 1234 if index[1] == nil { 1235 p.error(colons[0], "2nd index required in 3-index slice") 1236 index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]} 1237 } 1238 if index[2] == nil { 1239 p.error(colons[1], "3rd index required in 3-index slice") 1240 index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack} 1241 } 1242 } 1243 return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack} 1244 } 1245 1246 return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack} 1247 } 1248 1249 func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr { 1250 if p.trace { 1251 defer un(trace(p, "CallOrConversion")) 1252 } 1253 1254 lparen := p.expect(token.LPAREN) 1255 p.exprLev++ 1256 var list []ast.Expr 1257 var ellipsis token.Pos 1258 for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() { 1259 list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...) 1260 if p.tok == token.ELLIPSIS { 1261 ellipsis = p.pos 1262 p.next() 1263 } 1264 if !p.atComma("argument list", token.RPAREN) { 1265 break 1266 } 1267 p.next() 1268 } 1269 p.exprLev-- 1270 rparen := p.expectClosing(token.RPAREN, "argument list") 1271 1272 return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen} 1273 } 1274 1275 func (p *parser) parseValue(keyOk bool) ast.Expr { 1276 if p.trace { 1277 defer un(trace(p, "Element")) 1278 } 1279 1280 if p.tok == token.LBRACE { 1281 return p.parseLiteralValue(nil) 1282 } 1283 1284 // Because the parser doesn't know the composite literal type, it cannot 1285 // know if a key that's an identifier is a struct field name or a name 1286 // denoting a value. The former is not resolved by the parser or the 1287 // resolver. 1288 // 1289 // Instead, _try_ to resolve such a key if possible. If it resolves, 1290 // it a) has correctly resolved, or b) incorrectly resolved because 1291 // the key is a struct field with a name matching another identifier. 1292 // In the former case we are done, and in the latter case we don't 1293 // care because the type checker will do a separate field lookup. 1294 // 1295 // If the key does not resolve, it a) must be defined at the top 1296 // level in another file of the same package, the universe scope, or be 1297 // undeclared; or b) it is a struct field. In the former case, the type 1298 // checker can do a top-level lookup, and in the latter case it will do 1299 // a separate field lookup. 1300 x := p.checkExpr(p.parseExpr(keyOk)) 1301 if keyOk { 1302 if p.tok == token.COLON { 1303 // Try to resolve the key but don't collect it 1304 // as unresolved identifier if it fails so that 1305 // we don't get (possibly false) errors about 1306 // undeclared names. 1307 p.tryResolve(x, false) 1308 } else { 1309 // not a key 1310 p.resolve(x) 1311 } 1312 } 1313 1314 return x 1315 } 1316 1317 func (p *parser) parseElement() ast.Expr { 1318 if p.trace { 1319 defer un(trace(p, "Element")) 1320 } 1321 1322 x := p.parseValue(true) 1323 if p.tok == token.COLON { 1324 colon := p.pos 1325 p.next() 1326 x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue(false)} 1327 } 1328 1329 return x 1330 } 1331 1332 func (p *parser) parseElementList() (list []ast.Expr) { 1333 if p.trace { 1334 defer un(trace(p, "ElementList")) 1335 } 1336 1337 for p.tok != token.RBRACE && p.tok != token.EOF { 1338 list = append(list, p.parseElement()) 1339 if !p.atComma("composite literal", token.RBRACE) { 1340 break 1341 } 1342 p.next() 1343 } 1344 1345 return 1346 } 1347 1348 func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr { 1349 if p.trace { 1350 defer un(trace(p, "LiteralValue")) 1351 } 1352 1353 lbrace := p.expect(token.LBRACE) 1354 var elts []ast.Expr 1355 p.exprLev++ 1356 if p.tok != token.RBRACE { 1357 elts = p.parseElementList() 1358 } 1359 p.exprLev-- 1360 rbrace := p.expectClosing(token.RBRACE, "composite literal") 1361 return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace} 1362 } 1363 1364 // checkExpr checks that x is an expression (and not a type). 1365 func (p *parser) checkExpr(x ast.Expr) ast.Expr { 1366 switch unparen(x).(type) { 1367 case *ast.BadExpr: 1368 case *ast.Ident: 1369 case *ast.BasicLit: 1370 case *ast.FuncLit: 1371 case *ast.CompositeLit: 1372 case *ast.ParenExpr: 1373 panic("unreachable") 1374 case *ast.SelectorExpr: 1375 case *ast.IndexExpr: 1376 case *ast.SliceExpr: 1377 case *ast.TypeAssertExpr: 1378 // If t.Type == nil we have a type assertion of the form 1379 // y.(type), which is only allowed in type switch expressions. 1380 // It's hard to exclude those but for the case where we are in 1381 // a type switch. Instead be lenient and test this in the type 1382 // checker. 1383 case *ast.CallExpr: 1384 case *ast.StarExpr: 1385 case *ast.UnaryExpr: 1386 case *ast.BinaryExpr: 1387 default: 1388 // all other nodes are not proper expressions 1389 p.errorExpected(x.Pos(), "expression") 1390 x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())} 1391 } 1392 return x 1393 } 1394 1395 // isTypeName reports whether x is a (qualified) TypeName. 1396 func isTypeName(x ast.Expr) bool { 1397 switch t := x.(type) { 1398 case *ast.BadExpr: 1399 case *ast.Ident: 1400 case *ast.SelectorExpr: 1401 _, isIdent := t.X.(*ast.Ident) 1402 return isIdent 1403 default: 1404 return false // all other nodes are not type names 1405 } 1406 return true 1407 } 1408 1409 // isLiteralType reports whether x is a legal composite literal type. 1410 func isLiteralType(x ast.Expr) bool { 1411 switch t := x.(type) { 1412 case *ast.BadExpr: 1413 case *ast.Ident: 1414 case *ast.SelectorExpr: 1415 _, isIdent := t.X.(*ast.Ident) 1416 return isIdent 1417 case *ast.ArrayType: 1418 case *ast.StructType: 1419 case *ast.MapType: 1420 default: 1421 return false // all other nodes are not legal composite literal types 1422 } 1423 return true 1424 } 1425 1426 // If x is of the form *T, deref returns T, otherwise it returns x. 1427 func deref(x ast.Expr) ast.Expr { 1428 if p, isPtr := x.(*ast.StarExpr); isPtr { 1429 x = p.X 1430 } 1431 return x 1432 } 1433 1434 // If x is of the form (T), unparen returns unparen(T), otherwise it returns x. 1435 func unparen(x ast.Expr) ast.Expr { 1436 if p, isParen := x.(*ast.ParenExpr); isParen { 1437 x = unparen(p.X) 1438 } 1439 return x 1440 } 1441 1442 // checkExprOrType checks that x is an expression or a type 1443 // (and not a raw type such as [...]T). 1444 // 1445 func (p *parser) checkExprOrType(x ast.Expr) ast.Expr { 1446 switch t := unparen(x).(type) { 1447 case *ast.ParenExpr: 1448 panic("unreachable") 1449 case *ast.UnaryExpr: 1450 case *ast.ArrayType: 1451 if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis { 1452 p.error(len.Pos(), "expected array length, found '...'") 1453 x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())} 1454 } 1455 } 1456 1457 // all other nodes are expressions or types 1458 return x 1459 } 1460 1461 // If lhs is set and the result is an identifier, it is not resolved. 1462 func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr { 1463 if p.trace { 1464 defer un(trace(p, "PrimaryExpr")) 1465 } 1466 1467 x := p.parseOperand(lhs) 1468 L: 1469 for { 1470 switch p.tok { 1471 case token.PERIOD: 1472 p.next() 1473 if lhs { 1474 p.resolve(x) 1475 } 1476 switch p.tok { 1477 case token.IDENT: 1478 x = p.parseSelector(p.checkExprOrType(x)) 1479 case token.LPAREN: 1480 x = p.parseTypeAssertion(p.checkExpr(x)) 1481 default: 1482 pos := p.pos 1483 p.errorExpected(pos, "selector or type assertion") 1484 p.next() // make progress 1485 sel := &ast.Ident{NamePos: pos, Name: "_"} 1486 x = &ast.SelectorExpr{X: x, Sel: sel} 1487 } 1488 case token.LBRACK: 1489 if lhs { 1490 p.resolve(x) 1491 } 1492 x = p.parseIndexOrSlice(p.checkExpr(x)) 1493 case token.LPAREN: 1494 if lhs { 1495 p.resolve(x) 1496 } 1497 x = p.parseCallOrConversion(p.checkExprOrType(x)) 1498 case token.LBRACE: 1499 if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) { 1500 if lhs { 1501 p.resolve(x) 1502 } 1503 x = p.parseLiteralValue(x) 1504 } else { 1505 break L 1506 } 1507 default: 1508 break L 1509 } 1510 lhs = false // no need to try to resolve again 1511 } 1512 1513 return x 1514 } 1515 1516 // If lhs is set and the result is an identifier, it is not resolved. 1517 func (p *parser) parseUnaryExpr(lhs bool) ast.Expr { 1518 if p.trace { 1519 defer un(trace(p, "UnaryExpr")) 1520 } 1521 1522 switch p.tok { 1523 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND: 1524 pos, op := p.pos, p.tok 1525 p.next() 1526 x := p.parseUnaryExpr(false) 1527 return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)} 1528 1529 case token.ARROW: 1530 // channel type or receive expression 1531 arrow := p.pos 1532 p.next() 1533 1534 // If the next token is token.CHAN we still don't know if it 1535 // is a channel type or a receive operation - we only know 1536 // once we have found the end of the unary expression. There 1537 // are two cases: 1538 // 1539 // <- type => (<-type) must be channel type 1540 // <- expr => <-(expr) is a receive from an expression 1541 // 1542 // In the first case, the arrow must be re-associated with 1543 // the channel type parsed already: 1544 // 1545 // <- (chan type) => (<-chan type) 1546 // <- (chan<- type) => (<-chan (<-type)) 1547 1548 x := p.parseUnaryExpr(false) 1549 1550 // determine which case we have 1551 if typ, ok := x.(*ast.ChanType); ok { 1552 // (<-type) 1553 1554 // re-associate position info and <- 1555 dir := ast.SEND 1556 for ok && dir == ast.SEND { 1557 if typ.Dir == ast.RECV { 1558 // error: (<-type) is (<-(<-chan T)) 1559 p.errorExpected(typ.Arrow, "'chan'") 1560 } 1561 arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow 1562 dir, typ.Dir = typ.Dir, ast.RECV 1563 typ, ok = typ.Value.(*ast.ChanType) 1564 } 1565 if dir == ast.SEND { 1566 p.errorExpected(arrow, "channel type") 1567 } 1568 1569 return x 1570 } 1571 1572 // <-(expr) 1573 return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)} 1574 1575 case token.MUL: 1576 // pointer type or unary "*" expression 1577 pos := p.pos 1578 p.next() 1579 x := p.parseUnaryExpr(false) 1580 return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)} 1581 } 1582 1583 return p.parsePrimaryExpr(lhs) 1584 } 1585 1586 func (p *parser) tokPrec() (token.Token, int) { 1587 tok := p.tok 1588 if p.inRhs && tok == token.ASSIGN { 1589 tok = token.EQL 1590 } 1591 return tok, tok.Precedence() 1592 } 1593 1594 // If lhs is set and the result is an identifier, it is not resolved. 1595 func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr { 1596 if p.trace { 1597 defer un(trace(p, "BinaryExpr")) 1598 } 1599 1600 x := p.parseUnaryExpr(lhs) 1601 for { 1602 op, oprec := p.tokPrec() 1603 if oprec < prec1 { 1604 return x 1605 } 1606 pos := p.expect(op) 1607 if lhs { 1608 p.resolve(x) 1609 lhs = false 1610 } 1611 y := p.parseBinaryExpr(false, oprec+1) 1612 x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)} 1613 } 1614 } 1615 1616 // If lhs is set and the result is an identifier, it is not resolved. 1617 // The result may be a type or even a raw type ([...]int). Callers must 1618 // check the result (using checkExpr or checkExprOrType), depending on 1619 // context. 1620 func (p *parser) parseExpr(lhs bool) ast.Expr { 1621 if p.trace { 1622 defer un(trace(p, "Expression")) 1623 } 1624 1625 return p.parseBinaryExpr(lhs, token.LowestPrec+1) 1626 } 1627 1628 func (p *parser) parseRhs() ast.Expr { 1629 old := p.inRhs 1630 p.inRhs = true 1631 x := p.checkExpr(p.parseExpr(false)) 1632 p.inRhs = old 1633 return x 1634 } 1635 1636 func (p *parser) parseRhsOrType() ast.Expr { 1637 old := p.inRhs 1638 p.inRhs = true 1639 x := p.checkExprOrType(p.parseExpr(false)) 1640 p.inRhs = old 1641 return x 1642 } 1643 1644 // ---------------------------------------------------------------------------- 1645 // Statements 1646 1647 // Parsing modes for parseSimpleStmt. 1648 const ( 1649 basic = iota 1650 labelOk 1651 rangeOk 1652 ) 1653 1654 // parseSimpleStmt returns true as 2nd result if it parsed the assignment 1655 // of a range clause (with mode == rangeOk). The returned statement is an 1656 // assignment with a right-hand side that is a single unary expression of 1657 // the form "range x". No guarantees are given for the left-hand side. 1658 func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) { 1659 if p.trace { 1660 defer un(trace(p, "SimpleStmt")) 1661 } 1662 1663 x := p.parseLhsList() 1664 1665 switch p.tok { 1666 case 1667 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN, 1668 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN, 1669 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN, 1670 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN: 1671 // assignment statement, possibly part of a range clause 1672 pos, tok := p.pos, p.tok 1673 p.next() 1674 var y []ast.Expr 1675 isRange := false 1676 if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) { 1677 pos := p.pos 1678 p.next() 1679 y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}} 1680 isRange = true 1681 } else { 1682 y = p.parseRhsList() 1683 } 1684 as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y} 1685 if tok == token.DEFINE { 1686 p.shortVarDecl(as, x) 1687 } 1688 return as, isRange 1689 } 1690 1691 if len(x) > 1 { 1692 p.errorExpected(x[0].Pos(), "1 expression") 1693 // continue with first expression 1694 } 1695 1696 switch p.tok { 1697 case token.COLON: 1698 // labeled statement 1699 colon := p.pos 1700 p.next() 1701 if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent { 1702 // Go spec: The scope of a label is the body of the function 1703 // in which it is declared and excludes the body of any nested 1704 // function. 1705 stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()} 1706 p.declare(stmt, nil, p.labelScope, ast.Lbl, label) 1707 return stmt, false 1708 } 1709 // The label declaration typically starts at x[0].Pos(), but the label 1710 // declaration may be erroneous due to a token after that position (and 1711 // before the ':'). If SpuriousErrors is not set, the (only) error 1712 // reported for the line is the illegal label error instead of the token 1713 // before the ':' that caused the problem. Thus, use the (latest) colon 1714 // position for error reporting. 1715 p.error(colon, "illegal label declaration") 1716 return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false 1717 1718 case token.ARROW: 1719 // send statement 1720 arrow := p.pos 1721 p.next() 1722 y := p.parseRhs() 1723 return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false 1724 1725 case token.INC, token.DEC: 1726 // increment or decrement 1727 s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok} 1728 p.next() 1729 return s, false 1730 } 1731 1732 // expression 1733 return &ast.ExprStmt{X: x[0]}, false 1734 } 1735 1736 func (p *parser) parseCallExpr(callType string) *ast.CallExpr { 1737 x := p.parseRhsOrType() // could be a conversion: (some type)(x) 1738 if call, isCall := x.(*ast.CallExpr); isCall { 1739 return call 1740 } 1741 if _, isBad := x.(*ast.BadExpr); !isBad { 1742 // only report error if it's a new one 1743 p.error(p.safePos(x.End()), fmt.Sprintf("function must be invoked in %s statement", callType)) 1744 } 1745 return nil 1746 } 1747 1748 func (p *parser) parseGoStmt() ast.Stmt { 1749 if p.trace { 1750 defer un(trace(p, "GoStmt")) 1751 } 1752 1753 pos := p.expect(token.GO) 1754 call := p.parseCallExpr("go") 1755 p.expectSemi() 1756 if call == nil { 1757 return &ast.BadStmt{From: pos, To: pos + 2} // len("go") 1758 } 1759 1760 return &ast.GoStmt{Go: pos, Call: call} 1761 } 1762 1763 func (p *parser) parseDeferStmt() ast.Stmt { 1764 if p.trace { 1765 defer un(trace(p, "DeferStmt")) 1766 } 1767 1768 pos := p.expect(token.DEFER) 1769 call := p.parseCallExpr("defer") 1770 p.expectSemi() 1771 if call == nil { 1772 return &ast.BadStmt{From: pos, To: pos + 5} // len("defer") 1773 } 1774 1775 return &ast.DeferStmt{Defer: pos, Call: call} 1776 } 1777 1778 func (p *parser) parseReturnStmt() *ast.ReturnStmt { 1779 if p.trace { 1780 defer un(trace(p, "ReturnStmt")) 1781 } 1782 1783 pos := p.pos 1784 p.expect(token.RETURN) 1785 var x []ast.Expr 1786 if p.tok != token.SEMICOLON && p.tok != token.RBRACE { 1787 x = p.parseRhsList() 1788 } 1789 p.expectSemi() 1790 1791 return &ast.ReturnStmt{Return: pos, Results: x} 1792 } 1793 1794 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt { 1795 if p.trace { 1796 defer un(trace(p, "BranchStmt")) 1797 } 1798 1799 pos := p.expect(tok) 1800 var label *ast.Ident 1801 if tok != token.FALLTHROUGH && p.tok == token.IDENT { 1802 label = p.parseIdent() 1803 // add to list of unresolved targets 1804 n := len(p.targetStack) - 1 1805 p.targetStack[n] = append(p.targetStack[n], label) 1806 } 1807 p.expectSemi() 1808 1809 return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label} 1810 } 1811 1812 func (p *parser) makeExpr(s ast.Stmt, want string) ast.Expr { 1813 if s == nil { 1814 return nil 1815 } 1816 if es, isExpr := s.(*ast.ExprStmt); isExpr { 1817 return p.checkExpr(es.X) 1818 } 1819 found := "simple statement" 1820 if _, isAss := s.(*ast.AssignStmt); isAss { 1821 found = "assignment" 1822 } 1823 p.error(s.Pos(), fmt.Sprintf("expected %s, found %s (missing parentheses around composite literal?)", want, found)) 1824 return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())} 1825 } 1826 1827 // parseIfHeader is an adjusted version of parser.header 1828 // in cmd/compile/internal/syntax/parser.go, which has 1829 // been tuned for better error handling. 1830 func (p *parser) parseIfHeader() (init ast.Stmt, cond ast.Expr) { 1831 if p.tok == token.LBRACE { 1832 p.error(p.pos, "missing condition in if statement") 1833 return 1834 } 1835 // p.tok != token.LBRACE 1836 1837 outer := p.exprLev 1838 p.exprLev = -1 1839 1840 if p.tok != token.SEMICOLON { 1841 // accept potential variable declaration but complain 1842 if p.tok == token.VAR { 1843 p.next() 1844 p.error(p.pos, fmt.Sprintf("var declaration not allowed in 'IF' initializer")) 1845 } 1846 init, _ = p.parseSimpleStmt(basic) 1847 } 1848 1849 var condStmt ast.Stmt 1850 var semi struct { 1851 pos token.Pos 1852 lit string // ";" or "\n"; valid if pos.IsValid() 1853 } 1854 if p.tok != token.LBRACE { 1855 if p.tok == token.SEMICOLON { 1856 semi.pos = p.pos 1857 semi.lit = p.lit 1858 p.next() 1859 } else { 1860 p.expect(token.SEMICOLON) 1861 } 1862 if p.tok != token.LBRACE { 1863 condStmt, _ = p.parseSimpleStmt(basic) 1864 } 1865 } else { 1866 condStmt = init 1867 init = nil 1868 } 1869 1870 if condStmt != nil { 1871 cond = p.makeExpr(condStmt, "boolean expression") 1872 } else if semi.pos.IsValid() { 1873 if semi.lit == "\n" { 1874 p.error(semi.pos, "unexpected newline, expecting { after if clause") 1875 } else { 1876 p.error(semi.pos, "missing condition in if statement") 1877 } 1878 } 1879 1880 p.exprLev = outer 1881 return 1882 } 1883 1884 func (p *parser) parseIfStmt() *ast.IfStmt { 1885 if p.trace { 1886 defer un(trace(p, "IfStmt")) 1887 } 1888 1889 pos := p.expect(token.IF) 1890 p.openScope() 1891 defer p.closeScope() 1892 1893 init, cond := p.parseIfHeader() 1894 body := p.parseBlockStmt() 1895 1896 var else_ ast.Stmt 1897 if p.tok == token.ELSE { 1898 p.next() 1899 switch p.tok { 1900 case token.IF: 1901 else_ = p.parseIfStmt() 1902 case token.LBRACE: 1903 else_ = p.parseBlockStmt() 1904 p.expectSemi() 1905 default: 1906 p.errorExpected(p.pos, "if statement or block") 1907 else_ = &ast.BadStmt{From: p.pos, To: p.pos} 1908 } 1909 } else { 1910 p.expectSemi() 1911 } 1912 1913 return &ast.IfStmt{If: pos, Init: init, Cond: cond, Body: body, Else: else_} 1914 } 1915 1916 func (p *parser) parseTypeList() (list []ast.Expr) { 1917 if p.trace { 1918 defer un(trace(p, "TypeList")) 1919 } 1920 1921 list = append(list, p.parseType()) 1922 for p.tok == token.COMMA { 1923 p.next() 1924 list = append(list, p.parseType()) 1925 } 1926 1927 return 1928 } 1929 1930 func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause { 1931 if p.trace { 1932 defer un(trace(p, "CaseClause")) 1933 } 1934 1935 pos := p.pos 1936 var list []ast.Expr 1937 if p.tok == token.CASE { 1938 p.next() 1939 if typeSwitch { 1940 list = p.parseTypeList() 1941 } else { 1942 list = p.parseRhsList() 1943 } 1944 } else { 1945 p.expect(token.DEFAULT) 1946 } 1947 1948 colon := p.expect(token.COLON) 1949 p.openScope() 1950 body := p.parseStmtList() 1951 p.closeScope() 1952 1953 return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body} 1954 } 1955 1956 func isTypeSwitchAssert(x ast.Expr) bool { 1957 a, ok := x.(*ast.TypeAssertExpr) 1958 return ok && a.Type == nil 1959 } 1960 1961 func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool { 1962 switch t := s.(type) { 1963 case *ast.ExprStmt: 1964 // x.(type) 1965 return isTypeSwitchAssert(t.X) 1966 case *ast.AssignStmt: 1967 // v := x.(type) 1968 if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) { 1969 switch t.Tok { 1970 case token.ASSIGN: 1971 // permit v = x.(type) but complain 1972 p.error(t.TokPos, "expected ':=', found '='") 1973 fallthrough 1974 case token.DEFINE: 1975 return true 1976 } 1977 } 1978 } 1979 return false 1980 } 1981 1982 func (p *parser) parseSwitchStmt() ast.Stmt { 1983 if p.trace { 1984 defer un(trace(p, "SwitchStmt")) 1985 } 1986 1987 pos := p.expect(token.SWITCH) 1988 p.openScope() 1989 defer p.closeScope() 1990 1991 var s1, s2 ast.Stmt 1992 if p.tok != token.LBRACE { 1993 prevLev := p.exprLev 1994 p.exprLev = -1 1995 if p.tok != token.SEMICOLON { 1996 s2, _ = p.parseSimpleStmt(basic) 1997 } 1998 if p.tok == token.SEMICOLON { 1999 p.next() 2000 s1 = s2 2001 s2 = nil 2002 if p.tok != token.LBRACE { 2003 // A TypeSwitchGuard may declare a variable in addition 2004 // to the variable declared in the initial SimpleStmt. 2005 // Introduce extra scope to avoid redeclaration errors: 2006 // 2007 // switch t := 0; t := x.(T) { ... } 2008 // 2009 // (this code is not valid Go because the first t 2010 // cannot be accessed and thus is never used, the extra 2011 // scope is needed for the correct error message). 2012 // 2013 // If we don't have a type switch, s2 must be an expression. 2014 // Having the extra nested but empty scope won't affect it. 2015 p.openScope() 2016 defer p.closeScope() 2017 s2, _ = p.parseSimpleStmt(basic) 2018 } 2019 } 2020 p.exprLev = prevLev 2021 } 2022 2023 typeSwitch := p.isTypeSwitchGuard(s2) 2024 lbrace := p.expect(token.LBRACE) 2025 var list []ast.Stmt 2026 for p.tok == token.CASE || p.tok == token.DEFAULT { 2027 list = append(list, p.parseCaseClause(typeSwitch)) 2028 } 2029 rbrace := p.expect(token.RBRACE) 2030 p.expectSemi() 2031 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 2032 2033 if typeSwitch { 2034 return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body} 2035 } 2036 2037 return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body} 2038 } 2039 2040 func (p *parser) parseCommClause() *ast.CommClause { 2041 if p.trace { 2042 defer un(trace(p, "CommClause")) 2043 } 2044 2045 p.openScope() 2046 pos := p.pos 2047 var comm ast.Stmt 2048 if p.tok == token.CASE { 2049 p.next() 2050 lhs := p.parseLhsList() 2051 if p.tok == token.ARROW { 2052 // SendStmt 2053 if len(lhs) > 1 { 2054 p.errorExpected(lhs[0].Pos(), "1 expression") 2055 // continue with first expression 2056 } 2057 arrow := p.pos 2058 p.next() 2059 rhs := p.parseRhs() 2060 comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs} 2061 } else { 2062 // RecvStmt 2063 if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE { 2064 // RecvStmt with assignment 2065 if len(lhs) > 2 { 2066 p.errorExpected(lhs[0].Pos(), "1 or 2 expressions") 2067 // continue with first two expressions 2068 lhs = lhs[0:2] 2069 } 2070 pos := p.pos 2071 p.next() 2072 rhs := p.parseRhs() 2073 as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}} 2074 if tok == token.DEFINE { 2075 p.shortVarDecl(as, lhs) 2076 } 2077 comm = as 2078 } else { 2079 // lhs must be single receive operation 2080 if len(lhs) > 1 { 2081 p.errorExpected(lhs[0].Pos(), "1 expression") 2082 // continue with first expression 2083 } 2084 comm = &ast.ExprStmt{X: lhs[0]} 2085 } 2086 } 2087 } else { 2088 p.expect(token.DEFAULT) 2089 } 2090 2091 colon := p.expect(token.COLON) 2092 body := p.parseStmtList() 2093 p.closeScope() 2094 2095 return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body} 2096 } 2097 2098 func (p *parser) parseSelectStmt() *ast.SelectStmt { 2099 if p.trace { 2100 defer un(trace(p, "SelectStmt")) 2101 } 2102 2103 pos := p.expect(token.SELECT) 2104 lbrace := p.expect(token.LBRACE) 2105 var list []ast.Stmt 2106 for p.tok == token.CASE || p.tok == token.DEFAULT { 2107 list = append(list, p.parseCommClause()) 2108 } 2109 rbrace := p.expect(token.RBRACE) 2110 p.expectSemi() 2111 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 2112 2113 return &ast.SelectStmt{Select: pos, Body: body} 2114 } 2115 2116 func (p *parser) parseForStmt() ast.Stmt { 2117 if p.trace { 2118 defer un(trace(p, "ForStmt")) 2119 } 2120 2121 pos := p.expect(token.FOR) 2122 p.openScope() 2123 defer p.closeScope() 2124 2125 var s1, s2, s3 ast.Stmt 2126 var isRange bool 2127 if p.tok != token.LBRACE { 2128 prevLev := p.exprLev 2129 p.exprLev = -1 2130 if p.tok != token.SEMICOLON { 2131 if p.tok == token.RANGE { 2132 // "for range x" (nil lhs in assignment) 2133 pos := p.pos 2134 p.next() 2135 y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}} 2136 s2 = &ast.AssignStmt{Rhs: y} 2137 isRange = true 2138 } else { 2139 s2, isRange = p.parseSimpleStmt(rangeOk) 2140 } 2141 } 2142 if !isRange && p.tok == token.SEMICOLON { 2143 p.next() 2144 s1 = s2 2145 s2 = nil 2146 if p.tok != token.SEMICOLON { 2147 s2, _ = p.parseSimpleStmt(basic) 2148 } 2149 p.expectSemi() 2150 if p.tok != token.LBRACE { 2151 s3, _ = p.parseSimpleStmt(basic) 2152 } 2153 } 2154 p.exprLev = prevLev 2155 } 2156 2157 body := p.parseBlockStmt() 2158 p.expectSemi() 2159 2160 if isRange { 2161 as := s2.(*ast.AssignStmt) 2162 // check lhs 2163 var key, value ast.Expr 2164 switch len(as.Lhs) { 2165 case 0: 2166 // nothing to do 2167 case 1: 2168 key = as.Lhs[0] 2169 case 2: 2170 key, value = as.Lhs[0], as.Lhs[1] 2171 default: 2172 p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions") 2173 return &ast.BadStmt{From: pos, To: p.safePos(body.End())} 2174 } 2175 // parseSimpleStmt returned a right-hand side that 2176 // is a single unary expression of the form "range x" 2177 x := as.Rhs[0].(*ast.UnaryExpr).X 2178 return &ast.RangeStmt{ 2179 For: pos, 2180 Key: key, 2181 Value: value, 2182 TokPos: as.TokPos, 2183 Tok: as.Tok, 2184 X: x, 2185 Body: body, 2186 } 2187 } 2188 2189 // regular for statement 2190 return &ast.ForStmt{ 2191 For: pos, 2192 Init: s1, 2193 Cond: p.makeExpr(s2, "boolean or range expression"), 2194 Post: s3, 2195 Body: body, 2196 } 2197 } 2198 2199 func (p *parser) parseStmt() (s ast.Stmt) { 2200 if p.trace { 2201 defer un(trace(p, "Statement")) 2202 } 2203 2204 switch p.tok { 2205 case token.CONST, token.TYPE, token.VAR: 2206 s = &ast.DeclStmt{Decl: p.parseDecl(stmtStart)} 2207 case 2208 // tokens that may start an expression 2209 token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands 2210 token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE, // composite types 2211 token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators 2212 s, _ = p.parseSimpleStmt(labelOk) 2213 // because of the required look-ahead, labeled statements are 2214 // parsed by parseSimpleStmt - don't expect a semicolon after 2215 // them 2216 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt { 2217 p.expectSemi() 2218 } 2219 case token.GO: 2220 s = p.parseGoStmt() 2221 case token.DEFER: 2222 s = p.parseDeferStmt() 2223 case token.RETURN: 2224 s = p.parseReturnStmt() 2225 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH: 2226 s = p.parseBranchStmt(p.tok) 2227 case token.LBRACE: 2228 s = p.parseBlockStmt() 2229 p.expectSemi() 2230 case token.IF: 2231 s = p.parseIfStmt() 2232 case token.SWITCH: 2233 s = p.parseSwitchStmt() 2234 case token.SELECT: 2235 s = p.parseSelectStmt() 2236 case token.FOR: 2237 s = p.parseForStmt() 2238 case token.SEMICOLON: 2239 // Is it ever possible to have an implicit semicolon 2240 // producing an empty statement in a valid program? 2241 // (handle correctly anyway) 2242 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"} 2243 p.next() 2244 case token.RBRACE: 2245 // a semicolon may be omitted before a closing "}" 2246 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true} 2247 default: 2248 // no statement found 2249 pos := p.pos 2250 p.errorExpected(pos, "statement") 2251 p.advance(stmtStart) 2252 s = &ast.BadStmt{From: pos, To: p.pos} 2253 } 2254 2255 return 2256 } 2257 2258 // ---------------------------------------------------------------------------- 2259 // Declarations 2260 2261 type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec 2262 2263 func isValidImport(lit string) bool { 2264 const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD" 2265 s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal 2266 for _, r := range s { 2267 if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) { 2268 return false 2269 } 2270 } 2271 return s != "" 2272 } 2273 2274 func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec { 2275 if p.trace { 2276 defer un(trace(p, "ImportSpec")) 2277 } 2278 2279 var ident *ast.Ident 2280 switch p.tok { 2281 case token.PERIOD: 2282 ident = &ast.Ident{NamePos: p.pos, Name: "."} 2283 p.next() 2284 case token.IDENT: 2285 ident = p.parseIdent() 2286 } 2287 2288 pos := p.pos 2289 var path string 2290 if p.tok == token.STRING { 2291 path = p.lit 2292 if !isValidImport(path) { 2293 p.error(pos, "invalid import path: "+path) 2294 } 2295 p.next() 2296 } else { 2297 p.expect(token.STRING) // use expect() error handling 2298 } 2299 p.expectSemi() // call before accessing p.linecomment 2300 2301 // collect imports 2302 spec := &ast.ImportSpec{ 2303 Doc: doc, 2304 Name: ident, 2305 Path: &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path}, 2306 Comment: p.lineComment, 2307 } 2308 p.imports = append(p.imports, spec) 2309 2310 return spec 2311 } 2312 2313 func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec { 2314 if p.trace { 2315 defer un(trace(p, keyword.String()+"Spec")) 2316 } 2317 2318 pos := p.pos 2319 idents := p.parseIdentList() 2320 typ := p.tryType() 2321 var values []ast.Expr 2322 // always permit optional initialization for more tolerant parsing 2323 if p.tok == token.ASSIGN { 2324 p.next() 2325 values = p.parseRhsList() 2326 } 2327 p.expectSemi() // call before accessing p.linecomment 2328 2329 switch keyword { 2330 case token.VAR: 2331 if typ == nil && values == nil { 2332 p.error(pos, "missing variable type or initialization") 2333 } 2334 case token.CONST: 2335 if values == nil && (iota == 0 || typ != nil) { 2336 p.error(pos, "missing constant value") 2337 } 2338 } 2339 2340 // Go spec: The scope of a constant or variable identifier declared inside 2341 // a function begins at the end of the ConstSpec or VarSpec and ends at 2342 // the end of the innermost containing block. 2343 // (Global identifiers are resolved in a separate phase after parsing.) 2344 spec := &ast.ValueSpec{ 2345 Doc: doc, 2346 Names: idents, 2347 Type: typ, 2348 Values: values, 2349 Comment: p.lineComment, 2350 } 2351 kind := ast.Con 2352 if keyword == token.VAR { 2353 kind = ast.Var 2354 } 2355 p.declare(spec, iota, p.topScope, kind, idents...) 2356 2357 return spec 2358 } 2359 2360 func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec { 2361 if p.trace { 2362 defer un(trace(p, "TypeSpec")) 2363 } 2364 2365 ident := p.parseIdent() 2366 2367 // Go spec: The scope of a type identifier declared inside a function begins 2368 // at the identifier in the TypeSpec and ends at the end of the innermost 2369 // containing block. 2370 // (Global identifiers are resolved in a separate phase after parsing.) 2371 spec := &ast.TypeSpec{Doc: doc, Name: ident} 2372 p.declare(spec, nil, p.topScope, ast.Typ, ident) 2373 if p.tok == token.ASSIGN { 2374 spec.Assign = p.pos 2375 p.next() 2376 } 2377 spec.Type = p.parseType() 2378 p.expectSemi() // call before accessing p.linecomment 2379 spec.Comment = p.lineComment 2380 2381 return spec 2382 } 2383 2384 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl { 2385 if p.trace { 2386 defer un(trace(p, "GenDecl("+keyword.String()+")")) 2387 } 2388 2389 doc := p.leadComment 2390 pos := p.expect(keyword) 2391 var lparen, rparen token.Pos 2392 var list []ast.Spec 2393 if p.tok == token.LPAREN { 2394 lparen = p.pos 2395 p.next() 2396 for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ { 2397 list = append(list, f(p.leadComment, keyword, iota)) 2398 } 2399 rparen = p.expect(token.RPAREN) 2400 p.expectSemi() 2401 } else { 2402 list = append(list, f(nil, keyword, 0)) 2403 } 2404 2405 return &ast.GenDecl{ 2406 Doc: doc, 2407 TokPos: pos, 2408 Tok: keyword, 2409 Lparen: lparen, 2410 Specs: list, 2411 Rparen: rparen, 2412 } 2413 } 2414 2415 func (p *parser) parseFuncDecl() *ast.FuncDecl { 2416 if p.trace { 2417 defer un(trace(p, "FunctionDecl")) 2418 } 2419 2420 doc := p.leadComment 2421 pos := p.expect(token.FUNC) 2422 scope := ast.NewScope(p.topScope) // function scope 2423 2424 var recv *ast.FieldList 2425 if p.tok == token.LPAREN { 2426 recv = p.parseParameters(scope, false) 2427 } 2428 2429 ident := p.parseIdent() 2430 2431 params, results := p.parseSignature(scope) 2432 2433 var body *ast.BlockStmt 2434 if p.tok == token.LBRACE { 2435 body = p.parseBody(scope) 2436 } 2437 p.expectSemi() 2438 2439 decl := &ast.FuncDecl{ 2440 Doc: doc, 2441 Recv: recv, 2442 Name: ident, 2443 Type: &ast.FuncType{ 2444 Func: pos, 2445 Params: params, 2446 Results: results, 2447 }, 2448 Body: body, 2449 } 2450 if recv == nil { 2451 // Go spec: The scope of an identifier denoting a constant, type, 2452 // variable, or function (but not method) declared at top level 2453 // (outside any function) is the package block. 2454 // 2455 // init() functions cannot be referred to and there may 2456 // be more than one - don't put them in the pkgScope 2457 if ident.Name != "init" { 2458 p.declare(decl, nil, p.pkgScope, ast.Fun, ident) 2459 } 2460 } 2461 2462 return decl 2463 } 2464 2465 func (p *parser) parseDecl(sync map[token.Token]bool) ast.Decl { 2466 if p.trace { 2467 defer un(trace(p, "Declaration")) 2468 } 2469 2470 var f parseSpecFunction 2471 switch p.tok { 2472 case token.CONST, token.VAR: 2473 f = p.parseValueSpec 2474 2475 case token.TYPE: 2476 f = p.parseTypeSpec 2477 2478 case token.FUNC: 2479 return p.parseFuncDecl() 2480 2481 default: 2482 pos := p.pos 2483 p.errorExpected(pos, "declaration") 2484 p.advance(sync) 2485 return &ast.BadDecl{From: pos, To: p.pos} 2486 } 2487 2488 return p.parseGenDecl(p.tok, f) 2489 } 2490 2491 // ---------------------------------------------------------------------------- 2492 // Source files 2493 2494 func (p *parser) parseFile() *ast.File { 2495 if p.trace { 2496 defer un(trace(p, "File")) 2497 } 2498 2499 // Don't bother parsing the rest if we had errors scanning the first token. 2500 // Likely not a Go source file at all. 2501 if p.errors.Len() != 0 { 2502 return nil 2503 } 2504 2505 // package clause 2506 doc := p.leadComment 2507 pos := p.expect(token.PACKAGE) 2508 // Go spec: The package clause is not a declaration; 2509 // the package name does not appear in any scope. 2510 ident := p.parseIdent() 2511 if ident.Name == "_" && p.mode&DeclarationErrors != 0 { 2512 p.error(p.pos, "invalid package name _") 2513 } 2514 p.expectSemi() 2515 2516 // Don't bother parsing the rest if we had errors parsing the package clause. 2517 // Likely not a Go source file at all. 2518 if p.errors.Len() != 0 { 2519 return nil 2520 } 2521 2522 p.openScope() 2523 p.pkgScope = p.topScope 2524 var decls []ast.Decl 2525 if p.mode&PackageClauseOnly == 0 { 2526 // import decls 2527 for p.tok == token.IMPORT { 2528 decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec)) 2529 } 2530 2531 if p.mode&ImportsOnly == 0 { 2532 // rest of package body 2533 for p.tok != token.EOF { 2534 decls = append(decls, p.parseDecl(declStart)) 2535 } 2536 } 2537 } 2538 p.closeScope() 2539 assert(p.topScope == nil, "unbalanced scopes") 2540 assert(p.labelScope == nil, "unbalanced label scopes") 2541 2542 // resolve global identifiers within the same file 2543 i := 0 2544 for _, ident := range p.unresolved { 2545 // i <= index for current ident 2546 assert(ident.Obj == unresolved, "object already resolved") 2547 ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel 2548 if ident.Obj == nil { 2549 p.unresolved[i] = ident 2550 i++ 2551 } 2552 } 2553 2554 return &ast.File{ 2555 Doc: doc, 2556 Package: pos, 2557 Name: ident, 2558 Decls: decls, 2559 Scope: p.pkgScope, 2560 Imports: p.imports, 2561 Unresolved: p.unresolved[0:i], 2562 Comments: p.comments, 2563 } 2564 }