github.com/d4l3k/go@v0.0.0-20151015000803-65fc379daeda/src/go/parser/parser.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Package parser implements a parser for Go source files. Input may be 6 // provided in a variety of forms (see the various Parse* functions); the 7 // output is an abstract syntax tree (AST) representing the Go source. The 8 // parser is invoked through one of the Parse* functions. 9 // 10 // The parser accepts a larger language than is syntactically permitted by 11 // the Go spec, for simplicity, and for improved robustness in the presence 12 // of syntax errors. For instance, in method declarations, the receiver is 13 // treated like an ordinary parameter list and thus may contain multiple 14 // entries where the spec permits exactly one. Consequently, the corresponding 15 // field in the AST (ast.FuncDecl.Recv) field is not restricted to one entry. 16 // 17 package parser 18 19 import ( 20 "fmt" 21 "go/ast" 22 "go/scanner" 23 "go/token" 24 "strconv" 25 "strings" 26 "unicode" 27 ) 28 29 // The parser structure holds the parser's internal state. 30 type parser struct { 31 file *token.File 32 errors scanner.ErrorList 33 scanner scanner.Scanner 34 35 // Tracing/debugging 36 mode Mode // parsing mode 37 trace bool // == (mode & Trace != 0) 38 indent int // indentation used for tracing output 39 40 // Comments 41 comments []*ast.CommentGroup 42 leadComment *ast.CommentGroup // last lead comment 43 lineComment *ast.CommentGroup // last line comment 44 45 // Next token 46 pos token.Pos // token position 47 tok token.Token // one token look-ahead 48 lit string // token literal 49 50 // Error recovery 51 // (used to limit the number of calls to syncXXX functions 52 // w/o making scanning progress - avoids potential endless 53 // loops across multiple parser functions during error recovery) 54 syncPos token.Pos // last synchronization position 55 syncCnt int // number of calls to syncXXX without progress 56 57 // Non-syntactic parser control 58 exprLev int // < 0: in control clause, >= 0: in expression 59 inRhs bool // if set, the parser is parsing a rhs expression 60 61 // Ordinary identifier scopes 62 pkgScope *ast.Scope // pkgScope.Outer == nil 63 topScope *ast.Scope // top-most scope; may be pkgScope 64 unresolved []*ast.Ident // unresolved identifiers 65 imports []*ast.ImportSpec // list of imports 66 67 // Label scopes 68 // (maintained by open/close LabelScope) 69 labelScope *ast.Scope // label scope for current function 70 targetStack [][]*ast.Ident // stack of unresolved labels 71 } 72 73 func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) { 74 p.file = fset.AddFile(filename, -1, len(src)) 75 var m scanner.Mode 76 if mode&ParseComments != 0 { 77 m = scanner.ScanComments 78 } 79 eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) } 80 p.scanner.Init(p.file, src, eh, m) 81 82 p.mode = mode 83 p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently) 84 85 p.next() 86 } 87 88 // ---------------------------------------------------------------------------- 89 // Scoping support 90 91 func (p *parser) openScope() { 92 p.topScope = ast.NewScope(p.topScope) 93 } 94 95 func (p *parser) closeScope() { 96 p.topScope = p.topScope.Outer 97 } 98 99 func (p *parser) openLabelScope() { 100 p.labelScope = ast.NewScope(p.labelScope) 101 p.targetStack = append(p.targetStack, nil) 102 } 103 104 func (p *parser) closeLabelScope() { 105 // resolve labels 106 n := len(p.targetStack) - 1 107 scope := p.labelScope 108 for _, ident := range p.targetStack[n] { 109 ident.Obj = scope.Lookup(ident.Name) 110 if ident.Obj == nil && p.mode&DeclarationErrors != 0 { 111 p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name)) 112 } 113 } 114 // pop label scope 115 p.targetStack = p.targetStack[0:n] 116 p.labelScope = p.labelScope.Outer 117 } 118 119 func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) { 120 for _, ident := range idents { 121 assert(ident.Obj == nil, "identifier already declared or resolved") 122 obj := ast.NewObj(kind, ident.Name) 123 // remember the corresponding declaration for redeclaration 124 // errors and global variable resolution/typechecking phase 125 obj.Decl = decl 126 obj.Data = data 127 ident.Obj = obj 128 if ident.Name != "_" { 129 if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 { 130 prevDecl := "" 131 if pos := alt.Pos(); pos.IsValid() { 132 prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos)) 133 } 134 p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl)) 135 } 136 } 137 } 138 } 139 140 func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) { 141 // Go spec: A short variable declaration may redeclare variables 142 // provided they were originally declared in the same block with 143 // the same type, and at least one of the non-blank variables is new. 144 n := 0 // number of new variables 145 for _, x := range list { 146 if ident, isIdent := x.(*ast.Ident); isIdent { 147 assert(ident.Obj == nil, "identifier already declared or resolved") 148 obj := ast.NewObj(ast.Var, ident.Name) 149 // remember corresponding assignment for other tools 150 obj.Decl = decl 151 ident.Obj = obj 152 if ident.Name != "_" { 153 if alt := p.topScope.Insert(obj); alt != nil { 154 ident.Obj = alt // redeclaration 155 } else { 156 n++ // new declaration 157 } 158 } 159 } else { 160 p.errorExpected(x.Pos(), "identifier on left side of :=") 161 } 162 } 163 if n == 0 && p.mode&DeclarationErrors != 0 { 164 p.error(list[0].Pos(), "no new variables on left side of :=") 165 } 166 } 167 168 // The unresolved object is a sentinel to mark identifiers that have been added 169 // to the list of unresolved identifiers. The sentinel is only used for verifying 170 // internal consistency. 171 var unresolved = new(ast.Object) 172 173 // If x is an identifier, tryResolve attempts to resolve x by looking up 174 // the object it denotes. If no object is found and collectUnresolved is 175 // set, x is marked as unresolved and collected in the list of unresolved 176 // identifiers. 177 // 178 func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) { 179 // nothing to do if x is not an identifier or the blank identifier 180 ident, _ := x.(*ast.Ident) 181 if ident == nil { 182 return 183 } 184 assert(ident.Obj == nil, "identifier already declared or resolved") 185 if ident.Name == "_" { 186 return 187 } 188 // try to resolve the identifier 189 for s := p.topScope; s != nil; s = s.Outer { 190 if obj := s.Lookup(ident.Name); obj != nil { 191 ident.Obj = obj 192 return 193 } 194 } 195 // all local scopes are known, so any unresolved identifier 196 // must be found either in the file scope, package scope 197 // (perhaps in another file), or universe scope --- collect 198 // them so that they can be resolved later 199 if collectUnresolved { 200 ident.Obj = unresolved 201 p.unresolved = append(p.unresolved, ident) 202 } 203 } 204 205 func (p *parser) resolve(x ast.Expr) { 206 p.tryResolve(x, true) 207 } 208 209 // ---------------------------------------------------------------------------- 210 // Parsing support 211 212 func (p *parser) printTrace(a ...interface{}) { 213 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " 214 const n = len(dots) 215 pos := p.file.Position(p.pos) 216 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column) 217 i := 2 * p.indent 218 for i > n { 219 fmt.Print(dots) 220 i -= n 221 } 222 // i <= n 223 fmt.Print(dots[0:i]) 224 fmt.Println(a...) 225 } 226 227 func trace(p *parser, msg string) *parser { 228 p.printTrace(msg, "(") 229 p.indent++ 230 return p 231 } 232 233 // Usage pattern: defer un(trace(p, "...")) 234 func un(p *parser) { 235 p.indent-- 236 p.printTrace(")") 237 } 238 239 // Advance to the next token. 240 func (p *parser) next0() { 241 // Because of one-token look-ahead, print the previous token 242 // when tracing as it provides a more readable output. The 243 // very first token (!p.pos.IsValid()) is not initialized 244 // (it is token.ILLEGAL), so don't print it . 245 if p.trace && p.pos.IsValid() { 246 s := p.tok.String() 247 switch { 248 case p.tok.IsLiteral(): 249 p.printTrace(s, p.lit) 250 case p.tok.IsOperator(), p.tok.IsKeyword(): 251 p.printTrace("\"" + s + "\"") 252 default: 253 p.printTrace(s) 254 } 255 } 256 257 p.pos, p.tok, p.lit = p.scanner.Scan() 258 } 259 260 // Consume a comment and return it and the line on which it ends. 261 func (p *parser) consumeComment() (comment *ast.Comment, endline int) { 262 // /*-style comments may end on a different line than where they start. 263 // Scan the comment for '\n' chars and adjust endline accordingly. 264 endline = p.file.Line(p.pos) 265 if p.lit[1] == '*' { 266 // don't use range here - no need to decode Unicode code points 267 for i := 0; i < len(p.lit); i++ { 268 if p.lit[i] == '\n' { 269 endline++ 270 } 271 } 272 } 273 274 comment = &ast.Comment{Slash: p.pos, Text: p.lit} 275 p.next0() 276 277 return 278 } 279 280 // Consume a group of adjacent comments, add it to the parser's 281 // comments list, and return it together with the line at which 282 // the last comment in the group ends. A non-comment token or n 283 // empty lines terminate a comment group. 284 // 285 func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) { 286 var list []*ast.Comment 287 endline = p.file.Line(p.pos) 288 for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n { 289 var comment *ast.Comment 290 comment, endline = p.consumeComment() 291 list = append(list, comment) 292 } 293 294 // add comment group to the comments list 295 comments = &ast.CommentGroup{List: list} 296 p.comments = append(p.comments, comments) 297 298 return 299 } 300 301 // Advance to the next non-comment token. In the process, collect 302 // any comment groups encountered, and remember the last lead and 303 // and line comments. 304 // 305 // A lead comment is a comment group that starts and ends in a 306 // line without any other tokens and that is followed by a non-comment 307 // token on the line immediately after the comment group. 308 // 309 // A line comment is a comment group that follows a non-comment 310 // token on the same line, and that has no tokens after it on the line 311 // where it ends. 312 // 313 // Lead and line comments may be considered documentation that is 314 // stored in the AST. 315 // 316 func (p *parser) next() { 317 p.leadComment = nil 318 p.lineComment = nil 319 prev := p.pos 320 p.next0() 321 322 if p.tok == token.COMMENT { 323 var comment *ast.CommentGroup 324 var endline int 325 326 if p.file.Line(p.pos) == p.file.Line(prev) { 327 // The comment is on same line as the previous token; it 328 // cannot be a lead comment but may be a line comment. 329 comment, endline = p.consumeCommentGroup(0) 330 if p.file.Line(p.pos) != endline { 331 // The next token is on a different line, thus 332 // the last comment group is a line comment. 333 p.lineComment = comment 334 } 335 } 336 337 // consume successor comments, if any 338 endline = -1 339 for p.tok == token.COMMENT { 340 comment, endline = p.consumeCommentGroup(1) 341 } 342 343 if endline+1 == p.file.Line(p.pos) { 344 // The next token is following on the line immediately after the 345 // comment group, thus the last comment group is a lead comment. 346 p.leadComment = comment 347 } 348 } 349 } 350 351 // A bailout panic is raised to indicate early termination. 352 type bailout struct{} 353 354 func (p *parser) error(pos token.Pos, msg string) { 355 epos := p.file.Position(pos) 356 357 // If AllErrors is not set, discard errors reported on the same line 358 // as the last recorded error and stop parsing if there are more than 359 // 10 errors. 360 if p.mode&AllErrors == 0 { 361 n := len(p.errors) 362 if n > 0 && p.errors[n-1].Pos.Line == epos.Line { 363 return // discard - likely a spurious error 364 } 365 if n > 10 { 366 panic(bailout{}) 367 } 368 } 369 370 p.errors.Add(epos, msg) 371 } 372 373 func (p *parser) errorExpected(pos token.Pos, msg string) { 374 msg = "expected " + msg 375 if pos == p.pos { 376 // the error happened at the current position; 377 // make the error message more specific 378 if p.tok == token.SEMICOLON && p.lit == "\n" { 379 msg += ", found newline" 380 } else { 381 msg += ", found '" + p.tok.String() + "'" 382 if p.tok.IsLiteral() { 383 msg += " " + p.lit 384 } 385 } 386 } 387 p.error(pos, msg) 388 } 389 390 func (p *parser) expect(tok token.Token) token.Pos { 391 pos := p.pos 392 if p.tok != tok { 393 p.errorExpected(pos, "'"+tok.String()+"'") 394 } 395 p.next() // make progress 396 return pos 397 } 398 399 // expectClosing is like expect but provides a better error message 400 // for the common case of a missing comma before a newline. 401 // 402 func (p *parser) expectClosing(tok token.Token, context string) token.Pos { 403 if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" { 404 p.error(p.pos, "missing ',' before newline in "+context) 405 p.next() 406 } 407 return p.expect(tok) 408 } 409 410 func (p *parser) expectSemi() { 411 // semicolon is optional before a closing ')' or '}' 412 if p.tok != token.RPAREN && p.tok != token.RBRACE { 413 switch p.tok { 414 case token.COMMA: 415 // permit a ',' instead of a ';' but complain 416 p.errorExpected(p.pos, "';'") 417 fallthrough 418 case token.SEMICOLON: 419 p.next() 420 default: 421 p.errorExpected(p.pos, "';'") 422 syncStmt(p) 423 } 424 } 425 } 426 427 func (p *parser) atComma(context string, follow token.Token) bool { 428 if p.tok == token.COMMA { 429 return true 430 } 431 if p.tok != follow { 432 msg := "missing ','" 433 if p.tok == token.SEMICOLON && p.lit == "\n" { 434 msg += " before newline" 435 } 436 p.error(p.pos, msg+" in "+context) 437 return true // "insert" comma and continue 438 } 439 return false 440 } 441 442 func assert(cond bool, msg string) { 443 if !cond { 444 panic("go/parser internal error: " + msg) 445 } 446 } 447 448 // syncStmt advances to the next statement. 449 // Used for synchronization after an error. 450 // 451 func syncStmt(p *parser) { 452 for { 453 switch p.tok { 454 case token.BREAK, token.CONST, token.CONTINUE, token.DEFER, 455 token.FALLTHROUGH, token.FOR, token.GO, token.GOTO, 456 token.IF, token.RETURN, token.SELECT, token.SWITCH, 457 token.TYPE, token.VAR: 458 // Return only if parser made some progress since last 459 // sync or if it has not reached 10 sync calls without 460 // progress. Otherwise consume at least one token to 461 // avoid an endless parser loop (it is possible that 462 // both parseOperand and parseStmt call syncStmt and 463 // correctly do not advance, thus the need for the 464 // invocation limit p.syncCnt). 465 if p.pos == p.syncPos && p.syncCnt < 10 { 466 p.syncCnt++ 467 return 468 } 469 if p.pos > p.syncPos { 470 p.syncPos = p.pos 471 p.syncCnt = 0 472 return 473 } 474 // Reaching here indicates a parser bug, likely an 475 // incorrect token list in this function, but it only 476 // leads to skipping of possibly correct code if a 477 // previous error is present, and thus is preferred 478 // over a non-terminating parse. 479 case token.EOF: 480 return 481 } 482 p.next() 483 } 484 } 485 486 // syncDecl advances to the next declaration. 487 // Used for synchronization after an error. 488 // 489 func syncDecl(p *parser) { 490 for { 491 switch p.tok { 492 case token.CONST, token.TYPE, token.VAR: 493 // see comments in syncStmt 494 if p.pos == p.syncPos && p.syncCnt < 10 { 495 p.syncCnt++ 496 return 497 } 498 if p.pos > p.syncPos { 499 p.syncPos = p.pos 500 p.syncCnt = 0 501 return 502 } 503 case token.EOF: 504 return 505 } 506 p.next() 507 } 508 } 509 510 // safePos returns a valid file position for a given position: If pos 511 // is valid to begin with, safePos returns pos. If pos is out-of-range, 512 // safePos returns the EOF position. 513 // 514 // This is hack to work around "artificial" end positions in the AST which 515 // are computed by adding 1 to (presumably valid) token positions. If the 516 // token positions are invalid due to parse errors, the resulting end position 517 // may be past the file's EOF position, which would lead to panics if used 518 // later on. 519 // 520 func (p *parser) safePos(pos token.Pos) (res token.Pos) { 521 defer func() { 522 if recover() != nil { 523 res = token.Pos(p.file.Base() + p.file.Size()) // EOF position 524 } 525 }() 526 _ = p.file.Offset(pos) // trigger a panic if position is out-of-range 527 return pos 528 } 529 530 // ---------------------------------------------------------------------------- 531 // Identifiers 532 533 func (p *parser) parseIdent() *ast.Ident { 534 pos := p.pos 535 name := "_" 536 if p.tok == token.IDENT { 537 name = p.lit 538 p.next() 539 } else { 540 p.expect(token.IDENT) // use expect() error handling 541 } 542 return &ast.Ident{NamePos: pos, Name: name} 543 } 544 545 func (p *parser) parseIdentList() (list []*ast.Ident) { 546 if p.trace { 547 defer un(trace(p, "IdentList")) 548 } 549 550 list = append(list, p.parseIdent()) 551 for p.tok == token.COMMA { 552 p.next() 553 list = append(list, p.parseIdent()) 554 } 555 556 return 557 } 558 559 // ---------------------------------------------------------------------------- 560 // Common productions 561 562 // If lhs is set, result list elements which are identifiers are not resolved. 563 func (p *parser) parseExprList(lhs bool) (list []ast.Expr) { 564 if p.trace { 565 defer un(trace(p, "ExpressionList")) 566 } 567 568 list = append(list, p.checkExpr(p.parseExpr(lhs))) 569 for p.tok == token.COMMA { 570 p.next() 571 list = append(list, p.checkExpr(p.parseExpr(lhs))) 572 } 573 574 return 575 } 576 577 func (p *parser) parseLhsList() []ast.Expr { 578 old := p.inRhs 579 p.inRhs = false 580 list := p.parseExprList(true) 581 switch p.tok { 582 case token.DEFINE: 583 // lhs of a short variable declaration 584 // but doesn't enter scope until later: 585 // caller must call p.shortVarDecl(p.makeIdentList(list)) 586 // at appropriate time. 587 case token.COLON: 588 // lhs of a label declaration or a communication clause of a select 589 // statement (parseLhsList is not called when parsing the case clause 590 // of a switch statement): 591 // - labels are declared by the caller of parseLhsList 592 // - for communication clauses, if there is a stand-alone identifier 593 // followed by a colon, we have a syntax error; there is no need 594 // to resolve the identifier in that case 595 default: 596 // identifiers must be declared elsewhere 597 for _, x := range list { 598 p.resolve(x) 599 } 600 } 601 p.inRhs = old 602 return list 603 } 604 605 func (p *parser) parseRhsList() []ast.Expr { 606 old := p.inRhs 607 p.inRhs = true 608 list := p.parseExprList(false) 609 p.inRhs = old 610 return list 611 } 612 613 // ---------------------------------------------------------------------------- 614 // Types 615 616 func (p *parser) parseType() ast.Expr { 617 if p.trace { 618 defer un(trace(p, "Type")) 619 } 620 621 typ := p.tryType() 622 623 if typ == nil { 624 pos := p.pos 625 p.errorExpected(pos, "type") 626 p.next() // make progress 627 return &ast.BadExpr{From: pos, To: p.pos} 628 } 629 630 return typ 631 } 632 633 // If the result is an identifier, it is not resolved. 634 func (p *parser) parseTypeName() ast.Expr { 635 if p.trace { 636 defer un(trace(p, "TypeName")) 637 } 638 639 ident := p.parseIdent() 640 // don't resolve ident yet - it may be a parameter or field name 641 642 if p.tok == token.PERIOD { 643 // ident is a package name 644 p.next() 645 p.resolve(ident) 646 sel := p.parseIdent() 647 return &ast.SelectorExpr{X: ident, Sel: sel} 648 } 649 650 return ident 651 } 652 653 func (p *parser) parseArrayType() ast.Expr { 654 if p.trace { 655 defer un(trace(p, "ArrayType")) 656 } 657 658 lbrack := p.expect(token.LBRACK) 659 p.exprLev++ 660 var len ast.Expr 661 // always permit ellipsis for more fault-tolerant parsing 662 if p.tok == token.ELLIPSIS { 663 len = &ast.Ellipsis{Ellipsis: p.pos} 664 p.next() 665 } else if p.tok != token.RBRACK { 666 len = p.parseRhs() 667 } 668 p.exprLev-- 669 p.expect(token.RBRACK) 670 elt := p.parseType() 671 672 return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt} 673 } 674 675 func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident { 676 idents := make([]*ast.Ident, len(list)) 677 for i, x := range list { 678 ident, isIdent := x.(*ast.Ident) 679 if !isIdent { 680 if _, isBad := x.(*ast.BadExpr); !isBad { 681 // only report error if it's a new one 682 p.errorExpected(x.Pos(), "identifier") 683 } 684 ident = &ast.Ident{NamePos: x.Pos(), Name: "_"} 685 } 686 idents[i] = ident 687 } 688 return idents 689 } 690 691 func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field { 692 if p.trace { 693 defer un(trace(p, "FieldDecl")) 694 } 695 696 doc := p.leadComment 697 698 // 1st FieldDecl 699 // A type name used as an anonymous field looks like a field identifier. 700 var list []ast.Expr 701 for { 702 list = append(list, p.parseVarType(false)) 703 if p.tok != token.COMMA { 704 break 705 } 706 p.next() 707 } 708 709 typ := p.tryVarType(false) 710 711 // analyze case 712 var idents []*ast.Ident 713 if typ != nil { 714 // IdentifierList Type 715 idents = p.makeIdentList(list) 716 } else { 717 // ["*"] TypeName (AnonymousField) 718 typ = list[0] // we always have at least one element 719 if n := len(list); n > 1 { 720 p.errorExpected(p.pos, "type") 721 typ = &ast.BadExpr{From: p.pos, To: p.pos} 722 } else if !isTypeName(deref(typ)) { 723 p.errorExpected(typ.Pos(), "anonymous field") 724 typ = &ast.BadExpr{From: typ.Pos(), To: p.safePos(typ.End())} 725 } 726 } 727 728 // Tag 729 var tag *ast.BasicLit 730 if p.tok == token.STRING { 731 tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit} 732 p.next() 733 } 734 735 p.expectSemi() // call before accessing p.linecomment 736 737 field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment} 738 p.declare(field, nil, scope, ast.Var, idents...) 739 p.resolve(typ) 740 741 return field 742 } 743 744 func (p *parser) parseStructType() *ast.StructType { 745 if p.trace { 746 defer un(trace(p, "StructType")) 747 } 748 749 pos := p.expect(token.STRUCT) 750 lbrace := p.expect(token.LBRACE) 751 scope := ast.NewScope(nil) // struct scope 752 var list []*ast.Field 753 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN { 754 // a field declaration cannot start with a '(' but we accept 755 // it here for more robust parsing and better error messages 756 // (parseFieldDecl will check and complain if necessary) 757 list = append(list, p.parseFieldDecl(scope)) 758 } 759 rbrace := p.expect(token.RBRACE) 760 761 return &ast.StructType{ 762 Struct: pos, 763 Fields: &ast.FieldList{ 764 Opening: lbrace, 765 List: list, 766 Closing: rbrace, 767 }, 768 } 769 } 770 771 func (p *parser) parsePointerType() *ast.StarExpr { 772 if p.trace { 773 defer un(trace(p, "PointerType")) 774 } 775 776 star := p.expect(token.MUL) 777 base := p.parseType() 778 779 return &ast.StarExpr{Star: star, X: base} 780 } 781 782 // If the result is an identifier, it is not resolved. 783 func (p *parser) tryVarType(isParam bool) ast.Expr { 784 if isParam && p.tok == token.ELLIPSIS { 785 pos := p.pos 786 p.next() 787 typ := p.tryIdentOrType() // don't use parseType so we can provide better error message 788 if typ != nil { 789 p.resolve(typ) 790 } else { 791 p.error(pos, "'...' parameter is missing type") 792 typ = &ast.BadExpr{From: pos, To: p.pos} 793 } 794 return &ast.Ellipsis{Ellipsis: pos, Elt: typ} 795 } 796 return p.tryIdentOrType() 797 } 798 799 // If the result is an identifier, it is not resolved. 800 func (p *parser) parseVarType(isParam bool) ast.Expr { 801 typ := p.tryVarType(isParam) 802 if typ == nil { 803 pos := p.pos 804 p.errorExpected(pos, "type") 805 p.next() // make progress 806 typ = &ast.BadExpr{From: pos, To: p.pos} 807 } 808 return typ 809 } 810 811 func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) { 812 if p.trace { 813 defer un(trace(p, "ParameterList")) 814 } 815 816 // 1st ParameterDecl 817 // A list of identifiers looks like a list of type names. 818 var list []ast.Expr 819 for { 820 list = append(list, p.parseVarType(ellipsisOk)) 821 if p.tok != token.COMMA { 822 break 823 } 824 p.next() 825 if p.tok == token.RPAREN { 826 break 827 } 828 } 829 830 // analyze case 831 if typ := p.tryVarType(ellipsisOk); typ != nil { 832 // IdentifierList Type 833 idents := p.makeIdentList(list) 834 field := &ast.Field{Names: idents, Type: typ} 835 params = append(params, field) 836 // Go spec: The scope of an identifier denoting a function 837 // parameter or result variable is the function body. 838 p.declare(field, nil, scope, ast.Var, idents...) 839 p.resolve(typ) 840 if !p.atComma("parameter list", token.RPAREN) { 841 return 842 } 843 p.next() 844 for p.tok != token.RPAREN && p.tok != token.EOF { 845 idents := p.parseIdentList() 846 typ := p.parseVarType(ellipsisOk) 847 field := &ast.Field{Names: idents, Type: typ} 848 params = append(params, field) 849 // Go spec: The scope of an identifier denoting a function 850 // parameter or result variable is the function body. 851 p.declare(field, nil, scope, ast.Var, idents...) 852 p.resolve(typ) 853 if !p.atComma("parameter list", token.RPAREN) { 854 break 855 } 856 p.next() 857 } 858 return 859 } 860 861 // Type { "," Type } (anonymous parameters) 862 params = make([]*ast.Field, len(list)) 863 for i, typ := range list { 864 p.resolve(typ) 865 params[i] = &ast.Field{Type: typ} 866 } 867 return 868 } 869 870 func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList { 871 if p.trace { 872 defer un(trace(p, "Parameters")) 873 } 874 875 var params []*ast.Field 876 lparen := p.expect(token.LPAREN) 877 if p.tok != token.RPAREN { 878 params = p.parseParameterList(scope, ellipsisOk) 879 } 880 rparen := p.expect(token.RPAREN) 881 882 return &ast.FieldList{Opening: lparen, List: params, Closing: rparen} 883 } 884 885 func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList { 886 if p.trace { 887 defer un(trace(p, "Result")) 888 } 889 890 if p.tok == token.LPAREN { 891 return p.parseParameters(scope, false) 892 } 893 894 typ := p.tryType() 895 if typ != nil { 896 list := make([]*ast.Field, 1) 897 list[0] = &ast.Field{Type: typ} 898 return &ast.FieldList{List: list} 899 } 900 901 return nil 902 } 903 904 func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) { 905 if p.trace { 906 defer un(trace(p, "Signature")) 907 } 908 909 params = p.parseParameters(scope, true) 910 results = p.parseResult(scope) 911 912 return 913 } 914 915 func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) { 916 if p.trace { 917 defer un(trace(p, "FuncType")) 918 } 919 920 pos := p.expect(token.FUNC) 921 scope := ast.NewScope(p.topScope) // function scope 922 params, results := p.parseSignature(scope) 923 924 return &ast.FuncType{Func: pos, Params: params, Results: results}, scope 925 } 926 927 func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field { 928 if p.trace { 929 defer un(trace(p, "MethodSpec")) 930 } 931 932 doc := p.leadComment 933 var idents []*ast.Ident 934 var typ ast.Expr 935 x := p.parseTypeName() 936 if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN { 937 // method 938 idents = []*ast.Ident{ident} 939 scope := ast.NewScope(nil) // method scope 940 params, results := p.parseSignature(scope) 941 typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results} 942 } else { 943 // embedded interface 944 typ = x 945 p.resolve(typ) 946 } 947 p.expectSemi() // call before accessing p.linecomment 948 949 spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment} 950 p.declare(spec, nil, scope, ast.Fun, idents...) 951 952 return spec 953 } 954 955 func (p *parser) parseInterfaceType() *ast.InterfaceType { 956 if p.trace { 957 defer un(trace(p, "InterfaceType")) 958 } 959 960 pos := p.expect(token.INTERFACE) 961 lbrace := p.expect(token.LBRACE) 962 scope := ast.NewScope(nil) // interface scope 963 var list []*ast.Field 964 for p.tok == token.IDENT { 965 list = append(list, p.parseMethodSpec(scope)) 966 } 967 rbrace := p.expect(token.RBRACE) 968 969 return &ast.InterfaceType{ 970 Interface: pos, 971 Methods: &ast.FieldList{ 972 Opening: lbrace, 973 List: list, 974 Closing: rbrace, 975 }, 976 } 977 } 978 979 func (p *parser) parseMapType() *ast.MapType { 980 if p.trace { 981 defer un(trace(p, "MapType")) 982 } 983 984 pos := p.expect(token.MAP) 985 p.expect(token.LBRACK) 986 key := p.parseType() 987 p.expect(token.RBRACK) 988 value := p.parseType() 989 990 return &ast.MapType{Map: pos, Key: key, Value: value} 991 } 992 993 func (p *parser) parseChanType() *ast.ChanType { 994 if p.trace { 995 defer un(trace(p, "ChanType")) 996 } 997 998 pos := p.pos 999 dir := ast.SEND | ast.RECV 1000 var arrow token.Pos 1001 if p.tok == token.CHAN { 1002 p.next() 1003 if p.tok == token.ARROW { 1004 arrow = p.pos 1005 p.next() 1006 dir = ast.SEND 1007 } 1008 } else { 1009 arrow = p.expect(token.ARROW) 1010 p.expect(token.CHAN) 1011 dir = ast.RECV 1012 } 1013 value := p.parseType() 1014 1015 return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value} 1016 } 1017 1018 // If the result is an identifier, it is not resolved. 1019 func (p *parser) tryIdentOrType() ast.Expr { 1020 switch p.tok { 1021 case token.IDENT: 1022 return p.parseTypeName() 1023 case token.LBRACK: 1024 return p.parseArrayType() 1025 case token.STRUCT: 1026 return p.parseStructType() 1027 case token.MUL: 1028 return p.parsePointerType() 1029 case token.FUNC: 1030 typ, _ := p.parseFuncType() 1031 return typ 1032 case token.INTERFACE: 1033 return p.parseInterfaceType() 1034 case token.MAP: 1035 return p.parseMapType() 1036 case token.CHAN, token.ARROW: 1037 return p.parseChanType() 1038 case token.LPAREN: 1039 lparen := p.pos 1040 p.next() 1041 typ := p.parseType() 1042 rparen := p.expect(token.RPAREN) 1043 return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen} 1044 } 1045 1046 // no type found 1047 return nil 1048 } 1049 1050 func (p *parser) tryType() ast.Expr { 1051 typ := p.tryIdentOrType() 1052 if typ != nil { 1053 p.resolve(typ) 1054 } 1055 return typ 1056 } 1057 1058 // ---------------------------------------------------------------------------- 1059 // Blocks 1060 1061 func (p *parser) parseStmtList() (list []ast.Stmt) { 1062 if p.trace { 1063 defer un(trace(p, "StatementList")) 1064 } 1065 1066 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF { 1067 list = append(list, p.parseStmt()) 1068 } 1069 1070 return 1071 } 1072 1073 func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt { 1074 if p.trace { 1075 defer un(trace(p, "Body")) 1076 } 1077 1078 lbrace := p.expect(token.LBRACE) 1079 p.topScope = scope // open function scope 1080 p.openLabelScope() 1081 list := p.parseStmtList() 1082 p.closeLabelScope() 1083 p.closeScope() 1084 rbrace := p.expect(token.RBRACE) 1085 1086 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 1087 } 1088 1089 func (p *parser) parseBlockStmt() *ast.BlockStmt { 1090 if p.trace { 1091 defer un(trace(p, "BlockStmt")) 1092 } 1093 1094 lbrace := p.expect(token.LBRACE) 1095 p.openScope() 1096 list := p.parseStmtList() 1097 p.closeScope() 1098 rbrace := p.expect(token.RBRACE) 1099 1100 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 1101 } 1102 1103 // ---------------------------------------------------------------------------- 1104 // Expressions 1105 1106 func (p *parser) parseFuncTypeOrLit() ast.Expr { 1107 if p.trace { 1108 defer un(trace(p, "FuncTypeOrLit")) 1109 } 1110 1111 typ, scope := p.parseFuncType() 1112 if p.tok != token.LBRACE { 1113 // function type only 1114 return typ 1115 } 1116 1117 p.exprLev++ 1118 body := p.parseBody(scope) 1119 p.exprLev-- 1120 1121 return &ast.FuncLit{Type: typ, Body: body} 1122 } 1123 1124 // parseOperand may return an expression or a raw type (incl. array 1125 // types of the form [...]T. Callers must verify the result. 1126 // If lhs is set and the result is an identifier, it is not resolved. 1127 // 1128 func (p *parser) parseOperand(lhs bool) ast.Expr { 1129 if p.trace { 1130 defer un(trace(p, "Operand")) 1131 } 1132 1133 switch p.tok { 1134 case token.IDENT: 1135 x := p.parseIdent() 1136 if !lhs { 1137 p.resolve(x) 1138 } 1139 return x 1140 1141 case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING: 1142 x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit} 1143 p.next() 1144 return x 1145 1146 case token.LPAREN: 1147 lparen := p.pos 1148 p.next() 1149 p.exprLev++ 1150 x := p.parseRhsOrType() // types may be parenthesized: (some type) 1151 p.exprLev-- 1152 rparen := p.expect(token.RPAREN) 1153 return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen} 1154 1155 case token.FUNC: 1156 return p.parseFuncTypeOrLit() 1157 } 1158 1159 if typ := p.tryIdentOrType(); typ != nil { 1160 // could be type for composite literal or conversion 1161 _, isIdent := typ.(*ast.Ident) 1162 assert(!isIdent, "type cannot be identifier") 1163 return typ 1164 } 1165 1166 // we have an error 1167 pos := p.pos 1168 p.errorExpected(pos, "operand") 1169 syncStmt(p) 1170 return &ast.BadExpr{From: pos, To: p.pos} 1171 } 1172 1173 func (p *parser) parseSelector(x ast.Expr) ast.Expr { 1174 if p.trace { 1175 defer un(trace(p, "Selector")) 1176 } 1177 1178 sel := p.parseIdent() 1179 1180 return &ast.SelectorExpr{X: x, Sel: sel} 1181 } 1182 1183 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr { 1184 if p.trace { 1185 defer un(trace(p, "TypeAssertion")) 1186 } 1187 1188 lparen := p.expect(token.LPAREN) 1189 var typ ast.Expr 1190 if p.tok == token.TYPE { 1191 // type switch: typ == nil 1192 p.next() 1193 } else { 1194 typ = p.parseType() 1195 } 1196 rparen := p.expect(token.RPAREN) 1197 1198 return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen} 1199 } 1200 1201 func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr { 1202 if p.trace { 1203 defer un(trace(p, "IndexOrSlice")) 1204 } 1205 1206 const N = 3 // change the 3 to 2 to disable 3-index slices 1207 lbrack := p.expect(token.LBRACK) 1208 p.exprLev++ 1209 var index [N]ast.Expr 1210 var colons [N - 1]token.Pos 1211 if p.tok != token.COLON { 1212 index[0] = p.parseRhs() 1213 } 1214 ncolons := 0 1215 for p.tok == token.COLON && ncolons < len(colons) { 1216 colons[ncolons] = p.pos 1217 ncolons++ 1218 p.next() 1219 if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF { 1220 index[ncolons] = p.parseRhs() 1221 } 1222 } 1223 p.exprLev-- 1224 rbrack := p.expect(token.RBRACK) 1225 1226 if ncolons > 0 { 1227 // slice expression 1228 slice3 := false 1229 if ncolons == 2 { 1230 slice3 = true 1231 // Check presence of 2nd and 3rd index here rather than during type-checking 1232 // to prevent erroneous programs from passing through gofmt (was issue 7305). 1233 if index[1] == nil { 1234 p.error(colons[0], "2nd index required in 3-index slice") 1235 index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]} 1236 } 1237 if index[2] == nil { 1238 p.error(colons[1], "3rd index required in 3-index slice") 1239 index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack} 1240 } 1241 } 1242 return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack} 1243 } 1244 1245 return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack} 1246 } 1247 1248 func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr { 1249 if p.trace { 1250 defer un(trace(p, "CallOrConversion")) 1251 } 1252 1253 lparen := p.expect(token.LPAREN) 1254 p.exprLev++ 1255 var list []ast.Expr 1256 var ellipsis token.Pos 1257 for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() { 1258 list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...) 1259 if p.tok == token.ELLIPSIS { 1260 ellipsis = p.pos 1261 p.next() 1262 } 1263 if !p.atComma("argument list", token.RPAREN) { 1264 break 1265 } 1266 p.next() 1267 } 1268 p.exprLev-- 1269 rparen := p.expectClosing(token.RPAREN, "argument list") 1270 1271 return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen} 1272 } 1273 1274 func (p *parser) parseValue(keyOk bool) ast.Expr { 1275 if p.trace { 1276 defer un(trace(p, "Element")) 1277 } 1278 1279 if p.tok == token.LBRACE { 1280 return p.parseLiteralValue(nil) 1281 } 1282 1283 // Because the parser doesn't know the composite literal type, it cannot 1284 // know if a key that's an identifier is a struct field name or a name 1285 // denoting a value. The former is not resolved by the parser or the 1286 // resolver. 1287 // 1288 // Instead, _try_ to resolve such a key if possible. If it resolves, 1289 // it a) has correctly resolved, or b) incorrectly resolved because 1290 // the key is a struct field with a name matching another identifier. 1291 // In the former case we are done, and in the latter case we don't 1292 // care because the type checker will do a separate field lookup. 1293 // 1294 // If the key does not resolve, it a) must be defined at the top 1295 // level in another file of the same package, the universe scope, or be 1296 // undeclared; or b) it is a struct field. In the former case, the type 1297 // checker can do a top-level lookup, and in the latter case it will do 1298 // a separate field lookup. 1299 x := p.checkExpr(p.parseExpr(keyOk)) 1300 if keyOk { 1301 if p.tok == token.COLON { 1302 // Try to resolve the key but don't collect it 1303 // as unresolved identifier if it fails so that 1304 // we don't get (possibly false) errors about 1305 // undeclared names. 1306 p.tryResolve(x, false) 1307 } else { 1308 // not a key 1309 p.resolve(x) 1310 } 1311 } 1312 1313 return x 1314 } 1315 1316 func (p *parser) parseElement() ast.Expr { 1317 if p.trace { 1318 defer un(trace(p, "Element")) 1319 } 1320 1321 x := p.parseValue(true) 1322 if p.tok == token.COLON { 1323 colon := p.pos 1324 p.next() 1325 x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue(false)} 1326 } 1327 1328 return x 1329 } 1330 1331 func (p *parser) parseElementList() (list []ast.Expr) { 1332 if p.trace { 1333 defer un(trace(p, "ElementList")) 1334 } 1335 1336 for p.tok != token.RBRACE && p.tok != token.EOF { 1337 list = append(list, p.parseElement()) 1338 if !p.atComma("composite literal", token.RBRACE) { 1339 break 1340 } 1341 p.next() 1342 } 1343 1344 return 1345 } 1346 1347 func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr { 1348 if p.trace { 1349 defer un(trace(p, "LiteralValue")) 1350 } 1351 1352 lbrace := p.expect(token.LBRACE) 1353 var elts []ast.Expr 1354 p.exprLev++ 1355 if p.tok != token.RBRACE { 1356 elts = p.parseElementList() 1357 } 1358 p.exprLev-- 1359 rbrace := p.expectClosing(token.RBRACE, "composite literal") 1360 return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace} 1361 } 1362 1363 // checkExpr checks that x is an expression (and not a type). 1364 func (p *parser) checkExpr(x ast.Expr) ast.Expr { 1365 switch unparen(x).(type) { 1366 case *ast.BadExpr: 1367 case *ast.Ident: 1368 case *ast.BasicLit: 1369 case *ast.FuncLit: 1370 case *ast.CompositeLit: 1371 case *ast.ParenExpr: 1372 panic("unreachable") 1373 case *ast.SelectorExpr: 1374 case *ast.IndexExpr: 1375 case *ast.SliceExpr: 1376 case *ast.TypeAssertExpr: 1377 // If t.Type == nil we have a type assertion of the form 1378 // y.(type), which is only allowed in type switch expressions. 1379 // It's hard to exclude those but for the case where we are in 1380 // a type switch. Instead be lenient and test this in the type 1381 // checker. 1382 case *ast.CallExpr: 1383 case *ast.StarExpr: 1384 case *ast.UnaryExpr: 1385 case *ast.BinaryExpr: 1386 default: 1387 // all other nodes are not proper expressions 1388 p.errorExpected(x.Pos(), "expression") 1389 x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())} 1390 } 1391 return x 1392 } 1393 1394 // isTypeName reports whether x is a (qualified) TypeName. 1395 func isTypeName(x ast.Expr) bool { 1396 switch t := x.(type) { 1397 case *ast.BadExpr: 1398 case *ast.Ident: 1399 case *ast.SelectorExpr: 1400 _, isIdent := t.X.(*ast.Ident) 1401 return isIdent 1402 default: 1403 return false // all other nodes are not type names 1404 } 1405 return true 1406 } 1407 1408 // isLiteralType reports whether x is a legal composite literal type. 1409 func isLiteralType(x ast.Expr) bool { 1410 switch t := x.(type) { 1411 case *ast.BadExpr: 1412 case *ast.Ident: 1413 case *ast.SelectorExpr: 1414 _, isIdent := t.X.(*ast.Ident) 1415 return isIdent 1416 case *ast.ArrayType: 1417 case *ast.StructType: 1418 case *ast.MapType: 1419 default: 1420 return false // all other nodes are not legal composite literal types 1421 } 1422 return true 1423 } 1424 1425 // If x is of the form *T, deref returns T, otherwise it returns x. 1426 func deref(x ast.Expr) ast.Expr { 1427 if p, isPtr := x.(*ast.StarExpr); isPtr { 1428 x = p.X 1429 } 1430 return x 1431 } 1432 1433 // If x is of the form (T), unparen returns unparen(T), otherwise it returns x. 1434 func unparen(x ast.Expr) ast.Expr { 1435 if p, isParen := x.(*ast.ParenExpr); isParen { 1436 x = unparen(p.X) 1437 } 1438 return x 1439 } 1440 1441 // checkExprOrType checks that x is an expression or a type 1442 // (and not a raw type such as [...]T). 1443 // 1444 func (p *parser) checkExprOrType(x ast.Expr) ast.Expr { 1445 switch t := unparen(x).(type) { 1446 case *ast.ParenExpr: 1447 panic("unreachable") 1448 case *ast.UnaryExpr: 1449 case *ast.ArrayType: 1450 if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis { 1451 p.error(len.Pos(), "expected array length, found '...'") 1452 x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())} 1453 } 1454 } 1455 1456 // all other nodes are expressions or types 1457 return x 1458 } 1459 1460 // If lhs is set and the result is an identifier, it is not resolved. 1461 func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr { 1462 if p.trace { 1463 defer un(trace(p, "PrimaryExpr")) 1464 } 1465 1466 x := p.parseOperand(lhs) 1467 L: 1468 for { 1469 switch p.tok { 1470 case token.PERIOD: 1471 p.next() 1472 if lhs { 1473 p.resolve(x) 1474 } 1475 switch p.tok { 1476 case token.IDENT: 1477 x = p.parseSelector(p.checkExprOrType(x)) 1478 case token.LPAREN: 1479 x = p.parseTypeAssertion(p.checkExpr(x)) 1480 default: 1481 pos := p.pos 1482 p.errorExpected(pos, "selector or type assertion") 1483 p.next() // make progress 1484 sel := &ast.Ident{NamePos: pos, Name: "_"} 1485 x = &ast.SelectorExpr{X: x, Sel: sel} 1486 } 1487 case token.LBRACK: 1488 if lhs { 1489 p.resolve(x) 1490 } 1491 x = p.parseIndexOrSlice(p.checkExpr(x)) 1492 case token.LPAREN: 1493 if lhs { 1494 p.resolve(x) 1495 } 1496 x = p.parseCallOrConversion(p.checkExprOrType(x)) 1497 case token.LBRACE: 1498 if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) { 1499 if lhs { 1500 p.resolve(x) 1501 } 1502 x = p.parseLiteralValue(x) 1503 } else { 1504 break L 1505 } 1506 default: 1507 break L 1508 } 1509 lhs = false // no need to try to resolve again 1510 } 1511 1512 return x 1513 } 1514 1515 // If lhs is set and the result is an identifier, it is not resolved. 1516 func (p *parser) parseUnaryExpr(lhs bool) ast.Expr { 1517 if p.trace { 1518 defer un(trace(p, "UnaryExpr")) 1519 } 1520 1521 switch p.tok { 1522 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND: 1523 pos, op := p.pos, p.tok 1524 p.next() 1525 x := p.parseUnaryExpr(false) 1526 return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)} 1527 1528 case token.ARROW: 1529 // channel type or receive expression 1530 arrow := p.pos 1531 p.next() 1532 1533 // If the next token is token.CHAN we still don't know if it 1534 // is a channel type or a receive operation - we only know 1535 // once we have found the end of the unary expression. There 1536 // are two cases: 1537 // 1538 // <- type => (<-type) must be channel type 1539 // <- expr => <-(expr) is a receive from an expression 1540 // 1541 // In the first case, the arrow must be re-associated with 1542 // the channel type parsed already: 1543 // 1544 // <- (chan type) => (<-chan type) 1545 // <- (chan<- type) => (<-chan (<-type)) 1546 1547 x := p.parseUnaryExpr(false) 1548 1549 // determine which case we have 1550 if typ, ok := x.(*ast.ChanType); ok { 1551 // (<-type) 1552 1553 // re-associate position info and <- 1554 dir := ast.SEND 1555 for ok && dir == ast.SEND { 1556 if typ.Dir == ast.RECV { 1557 // error: (<-type) is (<-(<-chan T)) 1558 p.errorExpected(typ.Arrow, "'chan'") 1559 } 1560 arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow 1561 dir, typ.Dir = typ.Dir, ast.RECV 1562 typ, ok = typ.Value.(*ast.ChanType) 1563 } 1564 if dir == ast.SEND { 1565 p.errorExpected(arrow, "channel type") 1566 } 1567 1568 return x 1569 } 1570 1571 // <-(expr) 1572 return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)} 1573 1574 case token.MUL: 1575 // pointer type or unary "*" expression 1576 pos := p.pos 1577 p.next() 1578 x := p.parseUnaryExpr(false) 1579 return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)} 1580 } 1581 1582 return p.parsePrimaryExpr(lhs) 1583 } 1584 1585 func (p *parser) tokPrec() (token.Token, int) { 1586 tok := p.tok 1587 if p.inRhs && tok == token.ASSIGN { 1588 tok = token.EQL 1589 } 1590 return tok, tok.Precedence() 1591 } 1592 1593 // If lhs is set and the result is an identifier, it is not resolved. 1594 func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr { 1595 if p.trace { 1596 defer un(trace(p, "BinaryExpr")) 1597 } 1598 1599 x := p.parseUnaryExpr(lhs) 1600 for _, prec := p.tokPrec(); prec >= prec1; prec-- { 1601 for { 1602 op, oprec := p.tokPrec() 1603 if oprec != prec { 1604 break 1605 } 1606 pos := p.expect(op) 1607 if lhs { 1608 p.resolve(x) 1609 lhs = false 1610 } 1611 y := p.parseBinaryExpr(false, prec+1) 1612 x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)} 1613 } 1614 } 1615 1616 return x 1617 } 1618 1619 // If lhs is set and the result is an identifier, it is not resolved. 1620 // The result may be a type or even a raw type ([...]int). Callers must 1621 // check the result (using checkExpr or checkExprOrType), depending on 1622 // context. 1623 func (p *parser) parseExpr(lhs bool) ast.Expr { 1624 if p.trace { 1625 defer un(trace(p, "Expression")) 1626 } 1627 1628 return p.parseBinaryExpr(lhs, token.LowestPrec+1) 1629 } 1630 1631 func (p *parser) parseRhs() ast.Expr { 1632 old := p.inRhs 1633 p.inRhs = true 1634 x := p.checkExpr(p.parseExpr(false)) 1635 p.inRhs = old 1636 return x 1637 } 1638 1639 func (p *parser) parseRhsOrType() ast.Expr { 1640 old := p.inRhs 1641 p.inRhs = true 1642 x := p.checkExprOrType(p.parseExpr(false)) 1643 p.inRhs = old 1644 return x 1645 } 1646 1647 // ---------------------------------------------------------------------------- 1648 // Statements 1649 1650 // Parsing modes for parseSimpleStmt. 1651 const ( 1652 basic = iota 1653 labelOk 1654 rangeOk 1655 ) 1656 1657 // parseSimpleStmt returns true as 2nd result if it parsed the assignment 1658 // of a range clause (with mode == rangeOk). The returned statement is an 1659 // assignment with a right-hand side that is a single unary expression of 1660 // the form "range x". No guarantees are given for the left-hand side. 1661 func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) { 1662 if p.trace { 1663 defer un(trace(p, "SimpleStmt")) 1664 } 1665 1666 x := p.parseLhsList() 1667 1668 switch p.tok { 1669 case 1670 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN, 1671 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN, 1672 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN, 1673 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN: 1674 // assignment statement, possibly part of a range clause 1675 pos, tok := p.pos, p.tok 1676 p.next() 1677 var y []ast.Expr 1678 isRange := false 1679 if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) { 1680 pos := p.pos 1681 p.next() 1682 y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}} 1683 isRange = true 1684 } else { 1685 y = p.parseRhsList() 1686 } 1687 as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y} 1688 if tok == token.DEFINE { 1689 p.shortVarDecl(as, x) 1690 } 1691 return as, isRange 1692 } 1693 1694 if len(x) > 1 { 1695 p.errorExpected(x[0].Pos(), "1 expression") 1696 // continue with first expression 1697 } 1698 1699 switch p.tok { 1700 case token.COLON: 1701 // labeled statement 1702 colon := p.pos 1703 p.next() 1704 if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent { 1705 // Go spec: The scope of a label is the body of the function 1706 // in which it is declared and excludes the body of any nested 1707 // function. 1708 stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()} 1709 p.declare(stmt, nil, p.labelScope, ast.Lbl, label) 1710 return stmt, false 1711 } 1712 // The label declaration typically starts at x[0].Pos(), but the label 1713 // declaration may be erroneous due to a token after that position (and 1714 // before the ':'). If SpuriousErrors is not set, the (only) error re- 1715 // ported for the line is the illegal label error instead of the token 1716 // before the ':' that caused the problem. Thus, use the (latest) colon 1717 // position for error reporting. 1718 p.error(colon, "illegal label declaration") 1719 return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false 1720 1721 case token.ARROW: 1722 // send statement 1723 arrow := p.pos 1724 p.next() 1725 y := p.parseRhs() 1726 return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false 1727 1728 case token.INC, token.DEC: 1729 // increment or decrement 1730 s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok} 1731 p.next() 1732 return s, false 1733 } 1734 1735 // expression 1736 return &ast.ExprStmt{X: x[0]}, false 1737 } 1738 1739 func (p *parser) parseCallExpr(callType string) *ast.CallExpr { 1740 x := p.parseRhsOrType() // could be a conversion: (some type)(x) 1741 if call, isCall := x.(*ast.CallExpr); isCall { 1742 return call 1743 } 1744 if _, isBad := x.(*ast.BadExpr); !isBad { 1745 // only report error if it's a new one 1746 p.error(p.safePos(x.End()), fmt.Sprintf("function must be invoked in %s statement", callType)) 1747 } 1748 return nil 1749 } 1750 1751 func (p *parser) parseGoStmt() ast.Stmt { 1752 if p.trace { 1753 defer un(trace(p, "GoStmt")) 1754 } 1755 1756 pos := p.expect(token.GO) 1757 call := p.parseCallExpr("go") 1758 p.expectSemi() 1759 if call == nil { 1760 return &ast.BadStmt{From: pos, To: pos + 2} // len("go") 1761 } 1762 1763 return &ast.GoStmt{Go: pos, Call: call} 1764 } 1765 1766 func (p *parser) parseDeferStmt() ast.Stmt { 1767 if p.trace { 1768 defer un(trace(p, "DeferStmt")) 1769 } 1770 1771 pos := p.expect(token.DEFER) 1772 call := p.parseCallExpr("defer") 1773 p.expectSemi() 1774 if call == nil { 1775 return &ast.BadStmt{From: pos, To: pos + 5} // len("defer") 1776 } 1777 1778 return &ast.DeferStmt{Defer: pos, Call: call} 1779 } 1780 1781 func (p *parser) parseReturnStmt() *ast.ReturnStmt { 1782 if p.trace { 1783 defer un(trace(p, "ReturnStmt")) 1784 } 1785 1786 pos := p.pos 1787 p.expect(token.RETURN) 1788 var x []ast.Expr 1789 if p.tok != token.SEMICOLON && p.tok != token.RBRACE { 1790 x = p.parseRhsList() 1791 } 1792 p.expectSemi() 1793 1794 return &ast.ReturnStmt{Return: pos, Results: x} 1795 } 1796 1797 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt { 1798 if p.trace { 1799 defer un(trace(p, "BranchStmt")) 1800 } 1801 1802 pos := p.expect(tok) 1803 var label *ast.Ident 1804 if tok != token.FALLTHROUGH && p.tok == token.IDENT { 1805 label = p.parseIdent() 1806 // add to list of unresolved targets 1807 n := len(p.targetStack) - 1 1808 p.targetStack[n] = append(p.targetStack[n], label) 1809 } 1810 p.expectSemi() 1811 1812 return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label} 1813 } 1814 1815 func (p *parser) makeExpr(s ast.Stmt, kind string) ast.Expr { 1816 if s == nil { 1817 return nil 1818 } 1819 if es, isExpr := s.(*ast.ExprStmt); isExpr { 1820 return p.checkExpr(es.X) 1821 } 1822 p.error(s.Pos(), fmt.Sprintf("expected %s, found simple statement (missing parentheses around composite literal?)", kind)) 1823 return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())} 1824 } 1825 1826 func (p *parser) parseIfStmt() *ast.IfStmt { 1827 if p.trace { 1828 defer un(trace(p, "IfStmt")) 1829 } 1830 1831 pos := p.expect(token.IF) 1832 p.openScope() 1833 defer p.closeScope() 1834 1835 var s ast.Stmt 1836 var x ast.Expr 1837 { 1838 prevLev := p.exprLev 1839 p.exprLev = -1 1840 if p.tok == token.SEMICOLON { 1841 p.next() 1842 x = p.parseRhs() 1843 } else { 1844 s, _ = p.parseSimpleStmt(basic) 1845 if p.tok == token.SEMICOLON { 1846 p.next() 1847 x = p.parseRhs() 1848 } else { 1849 x = p.makeExpr(s, "boolean expression") 1850 s = nil 1851 } 1852 } 1853 p.exprLev = prevLev 1854 } 1855 1856 body := p.parseBlockStmt() 1857 var else_ ast.Stmt 1858 if p.tok == token.ELSE { 1859 p.next() 1860 else_ = p.parseStmt() 1861 } else { 1862 p.expectSemi() 1863 } 1864 1865 return &ast.IfStmt{If: pos, Init: s, Cond: x, Body: body, Else: else_} 1866 } 1867 1868 func (p *parser) parseTypeList() (list []ast.Expr) { 1869 if p.trace { 1870 defer un(trace(p, "TypeList")) 1871 } 1872 1873 list = append(list, p.parseType()) 1874 for p.tok == token.COMMA { 1875 p.next() 1876 list = append(list, p.parseType()) 1877 } 1878 1879 return 1880 } 1881 1882 func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause { 1883 if p.trace { 1884 defer un(trace(p, "CaseClause")) 1885 } 1886 1887 pos := p.pos 1888 var list []ast.Expr 1889 if p.tok == token.CASE { 1890 p.next() 1891 if typeSwitch { 1892 list = p.parseTypeList() 1893 } else { 1894 list = p.parseRhsList() 1895 } 1896 } else { 1897 p.expect(token.DEFAULT) 1898 } 1899 1900 colon := p.expect(token.COLON) 1901 p.openScope() 1902 body := p.parseStmtList() 1903 p.closeScope() 1904 1905 return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body} 1906 } 1907 1908 func isTypeSwitchAssert(x ast.Expr) bool { 1909 a, ok := x.(*ast.TypeAssertExpr) 1910 return ok && a.Type == nil 1911 } 1912 1913 func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool { 1914 switch t := s.(type) { 1915 case *ast.ExprStmt: 1916 // x.(type) 1917 return isTypeSwitchAssert(t.X) 1918 case *ast.AssignStmt: 1919 // v := x.(type) 1920 if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) { 1921 switch t.Tok { 1922 case token.ASSIGN: 1923 // permit v = x.(type) but complain 1924 p.error(t.TokPos, "expected ':=', found '='") 1925 fallthrough 1926 case token.DEFINE: 1927 return true 1928 } 1929 } 1930 } 1931 return false 1932 } 1933 1934 func (p *parser) parseSwitchStmt() ast.Stmt { 1935 if p.trace { 1936 defer un(trace(p, "SwitchStmt")) 1937 } 1938 1939 pos := p.expect(token.SWITCH) 1940 p.openScope() 1941 defer p.closeScope() 1942 1943 var s1, s2 ast.Stmt 1944 if p.tok != token.LBRACE { 1945 prevLev := p.exprLev 1946 p.exprLev = -1 1947 if p.tok != token.SEMICOLON { 1948 s2, _ = p.parseSimpleStmt(basic) 1949 } 1950 if p.tok == token.SEMICOLON { 1951 p.next() 1952 s1 = s2 1953 s2 = nil 1954 if p.tok != token.LBRACE { 1955 // A TypeSwitchGuard may declare a variable in addition 1956 // to the variable declared in the initial SimpleStmt. 1957 // Introduce extra scope to avoid redeclaration errors: 1958 // 1959 // switch t := 0; t := x.(T) { ... } 1960 // 1961 // (this code is not valid Go because the first t 1962 // cannot be accessed and thus is never used, the extra 1963 // scope is needed for the correct error message). 1964 // 1965 // If we don't have a type switch, s2 must be an expression. 1966 // Having the extra nested but empty scope won't affect it. 1967 p.openScope() 1968 defer p.closeScope() 1969 s2, _ = p.parseSimpleStmt(basic) 1970 } 1971 } 1972 p.exprLev = prevLev 1973 } 1974 1975 typeSwitch := p.isTypeSwitchGuard(s2) 1976 lbrace := p.expect(token.LBRACE) 1977 var list []ast.Stmt 1978 for p.tok == token.CASE || p.tok == token.DEFAULT { 1979 list = append(list, p.parseCaseClause(typeSwitch)) 1980 } 1981 rbrace := p.expect(token.RBRACE) 1982 p.expectSemi() 1983 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 1984 1985 if typeSwitch { 1986 return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body} 1987 } 1988 1989 return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body} 1990 } 1991 1992 func (p *parser) parseCommClause() *ast.CommClause { 1993 if p.trace { 1994 defer un(trace(p, "CommClause")) 1995 } 1996 1997 p.openScope() 1998 pos := p.pos 1999 var comm ast.Stmt 2000 if p.tok == token.CASE { 2001 p.next() 2002 lhs := p.parseLhsList() 2003 if p.tok == token.ARROW { 2004 // SendStmt 2005 if len(lhs) > 1 { 2006 p.errorExpected(lhs[0].Pos(), "1 expression") 2007 // continue with first expression 2008 } 2009 arrow := p.pos 2010 p.next() 2011 rhs := p.parseRhs() 2012 comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs} 2013 } else { 2014 // RecvStmt 2015 if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE { 2016 // RecvStmt with assignment 2017 if len(lhs) > 2 { 2018 p.errorExpected(lhs[0].Pos(), "1 or 2 expressions") 2019 // continue with first two expressions 2020 lhs = lhs[0:2] 2021 } 2022 pos := p.pos 2023 p.next() 2024 rhs := p.parseRhs() 2025 as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}} 2026 if tok == token.DEFINE { 2027 p.shortVarDecl(as, lhs) 2028 } 2029 comm = as 2030 } else { 2031 // lhs must be single receive operation 2032 if len(lhs) > 1 { 2033 p.errorExpected(lhs[0].Pos(), "1 expression") 2034 // continue with first expression 2035 } 2036 comm = &ast.ExprStmt{X: lhs[0]} 2037 } 2038 } 2039 } else { 2040 p.expect(token.DEFAULT) 2041 } 2042 2043 colon := p.expect(token.COLON) 2044 body := p.parseStmtList() 2045 p.closeScope() 2046 2047 return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body} 2048 } 2049 2050 func (p *parser) parseSelectStmt() *ast.SelectStmt { 2051 if p.trace { 2052 defer un(trace(p, "SelectStmt")) 2053 } 2054 2055 pos := p.expect(token.SELECT) 2056 lbrace := p.expect(token.LBRACE) 2057 var list []ast.Stmt 2058 for p.tok == token.CASE || p.tok == token.DEFAULT { 2059 list = append(list, p.parseCommClause()) 2060 } 2061 rbrace := p.expect(token.RBRACE) 2062 p.expectSemi() 2063 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 2064 2065 return &ast.SelectStmt{Select: pos, Body: body} 2066 } 2067 2068 func (p *parser) parseForStmt() ast.Stmt { 2069 if p.trace { 2070 defer un(trace(p, "ForStmt")) 2071 } 2072 2073 pos := p.expect(token.FOR) 2074 p.openScope() 2075 defer p.closeScope() 2076 2077 var s1, s2, s3 ast.Stmt 2078 var isRange bool 2079 if p.tok != token.LBRACE { 2080 prevLev := p.exprLev 2081 p.exprLev = -1 2082 if p.tok != token.SEMICOLON { 2083 if p.tok == token.RANGE { 2084 // "for range x" (nil lhs in assignment) 2085 pos := p.pos 2086 p.next() 2087 y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}} 2088 s2 = &ast.AssignStmt{Rhs: y} 2089 isRange = true 2090 } else { 2091 s2, isRange = p.parseSimpleStmt(rangeOk) 2092 } 2093 } 2094 if !isRange && p.tok == token.SEMICOLON { 2095 p.next() 2096 s1 = s2 2097 s2 = nil 2098 if p.tok != token.SEMICOLON { 2099 s2, _ = p.parseSimpleStmt(basic) 2100 } 2101 p.expectSemi() 2102 if p.tok != token.LBRACE { 2103 s3, _ = p.parseSimpleStmt(basic) 2104 } 2105 } 2106 p.exprLev = prevLev 2107 } 2108 2109 body := p.parseBlockStmt() 2110 p.expectSemi() 2111 2112 if isRange { 2113 as := s2.(*ast.AssignStmt) 2114 // check lhs 2115 var key, value ast.Expr 2116 switch len(as.Lhs) { 2117 case 0: 2118 // nothing to do 2119 case 1: 2120 key = as.Lhs[0] 2121 case 2: 2122 key, value = as.Lhs[0], as.Lhs[1] 2123 default: 2124 p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions") 2125 return &ast.BadStmt{From: pos, To: p.safePos(body.End())} 2126 } 2127 // parseSimpleStmt returned a right-hand side that 2128 // is a single unary expression of the form "range x" 2129 x := as.Rhs[0].(*ast.UnaryExpr).X 2130 return &ast.RangeStmt{ 2131 For: pos, 2132 Key: key, 2133 Value: value, 2134 TokPos: as.TokPos, 2135 Tok: as.Tok, 2136 X: x, 2137 Body: body, 2138 } 2139 } 2140 2141 // regular for statement 2142 return &ast.ForStmt{ 2143 For: pos, 2144 Init: s1, 2145 Cond: p.makeExpr(s2, "boolean or range expression"), 2146 Post: s3, 2147 Body: body, 2148 } 2149 } 2150 2151 func (p *parser) parseStmt() (s ast.Stmt) { 2152 if p.trace { 2153 defer un(trace(p, "Statement")) 2154 } 2155 2156 switch p.tok { 2157 case token.CONST, token.TYPE, token.VAR: 2158 s = &ast.DeclStmt{Decl: p.parseDecl(syncStmt)} 2159 case 2160 // tokens that may start an expression 2161 token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands 2162 token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE, // composite types 2163 token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators 2164 s, _ = p.parseSimpleStmt(labelOk) 2165 // because of the required look-ahead, labeled statements are 2166 // parsed by parseSimpleStmt - don't expect a semicolon after 2167 // them 2168 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt { 2169 p.expectSemi() 2170 } 2171 case token.GO: 2172 s = p.parseGoStmt() 2173 case token.DEFER: 2174 s = p.parseDeferStmt() 2175 case token.RETURN: 2176 s = p.parseReturnStmt() 2177 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH: 2178 s = p.parseBranchStmt(p.tok) 2179 case token.LBRACE: 2180 s = p.parseBlockStmt() 2181 p.expectSemi() 2182 case token.IF: 2183 s = p.parseIfStmt() 2184 case token.SWITCH: 2185 s = p.parseSwitchStmt() 2186 case token.SELECT: 2187 s = p.parseSelectStmt() 2188 case token.FOR: 2189 s = p.parseForStmt() 2190 case token.SEMICOLON: 2191 // Is it ever possible to have an implicit semicolon 2192 // producing an empty statement in a valid program? 2193 // (handle correctly anyway) 2194 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"} 2195 p.next() 2196 case token.RBRACE: 2197 // a semicolon may be omitted before a closing "}" 2198 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true} 2199 default: 2200 // no statement found 2201 pos := p.pos 2202 p.errorExpected(pos, "statement") 2203 syncStmt(p) 2204 s = &ast.BadStmt{From: pos, To: p.pos} 2205 } 2206 2207 return 2208 } 2209 2210 // ---------------------------------------------------------------------------- 2211 // Declarations 2212 2213 type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec 2214 2215 func isValidImport(lit string) bool { 2216 const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD" 2217 s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal 2218 for _, r := range s { 2219 if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) { 2220 return false 2221 } 2222 } 2223 return s != "" 2224 } 2225 2226 func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec { 2227 if p.trace { 2228 defer un(trace(p, "ImportSpec")) 2229 } 2230 2231 var ident *ast.Ident 2232 switch p.tok { 2233 case token.PERIOD: 2234 ident = &ast.Ident{NamePos: p.pos, Name: "."} 2235 p.next() 2236 case token.IDENT: 2237 ident = p.parseIdent() 2238 } 2239 2240 pos := p.pos 2241 var path string 2242 if p.tok == token.STRING { 2243 path = p.lit 2244 if !isValidImport(path) { 2245 p.error(pos, "invalid import path: "+path) 2246 } 2247 p.next() 2248 } else { 2249 p.expect(token.STRING) // use expect() error handling 2250 } 2251 p.expectSemi() // call before accessing p.linecomment 2252 2253 // collect imports 2254 spec := &ast.ImportSpec{ 2255 Doc: doc, 2256 Name: ident, 2257 Path: &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path}, 2258 Comment: p.lineComment, 2259 } 2260 p.imports = append(p.imports, spec) 2261 2262 return spec 2263 } 2264 2265 func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec { 2266 if p.trace { 2267 defer un(trace(p, keyword.String()+"Spec")) 2268 } 2269 2270 pos := p.pos 2271 idents := p.parseIdentList() 2272 typ := p.tryType() 2273 var values []ast.Expr 2274 // always permit optional initialization for more tolerant parsing 2275 if p.tok == token.ASSIGN { 2276 p.next() 2277 values = p.parseRhsList() 2278 } 2279 p.expectSemi() // call before accessing p.linecomment 2280 2281 switch keyword { 2282 case token.VAR: 2283 if typ == nil && values == nil { 2284 p.error(pos, "missing variable type or initialization") 2285 } 2286 case token.CONST: 2287 if values == nil && (iota == 0 || typ != nil) { 2288 p.error(pos, "missing constant value") 2289 } 2290 } 2291 2292 // Go spec: The scope of a constant or variable identifier declared inside 2293 // a function begins at the end of the ConstSpec or VarSpec and ends at 2294 // the end of the innermost containing block. 2295 // (Global identifiers are resolved in a separate phase after parsing.) 2296 spec := &ast.ValueSpec{ 2297 Doc: doc, 2298 Names: idents, 2299 Type: typ, 2300 Values: values, 2301 Comment: p.lineComment, 2302 } 2303 kind := ast.Con 2304 if keyword == token.VAR { 2305 kind = ast.Var 2306 } 2307 p.declare(spec, iota, p.topScope, kind, idents...) 2308 2309 return spec 2310 } 2311 2312 func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec { 2313 if p.trace { 2314 defer un(trace(p, "TypeSpec")) 2315 } 2316 2317 ident := p.parseIdent() 2318 2319 // Go spec: The scope of a type identifier declared inside a function begins 2320 // at the identifier in the TypeSpec and ends at the end of the innermost 2321 // containing block. 2322 // (Global identifiers are resolved in a separate phase after parsing.) 2323 spec := &ast.TypeSpec{Doc: doc, Name: ident} 2324 p.declare(spec, nil, p.topScope, ast.Typ, ident) 2325 2326 spec.Type = p.parseType() 2327 p.expectSemi() // call before accessing p.linecomment 2328 spec.Comment = p.lineComment 2329 2330 return spec 2331 } 2332 2333 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl { 2334 if p.trace { 2335 defer un(trace(p, "GenDecl("+keyword.String()+")")) 2336 } 2337 2338 doc := p.leadComment 2339 pos := p.expect(keyword) 2340 var lparen, rparen token.Pos 2341 var list []ast.Spec 2342 if p.tok == token.LPAREN { 2343 lparen = p.pos 2344 p.next() 2345 for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ { 2346 list = append(list, f(p.leadComment, keyword, iota)) 2347 } 2348 rparen = p.expect(token.RPAREN) 2349 p.expectSemi() 2350 } else { 2351 list = append(list, f(nil, keyword, 0)) 2352 } 2353 2354 return &ast.GenDecl{ 2355 Doc: doc, 2356 TokPos: pos, 2357 Tok: keyword, 2358 Lparen: lparen, 2359 Specs: list, 2360 Rparen: rparen, 2361 } 2362 } 2363 2364 func (p *parser) parseFuncDecl() *ast.FuncDecl { 2365 if p.trace { 2366 defer un(trace(p, "FunctionDecl")) 2367 } 2368 2369 doc := p.leadComment 2370 pos := p.expect(token.FUNC) 2371 scope := ast.NewScope(p.topScope) // function scope 2372 2373 var recv *ast.FieldList 2374 if p.tok == token.LPAREN { 2375 recv = p.parseParameters(scope, false) 2376 } 2377 2378 ident := p.parseIdent() 2379 2380 params, results := p.parseSignature(scope) 2381 2382 var body *ast.BlockStmt 2383 if p.tok == token.LBRACE { 2384 body = p.parseBody(scope) 2385 } 2386 p.expectSemi() 2387 2388 decl := &ast.FuncDecl{ 2389 Doc: doc, 2390 Recv: recv, 2391 Name: ident, 2392 Type: &ast.FuncType{ 2393 Func: pos, 2394 Params: params, 2395 Results: results, 2396 }, 2397 Body: body, 2398 } 2399 if recv == nil { 2400 // Go spec: The scope of an identifier denoting a constant, type, 2401 // variable, or function (but not method) declared at top level 2402 // (outside any function) is the package block. 2403 // 2404 // init() functions cannot be referred to and there may 2405 // be more than one - don't put them in the pkgScope 2406 if ident.Name != "init" { 2407 p.declare(decl, nil, p.pkgScope, ast.Fun, ident) 2408 } 2409 } 2410 2411 return decl 2412 } 2413 2414 func (p *parser) parseDecl(sync func(*parser)) ast.Decl { 2415 if p.trace { 2416 defer un(trace(p, "Declaration")) 2417 } 2418 2419 var f parseSpecFunction 2420 switch p.tok { 2421 case token.CONST, token.VAR: 2422 f = p.parseValueSpec 2423 2424 case token.TYPE: 2425 f = p.parseTypeSpec 2426 2427 case token.FUNC: 2428 return p.parseFuncDecl() 2429 2430 default: 2431 pos := p.pos 2432 p.errorExpected(pos, "declaration") 2433 sync(p) 2434 return &ast.BadDecl{From: pos, To: p.pos} 2435 } 2436 2437 return p.parseGenDecl(p.tok, f) 2438 } 2439 2440 // ---------------------------------------------------------------------------- 2441 // Source files 2442 2443 func (p *parser) parseFile() *ast.File { 2444 if p.trace { 2445 defer un(trace(p, "File")) 2446 } 2447 2448 // Don't bother parsing the rest if we had errors scanning the first token. 2449 // Likely not a Go source file at all. 2450 if p.errors.Len() != 0 { 2451 return nil 2452 } 2453 2454 // package clause 2455 doc := p.leadComment 2456 pos := p.expect(token.PACKAGE) 2457 // Go spec: The package clause is not a declaration; 2458 // the package name does not appear in any scope. 2459 ident := p.parseIdent() 2460 if ident.Name == "_" && p.mode&DeclarationErrors != 0 { 2461 p.error(p.pos, "invalid package name _") 2462 } 2463 p.expectSemi() 2464 2465 // Don't bother parsing the rest if we had errors parsing the package clause. 2466 // Likely not a Go source file at all. 2467 if p.errors.Len() != 0 { 2468 return nil 2469 } 2470 2471 p.openScope() 2472 p.pkgScope = p.topScope 2473 var decls []ast.Decl 2474 if p.mode&PackageClauseOnly == 0 { 2475 // import decls 2476 for p.tok == token.IMPORT { 2477 decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec)) 2478 } 2479 2480 if p.mode&ImportsOnly == 0 { 2481 // rest of package body 2482 for p.tok != token.EOF { 2483 decls = append(decls, p.parseDecl(syncDecl)) 2484 } 2485 } 2486 } 2487 p.closeScope() 2488 assert(p.topScope == nil, "unbalanced scopes") 2489 assert(p.labelScope == nil, "unbalanced label scopes") 2490 2491 // resolve global identifiers within the same file 2492 i := 0 2493 for _, ident := range p.unresolved { 2494 // i <= index for current ident 2495 assert(ident.Obj == unresolved, "object already resolved") 2496 ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel 2497 if ident.Obj == nil { 2498 p.unresolved[i] = ident 2499 i++ 2500 } 2501 } 2502 2503 return &ast.File{ 2504 Doc: doc, 2505 Package: pos, 2506 Name: ident, 2507 Decls: decls, 2508 Scope: p.pkgScope, 2509 Imports: p.imports, 2510 Unresolved: p.unresolved[0:i], 2511 Comments: p.comments, 2512 } 2513 }