github.com/dannin/go@v0.0.0-20161031215817-d35dfd405eaa/src/go/parser/parser.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Package parser implements a parser for Go source files. Input may be 6 // provided in a variety of forms (see the various Parse* functions); the 7 // output is an abstract syntax tree (AST) representing the Go source. The 8 // parser is invoked through one of the Parse* functions. 9 // 10 // The parser accepts a larger language than is syntactically permitted by 11 // the Go spec, for simplicity, and for improved robustness in the presence 12 // of syntax errors. For instance, in method declarations, the receiver is 13 // treated like an ordinary parameter list and thus may contain multiple 14 // entries where the spec permits exactly one. Consequently, the corresponding 15 // field in the AST (ast.FuncDecl.Recv) field is not restricted to one entry. 16 // 17 package parser 18 19 import ( 20 "fmt" 21 "go/ast" 22 "go/scanner" 23 "go/token" 24 "strconv" 25 "strings" 26 "unicode" 27 ) 28 29 // The parser structure holds the parser's internal state. 30 type parser struct { 31 file *token.File 32 errors scanner.ErrorList 33 scanner scanner.Scanner 34 35 // Tracing/debugging 36 mode Mode // parsing mode 37 trace bool // == (mode & Trace != 0) 38 indent int // indentation used for tracing output 39 40 // Comments 41 comments []*ast.CommentGroup 42 leadComment *ast.CommentGroup // last lead comment 43 lineComment *ast.CommentGroup // last line comment 44 45 // Next token 46 pos token.Pos // token position 47 tok token.Token // one token look-ahead 48 lit string // token literal 49 50 // Error recovery 51 // (used to limit the number of calls to syncXXX functions 52 // w/o making scanning progress - avoids potential endless 53 // loops across multiple parser functions during error recovery) 54 syncPos token.Pos // last synchronization position 55 syncCnt int // number of calls to syncXXX without progress 56 57 // Non-syntactic parser control 58 exprLev int // < 0: in control clause, >= 0: in expression 59 inRhs bool // if set, the parser is parsing a rhs expression 60 61 // Ordinary identifier scopes 62 pkgScope *ast.Scope // pkgScope.Outer == nil 63 topScope *ast.Scope // top-most scope; may be pkgScope 64 unresolved []*ast.Ident // unresolved identifiers 65 imports []*ast.ImportSpec // list of imports 66 67 // Label scopes 68 // (maintained by open/close LabelScope) 69 labelScope *ast.Scope // label scope for current function 70 targetStack [][]*ast.Ident // stack of unresolved labels 71 } 72 73 func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode Mode) { 74 p.file = fset.AddFile(filename, -1, len(src)) 75 var m scanner.Mode 76 if mode&ParseComments != 0 { 77 m = scanner.ScanComments 78 } 79 eh := func(pos token.Position, msg string) { p.errors.Add(pos, msg) } 80 p.scanner.Init(p.file, src, eh, m) 81 82 p.mode = mode 83 p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently) 84 85 p.next() 86 } 87 88 // ---------------------------------------------------------------------------- 89 // Scoping support 90 91 func (p *parser) openScope() { 92 p.topScope = ast.NewScope(p.topScope) 93 } 94 95 func (p *parser) closeScope() { 96 p.topScope = p.topScope.Outer 97 } 98 99 func (p *parser) openLabelScope() { 100 p.labelScope = ast.NewScope(p.labelScope) 101 p.targetStack = append(p.targetStack, nil) 102 } 103 104 func (p *parser) closeLabelScope() { 105 // resolve labels 106 n := len(p.targetStack) - 1 107 scope := p.labelScope 108 for _, ident := range p.targetStack[n] { 109 ident.Obj = scope.Lookup(ident.Name) 110 if ident.Obj == nil && p.mode&DeclarationErrors != 0 { 111 p.error(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name)) 112 } 113 } 114 // pop label scope 115 p.targetStack = p.targetStack[0:n] 116 p.labelScope = p.labelScope.Outer 117 } 118 119 func (p *parser) declare(decl, data interface{}, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) { 120 for _, ident := range idents { 121 assert(ident.Obj == nil, "identifier already declared or resolved") 122 obj := ast.NewObj(kind, ident.Name) 123 // remember the corresponding declaration for redeclaration 124 // errors and global variable resolution/typechecking phase 125 obj.Decl = decl 126 obj.Data = data 127 ident.Obj = obj 128 if ident.Name != "_" { 129 if alt := scope.Insert(obj); alt != nil && p.mode&DeclarationErrors != 0 { 130 prevDecl := "" 131 if pos := alt.Pos(); pos.IsValid() { 132 prevDecl = fmt.Sprintf("\n\tprevious declaration at %s", p.file.Position(pos)) 133 } 134 p.error(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl)) 135 } 136 } 137 } 138 } 139 140 func (p *parser) shortVarDecl(decl *ast.AssignStmt, list []ast.Expr) { 141 // Go spec: A short variable declaration may redeclare variables 142 // provided they were originally declared in the same block with 143 // the same type, and at least one of the non-blank variables is new. 144 n := 0 // number of new variables 145 for _, x := range list { 146 if ident, isIdent := x.(*ast.Ident); isIdent { 147 assert(ident.Obj == nil, "identifier already declared or resolved") 148 obj := ast.NewObj(ast.Var, ident.Name) 149 // remember corresponding assignment for other tools 150 obj.Decl = decl 151 ident.Obj = obj 152 if ident.Name != "_" { 153 if alt := p.topScope.Insert(obj); alt != nil { 154 ident.Obj = alt // redeclaration 155 } else { 156 n++ // new declaration 157 } 158 } 159 } else { 160 p.errorExpected(x.Pos(), "identifier on left side of :=") 161 } 162 } 163 if n == 0 && p.mode&DeclarationErrors != 0 { 164 p.error(list[0].Pos(), "no new variables on left side of :=") 165 } 166 } 167 168 // The unresolved object is a sentinel to mark identifiers that have been added 169 // to the list of unresolved identifiers. The sentinel is only used for verifying 170 // internal consistency. 171 var unresolved = new(ast.Object) 172 173 // If x is an identifier, tryResolve attempts to resolve x by looking up 174 // the object it denotes. If no object is found and collectUnresolved is 175 // set, x is marked as unresolved and collected in the list of unresolved 176 // identifiers. 177 // 178 func (p *parser) tryResolve(x ast.Expr, collectUnresolved bool) { 179 // nothing to do if x is not an identifier or the blank identifier 180 ident, _ := x.(*ast.Ident) 181 if ident == nil { 182 return 183 } 184 assert(ident.Obj == nil, "identifier already declared or resolved") 185 if ident.Name == "_" { 186 return 187 } 188 // try to resolve the identifier 189 for s := p.topScope; s != nil; s = s.Outer { 190 if obj := s.Lookup(ident.Name); obj != nil { 191 ident.Obj = obj 192 return 193 } 194 } 195 // all local scopes are known, so any unresolved identifier 196 // must be found either in the file scope, package scope 197 // (perhaps in another file), or universe scope --- collect 198 // them so that they can be resolved later 199 if collectUnresolved { 200 ident.Obj = unresolved 201 p.unresolved = append(p.unresolved, ident) 202 } 203 } 204 205 func (p *parser) resolve(x ast.Expr) { 206 p.tryResolve(x, true) 207 } 208 209 // ---------------------------------------------------------------------------- 210 // Parsing support 211 212 func (p *parser) printTrace(a ...interface{}) { 213 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " 214 const n = len(dots) 215 pos := p.file.Position(p.pos) 216 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column) 217 i := 2 * p.indent 218 for i > n { 219 fmt.Print(dots) 220 i -= n 221 } 222 // i <= n 223 fmt.Print(dots[0:i]) 224 fmt.Println(a...) 225 } 226 227 func trace(p *parser, msg string) *parser { 228 p.printTrace(msg, "(") 229 p.indent++ 230 return p 231 } 232 233 // Usage pattern: defer un(trace(p, "...")) 234 func un(p *parser) { 235 p.indent-- 236 p.printTrace(")") 237 } 238 239 // Advance to the next token. 240 func (p *parser) next0() { 241 // Because of one-token look-ahead, print the previous token 242 // when tracing as it provides a more readable output. The 243 // very first token (!p.pos.IsValid()) is not initialized 244 // (it is token.ILLEGAL), so don't print it . 245 if p.trace && p.pos.IsValid() { 246 s := p.tok.String() 247 switch { 248 case p.tok.IsLiteral(): 249 p.printTrace(s, p.lit) 250 case p.tok.IsOperator(), p.tok.IsKeyword(): 251 p.printTrace("\"" + s + "\"") 252 default: 253 p.printTrace(s) 254 } 255 } 256 257 p.pos, p.tok, p.lit = p.scanner.Scan() 258 } 259 260 // Consume a comment and return it and the line on which it ends. 261 func (p *parser) consumeComment() (comment *ast.Comment, endline int) { 262 // /*-style comments may end on a different line than where they start. 263 // Scan the comment for '\n' chars and adjust endline accordingly. 264 endline = p.file.Line(p.pos) 265 if p.lit[1] == '*' { 266 // don't use range here - no need to decode Unicode code points 267 for i := 0; i < len(p.lit); i++ { 268 if p.lit[i] == '\n' { 269 endline++ 270 } 271 } 272 } 273 274 comment = &ast.Comment{Slash: p.pos, Text: p.lit} 275 p.next0() 276 277 return 278 } 279 280 // Consume a group of adjacent comments, add it to the parser's 281 // comments list, and return it together with the line at which 282 // the last comment in the group ends. A non-comment token or n 283 // empty lines terminate a comment group. 284 // 285 func (p *parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) { 286 var list []*ast.Comment 287 endline = p.file.Line(p.pos) 288 for p.tok == token.COMMENT && p.file.Line(p.pos) <= endline+n { 289 var comment *ast.Comment 290 comment, endline = p.consumeComment() 291 list = append(list, comment) 292 } 293 294 // add comment group to the comments list 295 comments = &ast.CommentGroup{List: list} 296 p.comments = append(p.comments, comments) 297 298 return 299 } 300 301 // Advance to the next non-comment token. In the process, collect 302 // any comment groups encountered, and remember the last lead and 303 // and line comments. 304 // 305 // A lead comment is a comment group that starts and ends in a 306 // line without any other tokens and that is followed by a non-comment 307 // token on the line immediately after the comment group. 308 // 309 // A line comment is a comment group that follows a non-comment 310 // token on the same line, and that has no tokens after it on the line 311 // where it ends. 312 // 313 // Lead and line comments may be considered documentation that is 314 // stored in the AST. 315 // 316 func (p *parser) next() { 317 p.leadComment = nil 318 p.lineComment = nil 319 prev := p.pos 320 p.next0() 321 322 if p.tok == token.COMMENT { 323 var comment *ast.CommentGroup 324 var endline int 325 326 if p.file.Line(p.pos) == p.file.Line(prev) { 327 // The comment is on same line as the previous token; it 328 // cannot be a lead comment but may be a line comment. 329 comment, endline = p.consumeCommentGroup(0) 330 if p.file.Line(p.pos) != endline { 331 // The next token is on a different line, thus 332 // the last comment group is a line comment. 333 p.lineComment = comment 334 } 335 } 336 337 // consume successor comments, if any 338 endline = -1 339 for p.tok == token.COMMENT { 340 comment, endline = p.consumeCommentGroup(1) 341 } 342 343 if endline+1 == p.file.Line(p.pos) { 344 // The next token is following on the line immediately after the 345 // comment group, thus the last comment group is a lead comment. 346 p.leadComment = comment 347 } 348 } 349 } 350 351 // A bailout panic is raised to indicate early termination. 352 type bailout struct{} 353 354 func (p *parser) error(pos token.Pos, msg string) { 355 epos := p.file.Position(pos) 356 357 // If AllErrors is not set, discard errors reported on the same line 358 // as the last recorded error and stop parsing if there are more than 359 // 10 errors. 360 if p.mode&AllErrors == 0 { 361 n := len(p.errors) 362 if n > 0 && p.errors[n-1].Pos.Line == epos.Line { 363 return // discard - likely a spurious error 364 } 365 if n > 10 { 366 panic(bailout{}) 367 } 368 } 369 370 p.errors.Add(epos, msg) 371 } 372 373 func (p *parser) errorExpected(pos token.Pos, msg string) { 374 msg = "expected " + msg 375 if pos == p.pos { 376 // the error happened at the current position; 377 // make the error message more specific 378 if p.tok == token.SEMICOLON && p.lit == "\n" { 379 msg += ", found newline" 380 } else { 381 msg += ", found '" + p.tok.String() + "'" 382 if p.tok.IsLiteral() { 383 msg += " " + p.lit 384 } 385 } 386 } 387 p.error(pos, msg) 388 } 389 390 func (p *parser) expect(tok token.Token) token.Pos { 391 pos := p.pos 392 if p.tok != tok { 393 p.errorExpected(pos, "'"+tok.String()+"'") 394 } 395 p.next() // make progress 396 return pos 397 } 398 399 // expectClosing is like expect but provides a better error message 400 // for the common case of a missing comma before a newline. 401 // 402 func (p *parser) expectClosing(tok token.Token, context string) token.Pos { 403 if p.tok != tok && p.tok == token.SEMICOLON && p.lit == "\n" { 404 p.error(p.pos, "missing ',' before newline in "+context) 405 p.next() 406 } 407 return p.expect(tok) 408 } 409 410 func (p *parser) expectSemi() { 411 // semicolon is optional before a closing ')' or '}' 412 if p.tok != token.RPAREN && p.tok != token.RBRACE { 413 switch p.tok { 414 case token.COMMA: 415 // permit a ',' instead of a ';' but complain 416 p.errorExpected(p.pos, "';'") 417 fallthrough 418 case token.SEMICOLON: 419 p.next() 420 default: 421 p.errorExpected(p.pos, "';'") 422 syncStmt(p) 423 } 424 } 425 } 426 427 func (p *parser) atComma(context string, follow token.Token) bool { 428 if p.tok == token.COMMA { 429 return true 430 } 431 if p.tok != follow { 432 msg := "missing ','" 433 if p.tok == token.SEMICOLON && p.lit == "\n" { 434 msg += " before newline" 435 } 436 p.error(p.pos, msg+" in "+context) 437 return true // "insert" comma and continue 438 } 439 return false 440 } 441 442 func assert(cond bool, msg string) { 443 if !cond { 444 panic("go/parser internal error: " + msg) 445 } 446 } 447 448 // syncStmt advances to the next statement. 449 // Used for synchronization after an error. 450 // 451 func syncStmt(p *parser) { 452 for { 453 switch p.tok { 454 case token.BREAK, token.CONST, token.CONTINUE, token.DEFER, 455 token.FALLTHROUGH, token.FOR, token.GO, token.GOTO, 456 token.IF, token.RETURN, token.SELECT, token.SWITCH, 457 token.TYPE, token.VAR: 458 // Return only if parser made some progress since last 459 // sync or if it has not reached 10 sync calls without 460 // progress. Otherwise consume at least one token to 461 // avoid an endless parser loop (it is possible that 462 // both parseOperand and parseStmt call syncStmt and 463 // correctly do not advance, thus the need for the 464 // invocation limit p.syncCnt). 465 if p.pos == p.syncPos && p.syncCnt < 10 { 466 p.syncCnt++ 467 return 468 } 469 if p.pos > p.syncPos { 470 p.syncPos = p.pos 471 p.syncCnt = 0 472 return 473 } 474 // Reaching here indicates a parser bug, likely an 475 // incorrect token list in this function, but it only 476 // leads to skipping of possibly correct code if a 477 // previous error is present, and thus is preferred 478 // over a non-terminating parse. 479 case token.EOF: 480 return 481 } 482 p.next() 483 } 484 } 485 486 // syncDecl advances to the next declaration. 487 // Used for synchronization after an error. 488 // 489 func syncDecl(p *parser) { 490 for { 491 switch p.tok { 492 case token.CONST, token.TYPE, token.VAR: 493 // see comments in syncStmt 494 if p.pos == p.syncPos && p.syncCnt < 10 { 495 p.syncCnt++ 496 return 497 } 498 if p.pos > p.syncPos { 499 p.syncPos = p.pos 500 p.syncCnt = 0 501 return 502 } 503 case token.EOF: 504 return 505 } 506 p.next() 507 } 508 } 509 510 // safePos returns a valid file position for a given position: If pos 511 // is valid to begin with, safePos returns pos. If pos is out-of-range, 512 // safePos returns the EOF position. 513 // 514 // This is hack to work around "artificial" end positions in the AST which 515 // are computed by adding 1 to (presumably valid) token positions. If the 516 // token positions are invalid due to parse errors, the resulting end position 517 // may be past the file's EOF position, which would lead to panics if used 518 // later on. 519 // 520 func (p *parser) safePos(pos token.Pos) (res token.Pos) { 521 defer func() { 522 if recover() != nil { 523 res = token.Pos(p.file.Base() + p.file.Size()) // EOF position 524 } 525 }() 526 _ = p.file.Offset(pos) // trigger a panic if position is out-of-range 527 return pos 528 } 529 530 // ---------------------------------------------------------------------------- 531 // Identifiers 532 533 func (p *parser) parseIdent() *ast.Ident { 534 pos := p.pos 535 name := "_" 536 if p.tok == token.IDENT { 537 name = p.lit 538 p.next() 539 } else { 540 p.expect(token.IDENT) // use expect() error handling 541 } 542 return &ast.Ident{NamePos: pos, Name: name} 543 } 544 545 func (p *parser) parseIdentList(first *ast.Ident) []*ast.Ident { 546 if p.trace { 547 defer un(trace(p, "IdentList")) 548 } 549 550 list := []*ast.Ident{first} 551 for p.tok == token.COMMA { 552 p.next() 553 list = append(list, p.parseIdent()) 554 } 555 556 return list 557 } 558 559 // ---------------------------------------------------------------------------- 560 // Common productions 561 562 // If lhs is set, result list elements which are identifiers are not resolved. 563 func (p *parser) parseExprList(lhs bool) (list []ast.Expr) { 564 if p.trace { 565 defer un(trace(p, "ExpressionList")) 566 } 567 568 list = append(list, p.checkExpr(p.parseExpr(lhs))) 569 for p.tok == token.COMMA { 570 p.next() 571 list = append(list, p.checkExpr(p.parseExpr(lhs))) 572 } 573 574 return 575 } 576 577 func (p *parser) parseLhsList() []ast.Expr { 578 old := p.inRhs 579 p.inRhs = false 580 list := p.parseExprList(true) 581 switch p.tok { 582 case token.DEFINE: 583 // lhs of a short variable declaration 584 // but doesn't enter scope until later: 585 // caller must call p.shortVarDecl(p.makeIdentList(list)) 586 // at appropriate time. 587 case token.COLON: 588 // lhs of a label declaration or a communication clause of a select 589 // statement (parseLhsList is not called when parsing the case clause 590 // of a switch statement): 591 // - labels are declared by the caller of parseLhsList 592 // - for communication clauses, if there is a stand-alone identifier 593 // followed by a colon, we have a syntax error; there is no need 594 // to resolve the identifier in that case 595 default: 596 // identifiers must be declared elsewhere 597 for _, x := range list { 598 p.resolve(x) 599 } 600 } 601 p.inRhs = old 602 return list 603 } 604 605 func (p *parser) parseRhsList() []ast.Expr { 606 old := p.inRhs 607 p.inRhs = true 608 list := p.parseExprList(false) 609 p.inRhs = old 610 return list 611 } 612 613 // ---------------------------------------------------------------------------- 614 // Types 615 616 func (p *parser) parseType() ast.Expr { 617 if p.trace { 618 defer un(trace(p, "Type")) 619 } 620 621 typ := p.tryType() 622 623 if typ == nil { 624 pos := p.pos 625 p.errorExpected(pos, "type") 626 p.next() // make progress 627 return &ast.BadExpr{From: pos, To: p.pos} 628 } 629 630 return typ 631 } 632 633 // If the result is an identifier, it is not resolved. 634 func (p *parser) parseTypeName() ast.Expr { 635 if p.trace { 636 defer un(trace(p, "TypeName")) 637 } 638 639 ident := p.parseIdent() 640 // don't resolve ident yet - it may be a parameter or field name 641 642 if p.tok == token.PERIOD { 643 // ident must be a package name 644 p.next() 645 p.resolve(ident) 646 return &ast.SelectorExpr{X: ident, Sel: p.parseIdent()} 647 } 648 649 return ident 650 } 651 652 func (p *parser) parseArrayType() ast.Expr { 653 if p.trace { 654 defer un(trace(p, "ArrayType")) 655 } 656 657 lbrack := p.expect(token.LBRACK) 658 p.exprLev++ 659 var len ast.Expr 660 // always permit ellipsis for more fault-tolerant parsing 661 if p.tok == token.ELLIPSIS { 662 len = &ast.Ellipsis{Ellipsis: p.pos} 663 p.next() 664 } else if p.tok != token.RBRACK { 665 len = p.parseRhs() 666 } 667 p.exprLev-- 668 p.expect(token.RBRACK) 669 elt := p.parseType() 670 671 return &ast.ArrayType{Lbrack: lbrack, Len: len, Elt: elt} 672 } 673 674 func (p *parser) makeIdentList(list []ast.Expr) []*ast.Ident { 675 idents := make([]*ast.Ident, len(list)) 676 for i, x := range list { 677 ident, isIdent := x.(*ast.Ident) 678 if !isIdent { 679 if _, isBad := x.(*ast.BadExpr); !isBad { 680 // only report error if it's a new one 681 p.errorExpected(x.Pos(), "identifier") 682 } 683 ident = &ast.Ident{NamePos: x.Pos(), Name: "_"} 684 } 685 idents[i] = ident 686 } 687 return idents 688 } 689 690 func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field { 691 if p.trace { 692 defer un(trace(p, "FieldDecl")) 693 } 694 695 doc := p.leadComment 696 697 // 1st FieldDecl 698 // A type name used as an anonymous field looks like a field identifier. 699 var list []ast.Expr 700 for { 701 list = append(list, p.parseVarType(false)) 702 if p.tok != token.COMMA { 703 break 704 } 705 p.next() 706 } 707 708 typ := p.tryVarType(false) 709 710 // analyze case 711 var idents []*ast.Ident 712 if typ != nil { 713 // IdentifierList Type 714 idents = p.makeIdentList(list) 715 } else { 716 // ["*"] TypeName (AnonymousField) 717 typ = list[0] // we always have at least one element 718 if n := len(list); n > 1 { 719 p.errorExpected(p.pos, "type") 720 typ = &ast.BadExpr{From: p.pos, To: p.pos} 721 } else if !isTypeName(deref(typ)) { 722 p.errorExpected(typ.Pos(), "anonymous field") 723 typ = &ast.BadExpr{From: typ.Pos(), To: p.safePos(typ.End())} 724 } 725 } 726 727 // Tag 728 var tag *ast.BasicLit 729 if p.tok == token.STRING { 730 tag = &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit} 731 p.next() 732 } 733 734 p.expectSemi() // call before accessing p.linecomment 735 736 field := &ast.Field{Doc: doc, Names: idents, Type: typ, Tag: tag, Comment: p.lineComment} 737 p.declare(field, nil, scope, ast.Var, idents...) 738 p.resolve(typ) 739 740 return field 741 } 742 743 func (p *parser) parseStructType() *ast.StructType { 744 if p.trace { 745 defer un(trace(p, "StructType")) 746 } 747 748 pos := p.expect(token.STRUCT) 749 lbrace := p.expect(token.LBRACE) 750 scope := ast.NewScope(nil) // struct scope 751 var list []*ast.Field 752 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN { 753 // a field declaration cannot start with a '(' but we accept 754 // it here for more robust parsing and better error messages 755 // (parseFieldDecl will check and complain if necessary) 756 list = append(list, p.parseFieldDecl(scope)) 757 } 758 rbrace := p.expect(token.RBRACE) 759 760 return &ast.StructType{ 761 Struct: pos, 762 Fields: &ast.FieldList{ 763 Opening: lbrace, 764 List: list, 765 Closing: rbrace, 766 }, 767 } 768 } 769 770 func (p *parser) parsePointerType() *ast.StarExpr { 771 if p.trace { 772 defer un(trace(p, "PointerType")) 773 } 774 775 star := p.expect(token.MUL) 776 base := p.parseType() 777 778 return &ast.StarExpr{Star: star, X: base} 779 } 780 781 // If the result is an identifier, it is not resolved. 782 func (p *parser) tryVarType(isParam bool) ast.Expr { 783 if isParam && p.tok == token.ELLIPSIS { 784 pos := p.pos 785 p.next() 786 typ := p.tryIdentOrType() // don't use parseType so we can provide better error message 787 if typ != nil { 788 p.resolve(typ) 789 } else { 790 p.error(pos, "'...' parameter is missing type") 791 typ = &ast.BadExpr{From: pos, To: p.pos} 792 } 793 return &ast.Ellipsis{Ellipsis: pos, Elt: typ} 794 } 795 return p.tryIdentOrType() 796 } 797 798 // If the result is an identifier, it is not resolved. 799 func (p *parser) parseVarType(isParam bool) ast.Expr { 800 typ := p.tryVarType(isParam) 801 if typ == nil { 802 pos := p.pos 803 p.errorExpected(pos, "type") 804 p.next() // make progress 805 typ = &ast.BadExpr{From: pos, To: p.pos} 806 } 807 return typ 808 } 809 810 func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params []*ast.Field) { 811 if p.trace { 812 defer un(trace(p, "ParameterList")) 813 } 814 815 // 1st ParameterDecl 816 // A list of identifiers looks like a list of type names. 817 var list []ast.Expr 818 for { 819 list = append(list, p.parseVarType(ellipsisOk)) 820 if p.tok != token.COMMA { 821 break 822 } 823 p.next() 824 if p.tok == token.RPAREN { 825 break 826 } 827 } 828 829 // analyze case 830 if typ := p.tryVarType(ellipsisOk); typ != nil { 831 // IdentifierList Type 832 idents := p.makeIdentList(list) 833 field := &ast.Field{Names: idents, Type: typ} 834 params = append(params, field) 835 // Go spec: The scope of an identifier denoting a function 836 // parameter or result variable is the function body. 837 p.declare(field, nil, scope, ast.Var, idents...) 838 p.resolve(typ) 839 if !p.atComma("parameter list", token.RPAREN) { 840 return 841 } 842 p.next() 843 for p.tok != token.RPAREN && p.tok != token.EOF { 844 idents := p.parseIdentList(p.parseIdent()) 845 typ := p.parseVarType(ellipsisOk) 846 field := &ast.Field{Names: idents, Type: typ} 847 params = append(params, field) 848 // Go spec: The scope of an identifier denoting a function 849 // parameter or result variable is the function body. 850 p.declare(field, nil, scope, ast.Var, idents...) 851 p.resolve(typ) 852 if !p.atComma("parameter list", token.RPAREN) { 853 break 854 } 855 p.next() 856 } 857 return 858 } 859 860 // Type { "," Type } (anonymous parameters) 861 params = make([]*ast.Field, len(list)) 862 for i, typ := range list { 863 p.resolve(typ) 864 params[i] = &ast.Field{Type: typ} 865 } 866 return 867 } 868 869 func (p *parser) parseParameters(scope *ast.Scope, ellipsisOk bool) *ast.FieldList { 870 if p.trace { 871 defer un(trace(p, "Parameters")) 872 } 873 874 var params []*ast.Field 875 lparen := p.expect(token.LPAREN) 876 if p.tok != token.RPAREN { 877 params = p.parseParameterList(scope, ellipsisOk) 878 } 879 rparen := p.expect(token.RPAREN) 880 881 return &ast.FieldList{Opening: lparen, List: params, Closing: rparen} 882 } 883 884 func (p *parser) parseResult(scope *ast.Scope) *ast.FieldList { 885 if p.trace { 886 defer un(trace(p, "Result")) 887 } 888 889 if p.tok == token.LPAREN { 890 return p.parseParameters(scope, false) 891 } 892 893 typ := p.tryType() 894 if typ != nil { 895 list := make([]*ast.Field, 1) 896 list[0] = &ast.Field{Type: typ} 897 return &ast.FieldList{List: list} 898 } 899 900 return nil 901 } 902 903 func (p *parser) parseSignature(scope *ast.Scope) (params, results *ast.FieldList) { 904 if p.trace { 905 defer un(trace(p, "Signature")) 906 } 907 908 params = p.parseParameters(scope, true) 909 results = p.parseResult(scope) 910 911 return 912 } 913 914 func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) { 915 if p.trace { 916 defer un(trace(p, "FuncType")) 917 } 918 919 pos := p.expect(token.FUNC) 920 scope := ast.NewScope(p.topScope) // function scope 921 params, results := p.parseSignature(scope) 922 923 return &ast.FuncType{Func: pos, Params: params, Results: results}, scope 924 } 925 926 func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field { 927 if p.trace { 928 defer un(trace(p, "MethodSpec")) 929 } 930 931 doc := p.leadComment 932 var idents []*ast.Ident 933 var typ ast.Expr 934 x := p.parseTypeName() 935 if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN { 936 // method 937 idents = []*ast.Ident{ident} 938 scope := ast.NewScope(nil) // method scope 939 params, results := p.parseSignature(scope) 940 typ = &ast.FuncType{Func: token.NoPos, Params: params, Results: results} 941 } else { 942 // embedded interface 943 typ = x 944 p.resolve(typ) 945 } 946 p.expectSemi() // call before accessing p.linecomment 947 948 spec := &ast.Field{Doc: doc, Names: idents, Type: typ, Comment: p.lineComment} 949 p.declare(spec, nil, scope, ast.Fun, idents...) 950 951 return spec 952 } 953 954 func (p *parser) parseInterfaceType() *ast.InterfaceType { 955 if p.trace { 956 defer un(trace(p, "InterfaceType")) 957 } 958 959 pos := p.expect(token.INTERFACE) 960 lbrace := p.expect(token.LBRACE) 961 scope := ast.NewScope(nil) // interface scope 962 var list []*ast.Field 963 for p.tok == token.IDENT { 964 list = append(list, p.parseMethodSpec(scope)) 965 } 966 rbrace := p.expect(token.RBRACE) 967 968 return &ast.InterfaceType{ 969 Interface: pos, 970 Methods: &ast.FieldList{ 971 Opening: lbrace, 972 List: list, 973 Closing: rbrace, 974 }, 975 } 976 } 977 978 func (p *parser) parseMapType() *ast.MapType { 979 if p.trace { 980 defer un(trace(p, "MapType")) 981 } 982 983 pos := p.expect(token.MAP) 984 p.expect(token.LBRACK) 985 key := p.parseType() 986 p.expect(token.RBRACK) 987 value := p.parseType() 988 989 return &ast.MapType{Map: pos, Key: key, Value: value} 990 } 991 992 func (p *parser) parseChanType() *ast.ChanType { 993 if p.trace { 994 defer un(trace(p, "ChanType")) 995 } 996 997 pos := p.pos 998 dir := ast.SEND | ast.RECV 999 var arrow token.Pos 1000 if p.tok == token.CHAN { 1001 p.next() 1002 if p.tok == token.ARROW { 1003 arrow = p.pos 1004 p.next() 1005 dir = ast.SEND 1006 } 1007 } else { 1008 arrow = p.expect(token.ARROW) 1009 p.expect(token.CHAN) 1010 dir = ast.RECV 1011 } 1012 value := p.parseType() 1013 1014 return &ast.ChanType{Begin: pos, Arrow: arrow, Dir: dir, Value: value} 1015 } 1016 1017 // If the result is an identifier, it is not resolved. 1018 func (p *parser) tryIdentOrType() ast.Expr { 1019 switch p.tok { 1020 case token.IDENT: 1021 return p.parseTypeName() 1022 case token.LBRACK: 1023 return p.parseArrayType() 1024 case token.STRUCT: 1025 return p.parseStructType() 1026 case token.MUL: 1027 return p.parsePointerType() 1028 case token.FUNC: 1029 typ, _ := p.parseFuncType() 1030 return typ 1031 case token.INTERFACE: 1032 return p.parseInterfaceType() 1033 case token.MAP: 1034 return p.parseMapType() 1035 case token.CHAN, token.ARROW: 1036 return p.parseChanType() 1037 case token.LPAREN: 1038 lparen := p.pos 1039 p.next() 1040 typ := p.parseType() 1041 rparen := p.expect(token.RPAREN) 1042 return &ast.ParenExpr{Lparen: lparen, X: typ, Rparen: rparen} 1043 } 1044 1045 // no type found 1046 return nil 1047 } 1048 1049 func (p *parser) tryType() ast.Expr { 1050 typ := p.tryIdentOrType() 1051 if typ != nil { 1052 p.resolve(typ) 1053 } 1054 return typ 1055 } 1056 1057 // ---------------------------------------------------------------------------- 1058 // Blocks 1059 1060 func (p *parser) parseStmtList() (list []ast.Stmt) { 1061 if p.trace { 1062 defer un(trace(p, "StatementList")) 1063 } 1064 1065 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRACE && p.tok != token.EOF { 1066 list = append(list, p.parseStmt()) 1067 } 1068 1069 return 1070 } 1071 1072 func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt { 1073 if p.trace { 1074 defer un(trace(p, "Body")) 1075 } 1076 1077 lbrace := p.expect(token.LBRACE) 1078 p.topScope = scope // open function scope 1079 p.openLabelScope() 1080 list := p.parseStmtList() 1081 p.closeLabelScope() 1082 p.closeScope() 1083 rbrace := p.expect(token.RBRACE) 1084 1085 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 1086 } 1087 1088 func (p *parser) parseBlockStmt() *ast.BlockStmt { 1089 if p.trace { 1090 defer un(trace(p, "BlockStmt")) 1091 } 1092 1093 lbrace := p.expect(token.LBRACE) 1094 p.openScope() 1095 list := p.parseStmtList() 1096 p.closeScope() 1097 rbrace := p.expect(token.RBRACE) 1098 1099 return &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 1100 } 1101 1102 // ---------------------------------------------------------------------------- 1103 // Expressions 1104 1105 func (p *parser) parseFuncTypeOrLit() ast.Expr { 1106 if p.trace { 1107 defer un(trace(p, "FuncTypeOrLit")) 1108 } 1109 1110 typ, scope := p.parseFuncType() 1111 if p.tok != token.LBRACE { 1112 // function type only 1113 return typ 1114 } 1115 1116 p.exprLev++ 1117 body := p.parseBody(scope) 1118 p.exprLev-- 1119 1120 return &ast.FuncLit{Type: typ, Body: body} 1121 } 1122 1123 // parseOperand may return an expression or a raw type (incl. array 1124 // types of the form [...]T. Callers must verify the result. 1125 // If lhs is set and the result is an identifier, it is not resolved. 1126 // 1127 func (p *parser) parseOperand(lhs bool) ast.Expr { 1128 if p.trace { 1129 defer un(trace(p, "Operand")) 1130 } 1131 1132 switch p.tok { 1133 case token.IDENT: 1134 x := p.parseIdent() 1135 if !lhs { 1136 p.resolve(x) 1137 } 1138 return x 1139 1140 case token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING: 1141 x := &ast.BasicLit{ValuePos: p.pos, Kind: p.tok, Value: p.lit} 1142 p.next() 1143 return x 1144 1145 case token.LPAREN: 1146 lparen := p.pos 1147 p.next() 1148 p.exprLev++ 1149 x := p.parseRhsOrType() // types may be parenthesized: (some type) 1150 p.exprLev-- 1151 rparen := p.expect(token.RPAREN) 1152 return &ast.ParenExpr{Lparen: lparen, X: x, Rparen: rparen} 1153 1154 case token.FUNC: 1155 return p.parseFuncTypeOrLit() 1156 } 1157 1158 if typ := p.tryIdentOrType(); typ != nil { 1159 // could be type for composite literal or conversion 1160 _, isIdent := typ.(*ast.Ident) 1161 assert(!isIdent, "type cannot be identifier") 1162 return typ 1163 } 1164 1165 // we have an error 1166 pos := p.pos 1167 p.errorExpected(pos, "operand") 1168 syncStmt(p) 1169 return &ast.BadExpr{From: pos, To: p.pos} 1170 } 1171 1172 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr { 1173 if p.trace { 1174 defer un(trace(p, "TypeAssertion")) 1175 } 1176 1177 lparen := p.expect(token.LPAREN) 1178 var typ ast.Expr 1179 if p.tok == token.TYPE { 1180 // type switch: typ == nil 1181 p.next() 1182 } else { 1183 typ = p.parseType() 1184 } 1185 rparen := p.expect(token.RPAREN) 1186 1187 return &ast.TypeAssertExpr{X: x, Type: typ, Lparen: lparen, Rparen: rparen} 1188 } 1189 1190 func (p *parser) parseIndexOrSlice(x ast.Expr) ast.Expr { 1191 if p.trace { 1192 defer un(trace(p, "IndexOrSlice")) 1193 } 1194 1195 const N = 3 // change the 3 to 2 to disable 3-index slices 1196 lbrack := p.expect(token.LBRACK) 1197 p.exprLev++ 1198 var index [N]ast.Expr 1199 var colons [N - 1]token.Pos 1200 if p.tok != token.COLON { 1201 index[0] = p.parseRhs() 1202 } 1203 ncolons := 0 1204 for p.tok == token.COLON && ncolons < len(colons) { 1205 colons[ncolons] = p.pos 1206 ncolons++ 1207 p.next() 1208 if p.tok != token.COLON && p.tok != token.RBRACK && p.tok != token.EOF { 1209 index[ncolons] = p.parseRhs() 1210 } 1211 } 1212 p.exprLev-- 1213 rbrack := p.expect(token.RBRACK) 1214 1215 if ncolons > 0 { 1216 // slice expression 1217 slice3 := false 1218 if ncolons == 2 { 1219 slice3 = true 1220 // Check presence of 2nd and 3rd index here rather than during type-checking 1221 // to prevent erroneous programs from passing through gofmt (was issue 7305). 1222 if index[1] == nil { 1223 p.error(colons[0], "2nd index required in 3-index slice") 1224 index[1] = &ast.BadExpr{From: colons[0] + 1, To: colons[1]} 1225 } 1226 if index[2] == nil { 1227 p.error(colons[1], "3rd index required in 3-index slice") 1228 index[2] = &ast.BadExpr{From: colons[1] + 1, To: rbrack} 1229 } 1230 } 1231 return &ast.SliceExpr{X: x, Lbrack: lbrack, Low: index[0], High: index[1], Max: index[2], Slice3: slice3, Rbrack: rbrack} 1232 } 1233 1234 return &ast.IndexExpr{X: x, Lbrack: lbrack, Index: index[0], Rbrack: rbrack} 1235 } 1236 1237 func (p *parser) parseCallOrConversion(fun ast.Expr) *ast.CallExpr { 1238 if p.trace { 1239 defer un(trace(p, "CallOrConversion")) 1240 } 1241 1242 lparen := p.expect(token.LPAREN) 1243 p.exprLev++ 1244 var list []ast.Expr 1245 var ellipsis token.Pos 1246 for p.tok != token.RPAREN && p.tok != token.EOF && !ellipsis.IsValid() { 1247 list = append(list, p.parseRhsOrType()) // builtins may expect a type: make(some type, ...) 1248 if p.tok == token.ELLIPSIS { 1249 ellipsis = p.pos 1250 p.next() 1251 } 1252 if !p.atComma("argument list", token.RPAREN) { 1253 break 1254 } 1255 p.next() 1256 } 1257 p.exprLev-- 1258 rparen := p.expectClosing(token.RPAREN, "argument list") 1259 1260 return &ast.CallExpr{Fun: fun, Lparen: lparen, Args: list, Ellipsis: ellipsis, Rparen: rparen} 1261 } 1262 1263 func (p *parser) parseValue(keyOk bool) ast.Expr { 1264 if p.trace { 1265 defer un(trace(p, "Element")) 1266 } 1267 1268 if p.tok == token.LBRACE { 1269 return p.parseLiteralValue(nil) 1270 } 1271 1272 // Because the parser doesn't know the composite literal type, it cannot 1273 // know if a key that's an identifier is a struct field name or a name 1274 // denoting a value. The former is not resolved by the parser or the 1275 // resolver. 1276 // 1277 // Instead, _try_ to resolve such a key if possible. If it resolves, 1278 // it a) has correctly resolved, or b) incorrectly resolved because 1279 // the key is a struct field with a name matching another identifier. 1280 // In the former case we are done, and in the latter case we don't 1281 // care because the type checker will do a separate field lookup. 1282 // 1283 // If the key does not resolve, it a) must be defined at the top 1284 // level in another file of the same package, the universe scope, or be 1285 // undeclared; or b) it is a struct field. In the former case, the type 1286 // checker can do a top-level lookup, and in the latter case it will do 1287 // a separate field lookup. 1288 x := p.checkExpr(p.parseExpr(keyOk)) 1289 if keyOk { 1290 if p.tok == token.COLON { 1291 // Try to resolve the key but don't collect it 1292 // as unresolved identifier if it fails so that 1293 // we don't get (possibly false) errors about 1294 // undeclared names. 1295 p.tryResolve(x, false) 1296 } else { 1297 // not a key 1298 p.resolve(x) 1299 } 1300 } 1301 1302 return x 1303 } 1304 1305 func (p *parser) parseElement() ast.Expr { 1306 if p.trace { 1307 defer un(trace(p, "Element")) 1308 } 1309 1310 x := p.parseValue(true) 1311 if p.tok == token.COLON { 1312 colon := p.pos 1313 p.next() 1314 x = &ast.KeyValueExpr{Key: x, Colon: colon, Value: p.parseValue(false)} 1315 } 1316 1317 return x 1318 } 1319 1320 func (p *parser) parseElementList() (list []ast.Expr) { 1321 if p.trace { 1322 defer un(trace(p, "ElementList")) 1323 } 1324 1325 for p.tok != token.RBRACE && p.tok != token.EOF { 1326 list = append(list, p.parseElement()) 1327 if !p.atComma("composite literal", token.RBRACE) { 1328 break 1329 } 1330 p.next() 1331 } 1332 1333 return 1334 } 1335 1336 func (p *parser) parseLiteralValue(typ ast.Expr) ast.Expr { 1337 if p.trace { 1338 defer un(trace(p, "LiteralValue")) 1339 } 1340 1341 lbrace := p.expect(token.LBRACE) 1342 var elts []ast.Expr 1343 p.exprLev++ 1344 if p.tok != token.RBRACE { 1345 elts = p.parseElementList() 1346 } 1347 p.exprLev-- 1348 rbrace := p.expectClosing(token.RBRACE, "composite literal") 1349 return &ast.CompositeLit{Type: typ, Lbrace: lbrace, Elts: elts, Rbrace: rbrace} 1350 } 1351 1352 // checkExpr checks that x is an expression (and not a type). 1353 func (p *parser) checkExpr(x ast.Expr) ast.Expr { 1354 switch unparen(x).(type) { 1355 case *ast.BadExpr: 1356 case *ast.Ident: 1357 case *ast.BasicLit: 1358 case *ast.FuncLit: 1359 case *ast.CompositeLit: 1360 case *ast.ParenExpr: 1361 panic("unreachable") 1362 case *ast.SelectorExpr: 1363 case *ast.IndexExpr: 1364 case *ast.SliceExpr: 1365 case *ast.TypeAssertExpr: 1366 // If t.Type == nil we have a type assertion of the form 1367 // y.(type), which is only allowed in type switch expressions. 1368 // It's hard to exclude those but for the case where we are in 1369 // a type switch. Instead be lenient and test this in the type 1370 // checker. 1371 case *ast.CallExpr: 1372 case *ast.StarExpr: 1373 case *ast.UnaryExpr: 1374 case *ast.BinaryExpr: 1375 default: 1376 // all other nodes are not proper expressions 1377 p.errorExpected(x.Pos(), "expression") 1378 x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())} 1379 } 1380 return x 1381 } 1382 1383 // isTypeName reports whether x is a (qualified) TypeName. 1384 func isTypeName(x ast.Expr) bool { 1385 switch t := x.(type) { 1386 case *ast.BadExpr: 1387 case *ast.Ident: 1388 case *ast.SelectorExpr: 1389 _, isIdent := t.X.(*ast.Ident) 1390 return isIdent 1391 default: 1392 return false // all other nodes are not type names 1393 } 1394 return true 1395 } 1396 1397 // isLiteralType reports whether x is a legal composite literal type. 1398 func isLiteralType(x ast.Expr) bool { 1399 switch t := x.(type) { 1400 case *ast.BadExpr: 1401 case *ast.Ident: 1402 case *ast.SelectorExpr: 1403 _, isIdent := t.X.(*ast.Ident) 1404 return isIdent 1405 case *ast.ArrayType: 1406 case *ast.StructType: 1407 case *ast.MapType: 1408 default: 1409 return false // all other nodes are not legal composite literal types 1410 } 1411 return true 1412 } 1413 1414 // If x is of the form *T, deref returns T, otherwise it returns x. 1415 func deref(x ast.Expr) ast.Expr { 1416 if p, isPtr := x.(*ast.StarExpr); isPtr { 1417 x = p.X 1418 } 1419 return x 1420 } 1421 1422 // If x is of the form (T), unparen returns unparen(T), otherwise it returns x. 1423 func unparen(x ast.Expr) ast.Expr { 1424 if p, isParen := x.(*ast.ParenExpr); isParen { 1425 x = unparen(p.X) 1426 } 1427 return x 1428 } 1429 1430 // checkExprOrType checks that x is an expression or a type 1431 // (and not a raw type such as [...]T). 1432 // 1433 func (p *parser) checkExprOrType(x ast.Expr) ast.Expr { 1434 switch t := unparen(x).(type) { 1435 case *ast.ParenExpr: 1436 panic("unreachable") 1437 case *ast.UnaryExpr: 1438 case *ast.ArrayType: 1439 if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis { 1440 p.error(len.Pos(), "expected array length, found '...'") 1441 x = &ast.BadExpr{From: x.Pos(), To: p.safePos(x.End())} 1442 } 1443 } 1444 1445 // all other nodes are expressions or types 1446 return x 1447 } 1448 1449 // If lhs is set and the result is an identifier, it is not resolved. 1450 func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr { 1451 if p.trace { 1452 defer un(trace(p, "PrimaryExpr")) 1453 } 1454 1455 x := p.parseOperand(lhs) 1456 L: 1457 for { 1458 switch p.tok { 1459 case token.PERIOD: 1460 p.next() 1461 if lhs { 1462 p.resolve(x) 1463 } 1464 switch p.tok { 1465 case token.IDENT: 1466 x = &ast.SelectorExpr{X: p.checkExprOrType(x), Sel: p.parseIdent()} 1467 case token.LPAREN: 1468 x = p.parseTypeAssertion(p.checkExpr(x)) 1469 default: 1470 pos := p.pos 1471 p.errorExpected(pos, "selector or type assertion") 1472 p.next() // make progress 1473 sel := &ast.Ident{NamePos: pos, Name: "_"} 1474 x = &ast.SelectorExpr{X: x, Sel: sel} 1475 } 1476 case token.LBRACK: 1477 if lhs { 1478 p.resolve(x) 1479 } 1480 x = p.parseIndexOrSlice(p.checkExpr(x)) 1481 case token.LPAREN: 1482 if lhs { 1483 p.resolve(x) 1484 } 1485 x = p.parseCallOrConversion(p.checkExprOrType(x)) 1486 case token.LBRACE: 1487 if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)) { 1488 if lhs { 1489 p.resolve(x) 1490 } 1491 x = p.parseLiteralValue(x) 1492 } else { 1493 break L 1494 } 1495 default: 1496 break L 1497 } 1498 lhs = false // no need to try to resolve again 1499 } 1500 1501 return x 1502 } 1503 1504 // If lhs is set and the result is an identifier, it is not resolved. 1505 func (p *parser) parseUnaryExpr(lhs bool) ast.Expr { 1506 if p.trace { 1507 defer un(trace(p, "UnaryExpr")) 1508 } 1509 1510 switch p.tok { 1511 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND: 1512 pos, op := p.pos, p.tok 1513 p.next() 1514 x := p.parseUnaryExpr(false) 1515 return &ast.UnaryExpr{OpPos: pos, Op: op, X: p.checkExpr(x)} 1516 1517 case token.ARROW: 1518 // channel type or receive expression 1519 arrow := p.pos 1520 p.next() 1521 1522 // If the next token is token.CHAN we still don't know if it 1523 // is a channel type or a receive operation - we only know 1524 // once we have found the end of the unary expression. There 1525 // are two cases: 1526 // 1527 // <- type => (<-type) must be channel type 1528 // <- expr => <-(expr) is a receive from an expression 1529 // 1530 // In the first case, the arrow must be re-associated with 1531 // the channel type parsed already: 1532 // 1533 // <- (chan type) => (<-chan type) 1534 // <- (chan<- type) => (<-chan (<-type)) 1535 1536 x := p.parseUnaryExpr(false) 1537 1538 // determine which case we have 1539 if typ, ok := x.(*ast.ChanType); ok { 1540 // (<-type) 1541 1542 // re-associate position info and <- 1543 dir := ast.SEND 1544 for ok && dir == ast.SEND { 1545 if typ.Dir == ast.RECV { 1546 // error: (<-type) is (<-(<-chan T)) 1547 p.errorExpected(typ.Arrow, "'chan'") 1548 } 1549 arrow, typ.Begin, typ.Arrow = typ.Arrow, arrow, arrow 1550 dir, typ.Dir = typ.Dir, ast.RECV 1551 typ, ok = typ.Value.(*ast.ChanType) 1552 } 1553 if dir == ast.SEND { 1554 p.errorExpected(arrow, "channel type") 1555 } 1556 1557 return x 1558 } 1559 1560 // <-(expr) 1561 return &ast.UnaryExpr{OpPos: arrow, Op: token.ARROW, X: p.checkExpr(x)} 1562 1563 case token.MUL: 1564 // pointer type or unary "*" expression 1565 pos := p.pos 1566 p.next() 1567 x := p.parseUnaryExpr(false) 1568 return &ast.StarExpr{Star: pos, X: p.checkExprOrType(x)} 1569 } 1570 1571 return p.parsePrimaryExpr(lhs) 1572 } 1573 1574 func (p *parser) tokPrec() (token.Token, int) { 1575 tok := p.tok 1576 if p.inRhs && tok == token.ASSIGN { 1577 tok = token.EQL 1578 } 1579 return tok, tok.Precedence() 1580 } 1581 1582 // If lhs is set and the result is an identifier, it is not resolved. 1583 func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr { 1584 if p.trace { 1585 defer un(trace(p, "BinaryExpr")) 1586 } 1587 1588 x := p.parseUnaryExpr(lhs) 1589 for { 1590 op, oprec := p.tokPrec() 1591 if oprec < prec1 { 1592 return x 1593 } 1594 pos := p.expect(op) 1595 if lhs { 1596 p.resolve(x) 1597 lhs = false 1598 } 1599 y := p.parseBinaryExpr(false, oprec+1) 1600 x = &ast.BinaryExpr{X: p.checkExpr(x), OpPos: pos, Op: op, Y: p.checkExpr(y)} 1601 } 1602 } 1603 1604 // If lhs is set and the result is an identifier, it is not resolved. 1605 // The result may be a type or even a raw type ([...]int). Callers must 1606 // check the result (using checkExpr or checkExprOrType), depending on 1607 // context. 1608 func (p *parser) parseExpr(lhs bool) ast.Expr { 1609 if p.trace { 1610 defer un(trace(p, "Expression")) 1611 } 1612 1613 return p.parseBinaryExpr(lhs, token.LowestPrec+1) 1614 } 1615 1616 func (p *parser) parseRhs() ast.Expr { 1617 old := p.inRhs 1618 p.inRhs = true 1619 x := p.checkExpr(p.parseExpr(false)) 1620 p.inRhs = old 1621 return x 1622 } 1623 1624 func (p *parser) parseRhsOrType() ast.Expr { 1625 old := p.inRhs 1626 p.inRhs = true 1627 x := p.checkExprOrType(p.parseExpr(false)) 1628 p.inRhs = old 1629 return x 1630 } 1631 1632 // ---------------------------------------------------------------------------- 1633 // Statements 1634 1635 // Parsing modes for parseSimpleStmt. 1636 const ( 1637 basic = iota 1638 labelOk 1639 rangeOk 1640 ) 1641 1642 // parseSimpleStmt returns true as 2nd result if it parsed the assignment 1643 // of a range clause (with mode == rangeOk). The returned statement is an 1644 // assignment with a right-hand side that is a single unary expression of 1645 // the form "range x". No guarantees are given for the left-hand side. 1646 func (p *parser) parseSimpleStmt(mode int) (ast.Stmt, bool) { 1647 if p.trace { 1648 defer un(trace(p, "SimpleStmt")) 1649 } 1650 1651 x := p.parseLhsList() 1652 1653 switch p.tok { 1654 case 1655 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN, 1656 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN, 1657 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN, 1658 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_NOT_ASSIGN: 1659 // assignment statement, possibly part of a range clause 1660 pos, tok := p.pos, p.tok 1661 p.next() 1662 var y []ast.Expr 1663 isRange := false 1664 if mode == rangeOk && p.tok == token.RANGE && (tok == token.DEFINE || tok == token.ASSIGN) { 1665 pos := p.pos 1666 p.next() 1667 y = []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}} 1668 isRange = true 1669 } else { 1670 y = p.parseRhsList() 1671 } 1672 as := &ast.AssignStmt{Lhs: x, TokPos: pos, Tok: tok, Rhs: y} 1673 if tok == token.DEFINE { 1674 p.shortVarDecl(as, x) 1675 } 1676 return as, isRange 1677 } 1678 1679 if len(x) > 1 { 1680 p.errorExpected(x[0].Pos(), "1 expression") 1681 // continue with first expression 1682 } 1683 1684 switch p.tok { 1685 case token.COLON: 1686 // labeled statement 1687 colon := p.pos 1688 p.next() 1689 if label, isIdent := x[0].(*ast.Ident); mode == labelOk && isIdent { 1690 // Go spec: The scope of a label is the body of the function 1691 // in which it is declared and excludes the body of any nested 1692 // function. 1693 stmt := &ast.LabeledStmt{Label: label, Colon: colon, Stmt: p.parseStmt()} 1694 p.declare(stmt, nil, p.labelScope, ast.Lbl, label) 1695 return stmt, false 1696 } 1697 // The label declaration typically starts at x[0].Pos(), but the label 1698 // declaration may be erroneous due to a token after that position (and 1699 // before the ':'). If SpuriousErrors is not set, the (only) error re- 1700 // ported for the line is the illegal label error instead of the token 1701 // before the ':' that caused the problem. Thus, use the (latest) colon 1702 // position for error reporting. 1703 p.error(colon, "illegal label declaration") 1704 return &ast.BadStmt{From: x[0].Pos(), To: colon + 1}, false 1705 1706 case token.ARROW: 1707 // send statement 1708 arrow := p.pos 1709 p.next() 1710 y := p.parseRhs() 1711 return &ast.SendStmt{Chan: x[0], Arrow: arrow, Value: y}, false 1712 1713 case token.INC, token.DEC: 1714 // increment or decrement 1715 s := &ast.IncDecStmt{X: x[0], TokPos: p.pos, Tok: p.tok} 1716 p.next() 1717 return s, false 1718 } 1719 1720 // expression 1721 return &ast.ExprStmt{X: x[0]}, false 1722 } 1723 1724 func (p *parser) parseCallExpr(callType string) *ast.CallExpr { 1725 x := p.parseRhsOrType() // could be a conversion: (some type)(x) 1726 if call, isCall := x.(*ast.CallExpr); isCall { 1727 return call 1728 } 1729 if _, isBad := x.(*ast.BadExpr); !isBad { 1730 // only report error if it's a new one 1731 p.error(p.safePos(x.End()), fmt.Sprintf("function must be invoked in %s statement", callType)) 1732 } 1733 return nil 1734 } 1735 1736 func (p *parser) parseGoStmt() ast.Stmt { 1737 if p.trace { 1738 defer un(trace(p, "GoStmt")) 1739 } 1740 1741 pos := p.expect(token.GO) 1742 call := p.parseCallExpr("go") 1743 p.expectSemi() 1744 if call == nil { 1745 return &ast.BadStmt{From: pos, To: pos + 2} // len("go") 1746 } 1747 1748 return &ast.GoStmt{Go: pos, Call: call} 1749 } 1750 1751 func (p *parser) parseDeferStmt() ast.Stmt { 1752 if p.trace { 1753 defer un(trace(p, "DeferStmt")) 1754 } 1755 1756 pos := p.expect(token.DEFER) 1757 call := p.parseCallExpr("defer") 1758 p.expectSemi() 1759 if call == nil { 1760 return &ast.BadStmt{From: pos, To: pos + 5} // len("defer") 1761 } 1762 1763 return &ast.DeferStmt{Defer: pos, Call: call} 1764 } 1765 1766 func (p *parser) parseReturnStmt() *ast.ReturnStmt { 1767 if p.trace { 1768 defer un(trace(p, "ReturnStmt")) 1769 } 1770 1771 pos := p.pos 1772 p.expect(token.RETURN) 1773 var x []ast.Expr 1774 if p.tok != token.SEMICOLON && p.tok != token.RBRACE { 1775 x = p.parseRhsList() 1776 } 1777 p.expectSemi() 1778 1779 return &ast.ReturnStmt{Return: pos, Results: x} 1780 } 1781 1782 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt { 1783 if p.trace { 1784 defer un(trace(p, "BranchStmt")) 1785 } 1786 1787 pos := p.expect(tok) 1788 var label *ast.Ident 1789 if tok != token.FALLTHROUGH && p.tok == token.IDENT { 1790 label = p.parseIdent() 1791 // add to list of unresolved targets 1792 n := len(p.targetStack) - 1 1793 p.targetStack[n] = append(p.targetStack[n], label) 1794 } 1795 p.expectSemi() 1796 1797 return &ast.BranchStmt{TokPos: pos, Tok: tok, Label: label} 1798 } 1799 1800 func (p *parser) makeExpr(s ast.Stmt, kind string) ast.Expr { 1801 if s == nil { 1802 return nil 1803 } 1804 if es, isExpr := s.(*ast.ExprStmt); isExpr { 1805 return p.checkExpr(es.X) 1806 } 1807 p.error(s.Pos(), fmt.Sprintf("expected %s, found simple statement (missing parentheses around composite literal?)", kind)) 1808 return &ast.BadExpr{From: s.Pos(), To: p.safePos(s.End())} 1809 } 1810 1811 func (p *parser) parseIfStmt() *ast.IfStmt { 1812 if p.trace { 1813 defer un(trace(p, "IfStmt")) 1814 } 1815 1816 pos := p.expect(token.IF) 1817 p.openScope() 1818 defer p.closeScope() 1819 1820 var s ast.Stmt 1821 var x ast.Expr 1822 { 1823 prevLev := p.exprLev 1824 p.exprLev = -1 1825 if p.tok == token.SEMICOLON { 1826 p.next() 1827 x = p.parseRhs() 1828 } else { 1829 s, _ = p.parseSimpleStmt(basic) 1830 if p.tok == token.SEMICOLON { 1831 p.next() 1832 x = p.parseRhs() 1833 } else { 1834 x = p.makeExpr(s, "boolean expression") 1835 s = nil 1836 } 1837 } 1838 p.exprLev = prevLev 1839 } 1840 1841 body := p.parseBlockStmt() 1842 var else_ ast.Stmt 1843 if p.tok == token.ELSE { 1844 p.next() 1845 switch p.tok { 1846 case token.IF: 1847 else_ = p.parseIfStmt() 1848 case token.LBRACE: 1849 else_ = p.parseBlockStmt() 1850 p.expectSemi() 1851 default: 1852 p.errorExpected(p.pos, "if statement or block") 1853 else_ = &ast.BadStmt{From: p.pos, To: p.pos} 1854 } 1855 } else { 1856 p.expectSemi() 1857 } 1858 1859 return &ast.IfStmt{If: pos, Init: s, Cond: x, Body: body, Else: else_} 1860 } 1861 1862 func (p *parser) parseTypeList() (list []ast.Expr) { 1863 if p.trace { 1864 defer un(trace(p, "TypeList")) 1865 } 1866 1867 list = append(list, p.parseType()) 1868 for p.tok == token.COMMA { 1869 p.next() 1870 list = append(list, p.parseType()) 1871 } 1872 1873 return 1874 } 1875 1876 func (p *parser) parseCaseClause(typeSwitch bool) *ast.CaseClause { 1877 if p.trace { 1878 defer un(trace(p, "CaseClause")) 1879 } 1880 1881 pos := p.pos 1882 var list []ast.Expr 1883 if p.tok == token.CASE { 1884 p.next() 1885 if typeSwitch { 1886 list = p.parseTypeList() 1887 } else { 1888 list = p.parseRhsList() 1889 } 1890 } else { 1891 p.expect(token.DEFAULT) 1892 } 1893 1894 colon := p.expect(token.COLON) 1895 p.openScope() 1896 body := p.parseStmtList() 1897 p.closeScope() 1898 1899 return &ast.CaseClause{Case: pos, List: list, Colon: colon, Body: body} 1900 } 1901 1902 func isTypeSwitchAssert(x ast.Expr) bool { 1903 a, ok := x.(*ast.TypeAssertExpr) 1904 return ok && a.Type == nil 1905 } 1906 1907 func (p *parser) isTypeSwitchGuard(s ast.Stmt) bool { 1908 switch t := s.(type) { 1909 case *ast.ExprStmt: 1910 // x.(type) 1911 return isTypeSwitchAssert(t.X) 1912 case *ast.AssignStmt: 1913 // v := x.(type) 1914 if len(t.Lhs) == 1 && len(t.Rhs) == 1 && isTypeSwitchAssert(t.Rhs[0]) { 1915 switch t.Tok { 1916 case token.ASSIGN: 1917 // permit v = x.(type) but complain 1918 p.error(t.TokPos, "expected ':=', found '='") 1919 fallthrough 1920 case token.DEFINE: 1921 return true 1922 } 1923 } 1924 } 1925 return false 1926 } 1927 1928 func (p *parser) parseSwitchStmt() ast.Stmt { 1929 if p.trace { 1930 defer un(trace(p, "SwitchStmt")) 1931 } 1932 1933 pos := p.expect(token.SWITCH) 1934 p.openScope() 1935 defer p.closeScope() 1936 1937 var s1, s2 ast.Stmt 1938 if p.tok != token.LBRACE { 1939 prevLev := p.exprLev 1940 p.exprLev = -1 1941 if p.tok != token.SEMICOLON { 1942 s2, _ = p.parseSimpleStmt(basic) 1943 } 1944 if p.tok == token.SEMICOLON { 1945 p.next() 1946 s1 = s2 1947 s2 = nil 1948 if p.tok != token.LBRACE { 1949 // A TypeSwitchGuard may declare a variable in addition 1950 // to the variable declared in the initial SimpleStmt. 1951 // Introduce extra scope to avoid redeclaration errors: 1952 // 1953 // switch t := 0; t := x.(T) { ... } 1954 // 1955 // (this code is not valid Go because the first t 1956 // cannot be accessed and thus is never used, the extra 1957 // scope is needed for the correct error message). 1958 // 1959 // If we don't have a type switch, s2 must be an expression. 1960 // Having the extra nested but empty scope won't affect it. 1961 p.openScope() 1962 defer p.closeScope() 1963 s2, _ = p.parseSimpleStmt(basic) 1964 } 1965 } 1966 p.exprLev = prevLev 1967 } 1968 1969 typeSwitch := p.isTypeSwitchGuard(s2) 1970 lbrace := p.expect(token.LBRACE) 1971 var list []ast.Stmt 1972 for p.tok == token.CASE || p.tok == token.DEFAULT { 1973 list = append(list, p.parseCaseClause(typeSwitch)) 1974 } 1975 rbrace := p.expect(token.RBRACE) 1976 p.expectSemi() 1977 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 1978 1979 if typeSwitch { 1980 return &ast.TypeSwitchStmt{Switch: pos, Init: s1, Assign: s2, Body: body} 1981 } 1982 1983 return &ast.SwitchStmt{Switch: pos, Init: s1, Tag: p.makeExpr(s2, "switch expression"), Body: body} 1984 } 1985 1986 func (p *parser) parseCommClause() *ast.CommClause { 1987 if p.trace { 1988 defer un(trace(p, "CommClause")) 1989 } 1990 1991 p.openScope() 1992 pos := p.pos 1993 var comm ast.Stmt 1994 if p.tok == token.CASE { 1995 p.next() 1996 lhs := p.parseLhsList() 1997 if p.tok == token.ARROW { 1998 // SendStmt 1999 if len(lhs) > 1 { 2000 p.errorExpected(lhs[0].Pos(), "1 expression") 2001 // continue with first expression 2002 } 2003 arrow := p.pos 2004 p.next() 2005 rhs := p.parseRhs() 2006 comm = &ast.SendStmt{Chan: lhs[0], Arrow: arrow, Value: rhs} 2007 } else { 2008 // RecvStmt 2009 if tok := p.tok; tok == token.ASSIGN || tok == token.DEFINE { 2010 // RecvStmt with assignment 2011 if len(lhs) > 2 { 2012 p.errorExpected(lhs[0].Pos(), "1 or 2 expressions") 2013 // continue with first two expressions 2014 lhs = lhs[0:2] 2015 } 2016 pos := p.pos 2017 p.next() 2018 rhs := p.parseRhs() 2019 as := &ast.AssignStmt{Lhs: lhs, TokPos: pos, Tok: tok, Rhs: []ast.Expr{rhs}} 2020 if tok == token.DEFINE { 2021 p.shortVarDecl(as, lhs) 2022 } 2023 comm = as 2024 } else { 2025 // lhs must be single receive operation 2026 if len(lhs) > 1 { 2027 p.errorExpected(lhs[0].Pos(), "1 expression") 2028 // continue with first expression 2029 } 2030 comm = &ast.ExprStmt{X: lhs[0]} 2031 } 2032 } 2033 } else { 2034 p.expect(token.DEFAULT) 2035 } 2036 2037 colon := p.expect(token.COLON) 2038 body := p.parseStmtList() 2039 p.closeScope() 2040 2041 return &ast.CommClause{Case: pos, Comm: comm, Colon: colon, Body: body} 2042 } 2043 2044 func (p *parser) parseSelectStmt() *ast.SelectStmt { 2045 if p.trace { 2046 defer un(trace(p, "SelectStmt")) 2047 } 2048 2049 pos := p.expect(token.SELECT) 2050 lbrace := p.expect(token.LBRACE) 2051 var list []ast.Stmt 2052 for p.tok == token.CASE || p.tok == token.DEFAULT { 2053 list = append(list, p.parseCommClause()) 2054 } 2055 rbrace := p.expect(token.RBRACE) 2056 p.expectSemi() 2057 body := &ast.BlockStmt{Lbrace: lbrace, List: list, Rbrace: rbrace} 2058 2059 return &ast.SelectStmt{Select: pos, Body: body} 2060 } 2061 2062 func (p *parser) parseForStmt() ast.Stmt { 2063 if p.trace { 2064 defer un(trace(p, "ForStmt")) 2065 } 2066 2067 pos := p.expect(token.FOR) 2068 p.openScope() 2069 defer p.closeScope() 2070 2071 var s1, s2, s3 ast.Stmt 2072 var isRange bool 2073 if p.tok != token.LBRACE { 2074 prevLev := p.exprLev 2075 p.exprLev = -1 2076 if p.tok != token.SEMICOLON { 2077 if p.tok == token.RANGE { 2078 // "for range x" (nil lhs in assignment) 2079 pos := p.pos 2080 p.next() 2081 y := []ast.Expr{&ast.UnaryExpr{OpPos: pos, Op: token.RANGE, X: p.parseRhs()}} 2082 s2 = &ast.AssignStmt{Rhs: y} 2083 isRange = true 2084 } else { 2085 s2, isRange = p.parseSimpleStmt(rangeOk) 2086 } 2087 } 2088 if !isRange && p.tok == token.SEMICOLON { 2089 p.next() 2090 s1 = s2 2091 s2 = nil 2092 if p.tok != token.SEMICOLON { 2093 s2, _ = p.parseSimpleStmt(basic) 2094 } 2095 p.expectSemi() 2096 if p.tok != token.LBRACE { 2097 s3, _ = p.parseSimpleStmt(basic) 2098 } 2099 } 2100 p.exprLev = prevLev 2101 } 2102 2103 body := p.parseBlockStmt() 2104 p.expectSemi() 2105 2106 if isRange { 2107 as := s2.(*ast.AssignStmt) 2108 // check lhs 2109 var key, value ast.Expr 2110 switch len(as.Lhs) { 2111 case 0: 2112 // nothing to do 2113 case 1: 2114 key = as.Lhs[0] 2115 case 2: 2116 key, value = as.Lhs[0], as.Lhs[1] 2117 default: 2118 p.errorExpected(as.Lhs[len(as.Lhs)-1].Pos(), "at most 2 expressions") 2119 return &ast.BadStmt{From: pos, To: p.safePos(body.End())} 2120 } 2121 // parseSimpleStmt returned a right-hand side that 2122 // is a single unary expression of the form "range x" 2123 x := as.Rhs[0].(*ast.UnaryExpr).X 2124 return &ast.RangeStmt{ 2125 For: pos, 2126 Key: key, 2127 Value: value, 2128 TokPos: as.TokPos, 2129 Tok: as.Tok, 2130 X: x, 2131 Body: body, 2132 } 2133 } 2134 2135 // regular for statement 2136 return &ast.ForStmt{ 2137 For: pos, 2138 Init: s1, 2139 Cond: p.makeExpr(s2, "boolean or range expression"), 2140 Post: s3, 2141 Body: body, 2142 } 2143 } 2144 2145 func (p *parser) parseStmt() (s ast.Stmt) { 2146 if p.trace { 2147 defer un(trace(p, "Statement")) 2148 } 2149 2150 switch p.tok { 2151 case token.CONST, token.TYPE, token.VAR: 2152 s = &ast.DeclStmt{Decl: p.parseDecl(syncStmt)} 2153 case 2154 // tokens that may start an expression 2155 token.IDENT, token.INT, token.FLOAT, token.IMAG, token.CHAR, token.STRING, token.FUNC, token.LPAREN, // operands 2156 token.LBRACK, token.STRUCT, token.MAP, token.CHAN, token.INTERFACE, // composite types 2157 token.ADD, token.SUB, token.MUL, token.AND, token.XOR, token.ARROW, token.NOT: // unary operators 2158 s, _ = p.parseSimpleStmt(labelOk) 2159 // because of the required look-ahead, labeled statements are 2160 // parsed by parseSimpleStmt - don't expect a semicolon after 2161 // them 2162 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt { 2163 p.expectSemi() 2164 } 2165 case token.GO: 2166 s = p.parseGoStmt() 2167 case token.DEFER: 2168 s = p.parseDeferStmt() 2169 case token.RETURN: 2170 s = p.parseReturnStmt() 2171 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH: 2172 s = p.parseBranchStmt(p.tok) 2173 case token.LBRACE: 2174 s = p.parseBlockStmt() 2175 p.expectSemi() 2176 case token.IF: 2177 s = p.parseIfStmt() 2178 case token.SWITCH: 2179 s = p.parseSwitchStmt() 2180 case token.SELECT: 2181 s = p.parseSelectStmt() 2182 case token.FOR: 2183 s = p.parseForStmt() 2184 case token.SEMICOLON: 2185 // Is it ever possible to have an implicit semicolon 2186 // producing an empty statement in a valid program? 2187 // (handle correctly anyway) 2188 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: p.lit == "\n"} 2189 p.next() 2190 case token.RBRACE: 2191 // a semicolon may be omitted before a closing "}" 2192 s = &ast.EmptyStmt{Semicolon: p.pos, Implicit: true} 2193 default: 2194 // no statement found 2195 pos := p.pos 2196 p.errorExpected(pos, "statement") 2197 syncStmt(p) 2198 s = &ast.BadStmt{From: pos, To: p.pos} 2199 } 2200 2201 return 2202 } 2203 2204 // ---------------------------------------------------------------------------- 2205 // Declarations 2206 2207 type parseSpecFunction func(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec 2208 2209 func isValidImport(lit string) bool { 2210 const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD" 2211 s, _ := strconv.Unquote(lit) // go/scanner returns a legal string literal 2212 for _, r := range s { 2213 if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) { 2214 return false 2215 } 2216 } 2217 return s != "" 2218 } 2219 2220 func (p *parser) parseImportSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec { 2221 if p.trace { 2222 defer un(trace(p, "ImportSpec")) 2223 } 2224 2225 var ident *ast.Ident 2226 switch p.tok { 2227 case token.PERIOD: 2228 ident = &ast.Ident{NamePos: p.pos, Name: "."} 2229 p.next() 2230 case token.IDENT: 2231 ident = p.parseIdent() 2232 } 2233 2234 pos := p.pos 2235 var path string 2236 if p.tok == token.STRING { 2237 path = p.lit 2238 if !isValidImport(path) { 2239 p.error(pos, "invalid import path: "+path) 2240 } 2241 p.next() 2242 } else { 2243 p.expect(token.STRING) // use expect() error handling 2244 } 2245 p.expectSemi() // call before accessing p.linecomment 2246 2247 // collect imports 2248 spec := &ast.ImportSpec{ 2249 Doc: doc, 2250 Name: ident, 2251 Path: &ast.BasicLit{ValuePos: pos, Kind: token.STRING, Value: path}, 2252 Comment: p.lineComment, 2253 } 2254 p.imports = append(p.imports, spec) 2255 2256 return spec 2257 } 2258 2259 // AliasSpec = identifier "=>" [ PackageName "." ] identifier . 2260 func (p *parser) parseAliasSpec(doc *ast.CommentGroup, kind ast.ObjKind, ident *ast.Ident) ast.Spec { 2261 // no tracing since this is already called from a parse(Value/Type)Spec or parseFuncDecl 2262 2263 // lhs identifier and "=>" have been consumed already 2264 2265 var orig ast.Expr = p.parseIdent() 2266 if p.tok == token.PERIOD { 2267 // orig must be a package name 2268 p.next() 2269 p.resolve(orig) 2270 orig = &ast.SelectorExpr{X: orig, Sel: p.parseIdent()} 2271 } 2272 2273 p.expectSemi() // call before accessing p.linecomment 2274 2275 spec := &ast.AliasSpec{ 2276 Doc: doc, 2277 Name: ident, 2278 Orig: orig, 2279 Comment: p.lineComment, 2280 } 2281 p.declare(spec, nil, p.topScope, kind, ident) 2282 2283 return spec 2284 } 2285 2286 func (p *parser) parseValueSpec(doc *ast.CommentGroup, keyword token.Token, iota int) ast.Spec { 2287 if p.trace { 2288 defer un(trace(p, keyword.String()+"Spec")) 2289 } 2290 2291 kind := ast.Con 2292 if keyword == token.VAR { 2293 kind = ast.Var 2294 } 2295 2296 pos := p.pos 2297 ident := p.parseIdent() 2298 if p.tok == token.ALIAS { 2299 p.next() 2300 return p.parseAliasSpec(doc, kind, ident) 2301 } 2302 2303 idents := p.parseIdentList(ident) 2304 typ := p.tryType() 2305 var values []ast.Expr 2306 // always permit optional initialization for more tolerant parsing 2307 if p.tok == token.ASSIGN { 2308 p.next() 2309 values = p.parseRhsList() 2310 } 2311 p.expectSemi() // call before accessing p.linecomment 2312 2313 switch keyword { 2314 case token.VAR: 2315 if typ == nil && values == nil { 2316 p.error(pos, "missing variable type or initialization") 2317 } 2318 case token.CONST: 2319 if values == nil && (iota == 0 || typ != nil) { 2320 p.error(pos, "missing constant value") 2321 } 2322 } 2323 2324 // Go spec: The scope of a constant or variable identifier declared inside 2325 // a function begins at the end of the ConstSpec or VarSpec and ends at 2326 // the end of the innermost containing block. 2327 // (Global identifiers are resolved in a separate phase after parsing.) 2328 spec := &ast.ValueSpec{ 2329 Doc: doc, 2330 Names: idents, 2331 Type: typ, 2332 Values: values, 2333 Comment: p.lineComment, 2334 } 2335 p.declare(spec, iota, p.topScope, kind, idents...) 2336 2337 return spec 2338 } 2339 2340 func (p *parser) parseTypeSpec(doc *ast.CommentGroup, _ token.Token, _ int) ast.Spec { 2341 if p.trace { 2342 defer un(trace(p, "TypeSpec")) 2343 } 2344 2345 ident := p.parseIdent() 2346 if p.tok == token.ALIAS { 2347 p.next() 2348 return p.parseAliasSpec(doc, ast.Typ, ident) 2349 } 2350 2351 // Go spec: The scope of a type identifier declared inside a function begins 2352 // at the identifier in the TypeSpec and ends at the end of the innermost 2353 // containing block. 2354 // (Global identifiers are resolved in a separate phase after parsing.) 2355 spec := &ast.TypeSpec{Doc: doc, Name: ident} 2356 p.declare(spec, nil, p.topScope, ast.Typ, ident) 2357 2358 spec.Type = p.parseType() 2359 p.expectSemi() // call before accessing p.linecomment 2360 spec.Comment = p.lineComment 2361 2362 return spec 2363 } 2364 2365 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl { 2366 if p.trace { 2367 defer un(trace(p, "GenDecl("+keyword.String()+")")) 2368 } 2369 2370 doc := p.leadComment 2371 pos := p.expect(keyword) 2372 var lparen, rparen token.Pos 2373 var list []ast.Spec 2374 if p.tok == token.LPAREN { 2375 lparen = p.pos 2376 p.next() 2377 for iota := 0; p.tok != token.RPAREN && p.tok != token.EOF; iota++ { 2378 list = append(list, f(p.leadComment, keyword, iota)) 2379 } 2380 rparen = p.expect(token.RPAREN) 2381 p.expectSemi() 2382 } else { 2383 list = append(list, f(nil, keyword, 0)) 2384 } 2385 2386 return &ast.GenDecl{ 2387 Doc: doc, 2388 TokPos: pos, 2389 Tok: keyword, 2390 Lparen: lparen, 2391 Specs: list, 2392 Rparen: rparen, 2393 } 2394 } 2395 2396 func (p *parser) parseFuncDecl() ast.Decl { 2397 if p.trace { 2398 defer un(trace(p, "FunctionDecl")) 2399 } 2400 2401 doc := p.leadComment 2402 pos := p.expect(token.FUNC) 2403 scope := ast.NewScope(p.topScope) // function scope 2404 2405 var recv *ast.FieldList 2406 if p.tok == token.LPAREN { 2407 recv = p.parseParameters(scope, false) 2408 } 2409 2410 ident := p.parseIdent() 2411 if recv == nil && p.tok == token.ALIAS { 2412 p.next() 2413 return &ast.GenDecl{ 2414 Doc: doc, 2415 TokPos: pos, 2416 Tok: token.FUNC, 2417 Specs: []ast.Spec{p.parseAliasSpec(nil, ast.Fun, ident)}, 2418 } 2419 } 2420 2421 params, results := p.parseSignature(scope) 2422 2423 var body *ast.BlockStmt 2424 if p.tok == token.LBRACE { 2425 body = p.parseBody(scope) 2426 } 2427 p.expectSemi() 2428 2429 decl := &ast.FuncDecl{ 2430 Doc: doc, 2431 Recv: recv, 2432 Name: ident, 2433 Type: &ast.FuncType{ 2434 Func: pos, 2435 Params: params, 2436 Results: results, 2437 }, 2438 Body: body, 2439 } 2440 if recv == nil { 2441 // Go spec: The scope of an identifier denoting a constant, type, 2442 // variable, or function (but not method) declared at top level 2443 // (outside any function) is the package block. 2444 // 2445 // init() functions cannot be referred to and there may 2446 // be more than one - don't put them in the pkgScope 2447 if ident.Name != "init" { 2448 p.declare(decl, nil, p.pkgScope, ast.Fun, ident) 2449 } 2450 } 2451 2452 return decl 2453 } 2454 2455 func (p *parser) parseDecl(sync func(*parser)) ast.Decl { 2456 if p.trace { 2457 defer un(trace(p, "Declaration")) 2458 } 2459 2460 var f parseSpecFunction 2461 switch p.tok { 2462 case token.CONST, token.VAR: 2463 f = p.parseValueSpec 2464 2465 case token.TYPE: 2466 f = p.parseTypeSpec 2467 2468 case token.FUNC: 2469 return p.parseFuncDecl() 2470 2471 default: 2472 pos := p.pos 2473 p.errorExpected(pos, "declaration") 2474 sync(p) 2475 return &ast.BadDecl{From: pos, To: p.pos} 2476 } 2477 2478 return p.parseGenDecl(p.tok, f) 2479 } 2480 2481 // ---------------------------------------------------------------------------- 2482 // Source files 2483 2484 func (p *parser) parseFile() *ast.File { 2485 if p.trace { 2486 defer un(trace(p, "File")) 2487 } 2488 2489 // Don't bother parsing the rest if we had errors scanning the first token. 2490 // Likely not a Go source file at all. 2491 if p.errors.Len() != 0 { 2492 return nil 2493 } 2494 2495 // package clause 2496 doc := p.leadComment 2497 pos := p.expect(token.PACKAGE) 2498 // Go spec: The package clause is not a declaration; 2499 // the package name does not appear in any scope. 2500 ident := p.parseIdent() 2501 if ident.Name == "_" && p.mode&DeclarationErrors != 0 { 2502 p.error(p.pos, "invalid package name _") 2503 } 2504 p.expectSemi() 2505 2506 // Don't bother parsing the rest if we had errors parsing the package clause. 2507 // Likely not a Go source file at all. 2508 if p.errors.Len() != 0 { 2509 return nil 2510 } 2511 2512 p.openScope() 2513 p.pkgScope = p.topScope 2514 var decls []ast.Decl 2515 if p.mode&PackageClauseOnly == 0 { 2516 // import decls 2517 for p.tok == token.IMPORT { 2518 decls = append(decls, p.parseGenDecl(token.IMPORT, p.parseImportSpec)) 2519 } 2520 2521 if p.mode&ImportsOnly == 0 { 2522 // rest of package body 2523 for p.tok != token.EOF { 2524 decls = append(decls, p.parseDecl(syncDecl)) 2525 } 2526 } 2527 } 2528 p.closeScope() 2529 assert(p.topScope == nil, "unbalanced scopes") 2530 assert(p.labelScope == nil, "unbalanced label scopes") 2531 2532 // resolve global identifiers within the same file 2533 i := 0 2534 for _, ident := range p.unresolved { 2535 // i <= index for current ident 2536 assert(ident.Obj == unresolved, "object already resolved") 2537 ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel 2538 if ident.Obj == nil { 2539 p.unresolved[i] = ident 2540 i++ 2541 } 2542 } 2543 2544 return &ast.File{ 2545 Doc: doc, 2546 Package: pos, 2547 Name: ident, 2548 Decls: decls, 2549 Scope: p.pkgScope, 2550 Imports: p.imports, 2551 Unresolved: p.unresolved[0:i], 2552 Comments: p.comments, 2553 } 2554 }