golang.org/x/tools/gopls@v0.15.3/internal/cache/parsego/parse.go (about)

     1  // Copyright 2023 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package parsego
     6  
     7  import (
     8  	"bytes"
     9  	"context"
    10  	"fmt"
    11  	"go/ast"
    12  	"go/parser"
    13  	"go/scanner"
    14  	"go/token"
    15  	"reflect"
    16  
    17  	"golang.org/x/tools/gopls/internal/protocol"
    18  	"golang.org/x/tools/gopls/internal/util/astutil"
    19  	"golang.org/x/tools/gopls/internal/util/safetoken"
    20  	"golang.org/x/tools/internal/diff"
    21  	"golang.org/x/tools/internal/event"
    22  	"golang.org/x/tools/internal/event/tag"
    23  )
    24  
    25  // Common parse modes; these should be reused wherever possible to increase
    26  // cache hits.
    27  const (
    28  	// ParseHeader specifies that the main package declaration and imports are needed.
    29  	// This is the mode used when attempting to examine the package graph structure.
    30  	ParseHeader = parser.AllErrors | parser.ParseComments | parser.ImportsOnly | parser.SkipObjectResolution
    31  
    32  	// ParseFull specifies the full AST is needed.
    33  	// This is used for files of direct interest where the entire contents must
    34  	// be considered.
    35  	ParseFull = parser.AllErrors | parser.ParseComments | parser.SkipObjectResolution
    36  )
    37  
    38  // Parse parses a buffer of Go source, repairing the tree if necessary.
    39  //
    40  // The provided ctx is used only for logging.
    41  func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, src []byte, mode parser.Mode, purgeFuncBodies bool) (res *File, fixes []fixType) {
    42  	if purgeFuncBodies {
    43  		src = astutil.PurgeFuncBodies(src)
    44  	}
    45  	ctx, done := event.Start(ctx, "cache.ParseGoSrc", tag.File.Of(uri.Path()))
    46  	defer done()
    47  
    48  	file, err := parser.ParseFile(fset, uri.Path(), src, mode)
    49  	var parseErr scanner.ErrorList
    50  	if err != nil {
    51  		// We passed a byte slice, so the only possible error is a parse error.
    52  		parseErr = err.(scanner.ErrorList)
    53  	}
    54  
    55  	tok := fset.File(file.Pos())
    56  	if tok == nil {
    57  		// file.Pos is the location of the package declaration (issue #53202). If there was
    58  		// none, we can't find the token.File that ParseFile created, and we
    59  		// have no choice but to recreate it.
    60  		tok = fset.AddFile(uri.Path(), -1, len(src))
    61  		tok.SetLinesForContent(src)
    62  	}
    63  
    64  	fixedSrc := false
    65  	fixedAST := false
    66  	// If there were parse errors, attempt to fix them up.
    67  	if parseErr != nil {
    68  		// Fix any badly parsed parts of the AST.
    69  		astFixes := fixAST(file, tok, src)
    70  		fixedAST = len(astFixes) > 0
    71  		if fixedAST {
    72  			fixes = append(fixes, astFixes...)
    73  		}
    74  
    75  		for i := 0; i < 10; i++ {
    76  			// Fix certain syntax errors that render the file unparseable.
    77  			newSrc, srcFix := fixSrc(file, tok, src)
    78  			if newSrc == nil {
    79  				break
    80  			}
    81  
    82  			// If we thought there was something to fix 10 times in a row,
    83  			// it is likely we got stuck in a loop somehow. Log out a diff
    84  			// of the last changes we made to aid in debugging.
    85  			if i == 9 {
    86  				unified := diff.Unified("before", "after", string(src), string(newSrc))
    87  				event.Log(ctx, fmt.Sprintf("fixSrc loop - last diff:\n%v", unified), tag.File.Of(tok.Name()))
    88  			}
    89  
    90  			newFile, newErr := parser.ParseFile(fset, uri.Path(), newSrc, mode)
    91  			if newFile == nil {
    92  				break // no progress
    93  			}
    94  
    95  			// Maintain the original parseError so we don't try formatting the
    96  			// doctored file.
    97  			file = newFile
    98  			src = newSrc
    99  			tok = fset.File(file.Pos())
   100  
   101  			// Only now that we accept the fix do we record the src fix from above.
   102  			fixes = append(fixes, srcFix)
   103  			fixedSrc = true
   104  
   105  			if newErr == nil {
   106  				break // nothing to fix
   107  			}
   108  
   109  			// Note that fixedAST is reset after we fix src.
   110  			astFixes = fixAST(file, tok, src)
   111  			fixedAST = len(astFixes) > 0
   112  			if fixedAST {
   113  				fixes = append(fixes, astFixes...)
   114  			}
   115  		}
   116  	}
   117  
   118  	return &File{
   119  		URI:      uri,
   120  		Mode:     mode,
   121  		Src:      src,
   122  		fixedSrc: fixedSrc,
   123  		fixedAST: fixedAST,
   124  		File:     file,
   125  		Tok:      tok,
   126  		Mapper:   protocol.NewMapper(uri, src),
   127  		ParseErr: parseErr,
   128  	}, fixes
   129  }
   130  
   131  // fixAST inspects the AST and potentially modifies any *ast.BadStmts so that it can be
   132  // type-checked more effectively.
   133  //
   134  // If fixAST returns true, the resulting AST is considered "fixed", meaning
   135  // positions have been mangled, and type checker errors may not make sense.
   136  func fixAST(n ast.Node, tok *token.File, src []byte) (fixes []fixType) {
   137  	var err error
   138  	walkASTWithParent(n, func(n, parent ast.Node) bool {
   139  		switch n := n.(type) {
   140  		case *ast.BadStmt:
   141  			if fixDeferOrGoStmt(n, parent, tok, src) {
   142  				fixes = append(fixes, fixedDeferOrGo)
   143  				// Recursively fix in our fixed node.
   144  				moreFixes := fixAST(parent, tok, src)
   145  				fixes = append(fixes, moreFixes...)
   146  			} else {
   147  				err = fmt.Errorf("unable to parse defer or go from *ast.BadStmt: %v", err)
   148  			}
   149  			return false
   150  		case *ast.BadExpr:
   151  			if fixArrayType(n, parent, tok, src) {
   152  				fixes = append(fixes, fixedArrayType)
   153  				// Recursively fix in our fixed node.
   154  				moreFixes := fixAST(parent, tok, src)
   155  				fixes = append(fixes, moreFixes...)
   156  				return false
   157  			}
   158  
   159  			// Fix cases where parser interprets if/for/switch "init"
   160  			// statement as "cond" expression, e.g.:
   161  			//
   162  			//   // "i := foo" is init statement, not condition.
   163  			//   for i := foo
   164  			//
   165  			if fixInitStmt(n, parent, tok, src) {
   166  				fixes = append(fixes, fixedInit)
   167  			}
   168  			return false
   169  		case *ast.SelectorExpr:
   170  			// Fix cases where a keyword prefix results in a phantom "_" selector, e.g.:
   171  			//
   172  			//   foo.var<> // want to complete to "foo.variance"
   173  			//
   174  			if fixPhantomSelector(n, tok, src) {
   175  				fixes = append(fixes, fixedPhantomSelector)
   176  			}
   177  			return true
   178  
   179  		case *ast.BlockStmt:
   180  			switch parent.(type) {
   181  			case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt:
   182  				// Adjust closing curly brace of empty switch/select
   183  				// statements so we can complete inside them.
   184  				if fixEmptySwitch(n, tok, src) {
   185  					fixes = append(fixes, fixedEmptySwitch)
   186  				}
   187  			}
   188  
   189  			return true
   190  		default:
   191  			return true
   192  		}
   193  	})
   194  	return fixes
   195  }
   196  
   197  // walkASTWithParent walks the AST rooted at n. The semantics are
   198  // similar to ast.Inspect except it does not call f(nil).
   199  func walkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) {
   200  	var ancestors []ast.Node
   201  	ast.Inspect(n, func(n ast.Node) (recurse bool) {
   202  		defer func() {
   203  			if recurse {
   204  				ancestors = append(ancestors, n)
   205  			}
   206  		}()
   207  
   208  		if n == nil {
   209  			ancestors = ancestors[:len(ancestors)-1]
   210  			return false
   211  		}
   212  
   213  		var parent ast.Node
   214  		if len(ancestors) > 0 {
   215  			parent = ancestors[len(ancestors)-1]
   216  		}
   217  
   218  		return f(n, parent)
   219  	})
   220  }
   221  
   222  // TODO(rfindley): revert this intrumentation once we're certain the crash in
   223  // #59097 is fixed.
   224  type fixType int
   225  
   226  const (
   227  	noFix fixType = iota
   228  	fixedCurlies
   229  	fixedDanglingSelector
   230  	fixedDeferOrGo
   231  	fixedArrayType
   232  	fixedInit
   233  	fixedPhantomSelector
   234  	fixedEmptySwitch
   235  )
   236  
   237  // fixSrc attempts to modify the file's source code to fix certain
   238  // syntax errors that leave the rest of the file unparsed.
   239  //
   240  // fixSrc returns a non-nil result if and only if a fix was applied.
   241  func fixSrc(f *ast.File, tf *token.File, src []byte) (newSrc []byte, fix fixType) {
   242  	walkASTWithParent(f, func(n, parent ast.Node) bool {
   243  		if newSrc != nil {
   244  			return false
   245  		}
   246  
   247  		switch n := n.(type) {
   248  		case *ast.BlockStmt:
   249  			newSrc = fixMissingCurlies(f, n, parent, tf, src)
   250  			if newSrc != nil {
   251  				fix = fixedCurlies
   252  			}
   253  		case *ast.SelectorExpr:
   254  			newSrc = fixDanglingSelector(n, tf, src)
   255  			if newSrc != nil {
   256  				fix = fixedDanglingSelector
   257  			}
   258  		}
   259  
   260  		return newSrc == nil
   261  	})
   262  
   263  	return newSrc, fix
   264  }
   265  
   266  // fixMissingCurlies adds in curly braces for block statements that
   267  // are missing curly braces. For example:
   268  //
   269  //	if foo
   270  //
   271  // becomes
   272  //
   273  //	if foo {}
   274  func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *token.File, src []byte) []byte {
   275  	// If the "{" is already in the source code, there isn't anything to
   276  	// fix since we aren't missing curlies.
   277  	if b.Lbrace.IsValid() {
   278  		braceOffset, err := safetoken.Offset(tok, b.Lbrace)
   279  		if err != nil {
   280  			return nil
   281  		}
   282  		if braceOffset < len(src) && src[braceOffset] == '{' {
   283  			return nil
   284  		}
   285  	}
   286  
   287  	parentLine := safetoken.Line(tok, parent.Pos())
   288  
   289  	if parentLine >= tok.LineCount() {
   290  		// If we are the last line in the file, no need to fix anything.
   291  		return nil
   292  	}
   293  
   294  	// Insert curlies at the end of parent's starting line. The parent
   295  	// is the statement that contains the block, e.g. *ast.IfStmt. The
   296  	// block's Pos()/End() can't be relied upon because they are based
   297  	// on the (missing) curly braces. We assume the statement is a
   298  	// single line for now and try sticking the curly braces at the end.
   299  	insertPos := tok.LineStart(parentLine+1) - 1
   300  
   301  	// Scootch position backwards until it's not in a comment. For example:
   302  	//
   303  	// if foo<> // some amazing comment |
   304  	// someOtherCode()
   305  	//
   306  	// insertPos will be located at "|", so we back it out of the comment.
   307  	didSomething := true
   308  	for didSomething {
   309  		didSomething = false
   310  		for _, c := range f.Comments {
   311  			if c.Pos() < insertPos && insertPos <= c.End() {
   312  				insertPos = c.Pos()
   313  				didSomething = true
   314  			}
   315  		}
   316  	}
   317  
   318  	// Bail out if line doesn't end in an ident or ".". This is to avoid
   319  	// cases like below where we end up making things worse by adding
   320  	// curlies:
   321  	//
   322  	//   if foo &&
   323  	//     bar<>
   324  	switch precedingToken(insertPos, tok, src) {
   325  	case token.IDENT, token.PERIOD:
   326  		// ok
   327  	default:
   328  		return nil
   329  	}
   330  
   331  	var buf bytes.Buffer
   332  	buf.Grow(len(src) + 3)
   333  	offset, err := safetoken.Offset(tok, insertPos)
   334  	if err != nil {
   335  		return nil
   336  	}
   337  	buf.Write(src[:offset])
   338  
   339  	// Detect if we need to insert a semicolon to fix "for" loop situations like:
   340  	//
   341  	//   for i := foo(); foo<>
   342  	//
   343  	// Just adding curlies is not sufficient to make things parse well.
   344  	if fs, ok := parent.(*ast.ForStmt); ok {
   345  		if _, ok := fs.Cond.(*ast.BadExpr); !ok {
   346  			if xs, ok := fs.Post.(*ast.ExprStmt); ok {
   347  				if _, ok := xs.X.(*ast.BadExpr); ok {
   348  					buf.WriteByte(';')
   349  				}
   350  			}
   351  		}
   352  	}
   353  
   354  	// Insert "{}" at insertPos.
   355  	buf.WriteByte('{')
   356  	buf.WriteByte('}')
   357  	buf.Write(src[offset:])
   358  	return buf.Bytes()
   359  }
   360  
   361  // fixEmptySwitch moves empty switch/select statements' closing curly
   362  // brace down one line. This allows us to properly detect incomplete
   363  // "case" and "default" keywords as inside the switch statement. For
   364  // example:
   365  //
   366  //	switch {
   367  //	def<>
   368  //	}
   369  //
   370  // gets parsed like:
   371  //
   372  //	switch {
   373  //	}
   374  //
   375  // Later we manually pull out the "def" token, but we need to detect
   376  // that our "<>" position is inside the switch block. To do that we
   377  // move the curly brace so it looks like:
   378  //
   379  //	switch {
   380  //
   381  //	}
   382  //
   383  // The resulting bool reports whether any fixing occurred.
   384  func fixEmptySwitch(body *ast.BlockStmt, tok *token.File, src []byte) bool {
   385  	// We only care about empty switch statements.
   386  	if len(body.List) > 0 || !body.Rbrace.IsValid() {
   387  		return false
   388  	}
   389  
   390  	// If the right brace is actually in the source code at the
   391  	// specified position, don't mess with it.
   392  	braceOffset, err := safetoken.Offset(tok, body.Rbrace)
   393  	if err != nil {
   394  		return false
   395  	}
   396  	if braceOffset < len(src) && src[braceOffset] == '}' {
   397  		return false
   398  	}
   399  
   400  	braceLine := safetoken.Line(tok, body.Rbrace)
   401  	if braceLine >= tok.LineCount() {
   402  		// If we are the last line in the file, no need to fix anything.
   403  		return false
   404  	}
   405  
   406  	// Move the right brace down one line.
   407  	body.Rbrace = tok.LineStart(braceLine + 1)
   408  	return true
   409  }
   410  
   411  // fixDanglingSelector inserts real "_" selector expressions in place
   412  // of phantom "_" selectors. For example:
   413  //
   414  //	func _() {
   415  //		x.<>
   416  //	}
   417  //
   418  // var x struct { i int }
   419  //
   420  // To fix completion at "<>", we insert a real "_" after the "." so the
   421  // following declaration of "x" can be parsed and type checked
   422  // normally.
   423  func fixDanglingSelector(s *ast.SelectorExpr, tf *token.File, src []byte) []byte {
   424  	if !isPhantomUnderscore(s.Sel, tf, src) {
   425  		return nil
   426  	}
   427  
   428  	if !s.X.End().IsValid() {
   429  		return nil
   430  	}
   431  
   432  	insertOffset, err := safetoken.Offset(tf, s.X.End())
   433  	if err != nil {
   434  		return nil
   435  	}
   436  	// Insert directly after the selector's ".".
   437  	insertOffset++
   438  	if src[insertOffset-1] != '.' {
   439  		return nil
   440  	}
   441  
   442  	var buf bytes.Buffer
   443  	buf.Grow(len(src) + 1)
   444  	buf.Write(src[:insertOffset])
   445  	buf.WriteByte('_')
   446  	buf.Write(src[insertOffset:])
   447  	return buf.Bytes()
   448  }
   449  
   450  // fixPhantomSelector tries to fix selector expressions with phantom
   451  // "_" selectors. In particular, we check if the selector is a
   452  // keyword, and if so we swap in an *ast.Ident with the keyword text. For example:
   453  //
   454  // foo.var
   455  //
   456  // yields a "_" selector instead of "var" since "var" is a keyword.
   457  //
   458  // TODO(rfindley): should this constitute an ast 'fix'?
   459  //
   460  // The resulting bool reports whether any fixing occurred.
   461  func fixPhantomSelector(sel *ast.SelectorExpr, tf *token.File, src []byte) bool {
   462  	if !isPhantomUnderscore(sel.Sel, tf, src) {
   463  		return false
   464  	}
   465  
   466  	// Only consider selectors directly abutting the selector ".". This
   467  	// avoids false positives in cases like:
   468  	//
   469  	//   foo. // don't think "var" is our selector
   470  	//   var bar = 123
   471  	//
   472  	if sel.Sel.Pos() != sel.X.End()+1 {
   473  		return false
   474  	}
   475  
   476  	maybeKeyword := readKeyword(sel.Sel.Pos(), tf, src)
   477  	if maybeKeyword == "" {
   478  		return false
   479  	}
   480  
   481  	return replaceNode(sel, sel.Sel, &ast.Ident{
   482  		Name:    maybeKeyword,
   483  		NamePos: sel.Sel.Pos(),
   484  	})
   485  }
   486  
   487  // isPhantomUnderscore reports whether the given ident is a phantom
   488  // underscore. The parser sometimes inserts phantom underscores when
   489  // it encounters otherwise unparseable situations.
   490  func isPhantomUnderscore(id *ast.Ident, tok *token.File, src []byte) bool {
   491  	if id == nil || id.Name != "_" {
   492  		return false
   493  	}
   494  
   495  	// Phantom underscore means the underscore is not actually in the
   496  	// program text.
   497  	offset, err := safetoken.Offset(tok, id.Pos())
   498  	if err != nil {
   499  		return false
   500  	}
   501  	return len(src) <= offset || src[offset] != '_'
   502  }
   503  
   504  // fixInitStmt fixes cases where the parser misinterprets an
   505  // if/for/switch "init" statement as the "cond" conditional. In cases
   506  // like "if i := 0" the user hasn't typed the semicolon yet so the
   507  // parser is looking for the conditional expression. However, "i := 0"
   508  // are not valid expressions, so we get a BadExpr.
   509  //
   510  // The resulting bool reports whether any fixing occurred.
   511  func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) bool {
   512  	if !bad.Pos().IsValid() || !bad.End().IsValid() {
   513  		return false
   514  	}
   515  
   516  	// Try to extract a statement from the BadExpr.
   517  	start, end, err := safetoken.Offsets(tok, bad.Pos(), bad.End()-1)
   518  	if err != nil {
   519  		return false
   520  	}
   521  	stmtBytes := src[start : end+1]
   522  	stmt, err := parseStmt(tok, bad.Pos(), stmtBytes)
   523  	if err != nil {
   524  		return false
   525  	}
   526  
   527  	// If the parent statement doesn't already have an "init" statement,
   528  	// move the extracted statement into the "init" field and insert a
   529  	// dummy expression into the required "cond" field.
   530  	switch p := parent.(type) {
   531  	case *ast.IfStmt:
   532  		if p.Init != nil {
   533  			return false
   534  		}
   535  		p.Init = stmt
   536  		p.Cond = &ast.Ident{
   537  			Name:    "_",
   538  			NamePos: stmt.End(),
   539  		}
   540  		return true
   541  	case *ast.ForStmt:
   542  		if p.Init != nil {
   543  			return false
   544  		}
   545  		p.Init = stmt
   546  		p.Cond = &ast.Ident{
   547  			Name:    "_",
   548  			NamePos: stmt.End(),
   549  		}
   550  		return true
   551  	case *ast.SwitchStmt:
   552  		if p.Init != nil {
   553  			return false
   554  		}
   555  		p.Init = stmt
   556  		p.Tag = nil
   557  		return true
   558  	}
   559  	return false
   560  }
   561  
   562  // readKeyword reads the keyword starting at pos, if any.
   563  func readKeyword(pos token.Pos, tok *token.File, src []byte) string {
   564  	var kwBytes []byte
   565  	offset, err := safetoken.Offset(tok, pos)
   566  	if err != nil {
   567  		return ""
   568  	}
   569  	for i := offset; i < len(src); i++ {
   570  		// Use a simplified identifier check since keywords are always lowercase ASCII.
   571  		if src[i] < 'a' || src[i] > 'z' {
   572  			break
   573  		}
   574  		kwBytes = append(kwBytes, src[i])
   575  
   576  		// Stop search at arbitrarily chosen too-long-for-a-keyword length.
   577  		if len(kwBytes) > 15 {
   578  			return ""
   579  		}
   580  	}
   581  
   582  	if kw := string(kwBytes); token.Lookup(kw).IsKeyword() {
   583  		return kw
   584  	}
   585  
   586  	return ""
   587  }
   588  
   589  // fixArrayType tries to parse an *ast.BadExpr into an *ast.ArrayType.
   590  // go/parser often turns lone array types like "[]int" into BadExprs
   591  // if it isn't expecting a type.
   592  func fixArrayType(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) bool {
   593  	// Our expected input is a bad expression that looks like "[]someExpr".
   594  
   595  	from := bad.Pos()
   596  	to := bad.End()
   597  
   598  	if !from.IsValid() || !to.IsValid() {
   599  		return false
   600  	}
   601  
   602  	exprBytes := make([]byte, 0, int(to-from)+3)
   603  	// Avoid doing tok.Offset(to) since that panics if badExpr ends at EOF.
   604  	// It also panics if the position is not in the range of the file, and
   605  	// badExprs may not necessarily have good positions, so check first.
   606  	fromOffset, toOffset, err := safetoken.Offsets(tok, from, to-1)
   607  	if err != nil {
   608  		return false
   609  	}
   610  	exprBytes = append(exprBytes, src[fromOffset:toOffset+1]...)
   611  	exprBytes = bytes.TrimSpace(exprBytes)
   612  
   613  	// If our expression ends in "]" (e.g. "[]"), add a phantom selector
   614  	// so we can complete directly after the "[]".
   615  	if len(exprBytes) > 0 && exprBytes[len(exprBytes)-1] == ']' {
   616  		exprBytes = append(exprBytes, '_')
   617  	}
   618  
   619  	// Add "{}" to turn our ArrayType into a CompositeLit. This is to
   620  	// handle the case of "[...]int" where we must make it a composite
   621  	// literal to be parseable.
   622  	exprBytes = append(exprBytes, '{', '}')
   623  
   624  	expr, err := parseExpr(tok, from, exprBytes)
   625  	if err != nil {
   626  		return false
   627  	}
   628  
   629  	cl, _ := expr.(*ast.CompositeLit)
   630  	if cl == nil {
   631  		return false
   632  	}
   633  
   634  	at, _ := cl.Type.(*ast.ArrayType)
   635  	if at == nil {
   636  		return false
   637  	}
   638  
   639  	return replaceNode(parent, bad, at)
   640  }
   641  
   642  // precedingToken scans src to find the token preceding pos.
   643  func precedingToken(pos token.Pos, tok *token.File, src []byte) token.Token {
   644  	s := &scanner.Scanner{}
   645  	s.Init(tok, src, nil, 0)
   646  
   647  	var lastTok token.Token
   648  	for {
   649  		p, t, _ := s.Scan()
   650  		if t == token.EOF || p >= pos {
   651  			break
   652  		}
   653  
   654  		lastTok = t
   655  	}
   656  	return lastTok
   657  }
   658  
   659  // fixDeferOrGoStmt tries to parse an *ast.BadStmt into a defer or a go statement.
   660  //
   661  // go/parser packages a statement of the form "defer x." as an *ast.BadStmt because
   662  // it does not include a call expression. This means that go/types skips type-checking
   663  // this statement entirely, and we can't use the type information when completing.
   664  // Here, we try to generate a fake *ast.DeferStmt or *ast.GoStmt to put into the AST,
   665  // instead of the *ast.BadStmt.
   666  func fixDeferOrGoStmt(bad *ast.BadStmt, parent ast.Node, tok *token.File, src []byte) bool {
   667  	// Check if we have a bad statement containing either a "go" or "defer".
   668  	s := &scanner.Scanner{}
   669  	s.Init(tok, src, nil, 0)
   670  
   671  	var (
   672  		pos token.Pos
   673  		tkn token.Token
   674  	)
   675  	for {
   676  		if tkn == token.EOF {
   677  			return false
   678  		}
   679  		if pos >= bad.From {
   680  			break
   681  		}
   682  		pos, tkn, _ = s.Scan()
   683  	}
   684  
   685  	var stmt ast.Stmt
   686  	switch tkn {
   687  	case token.DEFER:
   688  		stmt = &ast.DeferStmt{
   689  			Defer: pos,
   690  		}
   691  	case token.GO:
   692  		stmt = &ast.GoStmt{
   693  			Go: pos,
   694  		}
   695  	default:
   696  		return false
   697  	}
   698  
   699  	var (
   700  		from, to, last   token.Pos
   701  		lastToken        token.Token
   702  		braceDepth       int
   703  		phantomSelectors []token.Pos
   704  	)
   705  FindTo:
   706  	for {
   707  		to, tkn, _ = s.Scan()
   708  
   709  		if from == token.NoPos {
   710  			from = to
   711  		}
   712  
   713  		switch tkn {
   714  		case token.EOF:
   715  			break FindTo
   716  		case token.SEMICOLON:
   717  			// If we aren't in nested braces, end of statement means
   718  			// end of expression.
   719  			if braceDepth == 0 {
   720  				break FindTo
   721  			}
   722  		case token.LBRACE:
   723  			braceDepth++
   724  		}
   725  
   726  		// This handles the common dangling selector case. For example in
   727  		//
   728  		// defer fmt.
   729  		// y := 1
   730  		//
   731  		// we notice the dangling period and end our expression.
   732  		//
   733  		// If the previous token was a "." and we are looking at a "}",
   734  		// the period is likely a dangling selector and needs a phantom
   735  		// "_". Likewise if the current token is on a different line than
   736  		// the period, the period is likely a dangling selector.
   737  		if lastToken == token.PERIOD && (tkn == token.RBRACE || safetoken.Line(tok, to) > safetoken.Line(tok, last)) {
   738  			// Insert phantom "_" selector after the dangling ".".
   739  			phantomSelectors = append(phantomSelectors, last+1)
   740  			// If we aren't in a block then end the expression after the ".".
   741  			if braceDepth == 0 {
   742  				to = last + 1
   743  				break
   744  			}
   745  		}
   746  
   747  		lastToken = tkn
   748  		last = to
   749  
   750  		switch tkn {
   751  		case token.RBRACE:
   752  			braceDepth--
   753  			if braceDepth <= 0 {
   754  				if braceDepth == 0 {
   755  					// +1 to include the "}" itself.
   756  					to += 1
   757  				}
   758  				break FindTo
   759  			}
   760  		}
   761  	}
   762  
   763  	fromOffset, toOffset, err := safetoken.Offsets(tok, from, to)
   764  	if err != nil {
   765  		return false
   766  	}
   767  	if !from.IsValid() || fromOffset >= len(src) {
   768  		return false
   769  	}
   770  	if !to.IsValid() || toOffset >= len(src) {
   771  		return false
   772  	}
   773  
   774  	// Insert any phantom selectors needed to prevent dangling "." from messing
   775  	// up the AST.
   776  	exprBytes := make([]byte, 0, int(to-from)+len(phantomSelectors))
   777  	for i, b := range src[fromOffset:toOffset] {
   778  		if len(phantomSelectors) > 0 && from+token.Pos(i) == phantomSelectors[0] {
   779  			exprBytes = append(exprBytes, '_')
   780  			phantomSelectors = phantomSelectors[1:]
   781  		}
   782  		exprBytes = append(exprBytes, b)
   783  	}
   784  
   785  	if len(phantomSelectors) > 0 {
   786  		exprBytes = append(exprBytes, '_')
   787  	}
   788  
   789  	expr, err := parseExpr(tok, from, exprBytes)
   790  	if err != nil {
   791  		return false
   792  	}
   793  
   794  	// Package the expression into a fake *ast.CallExpr and re-insert
   795  	// into the function.
   796  	call := &ast.CallExpr{
   797  		Fun:    expr,
   798  		Lparen: to,
   799  		Rparen: to,
   800  	}
   801  
   802  	switch stmt := stmt.(type) {
   803  	case *ast.DeferStmt:
   804  		stmt.Call = call
   805  	case *ast.GoStmt:
   806  		stmt.Call = call
   807  	}
   808  
   809  	return replaceNode(parent, bad, stmt)
   810  }
   811  
   812  // parseStmt parses the statement in src and updates its position to
   813  // start at pos.
   814  //
   815  // tok is the original file containing pos. Used to ensure that all adjusted
   816  // positions are valid.
   817  func parseStmt(tok *token.File, pos token.Pos, src []byte) (ast.Stmt, error) {
   818  	// Wrap our expression to make it a valid Go file we can pass to ParseFile.
   819  	fileSrc := bytes.Join([][]byte{
   820  		[]byte("package fake;func _(){"),
   821  		src,
   822  		[]byte("}"),
   823  	}, nil)
   824  
   825  	// Use ParseFile instead of ParseExpr because ParseFile has
   826  	// best-effort behavior, whereas ParseExpr fails hard on any error.
   827  	fakeFile, err := parser.ParseFile(token.NewFileSet(), "", fileSrc, 0)
   828  	if fakeFile == nil {
   829  		return nil, fmt.Errorf("error reading fake file source: %v", err)
   830  	}
   831  
   832  	// Extract our expression node from inside the fake file.
   833  	if len(fakeFile.Decls) == 0 {
   834  		return nil, fmt.Errorf("error parsing fake file: %v", err)
   835  	}
   836  
   837  	fakeDecl, _ := fakeFile.Decls[0].(*ast.FuncDecl)
   838  	if fakeDecl == nil || len(fakeDecl.Body.List) == 0 {
   839  		return nil, fmt.Errorf("no statement in %s: %v", src, err)
   840  	}
   841  
   842  	stmt := fakeDecl.Body.List[0]
   843  
   844  	// parser.ParseFile returns undefined positions.
   845  	// Adjust them for the current file.
   846  	offsetPositions(tok, stmt, pos-1-(stmt.Pos()-1))
   847  
   848  	return stmt, nil
   849  }
   850  
   851  // parseExpr parses the expression in src and updates its position to
   852  // start at pos.
   853  func parseExpr(tok *token.File, pos token.Pos, src []byte) (ast.Expr, error) {
   854  	stmt, err := parseStmt(tok, pos, src)
   855  	if err != nil {
   856  		return nil, err
   857  	}
   858  
   859  	exprStmt, ok := stmt.(*ast.ExprStmt)
   860  	if !ok {
   861  		return nil, fmt.Errorf("no expr in %s: %v", src, err)
   862  	}
   863  
   864  	return exprStmt.X, nil
   865  }
   866  
   867  var tokenPosType = reflect.TypeOf(token.NoPos)
   868  
   869  // offsetPositions applies an offset to the positions in an ast.Node.
   870  func offsetPositions(tok *token.File, n ast.Node, offset token.Pos) {
   871  	fileBase := int64(tok.Base())
   872  	fileEnd := fileBase + int64(tok.Size())
   873  	ast.Inspect(n, func(n ast.Node) bool {
   874  		if n == nil {
   875  			return false
   876  		}
   877  
   878  		v := reflect.ValueOf(n).Elem()
   879  
   880  		switch v.Kind() {
   881  		case reflect.Struct:
   882  			for i := 0; i < v.NumField(); i++ {
   883  				f := v.Field(i)
   884  				if f.Type() != tokenPosType {
   885  					continue
   886  				}
   887  
   888  				if !f.CanSet() {
   889  					continue
   890  				}
   891  
   892  				// Don't offset invalid positions: they should stay invalid.
   893  				if !token.Pos(f.Int()).IsValid() {
   894  					continue
   895  				}
   896  
   897  				// Clamp value to valid range; see #64335.
   898  				//
   899  				// TODO(golang/go#64335): this is a hack, because our fixes should not
   900  				// produce positions that overflow (but they do: golang/go#64488).
   901  				pos := f.Int() + int64(offset)
   902  				if pos < fileBase {
   903  					pos = fileBase
   904  				}
   905  				if pos > fileEnd {
   906  					pos = fileEnd
   907  				}
   908  				f.SetInt(pos)
   909  			}
   910  		}
   911  
   912  		return true
   913  	})
   914  }
   915  
   916  // replaceNode updates parent's child oldChild to be newChild. It
   917  // returns whether it replaced successfully.
   918  func replaceNode(parent, oldChild, newChild ast.Node) bool {
   919  	if parent == nil || oldChild == nil || newChild == nil {
   920  		return false
   921  	}
   922  
   923  	parentVal := reflect.ValueOf(parent).Elem()
   924  	if parentVal.Kind() != reflect.Struct {
   925  		return false
   926  	}
   927  
   928  	newChildVal := reflect.ValueOf(newChild)
   929  
   930  	tryReplace := func(v reflect.Value) bool {
   931  		if !v.CanSet() || !v.CanInterface() {
   932  			return false
   933  		}
   934  
   935  		// If the existing value is oldChild, we found our child. Make
   936  		// sure our newChild is assignable and then make the swap.
   937  		if v.Interface() == oldChild && newChildVal.Type().AssignableTo(v.Type()) {
   938  			v.Set(newChildVal)
   939  			return true
   940  		}
   941  
   942  		return false
   943  	}
   944  
   945  	// Loop over parent's struct fields.
   946  	for i := 0; i < parentVal.NumField(); i++ {
   947  		f := parentVal.Field(i)
   948  
   949  		switch f.Kind() {
   950  		// Check interface and pointer fields.
   951  		case reflect.Interface, reflect.Ptr:
   952  			if tryReplace(f) {
   953  				return true
   954  			}
   955  
   956  		// Search through any slice fields.
   957  		case reflect.Slice:
   958  			for i := 0; i < f.Len(); i++ {
   959  				if tryReplace(f.Index(i)) {
   960  					return true
   961  				}
   962  			}
   963  		}
   964  	}
   965  
   966  	return false
   967  }