github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/inline/inl.go (about)

     1  // Copyright 2011 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  //
     5  // The inlining facility makes 2 passes: first CanInline determines which
     6  // functions are suitable for inlining, and for those that are it
     7  // saves a copy of the body. Then InlineCalls walks each function body to
     8  // expand calls to inlinable functions.
     9  //
    10  // The Debug.l flag controls the aggressiveness. Note that main() swaps level 0 and 1,
    11  // making 1 the default and -l disable. Additional levels (beyond -l) may be buggy and
    12  // are not supported.
    13  //      0: disabled
    14  //      1: 80-nodes leaf functions, oneliners, panic, lazy typechecking (default)
    15  //      2: (unassigned)
    16  //      3: (unassigned)
    17  //      4: allow non-leaf functions
    18  //
    19  // At some point this may get another default and become switch-offable with -N.
    20  //
    21  // The -d typcheckinl flag enables early typechecking of all imported bodies,
    22  // which is useful to flush out bugs.
    23  //
    24  // The Debug.m flag enables diagnostic output.  a single -m is useful for verifying
    25  // which calls get inlined or not, more is for debugging, and may go away at any point.
    26  
    27  package inline
    28  
    29  import (
    30  	"fmt"
    31  	"github.com/bir3/gocompiler/src/go/constant"
    32  	"sort"
    33  	"strconv"
    34  	"strings"
    35  
    36  	"github.com/bir3/gocompiler/src/cmd/compile/internal/base"
    37  	"github.com/bir3/gocompiler/src/cmd/compile/internal/ir"
    38  	"github.com/bir3/gocompiler/src/cmd/compile/internal/logopt"
    39  	"github.com/bir3/gocompiler/src/cmd/compile/internal/pgo"
    40  	"github.com/bir3/gocompiler/src/cmd/compile/internal/typecheck"
    41  	"github.com/bir3/gocompiler/src/cmd/compile/internal/types"
    42  	"github.com/bir3/gocompiler/src/cmd/internal/obj"
    43  	"github.com/bir3/gocompiler/src/cmd/internal/src"
    44  )
    45  
    46  // Inlining budget parameters, gathered in one place
    47  const (
    48  	inlineMaxBudget       = 80
    49  	inlineExtraAppendCost = 0
    50  	// default is to inline if there's at most one call. -l=4 overrides this by using 1 instead.
    51  	inlineExtraCallCost  = 57              // 57 was benchmarked to provided most benefit with no bad surprises; see https://github.com/golang/go/issues/19348#issuecomment-439370742
    52  	inlineExtraPanicCost = 1               // do not penalize inlining panics.
    53  	inlineExtraThrowCost = inlineMaxBudget // with current (2018-05/1.11) code, inlining runtime.throw does not help.
    54  
    55  	inlineBigFunctionNodes   = 5000 // Functions with this many nodes are considered "big".
    56  	inlineBigFunctionMaxCost = 20   // Max cost of inlinee when inlining into a "big" function.
    57  )
    58  
    59  var (
    60  	// List of all hot callee nodes.
    61  	// TODO(prattmic): Make this non-global.
    62  	candHotCalleeMap = make(map[*pgo.IRNode]struct{})
    63  
    64  	// List of all hot call sites. CallSiteInfo.Callee is always nil.
    65  	// TODO(prattmic): Make this non-global.
    66  	candHotEdgeMap = make(map[pgo.CallSiteInfo]struct{})
    67  
    68  	// List of inlined call sites. CallSiteInfo.Callee is always nil.
    69  	// TODO(prattmic): Make this non-global.
    70  	inlinedCallSites = make(map[pgo.CallSiteInfo]struct{})
    71  
    72  	// Threshold in percentage for hot callsite inlining.
    73  	inlineHotCallSiteThresholdPercent float64
    74  
    75  	// Threshold in CDF percentage for hot callsite inlining,
    76  	// that is, for a threshold of X the hottest callsites that
    77  	// make up the top X% of total edge weight will be
    78  	// considered hot for inlining candidates.
    79  	inlineCDFHotCallSiteThresholdPercent = float64(99)
    80  
    81  	// Budget increased due to hotness.
    82  	inlineHotMaxBudget int32 = 2000
    83  )
    84  
    85  // pgoInlinePrologue records the hot callsites from ir-graph.
    86  func pgoInlinePrologue(p *pgo.Profile, decls []ir.Node) {
    87  	if base.Debug.PGOInlineCDFThreshold != "" {
    88  		if s, err := strconv.ParseFloat(base.Debug.PGOInlineCDFThreshold, 64); err == nil && s >= 0 && s <= 100 {
    89  			inlineCDFHotCallSiteThresholdPercent = s
    90  		} else {
    91  			base.Fatalf("invalid PGOInlineCDFThreshold, must be between 0 and 100")
    92  		}
    93  	}
    94  	var hotCallsites []pgo.NodeMapKey
    95  	inlineHotCallSiteThresholdPercent, hotCallsites = hotNodesFromCDF(p)
    96  	if base.Debug.PGOInline > 0 {
    97  		fmt.Printf("hot-callsite-thres-from-CDF=%v\n", inlineHotCallSiteThresholdPercent)
    98  	}
    99  
   100  	if x := base.Debug.PGOInlineBudget; x != 0 {
   101  		inlineHotMaxBudget = int32(x)
   102  	}
   103  
   104  	for _, n := range hotCallsites {
   105  		// mark inlineable callees from hot edges
   106  		if callee := p.WeightedCG.IRNodes[n.CalleeName]; callee != nil {
   107  			candHotCalleeMap[callee] = struct{}{}
   108  		}
   109  		// mark hot call sites
   110  		if caller := p.WeightedCG.IRNodes[n.CallerName]; caller != nil {
   111  			csi := pgo.CallSiteInfo{LineOffset: n.CallSiteOffset, Caller: caller.AST}
   112  			candHotEdgeMap[csi] = struct{}{}
   113  		}
   114  	}
   115  
   116  	if base.Debug.PGOInline >= 2 {
   117  		fmt.Printf("hot-cg before inline in dot format:")
   118  		p.PrintWeightedCallGraphDOT(inlineHotCallSiteThresholdPercent)
   119  	}
   120  }
   121  
   122  // hotNodesFromCDF computes an edge weight threshold and the list of hot
   123  // nodes that make up the given percentage of the CDF. The threshold, as
   124  // a percent, is the lower bound of weight for nodes to be considered hot
   125  // (currently only used in debug prints) (in case of equal weights,
   126  // comparing with the threshold may not accurately reflect which nodes are
   127  // considiered hot).
   128  func hotNodesFromCDF(p *pgo.Profile) (float64, []pgo.NodeMapKey) {
   129  	nodes := make([]pgo.NodeMapKey, len(p.NodeMap))
   130  	i := 0
   131  	for n := range p.NodeMap {
   132  		nodes[i] = n
   133  		i++
   134  	}
   135  	sort.Slice(nodes, func(i, j int) bool {
   136  		ni, nj := nodes[i], nodes[j]
   137  		if wi, wj := p.NodeMap[ni].EWeight, p.NodeMap[nj].EWeight; wi != wj {
   138  			return wi > wj // want larger weight first
   139  		}
   140  		// same weight, order by name/line number
   141  		if ni.CallerName != nj.CallerName {
   142  			return ni.CallerName < nj.CallerName
   143  		}
   144  		if ni.CalleeName != nj.CalleeName {
   145  			return ni.CalleeName < nj.CalleeName
   146  		}
   147  		return ni.CallSiteOffset < nj.CallSiteOffset
   148  	})
   149  	cum := int64(0)
   150  	for i, n := range nodes {
   151  		w := p.NodeMap[n].EWeight
   152  		cum += w
   153  		if pgo.WeightInPercentage(cum, p.TotalEdgeWeight) > inlineCDFHotCallSiteThresholdPercent {
   154  			// nodes[:i+1] to include the very last node that makes it to go over the threshold.
   155  			// (Say, if the CDF threshold is 50% and one hot node takes 60% of weight, we want to
   156  			// include that node instead of excluding it.)
   157  			return pgo.WeightInPercentage(w, p.TotalEdgeWeight), nodes[:i+1]
   158  		}
   159  	}
   160  	return 0, nodes
   161  }
   162  
   163  // pgoInlineEpilogue updates IRGraph after inlining.
   164  func pgoInlineEpilogue(p *pgo.Profile, decls []ir.Node) {
   165  	if base.Debug.PGOInline >= 2 {
   166  		ir.VisitFuncsBottomUp(decls, func(list []*ir.Func, recursive bool) {
   167  			for _, f := range list {
   168  				name := ir.PkgFuncName(f)
   169  				if n, ok := p.WeightedCG.IRNodes[name]; ok {
   170  					p.RedirectEdges(n, inlinedCallSites)
   171  				}
   172  			}
   173  		})
   174  		// Print the call-graph after inlining. This is a debugging feature.
   175  		fmt.Printf("hot-cg after inline in dot:")
   176  		p.PrintWeightedCallGraphDOT(inlineHotCallSiteThresholdPercent)
   177  	}
   178  }
   179  
   180  // InlinePackage finds functions that can be inlined and clones them before walk expands them.
   181  func InlinePackage(p *pgo.Profile) {
   182  	InlineDecls(p, typecheck.Target.Decls, true)
   183  }
   184  
   185  // InlineDecls applies inlining to the given batch of declarations.
   186  func InlineDecls(p *pgo.Profile, decls []ir.Node, doInline bool) {
   187  	if p != nil {
   188  		pgoInlinePrologue(p, decls)
   189  	}
   190  
   191  	ir.VisitFuncsBottomUp(decls, func(list []*ir.Func, recursive bool) {
   192  		numfns := numNonClosures(list)
   193  		for _, n := range list {
   194  			if !recursive || numfns > 1 {
   195  				// We allow inlining if there is no
   196  				// recursion, or the recursion cycle is
   197  				// across more than one function.
   198  				CanInline(n, p)
   199  			} else {
   200  				if base.Flag.LowerM > 1 {
   201  					fmt.Printf("%v: cannot inline %v: recursive\n", ir.Line(n), n.Nname)
   202  				}
   203  			}
   204  			if doInline {
   205  				InlineCalls(n, p)
   206  			}
   207  		}
   208  	})
   209  
   210  	if p != nil {
   211  		pgoInlineEpilogue(p, decls)
   212  	}
   213  }
   214  
   215  // CanInline determines whether fn is inlineable.
   216  // If so, CanInline saves copies of fn.Body and fn.Dcl in fn.Inl.
   217  // fn and fn.Body will already have been typechecked.
   218  func CanInline(fn *ir.Func, profile *pgo.Profile) {
   219  	if fn.Nname == nil {
   220  		base.Fatalf("CanInline no nname %+v", fn)
   221  	}
   222  
   223  	var reason string // reason, if any, that the function was not inlined
   224  	if base.Flag.LowerM > 1 || logopt.Enabled() {
   225  		defer func() {
   226  			if reason != "" {
   227  				if base.Flag.LowerM > 1 {
   228  					fmt.Printf("%v: cannot inline %v: %s\n", ir.Line(fn), fn.Nname, reason)
   229  				}
   230  				if logopt.Enabled() {
   231  					logopt.LogOpt(fn.Pos(), "cannotInlineFunction", "inline", ir.FuncName(fn), reason)
   232  				}
   233  			}
   234  		}()
   235  	}
   236  
   237  	// If marked "go:noinline", don't inline
   238  	if fn.Pragma&ir.Noinline != 0 {
   239  		reason = "marked go:noinline"
   240  		return
   241  	}
   242  
   243  	// If marked "go:norace" and -race compilation, don't inline.
   244  	if base.Flag.Race && fn.Pragma&ir.Norace != 0 {
   245  		reason = "marked go:norace with -race compilation"
   246  		return
   247  	}
   248  
   249  	// If marked "go:nocheckptr" and -d checkptr compilation, don't inline.
   250  	if base.Debug.Checkptr != 0 && fn.Pragma&ir.NoCheckPtr != 0 {
   251  		reason = "marked go:nocheckptr"
   252  		return
   253  	}
   254  
   255  	// If marked "go:cgo_unsafe_args", don't inline, since the
   256  	// function makes assumptions about its argument frame layout.
   257  	if fn.Pragma&ir.CgoUnsafeArgs != 0 {
   258  		reason = "marked go:cgo_unsafe_args"
   259  		return
   260  	}
   261  
   262  	// If marked as "go:uintptrkeepalive", don't inline, since the
   263  	// keep alive information is lost during inlining.
   264  	//
   265  	// TODO(prattmic): This is handled on calls during escape analysis,
   266  	// which is after inlining. Move prior to inlining so the keep-alive is
   267  	// maintained after inlining.
   268  	if fn.Pragma&ir.UintptrKeepAlive != 0 {
   269  		reason = "marked as having a keep-alive uintptr argument"
   270  		return
   271  	}
   272  
   273  	// If marked as "go:uintptrescapes", don't inline, since the
   274  	// escape information is lost during inlining.
   275  	if fn.Pragma&ir.UintptrEscapes != 0 {
   276  		reason = "marked as having an escaping uintptr argument"
   277  		return
   278  	}
   279  
   280  	// The nowritebarrierrec checker currently works at function
   281  	// granularity, so inlining yeswritebarrierrec functions can
   282  	// confuse it (#22342). As a workaround, disallow inlining
   283  	// them for now.
   284  	if fn.Pragma&ir.Yeswritebarrierrec != 0 {
   285  		reason = "marked go:yeswritebarrierrec"
   286  		return
   287  	}
   288  
   289  	// If fn has no body (is defined outside of Go), cannot inline it.
   290  	if len(fn.Body) == 0 {
   291  		reason = "no function body"
   292  		return
   293  	}
   294  
   295  	if fn.Typecheck() == 0 {
   296  		base.Fatalf("CanInline on non-typechecked function %v", fn)
   297  	}
   298  
   299  	n := fn.Nname
   300  	if n.Func.InlinabilityChecked() {
   301  		return
   302  	}
   303  	defer n.Func.SetInlinabilityChecked(true)
   304  
   305  	cc := int32(inlineExtraCallCost)
   306  	if base.Flag.LowerL == 4 {
   307  		cc = 1 // this appears to yield better performance than 0.
   308  	}
   309  
   310  	// Update the budget for profile-guided inlining.
   311  	budget := int32(inlineMaxBudget)
   312  	if profile != nil {
   313  		if n, ok := profile.WeightedCG.IRNodes[ir.PkgFuncName(fn)]; ok {
   314  			if _, ok := candHotCalleeMap[n]; ok {
   315  				budget = int32(inlineHotMaxBudget)
   316  				if base.Debug.PGOInline > 0 {
   317  					fmt.Printf("hot-node enabled increased budget=%v for func=%v\n", budget, ir.PkgFuncName(fn))
   318  				}
   319  			}
   320  		}
   321  	}
   322  
   323  	// At this point in the game the function we're looking at may
   324  	// have "stale" autos, vars that still appear in the Dcl list, but
   325  	// which no longer have any uses in the function body (due to
   326  	// elimination by deadcode). We'd like to exclude these dead vars
   327  	// when creating the "Inline.Dcl" field below; to accomplish this,
   328  	// the hairyVisitor below builds up a map of used/referenced
   329  	// locals, and we use this map to produce a pruned Inline.Dcl
   330  	// list. See issue 25249 for more context.
   331  
   332  	visitor := hairyVisitor{
   333  		curFunc:       fn,
   334  		budget:        budget,
   335  		maxBudget:     budget,
   336  		extraCallCost: cc,
   337  		profile:       profile,
   338  	}
   339  	if visitor.tooHairy(fn) {
   340  		reason = visitor.reason
   341  		return
   342  	}
   343  
   344  	n.Func.Inl = &ir.Inline{
   345  		Cost: budget - visitor.budget,
   346  		Dcl:  pruneUnusedAutos(n.Defn.(*ir.Func).Dcl, &visitor),
   347  		Body: inlcopylist(fn.Body),
   348  
   349  		CanDelayResults: canDelayResults(fn),
   350  	}
   351  
   352  	if base.Flag.LowerM > 1 {
   353  		fmt.Printf("%v: can inline %v with cost %d as: %v { %v }\n", ir.Line(fn), n, budget-visitor.budget, fn.Type(), ir.Nodes(n.Func.Inl.Body))
   354  	} else if base.Flag.LowerM != 0 {
   355  		fmt.Printf("%v: can inline %v\n", ir.Line(fn), n)
   356  	}
   357  	if logopt.Enabled() {
   358  		logopt.LogOpt(fn.Pos(), "canInlineFunction", "inline", ir.FuncName(fn), fmt.Sprintf("cost: %d", budget-visitor.budget))
   359  	}
   360  }
   361  
   362  // canDelayResults reports whether inlined calls to fn can delay
   363  // declaring the result parameter until the "return" statement.
   364  func canDelayResults(fn *ir.Func) bool {
   365  	// We can delay declaring+initializing result parameters if:
   366  	// (1) there's exactly one "return" statement in the inlined function;
   367  	// (2) it's not an empty return statement (#44355); and
   368  	// (3) the result parameters aren't named.
   369  
   370  	nreturns := 0
   371  	ir.VisitList(fn.Body, func(n ir.Node) {
   372  		if n, ok := n.(*ir.ReturnStmt); ok {
   373  			nreturns++
   374  			if len(n.Results) == 0 {
   375  				nreturns++ // empty return statement (case 2)
   376  			}
   377  		}
   378  	})
   379  
   380  	if nreturns != 1 {
   381  		return false // not exactly one return statement (case 1)
   382  	}
   383  
   384  	// temporaries for return values.
   385  	for _, param := range fn.Type().Results().FieldSlice() {
   386  		if sym := types.OrigSym(param.Sym); sym != nil && !sym.IsBlank() {
   387  			return false // found a named result parameter (case 3)
   388  		}
   389  	}
   390  
   391  	return true
   392  }
   393  
   394  // hairyVisitor visits a function body to determine its inlining
   395  // hairiness and whether or not it can be inlined.
   396  type hairyVisitor struct {
   397  	// This is needed to access the current caller in the doNode function.
   398  	curFunc       *ir.Func
   399  	budget        int32
   400  	maxBudget     int32
   401  	reason        string
   402  	extraCallCost int32
   403  	usedLocals    ir.NameSet
   404  	do            func(ir.Node) bool
   405  	profile       *pgo.Profile
   406  }
   407  
   408  func (v *hairyVisitor) tooHairy(fn *ir.Func) bool {
   409  	v.do = v.doNode // cache closure
   410  	if ir.DoChildren(fn, v.do) {
   411  		return true
   412  	}
   413  	if v.budget < 0 {
   414  		v.reason = fmt.Sprintf("function too complex: cost %d exceeds budget %d", v.maxBudget-v.budget, v.maxBudget)
   415  		return true
   416  	}
   417  	return false
   418  }
   419  
   420  func (v *hairyVisitor) doNode(n ir.Node) bool {
   421  	if n == nil {
   422  		return false
   423  	}
   424  	switch n.Op() {
   425  	// Call is okay if inlinable and we have the budget for the body.
   426  	case ir.OCALLFUNC:
   427  		n := n.(*ir.CallExpr)
   428  		// Functions that call runtime.getcaller{pc,sp} can not be inlined
   429  		// because getcaller{pc,sp} expect a pointer to the caller's first argument.
   430  		//
   431  		// runtime.throw is a "cheap call" like panic in normal code.
   432  		if n.X.Op() == ir.ONAME {
   433  			name := n.X.(*ir.Name)
   434  			if name.Class == ir.PFUNC && types.IsRuntimePkg(name.Sym().Pkg) {
   435  				fn := name.Sym().Name
   436  				if fn == "getcallerpc" || fn == "getcallersp" {
   437  					v.reason = "call to " + fn
   438  					return true
   439  				}
   440  				if fn == "throw" {
   441  					v.budget -= inlineExtraThrowCost
   442  					break
   443  				}
   444  			}
   445  			// Special case for coverage counter updates; although
   446  			// these correspond to real operations, we treat them as
   447  			// zero cost for the moment. This is due to the existence
   448  			// of tests that are sensitive to inlining-- if the
   449  			// insertion of coverage instrumentation happens to tip a
   450  			// given function over the threshold and move it from
   451  			// "inlinable" to "not-inlinable", this can cause changes
   452  			// in allocation behavior, which can then result in test
   453  			// failures (a good example is the TestAllocations in
   454  			// crypto/ed25519).
   455  			if isAtomicCoverageCounterUpdate(n) {
   456  				return false
   457  			}
   458  		}
   459  		if n.X.Op() == ir.OMETHEXPR {
   460  			if meth := ir.MethodExprName(n.X); meth != nil {
   461  				if fn := meth.Func; fn != nil {
   462  					s := fn.Sym()
   463  					var cheap bool
   464  					if types.IsRuntimePkg(s.Pkg) && s.Name == "heapBits.nextArena" {
   465  						// Special case: explicitly allow mid-stack inlining of
   466  						// runtime.heapBits.next even though it calls slow-path
   467  						// runtime.heapBits.nextArena.
   468  						cheap = true
   469  					}
   470  					// Special case: on architectures that can do unaligned loads,
   471  					// explicitly mark encoding/binary methods as cheap,
   472  					// because in practice they are, even though our inlining
   473  					// budgeting system does not see that. See issue 42958.
   474  					if base.Ctxt.Arch.CanMergeLoads && s.Pkg.Path == "encoding/binary" {
   475  						switch s.Name {
   476  						case "littleEndian.Uint64", "littleEndian.Uint32", "littleEndian.Uint16",
   477  							"bigEndian.Uint64", "bigEndian.Uint32", "bigEndian.Uint16",
   478  							"littleEndian.PutUint64", "littleEndian.PutUint32", "littleEndian.PutUint16",
   479  							"bigEndian.PutUint64", "bigEndian.PutUint32", "bigEndian.PutUint16",
   480  							"littleEndian.AppendUint64", "littleEndian.AppendUint32", "littleEndian.AppendUint16",
   481  							"bigEndian.AppendUint64", "bigEndian.AppendUint32", "bigEndian.AppendUint16":
   482  							cheap = true
   483  						}
   484  					}
   485  					if cheap {
   486  						break // treat like any other node, that is, cost of 1
   487  					}
   488  				}
   489  			}
   490  		}
   491  
   492  		// Determine if the callee edge is for an inlinable hot callee or not.
   493  		if v.profile != nil && v.curFunc != nil {
   494  			if fn := inlCallee(n.X, v.profile); fn != nil && typecheck.HaveInlineBody(fn) {
   495  				lineOffset := pgo.NodeLineOffset(n, fn)
   496  				csi := pgo.CallSiteInfo{LineOffset: lineOffset, Caller: v.curFunc}
   497  				if _, o := candHotEdgeMap[csi]; o {
   498  					if base.Debug.PGOInline > 0 {
   499  						fmt.Printf("hot-callsite identified at line=%v for func=%v\n", ir.Line(n), ir.PkgFuncName(v.curFunc))
   500  					}
   501  				}
   502  			}
   503  		}
   504  
   505  		if ir.IsIntrinsicCall(n) {
   506  			// Treat like any other node.
   507  			break
   508  		}
   509  
   510  		if fn := inlCallee(n.X, v.profile); fn != nil && typecheck.HaveInlineBody(fn) {
   511  			v.budget -= fn.Inl.Cost
   512  			break
   513  		}
   514  
   515  		// Call cost for non-leaf inlining.
   516  		v.budget -= v.extraCallCost
   517  
   518  	case ir.OCALLMETH:
   519  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   520  
   521  	// Things that are too hairy, irrespective of the budget
   522  	case ir.OCALL, ir.OCALLINTER:
   523  		// Call cost for non-leaf inlining.
   524  		v.budget -= v.extraCallCost
   525  
   526  	case ir.OPANIC:
   527  		n := n.(*ir.UnaryExpr)
   528  		if n.X.Op() == ir.OCONVIFACE && n.X.(*ir.ConvExpr).Implicit() {
   529  			// Hack to keep reflect.flag.mustBe inlinable for TestIntendedInlining.
   530  			// Before CL 284412, these conversions were introduced later in the
   531  			// compiler, so they didn't count against inlining budget.
   532  			v.budget++
   533  		}
   534  		v.budget -= inlineExtraPanicCost
   535  
   536  	case ir.ORECOVER:
   537  		// recover matches the argument frame pointer to find
   538  		// the right panic value, so it needs an argument frame.
   539  		v.reason = "call to recover"
   540  		return true
   541  
   542  	case ir.OCLOSURE:
   543  		if base.Debug.InlFuncsWithClosures == 0 {
   544  			v.reason = "not inlining functions with closures"
   545  			return true
   546  		}
   547  
   548  		// TODO(danscales): Maybe make budget proportional to number of closure
   549  		// variables, e.g.:
   550  		//v.budget -= int32(len(n.(*ir.ClosureExpr).Func.ClosureVars) * 3)
   551  		v.budget -= 15
   552  		// Scan body of closure (which DoChildren doesn't automatically
   553  		// do) to check for disallowed ops in the body and include the
   554  		// body in the budget.
   555  		if doList(n.(*ir.ClosureExpr).Func.Body, v.do) {
   556  			return true
   557  		}
   558  
   559  	case ir.OGO,
   560  		ir.ODEFER,
   561  		ir.ODCLTYPE, // can't print yet
   562  		ir.OTAILCALL:
   563  		v.reason = "unhandled op " + n.Op().String()
   564  		return true
   565  
   566  	case ir.OAPPEND:
   567  		v.budget -= inlineExtraAppendCost
   568  
   569  	case ir.OADDR:
   570  		n := n.(*ir.AddrExpr)
   571  		// Make "&s.f" cost 0 when f's offset is zero.
   572  		if dot, ok := n.X.(*ir.SelectorExpr); ok && (dot.Op() == ir.ODOT || dot.Op() == ir.ODOTPTR) {
   573  			if _, ok := dot.X.(*ir.Name); ok && dot.Selection.Offset == 0 {
   574  				v.budget += 2 // undo ir.OADDR+ir.ODOT/ir.ODOTPTR
   575  			}
   576  		}
   577  
   578  	case ir.ODEREF:
   579  		// *(*X)(unsafe.Pointer(&x)) is low-cost
   580  		n := n.(*ir.StarExpr)
   581  
   582  		ptr := n.X
   583  		for ptr.Op() == ir.OCONVNOP {
   584  			ptr = ptr.(*ir.ConvExpr).X
   585  		}
   586  		if ptr.Op() == ir.OADDR {
   587  			v.budget += 1 // undo half of default cost of ir.ODEREF+ir.OADDR
   588  		}
   589  
   590  	case ir.OCONVNOP:
   591  		// This doesn't produce code, but the children might.
   592  		v.budget++ // undo default cost
   593  
   594  	case ir.ODCLCONST, ir.OFALL:
   595  		// These nodes don't produce code; omit from inlining budget.
   596  		return false
   597  
   598  	case ir.OIF:
   599  		n := n.(*ir.IfStmt)
   600  		if ir.IsConst(n.Cond, constant.Bool) {
   601  			// This if and the condition cost nothing.
   602  			if doList(n.Init(), v.do) {
   603  				return true
   604  			}
   605  			if ir.BoolVal(n.Cond) {
   606  				return doList(n.Body, v.do)
   607  			} else {
   608  				return doList(n.Else, v.do)
   609  			}
   610  		}
   611  
   612  	case ir.ONAME:
   613  		n := n.(*ir.Name)
   614  		if n.Class == ir.PAUTO {
   615  			v.usedLocals.Add(n)
   616  		}
   617  
   618  	case ir.OBLOCK:
   619  		// The only OBLOCK we should see at this point is an empty one.
   620  		// In any event, let the visitList(n.List()) below take care of the statements,
   621  		// and don't charge for the OBLOCK itself. The ++ undoes the -- below.
   622  		v.budget++
   623  
   624  	case ir.OMETHVALUE, ir.OSLICELIT:
   625  		v.budget-- // Hack for toolstash -cmp.
   626  
   627  	case ir.OMETHEXPR:
   628  		v.budget++ // Hack for toolstash -cmp.
   629  
   630  	case ir.OAS2:
   631  		n := n.(*ir.AssignListStmt)
   632  
   633  		// Unified IR unconditionally rewrites:
   634  		//
   635  		//	a, b = f()
   636  		//
   637  		// into:
   638  		//
   639  		//	DCL tmp1
   640  		//	DCL tmp2
   641  		//	tmp1, tmp2 = f()
   642  		//	a, b = tmp1, tmp2
   643  		//
   644  		// so that it can insert implicit conversions as necessary. To
   645  		// minimize impact to the existing inlining heuristics (in
   646  		// particular, to avoid breaking the existing inlinability regress
   647  		// tests), we need to compensate for this here.
   648  		if base.Debug.Unified != 0 {
   649  			if init := n.Rhs[0].Init(); len(init) == 1 {
   650  				if _, ok := init[0].(*ir.AssignListStmt); ok {
   651  					// 4 for each value, because each temporary variable now
   652  					// appears 3 times (DCL, LHS, RHS), plus an extra DCL node.
   653  					//
   654  					// 1 for the extra "tmp1, tmp2 = f()" assignment statement.
   655  					v.budget += 4*int32(len(n.Lhs)) + 1
   656  				}
   657  			}
   658  		}
   659  
   660  	case ir.OAS:
   661  		// Special case for coverage counter updates and coverage
   662  		// function registrations. Although these correspond to real
   663  		// operations, we treat them as zero cost for the moment. This
   664  		// is primarily due to the existence of tests that are
   665  		// sensitive to inlining-- if the insertion of coverage
   666  		// instrumentation happens to tip a given function over the
   667  		// threshold and move it from "inlinable" to "not-inlinable",
   668  		// this can cause changes in allocation behavior, which can
   669  		// then result in test failures (a good example is the
   670  		// TestAllocations in crypto/ed25519).
   671  		n := n.(*ir.AssignStmt)
   672  		if n.X.Op() == ir.OINDEX && isIndexingCoverageCounter(n.X) {
   673  			return false
   674  		}
   675  	}
   676  
   677  	v.budget--
   678  
   679  	// When debugging, don't stop early, to get full cost of inlining this function
   680  	if v.budget < 0 && base.Flag.LowerM < 2 && !logopt.Enabled() {
   681  		v.reason = "too expensive"
   682  		return true
   683  	}
   684  
   685  	return ir.DoChildren(n, v.do)
   686  }
   687  
   688  func isBigFunc(fn *ir.Func) bool {
   689  	budget := inlineBigFunctionNodes
   690  	return ir.Any(fn, func(n ir.Node) bool {
   691  		budget--
   692  		return budget <= 0
   693  	})
   694  }
   695  
   696  // inlcopylist (together with inlcopy) recursively copies a list of nodes, except
   697  // that it keeps the same ONAME, OTYPE, and OLITERAL nodes. It is used for copying
   698  // the body and dcls of an inlineable function.
   699  func inlcopylist(ll []ir.Node) []ir.Node {
   700  	s := make([]ir.Node, len(ll))
   701  	for i, n := range ll {
   702  		s[i] = inlcopy(n)
   703  	}
   704  	return s
   705  }
   706  
   707  // inlcopy is like DeepCopy(), but does extra work to copy closures.
   708  func inlcopy(n ir.Node) ir.Node {
   709  	var edit func(ir.Node) ir.Node
   710  	edit = func(x ir.Node) ir.Node {
   711  		switch x.Op() {
   712  		case ir.ONAME, ir.OTYPE, ir.OLITERAL, ir.ONIL:
   713  			return x
   714  		}
   715  		m := ir.Copy(x)
   716  		ir.EditChildren(m, edit)
   717  		if x.Op() == ir.OCLOSURE {
   718  			x := x.(*ir.ClosureExpr)
   719  			// Need to save/duplicate x.Func.Nname,
   720  			// x.Func.Nname.Ntype, x.Func.Dcl, x.Func.ClosureVars, and
   721  			// x.Func.Body for iexport and local inlining.
   722  			oldfn := x.Func
   723  			newfn := ir.NewFunc(oldfn.Pos())
   724  			m.(*ir.ClosureExpr).Func = newfn
   725  			newfn.Nname = ir.NewNameAt(oldfn.Nname.Pos(), oldfn.Nname.Sym())
   726  			// XXX OK to share fn.Type() ??
   727  			newfn.Nname.SetType(oldfn.Nname.Type())
   728  			newfn.Body = inlcopylist(oldfn.Body)
   729  			// Make shallow copy of the Dcl and ClosureVar slices
   730  			newfn.Dcl = append([]*ir.Name(nil), oldfn.Dcl...)
   731  			newfn.ClosureVars = append([]*ir.Name(nil), oldfn.ClosureVars...)
   732  		}
   733  		return m
   734  	}
   735  	return edit(n)
   736  }
   737  
   738  // InlineCalls/inlnode walks fn's statements and expressions and substitutes any
   739  // calls made to inlineable functions. This is the external entry point.
   740  func InlineCalls(fn *ir.Func, profile *pgo.Profile) {
   741  	savefn := ir.CurFunc
   742  	ir.CurFunc = fn
   743  	maxCost := int32(inlineMaxBudget)
   744  	if isBigFunc(fn) {
   745  		maxCost = inlineBigFunctionMaxCost
   746  	}
   747  	var inlCalls []*ir.InlinedCallExpr
   748  	var edit func(ir.Node) ir.Node
   749  	edit = func(n ir.Node) ir.Node {
   750  		return inlnode(n, maxCost, &inlCalls, edit, profile)
   751  	}
   752  	ir.EditChildren(fn, edit)
   753  
   754  	// If we inlined any calls, we want to recursively visit their
   755  	// bodies for further inlining. However, we need to wait until
   756  	// *after* the original function body has been expanded, or else
   757  	// inlCallee can have false positives (e.g., #54632).
   758  	for len(inlCalls) > 0 {
   759  		call := inlCalls[0]
   760  		inlCalls = inlCalls[1:]
   761  		ir.EditChildren(call, edit)
   762  	}
   763  
   764  	ir.CurFunc = savefn
   765  }
   766  
   767  // inlnode recurses over the tree to find inlineable calls, which will
   768  // be turned into OINLCALLs by mkinlcall. When the recursion comes
   769  // back up will examine left, right, list, rlist, ninit, ntest, nincr,
   770  // nbody and nelse and use one of the 4 inlconv/glue functions above
   771  // to turn the OINLCALL into an expression, a statement, or patch it
   772  // in to this nodes list or rlist as appropriate.
   773  // NOTE it makes no sense to pass the glue functions down the
   774  // recursion to the level where the OINLCALL gets created because they
   775  // have to edit /this/ n, so you'd have to push that one down as well,
   776  // but then you may as well do it here.  so this is cleaner and
   777  // shorter and less complicated.
   778  // The result of inlnode MUST be assigned back to n, e.g.
   779  //
   780  //	n.Left = inlnode(n.Left)
   781  func inlnode(n ir.Node, maxCost int32, inlCalls *[]*ir.InlinedCallExpr, edit func(ir.Node) ir.Node, profile *pgo.Profile) ir.Node {
   782  	if n == nil {
   783  		return n
   784  	}
   785  
   786  	switch n.Op() {
   787  	case ir.ODEFER, ir.OGO:
   788  		n := n.(*ir.GoDeferStmt)
   789  		switch call := n.Call; call.Op() {
   790  		case ir.OCALLMETH:
   791  			base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
   792  		case ir.OCALLFUNC:
   793  			call := call.(*ir.CallExpr)
   794  			call.NoInline = true
   795  		}
   796  	case ir.OTAILCALL:
   797  		n := n.(*ir.TailCallStmt)
   798  		n.Call.NoInline = true // Not inline a tail call for now. Maybe we could inline it just like RETURN fn(arg)?
   799  
   800  	// TODO do them here (or earlier),
   801  	// so escape analysis can avoid more heapmoves.
   802  	case ir.OCLOSURE:
   803  		return n
   804  	case ir.OCALLMETH:
   805  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   806  	case ir.OCALLFUNC:
   807  		n := n.(*ir.CallExpr)
   808  		if n.X.Op() == ir.OMETHEXPR {
   809  			// Prevent inlining some reflect.Value methods when using checkptr,
   810  			// even when package reflect was compiled without it (#35073).
   811  			if meth := ir.MethodExprName(n.X); meth != nil {
   812  				s := meth.Sym()
   813  				if base.Debug.Checkptr != 0 && types.IsReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
   814  					return n
   815  				}
   816  			}
   817  		}
   818  	}
   819  
   820  	lno := ir.SetPos(n)
   821  
   822  	ir.EditChildren(n, edit)
   823  
   824  	// with all the branches out of the way, it is now time to
   825  	// transmogrify this node itself unless inhibited by the
   826  	// switch at the top of this function.
   827  	switch n.Op() {
   828  	case ir.OCALLMETH:
   829  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   830  
   831  	case ir.OCALLFUNC:
   832  		call := n.(*ir.CallExpr)
   833  		if call.NoInline {
   834  			break
   835  		}
   836  		if base.Flag.LowerM > 3 {
   837  			fmt.Printf("%v:call to func %+v\n", ir.Line(n), call.X)
   838  		}
   839  		if ir.IsIntrinsicCall(call) {
   840  			break
   841  		}
   842  		if fn := inlCallee(call.X, profile); fn != nil && typecheck.HaveInlineBody(fn) {
   843  			n = mkinlcall(call, fn, maxCost, inlCalls, edit)
   844  		}
   845  	}
   846  
   847  	base.Pos = lno
   848  
   849  	return n
   850  }
   851  
   852  // inlCallee takes a function-typed expression and returns the underlying function ONAME
   853  // that it refers to if statically known. Otherwise, it returns nil.
   854  func inlCallee(fn ir.Node, profile *pgo.Profile) *ir.Func {
   855  	fn = ir.StaticValue(fn)
   856  	switch fn.Op() {
   857  	case ir.OMETHEXPR:
   858  		fn := fn.(*ir.SelectorExpr)
   859  		n := ir.MethodExprName(fn)
   860  		// Check that receiver type matches fn.X.
   861  		// TODO(mdempsky): Handle implicit dereference
   862  		// of pointer receiver argument?
   863  		if n == nil || !types.Identical(n.Type().Recv().Type, fn.X.Type()) {
   864  			return nil
   865  		}
   866  		return n.Func
   867  	case ir.ONAME:
   868  		fn := fn.(*ir.Name)
   869  		if fn.Class == ir.PFUNC {
   870  			return fn.Func
   871  		}
   872  	case ir.OCLOSURE:
   873  		fn := fn.(*ir.ClosureExpr)
   874  		c := fn.Func
   875  		CanInline(c, profile)
   876  		return c
   877  	}
   878  	return nil
   879  }
   880  
   881  func inlParam(t *types.Field, as ir.InitNode, inlvars map[*ir.Name]*ir.Name) ir.Node {
   882  	if t.Nname == nil {
   883  		return ir.BlankNode
   884  	}
   885  	n := t.Nname.(*ir.Name)
   886  	if ir.IsBlank(n) {
   887  		return ir.BlankNode
   888  	}
   889  	inlvar := inlvars[n]
   890  	if inlvar == nil {
   891  		base.Fatalf("missing inlvar for %v", n)
   892  	}
   893  	as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, inlvar))
   894  	inlvar.Name().Defn = as
   895  	return inlvar
   896  }
   897  
   898  var inlgen int
   899  
   900  // SSADumpInline gives the SSA back end a chance to dump the function
   901  // when producing output for debugging the compiler itself.
   902  var SSADumpInline = func(*ir.Func) {}
   903  
   904  // InlineCall allows the inliner implementation to be overridden.
   905  // If it returns nil, the function will not be inlined.
   906  var InlineCall = oldInlineCall
   907  
   908  // If n is a OCALLFUNC node, and fn is an ONAME node for a
   909  // function with an inlinable body, return an OINLCALL node that can replace n.
   910  // The returned node's Ninit has the parameter assignments, the Nbody is the
   911  // inlined function body, and (List, Rlist) contain the (input, output)
   912  // parameters.
   913  // The result of mkinlcall MUST be assigned back to n, e.g.
   914  //
   915  //	n.Left = mkinlcall(n.Left, fn, isddd)
   916  func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlCalls *[]*ir.InlinedCallExpr, edit func(ir.Node) ir.Node) ir.Node {
   917  	if fn.Inl == nil {
   918  		if logopt.Enabled() {
   919  			logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(ir.CurFunc),
   920  				fmt.Sprintf("%s cannot be inlined", ir.PkgFuncName(fn)))
   921  		}
   922  		return n
   923  	}
   924  	if fn.Inl.Cost > maxCost {
   925  		// If the callsite is hot and it is under the inlineHotMaxBudget budget, then try to inline it, or else bail.
   926  		lineOffset := pgo.NodeLineOffset(n, ir.CurFunc)
   927  		csi := pgo.CallSiteInfo{LineOffset: lineOffset, Caller: ir.CurFunc}
   928  		if _, ok := candHotEdgeMap[csi]; ok {
   929  			if fn.Inl.Cost > inlineHotMaxBudget {
   930  				if logopt.Enabled() {
   931  					logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(ir.CurFunc),
   932  						fmt.Sprintf("cost %d of %s exceeds max large caller cost %d", fn.Inl.Cost, ir.PkgFuncName(fn), inlineHotMaxBudget))
   933  				}
   934  				return n
   935  			}
   936  			if base.Debug.PGOInline > 0 {
   937  				fmt.Printf("hot-budget check allows inlining for call %s at %v\n", ir.PkgFuncName(fn), ir.Line(n))
   938  			}
   939  		} else {
   940  			// The inlined function body is too big. Typically we use this check to restrict
   941  			// inlining into very big functions.  See issue 26546 and 17566.
   942  			if logopt.Enabled() {
   943  				logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(ir.CurFunc),
   944  					fmt.Sprintf("cost %d of %s exceeds max large caller cost %d", fn.Inl.Cost, ir.PkgFuncName(fn), maxCost))
   945  			}
   946  			return n
   947  		}
   948  	}
   949  
   950  	if fn == ir.CurFunc {
   951  		// Can't recursively inline a function into itself.
   952  		if logopt.Enabled() {
   953  			logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", fmt.Sprintf("recursive call to %s", ir.FuncName(ir.CurFunc)))
   954  		}
   955  		return n
   956  	}
   957  
   958  	// The non-unified frontend has issues with inlining and shape parameters.
   959  	if base.Debug.Unified == 0 {
   960  		// Don't inline a function fn that has no shape parameters, but is passed at
   961  		// least one shape arg. This means we must be inlining a non-generic function
   962  		// fn that was passed into a generic function, and can be called with a shape
   963  		// arg because it matches an appropriate type parameters. But fn may include
   964  		// an interface conversion (that may be applied to a shape arg) that was not
   965  		// apparent when we first created the instantiation of the generic function.
   966  		// We can't handle this if we actually do the inlining, since we want to know
   967  		// all interface conversions immediately after stenciling. So, we avoid
   968  		// inlining in this case, see issue #49309. (1)
   969  		//
   970  		// See discussion on go.dev/cl/406475 for more background.
   971  		if !fn.Type().Params().HasShape() {
   972  			for _, arg := range n.Args {
   973  				if arg.Type().HasShape() {
   974  					if logopt.Enabled() {
   975  						logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(ir.CurFunc),
   976  							fmt.Sprintf("inlining function %v has no-shape params with shape args", ir.FuncName(fn)))
   977  					}
   978  					return n
   979  				}
   980  			}
   981  		} else {
   982  			// Don't inline a function fn that has shape parameters, but is passed no shape arg.
   983  			// See comments (1) above, and issue #51909.
   984  			inlineable := len(n.Args) == 0 // Function has shape in type, with no arguments can always be inlined.
   985  			for _, arg := range n.Args {
   986  				if arg.Type().HasShape() {
   987  					inlineable = true
   988  					break
   989  				}
   990  			}
   991  			if !inlineable {
   992  				if logopt.Enabled() {
   993  					logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(ir.CurFunc),
   994  						fmt.Sprintf("inlining function %v has shape params with no-shape args", ir.FuncName(fn)))
   995  				}
   996  				return n
   997  			}
   998  		}
   999  	}
  1000  
  1001  	if base.Flag.Cfg.Instrumenting && types.IsRuntimePkg(fn.Sym().Pkg) {
  1002  		// Runtime package must not be instrumented.
  1003  		// Instrument skips runtime package. However, some runtime code can be
  1004  		// inlined into other packages and instrumented there. To avoid this,
  1005  		// we disable inlining of runtime functions when instrumenting.
  1006  		// The example that we observed is inlining of LockOSThread,
  1007  		// which lead to false race reports on m contents.
  1008  		return n
  1009  	}
  1010  
  1011  	parent := base.Ctxt.PosTable.Pos(n.Pos()).Base().InliningIndex()
  1012  	sym := fn.Linksym()
  1013  
  1014  	// Check if we've already inlined this function at this particular
  1015  	// call site, in order to stop inlining when we reach the beginning
  1016  	// of a recursion cycle again. We don't inline immediately recursive
  1017  	// functions, but allow inlining if there is a recursion cycle of
  1018  	// many functions. Most likely, the inlining will stop before we
  1019  	// even hit the beginning of the cycle again, but this catches the
  1020  	// unusual case.
  1021  	for inlIndex := parent; inlIndex >= 0; inlIndex = base.Ctxt.InlTree.Parent(inlIndex) {
  1022  		if base.Ctxt.InlTree.InlinedFunction(inlIndex) == sym {
  1023  			if base.Flag.LowerM > 1 {
  1024  				fmt.Printf("%v: cannot inline %v into %v: repeated recursive cycle\n", ir.Line(n), fn, ir.FuncName(ir.CurFunc))
  1025  			}
  1026  			return n
  1027  		}
  1028  	}
  1029  
  1030  	typecheck.FixVariadicCall(n)
  1031  
  1032  	inlIndex := base.Ctxt.InlTree.Add(parent, n.Pos(), sym)
  1033  
  1034  	closureInitLSym := func(n *ir.CallExpr, fn *ir.Func) {
  1035  		// The linker needs FuncInfo metadata for all inlined
  1036  		// functions. This is typically handled by gc.enqueueFunc
  1037  		// calling ir.InitLSym for all function declarations in
  1038  		// typecheck.Target.Decls (ir.UseClosure adds all closures to
  1039  		// Decls).
  1040  		//
  1041  		// However, non-trivial closures in Decls are ignored, and are
  1042  		// insteaded enqueued when walk of the calling function
  1043  		// discovers them.
  1044  		//
  1045  		// This presents a problem for direct calls to closures.
  1046  		// Inlining will replace the entire closure definition with its
  1047  		// body, which hides the closure from walk and thus suppresses
  1048  		// symbol creation.
  1049  		//
  1050  		// Explicitly create a symbol early in this edge case to ensure
  1051  		// we keep this metadata.
  1052  		//
  1053  		// TODO: Refactor to keep a reference so this can all be done
  1054  		// by enqueueFunc.
  1055  
  1056  		if n.Op() != ir.OCALLFUNC {
  1057  			// Not a standard call.
  1058  			return
  1059  		}
  1060  		if n.X.Op() != ir.OCLOSURE {
  1061  			// Not a direct closure call.
  1062  			return
  1063  		}
  1064  
  1065  		clo := n.X.(*ir.ClosureExpr)
  1066  		if ir.IsTrivialClosure(clo) {
  1067  			// enqueueFunc will handle trivial closures anyways.
  1068  			return
  1069  		}
  1070  
  1071  		ir.InitLSym(fn, true)
  1072  	}
  1073  
  1074  	closureInitLSym(n, fn)
  1075  
  1076  	if base.Flag.GenDwarfInl > 0 {
  1077  		if !sym.WasInlined() {
  1078  			base.Ctxt.DwFixups.SetPrecursorFunc(sym, fn)
  1079  			sym.Set(obj.AttrWasInlined, true)
  1080  		}
  1081  	}
  1082  
  1083  	if base.Flag.LowerM != 0 {
  1084  		fmt.Printf("%v: inlining call to %v\n", ir.Line(n), fn)
  1085  	}
  1086  	if base.Flag.LowerM > 2 {
  1087  		fmt.Printf("%v: Before inlining: %+v\n", ir.Line(n), n)
  1088  	}
  1089  
  1090  	if base.Debug.PGOInline > 0 {
  1091  		csi := pgo.CallSiteInfo{LineOffset: pgo.NodeLineOffset(n, fn), Caller: ir.CurFunc}
  1092  		if _, ok := inlinedCallSites[csi]; !ok {
  1093  			inlinedCallSites[csi] = struct{}{}
  1094  		}
  1095  	}
  1096  
  1097  	res := InlineCall(n, fn, inlIndex)
  1098  
  1099  	if res == nil {
  1100  		base.FatalfAt(n.Pos(), "inlining call to %v failed", fn)
  1101  	}
  1102  
  1103  	if base.Flag.LowerM > 2 {
  1104  		fmt.Printf("%v: After inlining %+v\n\n", ir.Line(res), res)
  1105  	}
  1106  
  1107  	*inlCalls = append(*inlCalls, res)
  1108  
  1109  	return res
  1110  }
  1111  
  1112  // CalleeEffects appends any side effects from evaluating callee to init.
  1113  func CalleeEffects(init *ir.Nodes, callee ir.Node) {
  1114  	for {
  1115  		init.Append(ir.TakeInit(callee)...)
  1116  
  1117  		switch callee.Op() {
  1118  		case ir.ONAME, ir.OCLOSURE, ir.OMETHEXPR:
  1119  			return // done
  1120  
  1121  		case ir.OCONVNOP:
  1122  			conv := callee.(*ir.ConvExpr)
  1123  			callee = conv.X
  1124  
  1125  		case ir.OINLCALL:
  1126  			ic := callee.(*ir.InlinedCallExpr)
  1127  			init.Append(ic.Body.Take()...)
  1128  			callee = ic.SingleResult()
  1129  
  1130  		default:
  1131  			base.FatalfAt(callee.Pos(), "unexpected callee expression: %v", callee)
  1132  		}
  1133  	}
  1134  }
  1135  
  1136  // oldInlineCall creates an InlinedCallExpr to replace the given call
  1137  // expression. fn is the callee function to be inlined. inlIndex is
  1138  // the inlining tree position index, for use with src.NewInliningBase
  1139  // when rewriting positions.
  1140  func oldInlineCall(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr {
  1141  	if base.Debug.TypecheckInl == 0 {
  1142  		typecheck.ImportedBody(fn)
  1143  	}
  1144  
  1145  	SSADumpInline(fn)
  1146  
  1147  	ninit := call.Init()
  1148  
  1149  	// For normal function calls, the function callee expression
  1150  	// may contain side effects. Make sure to preserve these,
  1151  	// if necessary (#42703).
  1152  	if call.Op() == ir.OCALLFUNC {
  1153  		CalleeEffects(&ninit, call.X)
  1154  	}
  1155  
  1156  	// Make temp names to use instead of the originals.
  1157  	inlvars := make(map[*ir.Name]*ir.Name)
  1158  
  1159  	// record formals/locals for later post-processing
  1160  	var inlfvars []*ir.Name
  1161  
  1162  	for _, ln := range fn.Inl.Dcl {
  1163  		if ln.Op() != ir.ONAME {
  1164  			continue
  1165  		}
  1166  		if ln.Class == ir.PPARAMOUT { // return values handled below.
  1167  			continue
  1168  		}
  1169  		inlf := typecheck.Expr(inlvar(ln)).(*ir.Name)
  1170  		inlvars[ln] = inlf
  1171  		if base.Flag.GenDwarfInl > 0 {
  1172  			if ln.Class == ir.PPARAM {
  1173  				inlf.Name().SetInlFormal(true)
  1174  			} else {
  1175  				inlf.Name().SetInlLocal(true)
  1176  			}
  1177  			inlf.SetPos(ln.Pos())
  1178  			inlfvars = append(inlfvars, inlf)
  1179  		}
  1180  	}
  1181  
  1182  	// We can delay declaring+initializing result parameters if:
  1183  	// temporaries for return values.
  1184  	var retvars []ir.Node
  1185  	for i, t := range fn.Type().Results().Fields().Slice() {
  1186  		var m *ir.Name
  1187  		if nn := t.Nname; nn != nil && !ir.IsBlank(nn.(*ir.Name)) && !strings.HasPrefix(nn.Sym().Name, "~r") {
  1188  			n := nn.(*ir.Name)
  1189  			m = inlvar(n)
  1190  			m = typecheck.Expr(m).(*ir.Name)
  1191  			inlvars[n] = m
  1192  		} else {
  1193  			// anonymous return values, synthesize names for use in assignment that replaces return
  1194  			m = retvar(t, i)
  1195  		}
  1196  
  1197  		if base.Flag.GenDwarfInl > 0 {
  1198  			// Don't update the src.Pos on a return variable if it
  1199  			// was manufactured by the inliner (e.g. "~R2"); such vars
  1200  			// were not part of the original callee.
  1201  			if !strings.HasPrefix(m.Sym().Name, "~R") {
  1202  				m.Name().SetInlFormal(true)
  1203  				m.SetPos(t.Pos)
  1204  				inlfvars = append(inlfvars, m)
  1205  			}
  1206  		}
  1207  
  1208  		retvars = append(retvars, m)
  1209  	}
  1210  
  1211  	// Assign arguments to the parameters' temp names.
  1212  	as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
  1213  	as.Def = true
  1214  	if call.Op() == ir.OCALLMETH {
  1215  		base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
  1216  	}
  1217  	as.Rhs.Append(call.Args...)
  1218  
  1219  	if recv := fn.Type().Recv(); recv != nil {
  1220  		as.Lhs.Append(inlParam(recv, as, inlvars))
  1221  	}
  1222  	for _, param := range fn.Type().Params().Fields().Slice() {
  1223  		as.Lhs.Append(inlParam(param, as, inlvars))
  1224  	}
  1225  
  1226  	if len(as.Rhs) != 0 {
  1227  		ninit.Append(typecheck.Stmt(as))
  1228  	}
  1229  
  1230  	if !fn.Inl.CanDelayResults {
  1231  		// Zero the return parameters.
  1232  		for _, n := range retvars {
  1233  			ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
  1234  			ras := ir.NewAssignStmt(base.Pos, n, nil)
  1235  			ninit.Append(typecheck.Stmt(ras))
  1236  		}
  1237  	}
  1238  
  1239  	retlabel := typecheck.AutoLabel(".i")
  1240  
  1241  	inlgen++
  1242  
  1243  	// Add an inline mark just before the inlined body.
  1244  	// This mark is inline in the code so that it's a reasonable spot
  1245  	// to put a breakpoint. Not sure if that's really necessary or not
  1246  	// (in which case it could go at the end of the function instead).
  1247  	// Note issue 28603.
  1248  	ninit.Append(ir.NewInlineMarkStmt(call.Pos().WithIsStmt(), int64(inlIndex)))
  1249  
  1250  	subst := inlsubst{
  1251  		retlabel:    retlabel,
  1252  		retvars:     retvars,
  1253  		inlvars:     inlvars,
  1254  		defnMarker:  ir.NilExpr{},
  1255  		bases:       make(map[*src.PosBase]*src.PosBase),
  1256  		newInlIndex: inlIndex,
  1257  		fn:          fn,
  1258  	}
  1259  	subst.edit = subst.node
  1260  
  1261  	body := subst.list(ir.Nodes(fn.Inl.Body))
  1262  
  1263  	lab := ir.NewLabelStmt(base.Pos, retlabel)
  1264  	body = append(body, lab)
  1265  
  1266  	if base.Flag.GenDwarfInl > 0 {
  1267  		for _, v := range inlfvars {
  1268  			v.SetPos(subst.updatedPos(v.Pos()))
  1269  		}
  1270  	}
  1271  
  1272  	//dumplist("ninit post", ninit);
  1273  
  1274  	res := ir.NewInlinedCallExpr(base.Pos, body, retvars)
  1275  	res.SetInit(ninit)
  1276  	res.SetType(call.Type())
  1277  	res.SetTypecheck(1)
  1278  	return res
  1279  }
  1280  
  1281  // Every time we expand a function we generate a new set of tmpnames,
  1282  // PAUTO's in the calling functions, and link them off of the
  1283  // PPARAM's, PAUTOS and PPARAMOUTs of the called function.
  1284  func inlvar(var_ *ir.Name) *ir.Name {
  1285  	if base.Flag.LowerM > 3 {
  1286  		fmt.Printf("inlvar %+v\n", var_)
  1287  	}
  1288  
  1289  	n := typecheck.NewName(var_.Sym())
  1290  	n.SetType(var_.Type())
  1291  	n.SetTypecheck(1)
  1292  	n.Class = ir.PAUTO
  1293  	n.SetUsed(true)
  1294  	n.SetAutoTemp(var_.AutoTemp())
  1295  	n.Curfn = ir.CurFunc // the calling function, not the called one
  1296  	n.SetAddrtaken(var_.Addrtaken())
  1297  
  1298  	ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
  1299  	return n
  1300  }
  1301  
  1302  // Synthesize a variable to store the inlined function's results in.
  1303  func retvar(t *types.Field, i int) *ir.Name {
  1304  	n := typecheck.NewName(typecheck.LookupNum("~R", i))
  1305  	n.SetType(t.Type)
  1306  	n.SetTypecheck(1)
  1307  	n.Class = ir.PAUTO
  1308  	n.SetUsed(true)
  1309  	n.Curfn = ir.CurFunc // the calling function, not the called one
  1310  	ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
  1311  	return n
  1312  }
  1313  
  1314  // The inlsubst type implements the actual inlining of a single
  1315  // function call.
  1316  type inlsubst struct {
  1317  	// Target of the goto substituted in place of a return.
  1318  	retlabel *types.Sym
  1319  
  1320  	// Temporary result variables.
  1321  	retvars []ir.Node
  1322  
  1323  	inlvars map[*ir.Name]*ir.Name
  1324  	// defnMarker is used to mark a Node for reassignment.
  1325  	// inlsubst.clovar set this during creating new ONAME.
  1326  	// inlsubst.node will set the correct Defn for inlvar.
  1327  	defnMarker ir.NilExpr
  1328  
  1329  	// bases maps from original PosBase to PosBase with an extra
  1330  	// inlined call frame.
  1331  	bases map[*src.PosBase]*src.PosBase
  1332  
  1333  	// newInlIndex is the index of the inlined call frame to
  1334  	// insert for inlined nodes.
  1335  	newInlIndex int
  1336  
  1337  	edit func(ir.Node) ir.Node // cached copy of subst.node method value closure
  1338  
  1339  	// If non-nil, we are inside a closure inside the inlined function, and
  1340  	// newclofn is the Func of the new inlined closure.
  1341  	newclofn *ir.Func
  1342  
  1343  	fn *ir.Func // For debug -- the func that is being inlined
  1344  
  1345  	// If true, then don't update source positions during substitution
  1346  	// (retain old source positions).
  1347  	noPosUpdate bool
  1348  }
  1349  
  1350  // list inlines a list of nodes.
  1351  func (subst *inlsubst) list(ll ir.Nodes) []ir.Node {
  1352  	s := make([]ir.Node, 0, len(ll))
  1353  	for _, n := range ll {
  1354  		s = append(s, subst.node(n))
  1355  	}
  1356  	return s
  1357  }
  1358  
  1359  // fields returns a list of the fields of a struct type representing receiver,
  1360  // params, or results, after duplicating the field nodes and substituting the
  1361  // Nname nodes inside the field nodes.
  1362  func (subst *inlsubst) fields(oldt *types.Type) []*types.Field {
  1363  	oldfields := oldt.FieldSlice()
  1364  	newfields := make([]*types.Field, len(oldfields))
  1365  	for i := range oldfields {
  1366  		newfields[i] = oldfields[i].Copy()
  1367  		if oldfields[i].Nname != nil {
  1368  			newfields[i].Nname = subst.node(oldfields[i].Nname.(*ir.Name))
  1369  		}
  1370  	}
  1371  	return newfields
  1372  }
  1373  
  1374  // clovar creates a new ONAME node for a local variable or param of a closure
  1375  // inside a function being inlined.
  1376  func (subst *inlsubst) clovar(n *ir.Name) *ir.Name {
  1377  	m := ir.NewNameAt(n.Pos(), n.Sym())
  1378  	m.Class = n.Class
  1379  	m.SetType(n.Type())
  1380  	m.SetTypecheck(1)
  1381  	if n.IsClosureVar() {
  1382  		m.SetIsClosureVar(true)
  1383  	}
  1384  	if n.Addrtaken() {
  1385  		m.SetAddrtaken(true)
  1386  	}
  1387  	if n.Used() {
  1388  		m.SetUsed(true)
  1389  	}
  1390  	m.Defn = n.Defn
  1391  
  1392  	m.Curfn = subst.newclofn
  1393  
  1394  	switch defn := n.Defn.(type) {
  1395  	case nil:
  1396  		// ok
  1397  	case *ir.Name:
  1398  		if !n.IsClosureVar() {
  1399  			base.FatalfAt(n.Pos(), "want closure variable, got: %+v", n)
  1400  		}
  1401  		if n.Sym().Pkg != types.LocalPkg {
  1402  			// If the closure came from inlining a function from
  1403  			// another package, must change package of captured
  1404  			// variable to localpkg, so that the fields of the closure
  1405  			// struct are local package and can be accessed even if
  1406  			// name is not exported. If you disable this code, you can
  1407  			// reproduce the problem by running 'go test
  1408  			// go/internal/srcimporter'. TODO(mdempsky) - maybe change
  1409  			// how we create closure structs?
  1410  			m.SetSym(types.LocalPkg.Lookup(n.Sym().Name))
  1411  		}
  1412  		// Make sure any inlvar which is the Defn
  1413  		// of an ONAME closure var is rewritten
  1414  		// during inlining. Don't substitute
  1415  		// if Defn node is outside inlined function.
  1416  		if subst.inlvars[n.Defn.(*ir.Name)] != nil {
  1417  			m.Defn = subst.node(n.Defn)
  1418  		}
  1419  	case *ir.AssignStmt, *ir.AssignListStmt:
  1420  		// Mark node for reassignment at the end of inlsubst.node.
  1421  		m.Defn = &subst.defnMarker
  1422  	case *ir.TypeSwitchGuard:
  1423  		// TODO(mdempsky): Set m.Defn properly. See discussion on #45743.
  1424  	case *ir.RangeStmt:
  1425  		// TODO: Set m.Defn properly if we support inlining range statement in the future.
  1426  	default:
  1427  		base.FatalfAt(n.Pos(), "unexpected Defn: %+v", defn)
  1428  	}
  1429  
  1430  	if n.Outer != nil {
  1431  		// Either the outer variable is defined in function being inlined,
  1432  		// and we will replace it with the substituted variable, or it is
  1433  		// defined outside the function being inlined, and we should just
  1434  		// skip the outer variable (the closure variable of the function
  1435  		// being inlined).
  1436  		s := subst.node(n.Outer).(*ir.Name)
  1437  		if s == n.Outer {
  1438  			s = n.Outer.Outer
  1439  		}
  1440  		m.Outer = s
  1441  	}
  1442  	return m
  1443  }
  1444  
  1445  // closure does the necessary substitions for a ClosureExpr n and returns the new
  1446  // closure node.
  1447  func (subst *inlsubst) closure(n *ir.ClosureExpr) ir.Node {
  1448  	// Prior to the subst edit, set a flag in the inlsubst to indicate
  1449  	// that we don't want to update the source positions in the new
  1450  	// closure function. If we do this, it will appear that the
  1451  	// closure itself has things inlined into it, which is not the
  1452  	// case. See issue #46234 for more details. At the same time, we
  1453  	// do want to update the position in the new ClosureExpr (which is
  1454  	// part of the function we're working on). See #49171 for an
  1455  	// example of what happens if we miss that update.
  1456  	newClosurePos := subst.updatedPos(n.Pos())
  1457  	defer func(prev bool) { subst.noPosUpdate = prev }(subst.noPosUpdate)
  1458  	subst.noPosUpdate = true
  1459  
  1460  	//fmt.Printf("Inlining func %v with closure into %v\n", subst.fn, ir.FuncName(ir.CurFunc))
  1461  
  1462  	oldfn := n.Func
  1463  	newfn := ir.NewClosureFunc(oldfn.Pos(), true)
  1464  
  1465  	if subst.newclofn != nil {
  1466  		//fmt.Printf("Inlining a closure with a nested closure\n")
  1467  	}
  1468  	prevxfunc := subst.newclofn
  1469  
  1470  	// Mark that we are now substituting within a closure (within the
  1471  	// inlined function), and create new nodes for all the local
  1472  	// vars/params inside this closure.
  1473  	subst.newclofn = newfn
  1474  	newfn.Dcl = nil
  1475  	newfn.ClosureVars = nil
  1476  	for _, oldv := range oldfn.Dcl {
  1477  		newv := subst.clovar(oldv)
  1478  		subst.inlvars[oldv] = newv
  1479  		newfn.Dcl = append(newfn.Dcl, newv)
  1480  	}
  1481  	for _, oldv := range oldfn.ClosureVars {
  1482  		newv := subst.clovar(oldv)
  1483  		subst.inlvars[oldv] = newv
  1484  		newfn.ClosureVars = append(newfn.ClosureVars, newv)
  1485  	}
  1486  
  1487  	// Need to replace ONAME nodes in
  1488  	// newfn.Type().FuncType().Receiver/Params/Results.FieldSlice().Nname
  1489  	oldt := oldfn.Type()
  1490  	newrecvs := subst.fields(oldt.Recvs())
  1491  	var newrecv *types.Field
  1492  	if len(newrecvs) > 0 {
  1493  		newrecv = newrecvs[0]
  1494  	}
  1495  	newt := types.NewSignature(oldt.Pkg(), newrecv,
  1496  		nil, subst.fields(oldt.Params()), subst.fields(oldt.Results()))
  1497  
  1498  	newfn.Nname.SetType(newt)
  1499  	newfn.Body = subst.list(oldfn.Body)
  1500  
  1501  	// Remove the nodes for the current closure from subst.inlvars
  1502  	for _, oldv := range oldfn.Dcl {
  1503  		delete(subst.inlvars, oldv)
  1504  	}
  1505  	for _, oldv := range oldfn.ClosureVars {
  1506  		delete(subst.inlvars, oldv)
  1507  	}
  1508  	// Go back to previous closure func
  1509  	subst.newclofn = prevxfunc
  1510  
  1511  	// Actually create the named function for the closure, now that
  1512  	// the closure is inlined in a specific function.
  1513  	newclo := newfn.OClosure
  1514  	newclo.SetPos(newClosurePos)
  1515  	newclo.SetInit(subst.list(n.Init()))
  1516  	return typecheck.Expr(newclo)
  1517  }
  1518  
  1519  // node recursively copies a node from the saved pristine body of the
  1520  // inlined function, substituting references to input/output
  1521  // parameters with ones to the tmpnames, and substituting returns with
  1522  // assignments to the output.
  1523  func (subst *inlsubst) node(n ir.Node) ir.Node {
  1524  	if n == nil {
  1525  		return nil
  1526  	}
  1527  
  1528  	switch n.Op() {
  1529  	case ir.ONAME:
  1530  		n := n.(*ir.Name)
  1531  
  1532  		// Handle captured variables when inlining closures.
  1533  		if n.IsClosureVar() && subst.newclofn == nil {
  1534  			o := n.Outer
  1535  
  1536  			// Deal with case where sequence of closures are inlined.
  1537  			// TODO(danscales) - write test case to see if we need to
  1538  			// go up multiple levels.
  1539  			if o.Curfn != ir.CurFunc {
  1540  				o = o.Outer
  1541  			}
  1542  
  1543  			// make sure the outer param matches the inlining location
  1544  			if o == nil || o.Curfn != ir.CurFunc {
  1545  				base.Fatalf("%v: unresolvable capture %v\n", ir.Line(n), n)
  1546  			}
  1547  
  1548  			if base.Flag.LowerM > 2 {
  1549  				fmt.Printf("substituting captured name %+v  ->  %+v\n", n, o)
  1550  			}
  1551  			return o
  1552  		}
  1553  
  1554  		if inlvar := subst.inlvars[n]; inlvar != nil { // These will be set during inlnode
  1555  			if base.Flag.LowerM > 2 {
  1556  				fmt.Printf("substituting name %+v  ->  %+v\n", n, inlvar)
  1557  			}
  1558  			return inlvar
  1559  		}
  1560  
  1561  		if base.Flag.LowerM > 2 {
  1562  			fmt.Printf("not substituting name %+v\n", n)
  1563  		}
  1564  		return n
  1565  
  1566  	case ir.OMETHEXPR:
  1567  		n := n.(*ir.SelectorExpr)
  1568  		return n
  1569  
  1570  	case ir.OLITERAL, ir.ONIL, ir.OTYPE:
  1571  		// If n is a named constant or type, we can continue
  1572  		// using it in the inline copy. Otherwise, make a copy
  1573  		// so we can update the line number.
  1574  		if n.Sym() != nil {
  1575  			return n
  1576  		}
  1577  
  1578  	case ir.ORETURN:
  1579  		if subst.newclofn != nil {
  1580  			// Don't do special substitutions if inside a closure
  1581  			break
  1582  		}
  1583  		// Because of the above test for subst.newclofn,
  1584  		// this return is guaranteed to belong to the current inlined function.
  1585  		n := n.(*ir.ReturnStmt)
  1586  		init := subst.list(n.Init())
  1587  		if len(subst.retvars) != 0 && len(n.Results) != 0 {
  1588  			as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
  1589  
  1590  			// Make a shallow copy of retvars.
  1591  			// Otherwise OINLCALL.Rlist will be the same list,
  1592  			// and later walk and typecheck may clobber it.
  1593  			for _, n := range subst.retvars {
  1594  				as.Lhs.Append(n)
  1595  			}
  1596  			as.Rhs = subst.list(n.Results)
  1597  
  1598  			if subst.fn.Inl.CanDelayResults {
  1599  				for _, n := range as.Lhs {
  1600  					as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
  1601  					n.Name().Defn = as
  1602  				}
  1603  			}
  1604  
  1605  			init = append(init, typecheck.Stmt(as))
  1606  		}
  1607  		init = append(init, ir.NewBranchStmt(base.Pos, ir.OGOTO, subst.retlabel))
  1608  		typecheck.Stmts(init)
  1609  		return ir.NewBlockStmt(base.Pos, init)
  1610  
  1611  	case ir.OGOTO, ir.OBREAK, ir.OCONTINUE:
  1612  		if subst.newclofn != nil {
  1613  			// Don't do special substitutions if inside a closure
  1614  			break
  1615  		}
  1616  		n := n.(*ir.BranchStmt)
  1617  		m := ir.Copy(n).(*ir.BranchStmt)
  1618  		m.SetPos(subst.updatedPos(m.Pos()))
  1619  		m.SetInit(nil)
  1620  		m.Label = translateLabel(n.Label)
  1621  		return m
  1622  
  1623  	case ir.OLABEL:
  1624  		if subst.newclofn != nil {
  1625  			// Don't do special substitutions if inside a closure
  1626  			break
  1627  		}
  1628  		n := n.(*ir.LabelStmt)
  1629  		m := ir.Copy(n).(*ir.LabelStmt)
  1630  		m.SetPos(subst.updatedPos(m.Pos()))
  1631  		m.SetInit(nil)
  1632  		m.Label = translateLabel(n.Label)
  1633  		return m
  1634  
  1635  	case ir.OCLOSURE:
  1636  		return subst.closure(n.(*ir.ClosureExpr))
  1637  
  1638  	}
  1639  
  1640  	m := ir.Copy(n)
  1641  	m.SetPos(subst.updatedPos(m.Pos()))
  1642  	ir.EditChildren(m, subst.edit)
  1643  
  1644  	if subst.newclofn == nil {
  1645  		// Translate any label on FOR, RANGE loops, SWITCH or SELECT
  1646  		switch m.Op() {
  1647  		case ir.OFOR:
  1648  			m := m.(*ir.ForStmt)
  1649  			m.Label = translateLabel(m.Label)
  1650  			return m
  1651  
  1652  		case ir.ORANGE:
  1653  			m := m.(*ir.RangeStmt)
  1654  			m.Label = translateLabel(m.Label)
  1655  			return m
  1656  
  1657  		case ir.OSWITCH:
  1658  			m := m.(*ir.SwitchStmt)
  1659  			m.Label = translateLabel(m.Label)
  1660  			return m
  1661  
  1662  		case ir.OSELECT:
  1663  			m := m.(*ir.SelectStmt)
  1664  			m.Label = translateLabel(m.Label)
  1665  			return m
  1666  		}
  1667  	}
  1668  
  1669  	switch m := m.(type) {
  1670  	case *ir.AssignStmt:
  1671  		if lhs, ok := m.X.(*ir.Name); ok && lhs.Defn == &subst.defnMarker {
  1672  			lhs.Defn = m
  1673  		}
  1674  	case *ir.AssignListStmt:
  1675  		for _, lhs := range m.Lhs {
  1676  			if lhs, ok := lhs.(*ir.Name); ok && lhs.Defn == &subst.defnMarker {
  1677  				lhs.Defn = m
  1678  			}
  1679  		}
  1680  	}
  1681  
  1682  	return m
  1683  }
  1684  
  1685  // translateLabel makes a label from an inlined function (if non-nil) be unique by
  1686  // adding "·inlgen".
  1687  func translateLabel(l *types.Sym) *types.Sym {
  1688  	if l == nil {
  1689  		return nil
  1690  	}
  1691  	p := fmt.Sprintf("%s·%d", l.Name, inlgen)
  1692  	return typecheck.Lookup(p)
  1693  }
  1694  
  1695  func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos {
  1696  	if subst.noPosUpdate {
  1697  		return xpos
  1698  	}
  1699  	pos := base.Ctxt.PosTable.Pos(xpos)
  1700  	oldbase := pos.Base() // can be nil
  1701  	newbase := subst.bases[oldbase]
  1702  	if newbase == nil {
  1703  		newbase = src.NewInliningBase(oldbase, subst.newInlIndex)
  1704  		subst.bases[oldbase] = newbase
  1705  	}
  1706  	pos.SetBase(newbase)
  1707  	return base.Ctxt.PosTable.XPos(pos)
  1708  }
  1709  
  1710  func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name {
  1711  	s := make([]*ir.Name, 0, len(ll))
  1712  	for _, n := range ll {
  1713  		if n.Class == ir.PAUTO {
  1714  			if !vis.usedLocals.Has(n) {
  1715  				continue
  1716  			}
  1717  		}
  1718  		s = append(s, n)
  1719  	}
  1720  	return s
  1721  }
  1722  
  1723  // numNonClosures returns the number of functions in list which are not closures.
  1724  func numNonClosures(list []*ir.Func) int {
  1725  	count := 0
  1726  	for _, fn := range list {
  1727  		if fn.OClosure == nil {
  1728  			count++
  1729  		}
  1730  	}
  1731  	return count
  1732  }
  1733  
  1734  func doList(list []ir.Node, do func(ir.Node) bool) bool {
  1735  	for _, x := range list {
  1736  		if x != nil {
  1737  			if do(x) {
  1738  				return true
  1739  			}
  1740  		}
  1741  	}
  1742  	return false
  1743  }
  1744  
  1745  // isIndexingCoverageCounter returns true if the specified node 'n' is indexing
  1746  // into a coverage counter array.
  1747  func isIndexingCoverageCounter(n ir.Node) bool {
  1748  	if n.Op() != ir.OINDEX {
  1749  		return false
  1750  	}
  1751  	ixn := n.(*ir.IndexExpr)
  1752  	if ixn.X.Op() != ir.ONAME || !ixn.X.Type().IsArray() {
  1753  		return false
  1754  	}
  1755  	nn := ixn.X.(*ir.Name)
  1756  	return nn.CoverageCounter()
  1757  }
  1758  
  1759  // isAtomicCoverageCounterUpdate examines the specified node to
  1760  // determine whether it represents a call to sync/atomic.AddUint32 to
  1761  // increment a coverage counter.
  1762  func isAtomicCoverageCounterUpdate(cn *ir.CallExpr) bool {
  1763  	if cn.X.Op() != ir.ONAME {
  1764  		return false
  1765  	}
  1766  	name := cn.X.(*ir.Name)
  1767  	if name.Class != ir.PFUNC {
  1768  		return false
  1769  	}
  1770  	fn := name.Sym().Name
  1771  	if name.Sym().Pkg.Path != "sync/atomic" ||
  1772  		(fn != "AddUint32" && fn != "StoreUint32") {
  1773  		return false
  1774  	}
  1775  	if len(cn.Args) != 2 || cn.Args[0].Op() != ir.OADDR {
  1776  		return false
  1777  	}
  1778  	adn := cn.Args[0].(*ir.AddrExpr)
  1779  	v := isIndexingCoverageCounter(adn.X)
  1780  	return v
  1781  }