github.com/go-asm/go@v1.21.1-0.20240213172139-40c5ead50c48/cmd/compile/walk/expr.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"go/constant"
    10  	"strings"
    11  
    12  	"github.com/go-asm/go/abi"
    13  	"github.com/go-asm/go/buildcfg"
    14  
    15  	"github.com/go-asm/go/cmd/compile/base"
    16  	"github.com/go-asm/go/cmd/compile/ir"
    17  	"github.com/go-asm/go/cmd/compile/objw"
    18  	"github.com/go-asm/go/cmd/compile/reflectdata"
    19  	"github.com/go-asm/go/cmd/compile/rttype"
    20  	"github.com/go-asm/go/cmd/compile/staticdata"
    21  	"github.com/go-asm/go/cmd/compile/typecheck"
    22  	"github.com/go-asm/go/cmd/compile/types"
    23  	"github.com/go-asm/go/cmd/obj"
    24  	"github.com/go-asm/go/cmd/objabi"
    25  )
    26  
    27  // The result of walkExpr MUST be assigned back to n, e.g.
    28  //
    29  //	n.Left = walkExpr(n.Left, init)
    30  func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
    31  	if n == nil {
    32  		return n
    33  	}
    34  
    35  	if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
    36  		// not okay to use n->ninit when walking n,
    37  		// because we might replace n with some other node
    38  		// and would lose the init list.
    39  		base.Fatalf("walkExpr init == &n->ninit")
    40  	}
    41  
    42  	if len(n.Init()) != 0 {
    43  		walkStmtList(n.Init())
    44  		init.Append(ir.TakeInit(n)...)
    45  	}
    46  
    47  	lno := ir.SetPos(n)
    48  
    49  	if base.Flag.LowerW > 1 {
    50  		ir.Dump("before walk expr", n)
    51  	}
    52  
    53  	if n.Typecheck() != 1 {
    54  		base.Fatalf("missed typecheck: %+v", n)
    55  	}
    56  
    57  	if n.Type().IsUntyped() {
    58  		base.Fatalf("expression has untyped type: %+v", n)
    59  	}
    60  
    61  	n = walkExpr1(n, init)
    62  
    63  	// Eagerly compute sizes of all expressions for the back end.
    64  	if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
    65  		types.CheckSize(typ)
    66  	}
    67  	if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
    68  		types.CheckSize(n.Heapaddr.Type())
    69  	}
    70  	if ir.IsConst(n, constant.String) {
    71  		// Emit string symbol now to avoid emitting
    72  		// any concurrently during the backend.
    73  		_ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
    74  	}
    75  
    76  	if base.Flag.LowerW != 0 && n != nil {
    77  		ir.Dump("after walk expr", n)
    78  	}
    79  
    80  	base.Pos = lno
    81  	return n
    82  }
    83  
    84  func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
    85  	switch n.Op() {
    86  	default:
    87  		ir.Dump("walk", n)
    88  		base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
    89  		panic("unreachable")
    90  
    91  	case ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP:
    92  		return n
    93  
    94  	case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
    95  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
    96  		// Perhaps refactor to use Node.mayBeShared for these instead.
    97  		// If these return early, make sure to still call
    98  		// StringSym for constant strings.
    99  		return n
   100  
   101  	case ir.OMETHEXPR:
   102  		// TODO(mdempsky): Do this right after type checking.
   103  		n := n.(*ir.SelectorExpr)
   104  		return n.FuncName()
   105  
   106  	case ir.OMIN, ir.OMAX:
   107  		n := n.(*ir.CallExpr)
   108  		return walkMinMax(n, init)
   109  
   110  	case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
   111  		n := n.(*ir.UnaryExpr)
   112  		n.X = walkExpr(n.X, init)
   113  		return n
   114  
   115  	case ir.ODOTMETH, ir.ODOTINTER:
   116  		n := n.(*ir.SelectorExpr)
   117  		n.X = walkExpr(n.X, init)
   118  		return n
   119  
   120  	case ir.OADDR:
   121  		n := n.(*ir.AddrExpr)
   122  		n.X = walkExpr(n.X, init)
   123  		return n
   124  
   125  	case ir.ODEREF:
   126  		n := n.(*ir.StarExpr)
   127  		n.X = walkExpr(n.X, init)
   128  		return n
   129  
   130  	case ir.OMAKEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
   131  		ir.OUNSAFEADD:
   132  		n := n.(*ir.BinaryExpr)
   133  		n.X = walkExpr(n.X, init)
   134  		n.Y = walkExpr(n.Y, init)
   135  		return n
   136  
   137  	case ir.OUNSAFESLICE:
   138  		n := n.(*ir.BinaryExpr)
   139  		return walkUnsafeSlice(n, init)
   140  
   141  	case ir.OUNSAFESTRING:
   142  		n := n.(*ir.BinaryExpr)
   143  		return walkUnsafeString(n, init)
   144  
   145  	case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
   146  		n := n.(*ir.UnaryExpr)
   147  		return walkUnsafeData(n, init)
   148  
   149  	case ir.ODOT, ir.ODOTPTR:
   150  		n := n.(*ir.SelectorExpr)
   151  		return walkDot(n, init)
   152  
   153  	case ir.ODOTTYPE, ir.ODOTTYPE2:
   154  		n := n.(*ir.TypeAssertExpr)
   155  		return walkDotType(n, init)
   156  
   157  	case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
   158  		n := n.(*ir.DynamicTypeAssertExpr)
   159  		return walkDynamicDotType(n, init)
   160  
   161  	case ir.OLEN, ir.OCAP:
   162  		n := n.(*ir.UnaryExpr)
   163  		return walkLenCap(n, init)
   164  
   165  	case ir.OCOMPLEX:
   166  		n := n.(*ir.BinaryExpr)
   167  		n.X = walkExpr(n.X, init)
   168  		n.Y = walkExpr(n.Y, init)
   169  		return n
   170  
   171  	case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
   172  		n := n.(*ir.BinaryExpr)
   173  		return walkCompare(n, init)
   174  
   175  	case ir.OANDAND, ir.OOROR:
   176  		n := n.(*ir.LogicalExpr)
   177  		return walkLogical(n, init)
   178  
   179  	case ir.OPRINT, ir.OPRINTLN:
   180  		return walkPrint(n.(*ir.CallExpr), init)
   181  
   182  	case ir.OPANIC:
   183  		n := n.(*ir.UnaryExpr)
   184  		return mkcall("gopanic", nil, init, n.X)
   185  
   186  	case ir.ORECOVERFP:
   187  		return walkRecoverFP(n.(*ir.CallExpr), init)
   188  
   189  	case ir.OCFUNC:
   190  		return n
   191  
   192  	case ir.OCALLINTER, ir.OCALLFUNC:
   193  		n := n.(*ir.CallExpr)
   194  		return walkCall(n, init)
   195  
   196  	case ir.OAS, ir.OASOP:
   197  		return walkAssign(init, n)
   198  
   199  	case ir.OAS2:
   200  		n := n.(*ir.AssignListStmt)
   201  		return walkAssignList(init, n)
   202  
   203  	// a,b,... = fn()
   204  	case ir.OAS2FUNC:
   205  		n := n.(*ir.AssignListStmt)
   206  		return walkAssignFunc(init, n)
   207  
   208  	// x, y = <-c
   209  	// order.stmt made sure x is addressable or blank.
   210  	case ir.OAS2RECV:
   211  		n := n.(*ir.AssignListStmt)
   212  		return walkAssignRecv(init, n)
   213  
   214  	// a,b = m[i]
   215  	case ir.OAS2MAPR:
   216  		n := n.(*ir.AssignListStmt)
   217  		return walkAssignMapRead(init, n)
   218  
   219  	case ir.ODELETE:
   220  		n := n.(*ir.CallExpr)
   221  		return walkDelete(init, n)
   222  
   223  	case ir.OAS2DOTTYPE:
   224  		n := n.(*ir.AssignListStmt)
   225  		return walkAssignDotType(n, init)
   226  
   227  	case ir.OCONVIFACE:
   228  		n := n.(*ir.ConvExpr)
   229  		return walkConvInterface(n, init)
   230  
   231  	case ir.OCONV, ir.OCONVNOP:
   232  		n := n.(*ir.ConvExpr)
   233  		return walkConv(n, init)
   234  
   235  	case ir.OSLICE2ARR:
   236  		n := n.(*ir.ConvExpr)
   237  		return walkSliceToArray(n, init)
   238  
   239  	case ir.OSLICE2ARRPTR:
   240  		n := n.(*ir.ConvExpr)
   241  		n.X = walkExpr(n.X, init)
   242  		return n
   243  
   244  	case ir.ODIV, ir.OMOD:
   245  		n := n.(*ir.BinaryExpr)
   246  		return walkDivMod(n, init)
   247  
   248  	case ir.OINDEX:
   249  		n := n.(*ir.IndexExpr)
   250  		return walkIndex(n, init)
   251  
   252  	case ir.OINDEXMAP:
   253  		n := n.(*ir.IndexExpr)
   254  		return walkIndexMap(n, init)
   255  
   256  	case ir.ORECV:
   257  		base.Fatalf("walkExpr ORECV") // should see inside OAS only
   258  		panic("unreachable")
   259  
   260  	case ir.OSLICEHEADER:
   261  		n := n.(*ir.SliceHeaderExpr)
   262  		return walkSliceHeader(n, init)
   263  
   264  	case ir.OSTRINGHEADER:
   265  		n := n.(*ir.StringHeaderExpr)
   266  		return walkStringHeader(n, init)
   267  
   268  	case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
   269  		n := n.(*ir.SliceExpr)
   270  		return walkSlice(n, init)
   271  
   272  	case ir.ONEW:
   273  		n := n.(*ir.UnaryExpr)
   274  		return walkNew(n, init)
   275  
   276  	case ir.OADDSTR:
   277  		return walkAddString(n.(*ir.AddStringExpr), init)
   278  
   279  	case ir.OAPPEND:
   280  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
   281  		base.Fatalf("append outside assignment")
   282  		panic("unreachable")
   283  
   284  	case ir.OCOPY:
   285  		return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
   286  
   287  	case ir.OCLEAR:
   288  		n := n.(*ir.UnaryExpr)
   289  		return walkClear(n)
   290  
   291  	case ir.OCLOSE:
   292  		n := n.(*ir.UnaryExpr)
   293  		return walkClose(n, init)
   294  
   295  	case ir.OMAKECHAN:
   296  		n := n.(*ir.MakeExpr)
   297  		return walkMakeChan(n, init)
   298  
   299  	case ir.OMAKEMAP:
   300  		n := n.(*ir.MakeExpr)
   301  		return walkMakeMap(n, init)
   302  
   303  	case ir.OMAKESLICE:
   304  		n := n.(*ir.MakeExpr)
   305  		return walkMakeSlice(n, init)
   306  
   307  	case ir.OMAKESLICECOPY:
   308  		n := n.(*ir.MakeExpr)
   309  		return walkMakeSliceCopy(n, init)
   310  
   311  	case ir.ORUNESTR:
   312  		n := n.(*ir.ConvExpr)
   313  		return walkRuneToString(n, init)
   314  
   315  	case ir.OBYTES2STR, ir.ORUNES2STR:
   316  		n := n.(*ir.ConvExpr)
   317  		return walkBytesRunesToString(n, init)
   318  
   319  	case ir.OBYTES2STRTMP:
   320  		n := n.(*ir.ConvExpr)
   321  		return walkBytesToStringTemp(n, init)
   322  
   323  	case ir.OSTR2BYTES:
   324  		n := n.(*ir.ConvExpr)
   325  		return walkStringToBytes(n, init)
   326  
   327  	case ir.OSTR2BYTESTMP:
   328  		n := n.(*ir.ConvExpr)
   329  		return walkStringToBytesTemp(n, init)
   330  
   331  	case ir.OSTR2RUNES:
   332  		n := n.(*ir.ConvExpr)
   333  		return walkStringToRunes(n, init)
   334  
   335  	case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
   336  		return walkCompLit(n, init)
   337  
   338  	case ir.OSEND:
   339  		n := n.(*ir.SendStmt)
   340  		return walkSend(n, init)
   341  
   342  	case ir.OCLOSURE:
   343  		return walkClosure(n.(*ir.ClosureExpr), init)
   344  
   345  	case ir.OMETHVALUE:
   346  		return walkMethodValue(n.(*ir.SelectorExpr), init)
   347  	}
   348  
   349  	// No return! Each case must return (or panic),
   350  	// to avoid confusion about what gets returned
   351  	// in the presence of type assertions.
   352  }
   353  
   354  // walk the whole tree of the body of an
   355  // expression or simple statement.
   356  // the types expressions are calculated.
   357  // compile-time constants are evaluated.
   358  // complex side effects like statements are appended to init.
   359  func walkExprList(s []ir.Node, init *ir.Nodes) {
   360  	for i := range s {
   361  		s[i] = walkExpr(s[i], init)
   362  	}
   363  }
   364  
   365  func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
   366  	for i, n := range s {
   367  		s[i] = cheapExpr(n, init)
   368  		s[i] = walkExpr(s[i], init)
   369  	}
   370  }
   371  
   372  func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
   373  	for i, n := range s {
   374  		s[i] = safeExpr(n, init)
   375  		s[i] = walkExpr(s[i], init)
   376  	}
   377  }
   378  
   379  // return side-effect free and cheap n, appending side effects to init.
   380  // result may not be assignable.
   381  func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
   382  	switch n.Op() {
   383  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   384  		return n
   385  	}
   386  
   387  	return copyExpr(n, n.Type(), init)
   388  }
   389  
   390  // return side effect-free n, appending side effects to init.
   391  // result is assignable if n is.
   392  func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
   393  	if n == nil {
   394  		return nil
   395  	}
   396  
   397  	if len(n.Init()) != 0 {
   398  		walkStmtList(n.Init())
   399  		init.Append(ir.TakeInit(n)...)
   400  	}
   401  
   402  	switch n.Op() {
   403  	case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
   404  		return n
   405  
   406  	case ir.OLEN, ir.OCAP:
   407  		n := n.(*ir.UnaryExpr)
   408  		l := safeExpr(n.X, init)
   409  		if l == n.X {
   410  			return n
   411  		}
   412  		a := ir.Copy(n).(*ir.UnaryExpr)
   413  		a.X = l
   414  		return walkExpr(typecheck.Expr(a), init)
   415  
   416  	case ir.ODOT, ir.ODOTPTR:
   417  		n := n.(*ir.SelectorExpr)
   418  		l := safeExpr(n.X, init)
   419  		if l == n.X {
   420  			return n
   421  		}
   422  		a := ir.Copy(n).(*ir.SelectorExpr)
   423  		a.X = l
   424  		return walkExpr(typecheck.Expr(a), init)
   425  
   426  	case ir.ODEREF:
   427  		n := n.(*ir.StarExpr)
   428  		l := safeExpr(n.X, init)
   429  		if l == n.X {
   430  			return n
   431  		}
   432  		a := ir.Copy(n).(*ir.StarExpr)
   433  		a.X = l
   434  		return walkExpr(typecheck.Expr(a), init)
   435  
   436  	case ir.OINDEX, ir.OINDEXMAP:
   437  		n := n.(*ir.IndexExpr)
   438  		l := safeExpr(n.X, init)
   439  		r := safeExpr(n.Index, init)
   440  		if l == n.X && r == n.Index {
   441  			return n
   442  		}
   443  		a := ir.Copy(n).(*ir.IndexExpr)
   444  		a.X = l
   445  		a.Index = r
   446  		return walkExpr(typecheck.Expr(a), init)
   447  
   448  	case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
   449  		n := n.(*ir.CompLitExpr)
   450  		if isStaticCompositeLiteral(n) {
   451  			return n
   452  		}
   453  	}
   454  
   455  	// make a copy; must not be used as an lvalue
   456  	if ir.IsAddressable(n) {
   457  		base.Fatalf("missing lvalue case in safeExpr: %v", n)
   458  	}
   459  	return cheapExpr(n, init)
   460  }
   461  
   462  func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
   463  	l := typecheck.TempAt(base.Pos, ir.CurFunc, t)
   464  	appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
   465  	return l
   466  }
   467  
   468  func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
   469  	c := len(n.List)
   470  
   471  	if c < 2 {
   472  		base.Fatalf("walkAddString count %d too small", c)
   473  	}
   474  
   475  	buf := typecheck.NodNil()
   476  	if n.Esc() == ir.EscNone {
   477  		sz := int64(0)
   478  		for _, n1 := range n.List {
   479  			if n1.Op() == ir.OLITERAL {
   480  				sz += int64(len(ir.StringVal(n1)))
   481  			}
   482  		}
   483  
   484  		// Don't allocate the buffer if the result won't fit.
   485  		if sz < tmpstringbufsize {
   486  			// Create temporary buffer for result string on stack.
   487  			buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
   488  		}
   489  	}
   490  
   491  	// build list of string arguments
   492  	args := []ir.Node{buf}
   493  	for _, n2 := range n.List {
   494  		args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
   495  	}
   496  
   497  	var fn string
   498  	if c <= 5 {
   499  		// small numbers of strings use direct runtime helpers.
   500  		// note: order.expr knows this cutoff too.
   501  		fn = fmt.Sprintf("concatstring%d", c)
   502  	} else {
   503  		// large numbers of strings are passed to the runtime as a slice.
   504  		fn = "concatstrings"
   505  
   506  		t := types.NewSlice(types.Types[types.TSTRING])
   507  		// args[1:] to skip buf arg
   508  		slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, t, args[1:])
   509  		slice.Prealloc = n.Prealloc
   510  		args = []ir.Node{buf, slice}
   511  		slice.SetEsc(ir.EscNone)
   512  	}
   513  
   514  	cat := typecheck.LookupRuntime(fn)
   515  	r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
   516  	r.Args = args
   517  	r1 := typecheck.Expr(r)
   518  	r1 = walkExpr(r1, init)
   519  	r1.SetType(n.Type())
   520  
   521  	return r1
   522  }
   523  
   524  type hookInfo struct {
   525  	paramType   types.Kind
   526  	argsNum     int
   527  	runtimeFunc string
   528  }
   529  
   530  var hooks = map[string]hookInfo{
   531  	"strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
   532  }
   533  
   534  // walkCall walks an OCALLFUNC or OCALLINTER node.
   535  func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   536  	if n.Op() == ir.OCALLMETH {
   537  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   538  	}
   539  	if n.Op() == ir.OCALLINTER || n.Fun.Op() == ir.OMETHEXPR {
   540  		// We expect both interface call reflect.Type.Method and concrete
   541  		// call reflect.(*rtype).Method.
   542  		usemethod(n)
   543  	}
   544  	if n.Op() == ir.OCALLINTER {
   545  		reflectdata.MarkUsedIfaceMethod(n)
   546  	}
   547  
   548  	if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.OCLOSURE {
   549  		directClosureCall(n)
   550  	}
   551  
   552  	if ir.IsFuncPCIntrinsic(n) {
   553  		// For github.com/go-asm/go/abi.FuncPCABIxxx(fn), if fn is a defined function, rewrite
   554  		// it to the address of the function of the ABI fn is defined.
   555  		name := n.Fun.(*ir.Name).Sym().Name
   556  		arg := n.Args[0]
   557  		var wantABI obj.ABI
   558  		switch name {
   559  		case "FuncPCABI0":
   560  			wantABI = obj.ABI0
   561  		case "FuncPCABIInternal":
   562  			wantABI = obj.ABIInternal
   563  		}
   564  		if n.Type() != types.Types[types.TUINTPTR] {
   565  			base.FatalfAt(n.Pos(), "FuncPC intrinsic should return uintptr, got %v", n.Type()) // as expected by typecheck.FuncPC.
   566  		}
   567  		n := ir.FuncPC(n.Pos(), arg, wantABI)
   568  		return walkExpr(n, init)
   569  	}
   570  
   571  	if name, ok := n.Fun.(*ir.Name); ok {
   572  		sym := name.Sym()
   573  		if sym.Pkg.Path == "go.runtime" && sym.Name == "deferrangefunc" {
   574  			// Call to runtime.deferrangefunc is being shared with a range-over-func
   575  			// body that might add defers to this frame, so we cannot use open-coded defers
   576  			// and we need to call deferreturn even if we don't see any other explicit defers.
   577  			ir.CurFunc.SetHasDefer(true)
   578  			ir.CurFunc.SetOpenCodedDeferDisallowed(true)
   579  		}
   580  	}
   581  
   582  	walkCall1(n, init)
   583  	return n
   584  }
   585  
   586  func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
   587  	if n.Walked() {
   588  		return // already walked
   589  	}
   590  	n.SetWalked(true)
   591  
   592  	if n.Op() == ir.OCALLMETH {
   593  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   594  	}
   595  
   596  	args := n.Args
   597  	params := n.Fun.Type().Params()
   598  
   599  	n.Fun = walkExpr(n.Fun, init)
   600  	walkExprList(args, init)
   601  
   602  	for i, arg := range args {
   603  		// Validate argument and parameter types match.
   604  		param := params[i]
   605  		if !types.Identical(arg.Type(), param.Type) {
   606  			base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
   607  		}
   608  
   609  		// For any argument whose evaluation might require a function call,
   610  		// store that argument into a temporary variable,
   611  		// to prevent that calls from clobbering arguments already on the stack.
   612  		if mayCall(arg) {
   613  			// assignment of arg to Temp
   614  			tmp := typecheck.TempAt(base.Pos, ir.CurFunc, param.Type)
   615  			init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
   616  			// replace arg with temp
   617  			args[i] = tmp
   618  		}
   619  	}
   620  
   621  	funSym := n.Fun.Sym()
   622  	if base.Debug.Libfuzzer != 0 && funSym != nil {
   623  		if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found {
   624  			if len(args) != hook.argsNum {
   625  				panic(fmt.Sprintf("%s.%s expects %d arguments, but received %d", funSym.Pkg.Path, funSym.Name, hook.argsNum, len(args)))
   626  			}
   627  			var hookArgs []ir.Node
   628  			for _, arg := range args {
   629  				hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
   630  			}
   631  			hookArgs = append(hookArgs, fakePC(n))
   632  			init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
   633  		}
   634  	}
   635  }
   636  
   637  // walkDivMod walks an ODIV or OMOD node.
   638  func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
   639  	n.X = walkExpr(n.X, init)
   640  	n.Y = walkExpr(n.Y, init)
   641  
   642  	// rewrite complex div into function call.
   643  	et := n.X.Type().Kind()
   644  
   645  	if types.IsComplex[et] && n.Op() == ir.ODIV {
   646  		t := n.Type()
   647  		call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
   648  		return typecheck.Conv(call, t)
   649  	}
   650  
   651  	// Nothing to do for float divisions.
   652  	if types.IsFloat[et] {
   653  		return n
   654  	}
   655  
   656  	// rewrite 64-bit div and mod on 32-bit architectures.
   657  	// TODO: Remove this code once we can introduce
   658  	// runtime calls late in SSA processing.
   659  	if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
   660  		if n.Y.Op() == ir.OLITERAL {
   661  			// Leave div/mod by constant powers of 2 or small 16-bit constants.
   662  			// The SSA backend will handle those.
   663  			switch et {
   664  			case types.TINT64:
   665  				c := ir.Int64Val(n.Y)
   666  				if c < 0 {
   667  					c = -c
   668  				}
   669  				if c != 0 && c&(c-1) == 0 {
   670  					return n
   671  				}
   672  			case types.TUINT64:
   673  				c := ir.Uint64Val(n.Y)
   674  				if c < 1<<16 {
   675  					return n
   676  				}
   677  				if c != 0 && c&(c-1) == 0 {
   678  					return n
   679  				}
   680  			}
   681  		}
   682  		var fn string
   683  		if et == types.TINT64 {
   684  			fn = "int64"
   685  		} else {
   686  			fn = "uint64"
   687  		}
   688  		if n.Op() == ir.ODIV {
   689  			fn += "div"
   690  		} else {
   691  			fn += "mod"
   692  		}
   693  		return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
   694  	}
   695  	return n
   696  }
   697  
   698  // walkDot walks an ODOT or ODOTPTR node.
   699  func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
   700  	usefield(n)
   701  	n.X = walkExpr(n.X, init)
   702  	return n
   703  }
   704  
   705  // walkDotType walks an ODOTTYPE or ODOTTYPE2 node.
   706  func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
   707  	n.X = walkExpr(n.X, init)
   708  	// Set up interface type addresses for back end.
   709  	if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
   710  		n.ITab = reflectdata.ITabAddrAt(base.Pos, n.Type(), n.X.Type())
   711  	}
   712  	if n.X.Type().IsInterface() && n.Type().IsInterface() && !n.Type().IsEmptyInterface() {
   713  		// This kind of conversion needs a runtime call. Allocate
   714  		// a descriptor for that call.
   715  		n.Descriptor = makeTypeAssertDescriptor(n.Type(), n.Op() == ir.ODOTTYPE2)
   716  	}
   717  	return n
   718  }
   719  
   720  func makeTypeAssertDescriptor(target *types.Type, canFail bool) *obj.LSym {
   721  	// When converting from an interface to a non-empty interface. Needs a runtime call.
   722  	// Allocate an github.com/go-asm/go/abi.TypeAssert descriptor for that call.
   723  	lsym := types.LocalPkg.Lookup(fmt.Sprintf(".typeAssert.%d", typeAssertGen)).LinksymABI(obj.ABI0)
   724  	typeAssertGen++
   725  	c := rttype.NewCursor(lsym, 0, rttype.TypeAssert)
   726  	c.Field("Cache").WritePtr(typecheck.LookupRuntimeVar("emptyTypeAssertCache"))
   727  	c.Field("Inter").WritePtr(reflectdata.TypeSym(target).Linksym())
   728  	c.Field("CanFail").WriteBool(canFail)
   729  	objw.Global(lsym, int32(rttype.TypeAssert.Size()), obj.LOCAL)
   730  	lsym.Gotype = reflectdata.TypeLinksym(rttype.TypeAssert)
   731  	return lsym
   732  }
   733  
   734  var typeAssertGen int
   735  
   736  // walkDynamicDotType walks an ODYNAMICDOTTYPE or ODYNAMICDOTTYPE2 node.
   737  func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
   738  	n.X = walkExpr(n.X, init)
   739  	n.RType = walkExpr(n.RType, init)
   740  	n.ITab = walkExpr(n.ITab, init)
   741  	// Convert to non-dynamic if we can.
   742  	if n.RType != nil && n.RType.Op() == ir.OADDR {
   743  		addr := n.RType.(*ir.AddrExpr)
   744  		if addr.X.Op() == ir.OLINKSYMOFFSET {
   745  			r := ir.NewTypeAssertExpr(n.Pos(), n.X, n.Type())
   746  			if n.Op() == ir.ODYNAMICDOTTYPE2 {
   747  				r.SetOp(ir.ODOTTYPE2)
   748  			}
   749  			r.SetType(n.Type())
   750  			r.SetTypecheck(1)
   751  			return walkExpr(r, init)
   752  		}
   753  	}
   754  	return n
   755  }
   756  
   757  // walkIndex walks an OINDEX node.
   758  func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
   759  	n.X = walkExpr(n.X, init)
   760  
   761  	// save the original node for bounds checking elision.
   762  	// If it was a ODIV/OMOD walk might rewrite it.
   763  	r := n.Index
   764  
   765  	n.Index = walkExpr(n.Index, init)
   766  
   767  	// if range of type cannot exceed static array bound,
   768  	// disable bounds check.
   769  	if n.Bounded() {
   770  		return n
   771  	}
   772  	t := n.X.Type()
   773  	if t != nil && t.IsPtr() {
   774  		t = t.Elem()
   775  	}
   776  	if t.IsArray() {
   777  		n.SetBounded(bounded(r, t.NumElem()))
   778  		if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
   779  			base.Warn("index bounds check elided")
   780  		}
   781  	} else if ir.IsConst(n.X, constant.String) {
   782  		n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
   783  		if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
   784  			base.Warn("index bounds check elided")
   785  		}
   786  	}
   787  	return n
   788  }
   789  
   790  // mapKeyArg returns an expression for key that is suitable to be passed
   791  // as the key argument for runtime map* functions.
   792  // n is the map indexing or delete Node (to provide Pos).
   793  func mapKeyArg(fast int, n, key ir.Node, assigned bool) ir.Node {
   794  	if fast == mapslow {
   795  		// standard version takes key by reference.
   796  		// orderState.expr made sure key is addressable.
   797  		return typecheck.NodAddr(key)
   798  	}
   799  	if assigned {
   800  		// mapassign does distinguish pointer vs. integer key.
   801  		return key
   802  	}
   803  	// mapaccess and mapdelete don't distinguish pointer vs. integer key.
   804  	switch fast {
   805  	case mapfast32ptr:
   806  		return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
   807  	case mapfast64ptr:
   808  		return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
   809  	default:
   810  		// fast version takes key by value.
   811  		return key
   812  	}
   813  }
   814  
   815  // walkIndexMap walks an OINDEXMAP node.
   816  // It replaces m[k] with *map{access1,assign}(maptype, m, &k)
   817  func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
   818  	n.X = walkExpr(n.X, init)
   819  	n.Index = walkExpr(n.Index, init)
   820  	map_ := n.X
   821  	t := map_.Type()
   822  	fast := mapfast(t)
   823  	key := mapKeyArg(fast, n, n.Index, n.Assigned)
   824  	args := []ir.Node{reflectdata.IndexMapRType(base.Pos, n), map_, key}
   825  
   826  	var mapFn ir.Node
   827  	switch {
   828  	case n.Assigned:
   829  		mapFn = mapfn(mapassign[fast], t, false)
   830  	case t.Elem().Size() > abi.ZeroValSize:
   831  		args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
   832  		mapFn = mapfn("mapaccess1_fat", t, true)
   833  	default:
   834  		mapFn = mapfn(mapaccess1[fast], t, false)
   835  	}
   836  	call := mkcall1(mapFn, nil, init, args...)
   837  	call.SetType(types.NewPtr(t.Elem()))
   838  	call.MarkNonNil() // mapaccess1* and mapassign always return non-nil pointers.
   839  	star := ir.NewStarExpr(base.Pos, call)
   840  	star.SetType(t.Elem())
   841  	star.SetTypecheck(1)
   842  	return star
   843  }
   844  
   845  // walkLogical walks an OANDAND or OOROR node.
   846  func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
   847  	n.X = walkExpr(n.X, init)
   848  
   849  	// cannot put side effects from n.Right on init,
   850  	// because they cannot run before n.Left is checked.
   851  	// save elsewhere and store on the eventual n.Right.
   852  	var ll ir.Nodes
   853  
   854  	n.Y = walkExpr(n.Y, &ll)
   855  	n.Y = ir.InitExpr(ll, n.Y)
   856  	return n
   857  }
   858  
   859  // walkSend walks an OSEND node.
   860  func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
   861  	n1 := n.Value
   862  	n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
   863  	n1 = walkExpr(n1, init)
   864  	n1 = typecheck.NodAddr(n1)
   865  	return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
   866  }
   867  
   868  // walkSlice walks an OSLICE, OSLICEARR, OSLICESTR, OSLICE3, or OSLICE3ARR node.
   869  func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
   870  	n.X = walkExpr(n.X, init)
   871  	n.Low = walkExpr(n.Low, init)
   872  	if n.Low != nil && ir.IsZero(n.Low) {
   873  		// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
   874  		n.Low = nil
   875  	}
   876  	n.High = walkExpr(n.High, init)
   877  	n.Max = walkExpr(n.Max, init)
   878  
   879  	if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
   880  		// Reduce x[:] to x.
   881  		if base.Debug.Slice > 0 {
   882  			base.Warn("slice: omit slice operation")
   883  		}
   884  		return n.X
   885  	}
   886  	return n
   887  }
   888  
   889  // walkSliceHeader walks an OSLICEHEADER node.
   890  func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
   891  	n.Ptr = walkExpr(n.Ptr, init)
   892  	n.Len = walkExpr(n.Len, init)
   893  	n.Cap = walkExpr(n.Cap, init)
   894  	return n
   895  }
   896  
   897  // walkStringHeader walks an OSTRINGHEADER node.
   898  func walkStringHeader(n *ir.StringHeaderExpr, init *ir.Nodes) ir.Node {
   899  	n.Ptr = walkExpr(n.Ptr, init)
   900  	n.Len = walkExpr(n.Len, init)
   901  	return n
   902  }
   903  
   904  // return 1 if integer n must be in range [0, max), 0 otherwise.
   905  func bounded(n ir.Node, max int64) bool {
   906  	if n.Type() == nil || !n.Type().IsInteger() {
   907  		return false
   908  	}
   909  
   910  	sign := n.Type().IsSigned()
   911  	bits := int32(8 * n.Type().Size())
   912  
   913  	if ir.IsSmallIntConst(n) {
   914  		v := ir.Int64Val(n)
   915  		return 0 <= v && v < max
   916  	}
   917  
   918  	switch n.Op() {
   919  	case ir.OAND, ir.OANDNOT:
   920  		n := n.(*ir.BinaryExpr)
   921  		v := int64(-1)
   922  		switch {
   923  		case ir.IsSmallIntConst(n.X):
   924  			v = ir.Int64Val(n.X)
   925  		case ir.IsSmallIntConst(n.Y):
   926  			v = ir.Int64Val(n.Y)
   927  			if n.Op() == ir.OANDNOT {
   928  				v = ^v
   929  				if !sign {
   930  					v &= 1<<uint(bits) - 1
   931  				}
   932  			}
   933  		}
   934  		if 0 <= v && v < max {
   935  			return true
   936  		}
   937  
   938  	case ir.OMOD:
   939  		n := n.(*ir.BinaryExpr)
   940  		if !sign && ir.IsSmallIntConst(n.Y) {
   941  			v := ir.Int64Val(n.Y)
   942  			if 0 <= v && v <= max {
   943  				return true
   944  			}
   945  		}
   946  
   947  	case ir.ODIV:
   948  		n := n.(*ir.BinaryExpr)
   949  		if !sign && ir.IsSmallIntConst(n.Y) {
   950  			v := ir.Int64Val(n.Y)
   951  			for bits > 0 && v >= 2 {
   952  				bits--
   953  				v >>= 1
   954  			}
   955  		}
   956  
   957  	case ir.ORSH:
   958  		n := n.(*ir.BinaryExpr)
   959  		if !sign && ir.IsSmallIntConst(n.Y) {
   960  			v := ir.Int64Val(n.Y)
   961  			if v > int64(bits) {
   962  				return true
   963  			}
   964  			bits -= int32(v)
   965  		}
   966  	}
   967  
   968  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
   969  		return true
   970  	}
   971  
   972  	return false
   973  }
   974  
   975  // usemethod checks calls for uses of Method and MethodByName of reflect.Value,
   976  // reflect.Type, reflect.(*rtype), and reflect.(*interfaceType).
   977  func usemethod(n *ir.CallExpr) {
   978  	// Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
   979  	// Those functions may be alive via the itab, which should not cause all methods
   980  	// alive. We only want to mark their callers.
   981  	if base.Ctxt.Pkgpath == "reflect" {
   982  		// TODO: is there a better way than hardcoding the names?
   983  		switch fn := ir.CurFunc.Nname.Sym().Name; {
   984  		case fn == "(*rtype).Method", fn == "(*rtype).MethodByName":
   985  			return
   986  		case fn == "(*interfaceType).Method", fn == "(*interfaceType).MethodByName":
   987  			return
   988  		case fn == "Value.Method", fn == "Value.MethodByName":
   989  			return
   990  		}
   991  	}
   992  
   993  	dot, ok := n.Fun.(*ir.SelectorExpr)
   994  	if !ok {
   995  		return
   996  	}
   997  
   998  	// looking for either direct method calls and interface method calls of:
   999  	//	reflect.Type.Method        - func(int) reflect.Method
  1000  	//	reflect.Type.MethodByName  - func(string) (reflect.Method, bool)
  1001  	//
  1002  	//	reflect.Value.Method       - func(int) reflect.Value
  1003  	//	reflect.Value.MethodByName - func(string) reflect.Value
  1004  	methodName := dot.Sel.Name
  1005  	t := dot.Selection.Type
  1006  
  1007  	// Check the number of arguments and return values.
  1008  	if t.NumParams() != 1 || (t.NumResults() != 1 && t.NumResults() != 2) {
  1009  		return
  1010  	}
  1011  
  1012  	// Check the type of the argument.
  1013  	switch pKind := t.Param(0).Type.Kind(); {
  1014  	case methodName == "Method" && pKind == types.TINT,
  1015  		methodName == "MethodByName" && pKind == types.TSTRING:
  1016  
  1017  	default:
  1018  		// not a call to Method or MethodByName of reflect.{Type,Value}.
  1019  		return
  1020  	}
  1021  
  1022  	// Check that first result type is "reflect.Method" or "reflect.Value".
  1023  	// Note that we have to check sym name and sym package separately, as
  1024  	// we can't check for exact string "reflect.Method" reliably
  1025  	// (e.g., see #19028 and #38515).
  1026  	switch s := t.Result(0).Type.Sym(); {
  1027  	case s != nil && types.ReflectSymName(s) == "Method",
  1028  		s != nil && types.ReflectSymName(s) == "Value":
  1029  
  1030  	default:
  1031  		// not a call to Method or MethodByName of reflect.{Type,Value}.
  1032  		return
  1033  	}
  1034  
  1035  	var targetName ir.Node
  1036  	switch dot.Op() {
  1037  	case ir.ODOTINTER:
  1038  		if methodName == "MethodByName" {
  1039  			targetName = n.Args[0]
  1040  		}
  1041  	case ir.OMETHEXPR:
  1042  		if methodName == "MethodByName" {
  1043  			targetName = n.Args[1]
  1044  		}
  1045  	default:
  1046  		base.FatalfAt(dot.Pos(), "usemethod: unexpected dot.Op() %s", dot.Op())
  1047  	}
  1048  
  1049  	if ir.IsConst(targetName, constant.String) {
  1050  		name := constant.StringVal(targetName.Val())
  1051  
  1052  		r := obj.Addrel(ir.CurFunc.LSym)
  1053  		r.Type = objabi.R_USENAMEDMETHOD
  1054  		r.Sym = staticdata.StringSymNoCommon(name)
  1055  	} else {
  1056  		ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
  1057  	}
  1058  }
  1059  
  1060  func usefield(n *ir.SelectorExpr) {
  1061  	if !buildcfg.Experiment.FieldTrack {
  1062  		return
  1063  	}
  1064  
  1065  	switch n.Op() {
  1066  	default:
  1067  		base.Fatalf("usefield %v", n.Op())
  1068  
  1069  	case ir.ODOT, ir.ODOTPTR:
  1070  		break
  1071  	}
  1072  
  1073  	field := n.Selection
  1074  	if field == nil {
  1075  		base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
  1076  	}
  1077  	if field.Sym != n.Sel {
  1078  		base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
  1079  	}
  1080  	if !strings.Contains(field.Note, "go:\"track\"") {
  1081  		return
  1082  	}
  1083  
  1084  	outer := n.X.Type()
  1085  	if outer.IsPtr() {
  1086  		outer = outer.Elem()
  1087  	}
  1088  	if outer.Sym() == nil {
  1089  		base.Errorf("tracked field must be in named struct type")
  1090  	}
  1091  
  1092  	sym := reflectdata.TrackSym(outer, field)
  1093  	if ir.CurFunc.FieldTrack == nil {
  1094  		ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
  1095  	}
  1096  	ir.CurFunc.FieldTrack[sym] = struct{}{}
  1097  }