github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/walk/expr.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"github.com/bir3/gocompiler/src/go/constant"
    10  	"github.com/bir3/gocompiler/src/internal/buildcfg"
    11  	"strings"
    12  
    13  	"github.com/bir3/gocompiler/src/cmd/compile/internal/base"
    14  	"github.com/bir3/gocompiler/src/cmd/compile/internal/ir"
    15  	"github.com/bir3/gocompiler/src/cmd/compile/internal/reflectdata"
    16  	"github.com/bir3/gocompiler/src/cmd/compile/internal/staticdata"
    17  	"github.com/bir3/gocompiler/src/cmd/compile/internal/typecheck"
    18  	"github.com/bir3/gocompiler/src/cmd/compile/internal/types"
    19  	"github.com/bir3/gocompiler/src/cmd/internal/obj"
    20  )
    21  
    22  // The result of walkExpr MUST be assigned back to n, e.g.
    23  //
    24  //	n.Left = walkExpr(n.Left, init)
    25  func walkExpr(n ir.Node, init *ir.Nodes) ir.Node {
    26  	if n == nil {
    27  		return n
    28  	}
    29  
    30  	if n, ok := n.(ir.InitNode); ok && init == n.PtrInit() {
    31  		// not okay to use n->ninit when walking n,
    32  		// because we might replace n with some other node
    33  		// and would lose the init list.
    34  		base.Fatalf("walkExpr init == &n->ninit")
    35  	}
    36  
    37  	if len(n.Init()) != 0 {
    38  		walkStmtList(n.Init())
    39  		init.Append(ir.TakeInit(n)...)
    40  	}
    41  
    42  	lno := ir.SetPos(n)
    43  
    44  	if base.Flag.LowerW > 1 {
    45  		ir.Dump("before walk expr", n)
    46  	}
    47  
    48  	if n.Typecheck() != 1 {
    49  		base.Fatalf("missed typecheck: %+v", n)
    50  	}
    51  
    52  	if n.Type().IsUntyped() {
    53  		base.Fatalf("expression has untyped type: %+v", n)
    54  	}
    55  
    56  	n = walkExpr1(n, init)
    57  
    58  	// Eagerly compute sizes of all expressions for the back end.
    59  	if typ := n.Type(); typ != nil && typ.Kind() != types.TBLANK && !typ.IsFuncArgStruct() {
    60  		types.CheckSize(typ)
    61  	}
    62  	if n, ok := n.(*ir.Name); ok && n.Heapaddr != nil {
    63  		types.CheckSize(n.Heapaddr.Type())
    64  	}
    65  	if ir.IsConst(n, constant.String) {
    66  		// Emit string symbol now to avoid emitting
    67  		// any concurrently during the backend.
    68  		_ = staticdata.StringSym(n.Pos(), constant.StringVal(n.Val()))
    69  	}
    70  
    71  	if base.Flag.LowerW != 0 && n != nil {
    72  		ir.Dump("after walk expr", n)
    73  	}
    74  
    75  	base.Pos = lno
    76  	return n
    77  }
    78  
    79  func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
    80  	switch n.Op() {
    81  	default:
    82  		ir.Dump("walk", n)
    83  		base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
    84  		panic("unreachable")
    85  
    86  	case ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP:
    87  		return n
    88  
    89  	case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
    90  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
    91  		// Perhaps refactor to use Node.mayBeShared for these instead.
    92  		// If these return early, make sure to still call
    93  		// StringSym for constant strings.
    94  		return n
    95  
    96  	case ir.OMETHEXPR:
    97  		// TODO(mdempsky): Do this right after type checking.
    98  		n := n.(*ir.SelectorExpr)
    99  		return n.FuncName()
   100  
   101  	case ir.ONOT, ir.ONEG, ir.OPLUS, ir.OBITNOT, ir.OREAL, ir.OIMAG, ir.OSPTR, ir.OITAB, ir.OIDATA:
   102  		n := n.(*ir.UnaryExpr)
   103  		n.X = walkExpr(n.X, init)
   104  		return n
   105  
   106  	case ir.ODOTMETH, ir.ODOTINTER:
   107  		n := n.(*ir.SelectorExpr)
   108  		n.X = walkExpr(n.X, init)
   109  		return n
   110  
   111  	case ir.OADDR:
   112  		n := n.(*ir.AddrExpr)
   113  		n.X = walkExpr(n.X, init)
   114  		return n
   115  
   116  	case ir.ODEREF:
   117  		n := n.(*ir.StarExpr)
   118  		n.X = walkExpr(n.X, init)
   119  		return n
   120  
   121  	case ir.OEFACE, ir.OAND, ir.OANDNOT, ir.OSUB, ir.OMUL, ir.OADD, ir.OOR, ir.OXOR, ir.OLSH, ir.ORSH,
   122  		ir.OUNSAFEADD:
   123  		n := n.(*ir.BinaryExpr)
   124  		n.X = walkExpr(n.X, init)
   125  		n.Y = walkExpr(n.Y, init)
   126  		return n
   127  
   128  	case ir.OUNSAFESLICE:
   129  		n := n.(*ir.BinaryExpr)
   130  		return walkUnsafeSlice(n, init)
   131  
   132  	case ir.OUNSAFESTRING:
   133  		n := n.(*ir.BinaryExpr)
   134  		return walkUnsafeString(n, init)
   135  
   136  	case ir.OUNSAFESTRINGDATA, ir.OUNSAFESLICEDATA:
   137  		n := n.(*ir.UnaryExpr)
   138  		return walkUnsafeData(n, init)
   139  
   140  	case ir.ODOT, ir.ODOTPTR:
   141  		n := n.(*ir.SelectorExpr)
   142  		return walkDot(n, init)
   143  
   144  	case ir.ODOTTYPE, ir.ODOTTYPE2:
   145  		n := n.(*ir.TypeAssertExpr)
   146  		return walkDotType(n, init)
   147  
   148  	case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
   149  		n := n.(*ir.DynamicTypeAssertExpr)
   150  		return walkDynamicDotType(n, init)
   151  
   152  	case ir.OLEN, ir.OCAP:
   153  		n := n.(*ir.UnaryExpr)
   154  		return walkLenCap(n, init)
   155  
   156  	case ir.OCOMPLEX:
   157  		n := n.(*ir.BinaryExpr)
   158  		n.X = walkExpr(n.X, init)
   159  		n.Y = walkExpr(n.Y, init)
   160  		return n
   161  
   162  	case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
   163  		n := n.(*ir.BinaryExpr)
   164  		return walkCompare(n, init)
   165  
   166  	case ir.OANDAND, ir.OOROR:
   167  		n := n.(*ir.LogicalExpr)
   168  		return walkLogical(n, init)
   169  
   170  	case ir.OPRINT, ir.OPRINTN:
   171  		return walkPrint(n.(*ir.CallExpr), init)
   172  
   173  	case ir.OPANIC:
   174  		n := n.(*ir.UnaryExpr)
   175  		return mkcall("gopanic", nil, init, n.X)
   176  
   177  	case ir.ORECOVERFP:
   178  		return walkRecoverFP(n.(*ir.CallExpr), init)
   179  
   180  	case ir.OCFUNC:
   181  		return n
   182  
   183  	case ir.OCALLINTER, ir.OCALLFUNC:
   184  		n := n.(*ir.CallExpr)
   185  		return walkCall(n, init)
   186  
   187  	case ir.OAS, ir.OASOP:
   188  		return walkAssign(init, n)
   189  
   190  	case ir.OAS2:
   191  		n := n.(*ir.AssignListStmt)
   192  		return walkAssignList(init, n)
   193  
   194  	// a,b,... = fn()
   195  	case ir.OAS2FUNC:
   196  		n := n.(*ir.AssignListStmt)
   197  		return walkAssignFunc(init, n)
   198  
   199  	// x, y = <-c
   200  	// order.stmt made sure x is addressable or blank.
   201  	case ir.OAS2RECV:
   202  		n := n.(*ir.AssignListStmt)
   203  		return walkAssignRecv(init, n)
   204  
   205  	// a,b = m[i]
   206  	case ir.OAS2MAPR:
   207  		n := n.(*ir.AssignListStmt)
   208  		return walkAssignMapRead(init, n)
   209  
   210  	case ir.ODELETE:
   211  		n := n.(*ir.CallExpr)
   212  		return walkDelete(init, n)
   213  
   214  	case ir.OAS2DOTTYPE:
   215  		n := n.(*ir.AssignListStmt)
   216  		return walkAssignDotType(n, init)
   217  
   218  	case ir.OCONVIFACE:
   219  		n := n.(*ir.ConvExpr)
   220  		return walkConvInterface(n, init)
   221  
   222  	case ir.OCONVIDATA:
   223  		n := n.(*ir.ConvExpr)
   224  		return walkConvIData(n, init)
   225  
   226  	case ir.OCONV, ir.OCONVNOP:
   227  		n := n.(*ir.ConvExpr)
   228  		return walkConv(n, init)
   229  
   230  	case ir.OSLICE2ARR:
   231  		n := n.(*ir.ConvExpr)
   232  		return walkSliceToArray(n, init)
   233  
   234  	case ir.OSLICE2ARRPTR:
   235  		n := n.(*ir.ConvExpr)
   236  		n.X = walkExpr(n.X, init)
   237  		return n
   238  
   239  	case ir.ODIV, ir.OMOD:
   240  		n := n.(*ir.BinaryExpr)
   241  		return walkDivMod(n, init)
   242  
   243  	case ir.OINDEX:
   244  		n := n.(*ir.IndexExpr)
   245  		return walkIndex(n, init)
   246  
   247  	case ir.OINDEXMAP:
   248  		n := n.(*ir.IndexExpr)
   249  		return walkIndexMap(n, init)
   250  
   251  	case ir.ORECV:
   252  		base.Fatalf("walkExpr ORECV") // should see inside OAS only
   253  		panic("unreachable")
   254  
   255  	case ir.OSLICEHEADER:
   256  		n := n.(*ir.SliceHeaderExpr)
   257  		return walkSliceHeader(n, init)
   258  
   259  	case ir.OSTRINGHEADER:
   260  		n := n.(*ir.StringHeaderExpr)
   261  		return walkStringHeader(n, init)
   262  
   263  	case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
   264  		n := n.(*ir.SliceExpr)
   265  		return walkSlice(n, init)
   266  
   267  	case ir.ONEW:
   268  		n := n.(*ir.UnaryExpr)
   269  		return walkNew(n, init)
   270  
   271  	case ir.OADDSTR:
   272  		return walkAddString(n.(*ir.AddStringExpr), init)
   273  
   274  	case ir.OAPPEND:
   275  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
   276  		base.Fatalf("append outside assignment")
   277  		panic("unreachable")
   278  
   279  	case ir.OCOPY:
   280  		return walkCopy(n.(*ir.BinaryExpr), init, base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime)
   281  
   282  	case ir.OCLOSE:
   283  		n := n.(*ir.UnaryExpr)
   284  		return walkClose(n, init)
   285  
   286  	case ir.OMAKECHAN:
   287  		n := n.(*ir.MakeExpr)
   288  		return walkMakeChan(n, init)
   289  
   290  	case ir.OMAKEMAP:
   291  		n := n.(*ir.MakeExpr)
   292  		return walkMakeMap(n, init)
   293  
   294  	case ir.OMAKESLICE:
   295  		n := n.(*ir.MakeExpr)
   296  		return walkMakeSlice(n, init)
   297  
   298  	case ir.OMAKESLICECOPY:
   299  		n := n.(*ir.MakeExpr)
   300  		return walkMakeSliceCopy(n, init)
   301  
   302  	case ir.ORUNESTR:
   303  		n := n.(*ir.ConvExpr)
   304  		return walkRuneToString(n, init)
   305  
   306  	case ir.OBYTES2STR, ir.ORUNES2STR:
   307  		n := n.(*ir.ConvExpr)
   308  		return walkBytesRunesToString(n, init)
   309  
   310  	case ir.OBYTES2STRTMP:
   311  		n := n.(*ir.ConvExpr)
   312  		return walkBytesToStringTemp(n, init)
   313  
   314  	case ir.OSTR2BYTES:
   315  		n := n.(*ir.ConvExpr)
   316  		return walkStringToBytes(n, init)
   317  
   318  	case ir.OSTR2BYTESTMP:
   319  		n := n.(*ir.ConvExpr)
   320  		return walkStringToBytesTemp(n, init)
   321  
   322  	case ir.OSTR2RUNES:
   323  		n := n.(*ir.ConvExpr)
   324  		return walkStringToRunes(n, init)
   325  
   326  	case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
   327  		return walkCompLit(n, init)
   328  
   329  	case ir.OSEND:
   330  		n := n.(*ir.SendStmt)
   331  		return walkSend(n, init)
   332  
   333  	case ir.OCLOSURE:
   334  		return walkClosure(n.(*ir.ClosureExpr), init)
   335  
   336  	case ir.OMETHVALUE:
   337  		return walkMethodValue(n.(*ir.SelectorExpr), init)
   338  	}
   339  
   340  	// No return! Each case must return (or panic),
   341  	// to avoid confusion about what gets returned
   342  	// in the presence of type assertions.
   343  }
   344  
   345  // walk the whole tree of the body of an
   346  // expression or simple statement.
   347  // the types expressions are calculated.
   348  // compile-time constants are evaluated.
   349  // complex side effects like statements are appended to init.
   350  func walkExprList(s []ir.Node, init *ir.Nodes) {
   351  	for i := range s {
   352  		s[i] = walkExpr(s[i], init)
   353  	}
   354  }
   355  
   356  func walkExprListCheap(s []ir.Node, init *ir.Nodes) {
   357  	for i, n := range s {
   358  		s[i] = cheapExpr(n, init)
   359  		s[i] = walkExpr(s[i], init)
   360  	}
   361  }
   362  
   363  func walkExprListSafe(s []ir.Node, init *ir.Nodes) {
   364  	for i, n := range s {
   365  		s[i] = safeExpr(n, init)
   366  		s[i] = walkExpr(s[i], init)
   367  	}
   368  }
   369  
   370  // return side-effect free and cheap n, appending side effects to init.
   371  // result may not be assignable.
   372  func cheapExpr(n ir.Node, init *ir.Nodes) ir.Node {
   373  	switch n.Op() {
   374  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   375  		return n
   376  	}
   377  
   378  	return copyExpr(n, n.Type(), init)
   379  }
   380  
   381  // return side effect-free n, appending side effects to init.
   382  // result is assignable if n is.
   383  func safeExpr(n ir.Node, init *ir.Nodes) ir.Node {
   384  	if n == nil {
   385  		return nil
   386  	}
   387  
   388  	if len(n.Init()) != 0 {
   389  		walkStmtList(n.Init())
   390  		init.Append(ir.TakeInit(n)...)
   391  	}
   392  
   393  	switch n.Op() {
   394  	case ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
   395  		return n
   396  
   397  	case ir.OLEN, ir.OCAP:
   398  		n := n.(*ir.UnaryExpr)
   399  		l := safeExpr(n.X, init)
   400  		if l == n.X {
   401  			return n
   402  		}
   403  		a := ir.Copy(n).(*ir.UnaryExpr)
   404  		a.X = l
   405  		return walkExpr(typecheck.Expr(a), init)
   406  
   407  	case ir.ODOT, ir.ODOTPTR:
   408  		n := n.(*ir.SelectorExpr)
   409  		l := safeExpr(n.X, init)
   410  		if l == n.X {
   411  			return n
   412  		}
   413  		a := ir.Copy(n).(*ir.SelectorExpr)
   414  		a.X = l
   415  		return walkExpr(typecheck.Expr(a), init)
   416  
   417  	case ir.ODEREF:
   418  		n := n.(*ir.StarExpr)
   419  		l := safeExpr(n.X, init)
   420  		if l == n.X {
   421  			return n
   422  		}
   423  		a := ir.Copy(n).(*ir.StarExpr)
   424  		a.X = l
   425  		return walkExpr(typecheck.Expr(a), init)
   426  
   427  	case ir.OINDEX, ir.OINDEXMAP:
   428  		n := n.(*ir.IndexExpr)
   429  		l := safeExpr(n.X, init)
   430  		r := safeExpr(n.Index, init)
   431  		if l == n.X && r == n.Index {
   432  			return n
   433  		}
   434  		a := ir.Copy(n).(*ir.IndexExpr)
   435  		a.X = l
   436  		a.Index = r
   437  		return walkExpr(typecheck.Expr(a), init)
   438  
   439  	case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
   440  		n := n.(*ir.CompLitExpr)
   441  		if isStaticCompositeLiteral(n) {
   442  			return n
   443  		}
   444  	}
   445  
   446  	// make a copy; must not be used as an lvalue
   447  	if ir.IsAddressable(n) {
   448  		base.Fatalf("missing lvalue case in safeExpr: %v", n)
   449  	}
   450  	return cheapExpr(n, init)
   451  }
   452  
   453  func copyExpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
   454  	l := typecheck.Temp(t)
   455  	appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
   456  	return l
   457  }
   458  
   459  func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
   460  	c := len(n.List)
   461  
   462  	if c < 2 {
   463  		base.Fatalf("walkAddString count %d too small", c)
   464  	}
   465  
   466  	buf := typecheck.NodNil()
   467  	if n.Esc() == ir.EscNone {
   468  		sz := int64(0)
   469  		for _, n1 := range n.List {
   470  			if n1.Op() == ir.OLITERAL {
   471  				sz += int64(len(ir.StringVal(n1)))
   472  			}
   473  		}
   474  
   475  		// Don't allocate the buffer if the result won't fit.
   476  		if sz < tmpstringbufsize {
   477  			// Create temporary buffer for result string on stack.
   478  			buf = stackBufAddr(tmpstringbufsize, types.Types[types.TUINT8])
   479  		}
   480  	}
   481  
   482  	// build list of string arguments
   483  	args := []ir.Node{buf}
   484  	for _, n2 := range n.List {
   485  		args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
   486  	}
   487  
   488  	var fn string
   489  	if c <= 5 {
   490  		// small numbers of strings use direct runtime helpers.
   491  		// note: order.expr knows this cutoff too.
   492  		fn = fmt.Sprintf("concatstring%d", c)
   493  	} else {
   494  		// large numbers of strings are passed to the runtime as a slice.
   495  		fn = "concatstrings"
   496  
   497  		t := types.NewSlice(types.Types[types.TSTRING])
   498  		// args[1:] to skip buf arg
   499  		slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, t, args[1:])
   500  		slice.Prealloc = n.Prealloc
   501  		args = []ir.Node{buf, slice}
   502  		slice.SetEsc(ir.EscNone)
   503  	}
   504  
   505  	cat := typecheck.LookupRuntime(fn)
   506  	r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
   507  	r.Args = args
   508  	r1 := typecheck.Expr(r)
   509  	r1 = walkExpr(r1, init)
   510  	r1.SetType(n.Type())
   511  
   512  	return r1
   513  }
   514  
   515  type hookInfo struct {
   516  	paramType   types.Kind
   517  	argsNum     int
   518  	runtimeFunc string
   519  }
   520  
   521  var hooks = map[string]hookInfo{
   522  	"strings.EqualFold": {paramType: types.TSTRING, argsNum: 2, runtimeFunc: "libfuzzerHookEqualFold"},
   523  }
   524  
   525  // walkCall walks an OCALLFUNC or OCALLINTER node.
   526  func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   527  	if n.Op() == ir.OCALLMETH {
   528  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   529  	}
   530  	if n.Op() == ir.OCALLINTER || n.X.Op() == ir.OMETHEXPR {
   531  		// We expect both interface call reflect.Type.Method and concrete
   532  		// call reflect.(*rtype).Method.
   533  		usemethod(n)
   534  	}
   535  	if n.Op() == ir.OCALLINTER {
   536  		reflectdata.MarkUsedIfaceMethod(n)
   537  	}
   538  
   539  	if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.OCLOSURE {
   540  		directClosureCall(n)
   541  	}
   542  
   543  	if isFuncPCIntrinsic(n) {
   544  		// For internal/abi.FuncPCABIxxx(fn), if fn is a defined function, rewrite
   545  		// it to the address of the function of the ABI fn is defined.
   546  		name := n.X.(*ir.Name).Sym().Name
   547  		arg := n.Args[0]
   548  		var wantABI obj.ABI
   549  		switch name {
   550  		case "FuncPCABI0":
   551  			wantABI = obj.ABI0
   552  		case "FuncPCABIInternal":
   553  			wantABI = obj.ABIInternal
   554  		}
   555  		if isIfaceOfFunc(arg) {
   556  			fn := arg.(*ir.ConvExpr).X.(*ir.Name)
   557  			abi := fn.Func.ABI
   558  			if abi != wantABI {
   559  				base.ErrorfAt(n.Pos(), "internal/abi.%s expects an %v function, %s is defined as %v", name, wantABI, fn.Sym().Name, abi)
   560  			}
   561  			var e ir.Node = ir.NewLinksymExpr(n.Pos(), fn.Sym().LinksymABI(abi), types.Types[types.TUINTPTR])
   562  			e = ir.NewAddrExpr(n.Pos(), e)
   563  			e.SetType(types.Types[types.TUINTPTR].PtrTo())
   564  			return typecheck.Expr(ir.NewConvExpr(n.Pos(), ir.OCONVNOP, n.Type(), e))
   565  		}
   566  		// fn is not a defined function. It must be ABIInternal.
   567  		// Read the address from func value, i.e. *(*uintptr)(idata(fn)).
   568  		if wantABI != obj.ABIInternal {
   569  			base.ErrorfAt(n.Pos(), "internal/abi.%s does not accept func expression, which is ABIInternal", name)
   570  		}
   571  		arg = walkExpr(arg, init)
   572  		var e ir.Node = ir.NewUnaryExpr(n.Pos(), ir.OIDATA, arg)
   573  		e.SetType(n.Type().PtrTo())
   574  		e.SetTypecheck(1)
   575  		e = ir.NewStarExpr(n.Pos(), e)
   576  		e.SetType(n.Type())
   577  		e.SetTypecheck(1)
   578  		return e
   579  	}
   580  
   581  	walkCall1(n, init)
   582  	return n
   583  }
   584  
   585  func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
   586  	if n.Walked() {
   587  		return // already walked
   588  	}
   589  	n.SetWalked(true)
   590  
   591  	if n.Op() == ir.OCALLMETH {
   592  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
   593  	}
   594  
   595  	args := n.Args
   596  	params := n.X.Type().Params()
   597  
   598  	n.X = walkExpr(n.X, init)
   599  	walkExprList(args, init)
   600  
   601  	for i, arg := range args {
   602  		// Validate argument and parameter types match.
   603  		param := params.Field(i)
   604  		if !types.Identical(arg.Type(), param.Type) {
   605  			base.FatalfAt(n.Pos(), "assigning %L to parameter %v (type %v)", arg, param.Sym, param.Type)
   606  		}
   607  
   608  		// For any argument whose evaluation might require a function call,
   609  		// store that argument into a temporary variable,
   610  		// to prevent that calls from clobbering arguments already on the stack.
   611  		if mayCall(arg) {
   612  			// assignment of arg to Temp
   613  			tmp := typecheck.Temp(param.Type)
   614  			init.Append(convas(typecheck.Stmt(ir.NewAssignStmt(base.Pos, tmp, arg)).(*ir.AssignStmt), init))
   615  			// replace arg with temp
   616  			args[i] = tmp
   617  		}
   618  	}
   619  
   620  	n.Args = args
   621  	funSym := n.X.Sym()
   622  	if base.Debug.Libfuzzer != 0 && funSym != nil {
   623  		if hook, found := hooks[funSym.Pkg.Path+"."+funSym.Name]; found {
   624  			if len(args) != hook.argsNum {
   625  				panic(fmt.Sprintf("%s.%s expects %d arguments, but received %d", funSym.Pkg.Path, funSym.Name, hook.argsNum, len(args)))
   626  			}
   627  			var hookArgs []ir.Node
   628  			for _, arg := range args {
   629  				hookArgs = append(hookArgs, tracecmpArg(arg, types.Types[hook.paramType], init))
   630  			}
   631  			hookArgs = append(hookArgs, fakePC(n))
   632  			init.Append(mkcall(hook.runtimeFunc, nil, init, hookArgs...))
   633  		}
   634  	}
   635  }
   636  
   637  // walkDivMod walks an ODIV or OMOD node.
   638  func walkDivMod(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
   639  	n.X = walkExpr(n.X, init)
   640  	n.Y = walkExpr(n.Y, init)
   641  
   642  	// rewrite complex div into function call.
   643  	et := n.X.Type().Kind()
   644  
   645  	if types.IsComplex[et] && n.Op() == ir.ODIV {
   646  		t := n.Type()
   647  		call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
   648  		return typecheck.Conv(call, t)
   649  	}
   650  
   651  	// Nothing to do for float divisions.
   652  	if types.IsFloat[et] {
   653  		return n
   654  	}
   655  
   656  	// rewrite 64-bit div and mod on 32-bit architectures.
   657  	// TODO: Remove this code once we can introduce
   658  	// runtime calls late in SSA processing.
   659  	if types.RegSize < 8 && (et == types.TINT64 || et == types.TUINT64) {
   660  		if n.Y.Op() == ir.OLITERAL {
   661  			// Leave div/mod by constant powers of 2 or small 16-bit constants.
   662  			// The SSA backend will handle those.
   663  			switch et {
   664  			case types.TINT64:
   665  				c := ir.Int64Val(n.Y)
   666  				if c < 0 {
   667  					c = -c
   668  				}
   669  				if c != 0 && c&(c-1) == 0 {
   670  					return n
   671  				}
   672  			case types.TUINT64:
   673  				c := ir.Uint64Val(n.Y)
   674  				if c < 1<<16 {
   675  					return n
   676  				}
   677  				if c != 0 && c&(c-1) == 0 {
   678  					return n
   679  				}
   680  			}
   681  		}
   682  		var fn string
   683  		if et == types.TINT64 {
   684  			fn = "int64"
   685  		} else {
   686  			fn = "uint64"
   687  		}
   688  		if n.Op() == ir.ODIV {
   689  			fn += "div"
   690  		} else {
   691  			fn += "mod"
   692  		}
   693  		return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
   694  	}
   695  	return n
   696  }
   697  
   698  // walkDot walks an ODOT or ODOTPTR node.
   699  func walkDot(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
   700  	usefield(n)
   701  	n.X = walkExpr(n.X, init)
   702  	return n
   703  }
   704  
   705  // walkDotType walks an ODOTTYPE or ODOTTYPE2 node.
   706  func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
   707  	n.X = walkExpr(n.X, init)
   708  	// Set up interface type addresses for back end.
   709  	if !n.Type().IsInterface() && !n.X.Type().IsEmptyInterface() {
   710  		n.ITab = reflectdata.ITabAddr(n.Type(), n.X.Type())
   711  	}
   712  	return n
   713  }
   714  
   715  // walkDynamicDotType walks an ODYNAMICDOTTYPE or ODYNAMICDOTTYPE2 node.
   716  func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
   717  	n.X = walkExpr(n.X, init)
   718  	n.RType = walkExpr(n.RType, init)
   719  	n.ITab = walkExpr(n.ITab, init)
   720  	return n
   721  }
   722  
   723  // walkIndex walks an OINDEX node.
   724  func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
   725  	n.X = walkExpr(n.X, init)
   726  
   727  	// save the original node for bounds checking elision.
   728  	// If it was a ODIV/OMOD walk might rewrite it.
   729  	r := n.Index
   730  
   731  	n.Index = walkExpr(n.Index, init)
   732  
   733  	// if range of type cannot exceed static array bound,
   734  	// disable bounds check.
   735  	if n.Bounded() {
   736  		return n
   737  	}
   738  	t := n.X.Type()
   739  	if t != nil && t.IsPtr() {
   740  		t = t.Elem()
   741  	}
   742  	if t.IsArray() {
   743  		n.SetBounded(bounded(r, t.NumElem()))
   744  		if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
   745  			base.Warn("index bounds check elided")
   746  		}
   747  	} else if ir.IsConst(n.X, constant.String) {
   748  		n.SetBounded(bounded(r, int64(len(ir.StringVal(n.X)))))
   749  		if base.Flag.LowerM != 0 && n.Bounded() && !ir.IsConst(n.Index, constant.Int) {
   750  			base.Warn("index bounds check elided")
   751  		}
   752  	}
   753  	return n
   754  }
   755  
   756  // mapKeyArg returns an expression for key that is suitable to be passed
   757  // as the key argument for runtime map* functions.
   758  // n is is the map indexing or delete Node (to provide Pos).
   759  func mapKeyArg(fast int, n, key ir.Node, assigned bool) ir.Node {
   760  	if fast == mapslow {
   761  		// standard version takes key by reference.
   762  		// orderState.expr made sure key is addressable.
   763  		return typecheck.NodAddr(key)
   764  	}
   765  	if assigned {
   766  		// mapassign does distinguish pointer vs. integer key.
   767  		return key
   768  	}
   769  	// mapaccess and mapdelete don't distinguish pointer vs. integer key.
   770  	switch fast {
   771  	case mapfast32ptr:
   772  		return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT32], key)
   773  	case mapfast64ptr:
   774  		return ir.NewConvExpr(n.Pos(), ir.OCONVNOP, types.Types[types.TUINT64], key)
   775  	default:
   776  		// fast version takes key by value.
   777  		return key
   778  	}
   779  }
   780  
   781  // walkIndexMap walks an OINDEXMAP node.
   782  // It replaces m[k] with *map{access1,assign}(maptype, m, &k)
   783  func walkIndexMap(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
   784  	n.X = walkExpr(n.X, init)
   785  	n.Index = walkExpr(n.Index, init)
   786  	map_ := n.X
   787  	t := map_.Type()
   788  	fast := mapfast(t)
   789  	key := mapKeyArg(fast, n, n.Index, n.Assigned)
   790  	args := []ir.Node{reflectdata.IndexMapRType(base.Pos, n), map_, key}
   791  
   792  	var mapFn ir.Node
   793  	switch {
   794  	case n.Assigned:
   795  		mapFn = mapfn(mapassign[fast], t, false)
   796  	case t.Elem().Size() > zeroValSize:
   797  		args = append(args, reflectdata.ZeroAddr(t.Elem().Size()))
   798  		mapFn = mapfn("mapaccess1_fat", t, true)
   799  	default:
   800  		mapFn = mapfn(mapaccess1[fast], t, false)
   801  	}
   802  	call := mkcall1(mapFn, nil, init, args...)
   803  	call.SetType(types.NewPtr(t.Elem()))
   804  	call.MarkNonNil() // mapaccess1* and mapassign always return non-nil pointers.
   805  	star := ir.NewStarExpr(base.Pos, call)
   806  	star.SetType(t.Elem())
   807  	star.SetTypecheck(1)
   808  	return star
   809  }
   810  
   811  // walkLogical walks an OANDAND or OOROR node.
   812  func walkLogical(n *ir.LogicalExpr, init *ir.Nodes) ir.Node {
   813  	n.X = walkExpr(n.X, init)
   814  
   815  	// cannot put side effects from n.Right on init,
   816  	// because they cannot run before n.Left is checked.
   817  	// save elsewhere and store on the eventual n.Right.
   818  	var ll ir.Nodes
   819  
   820  	n.Y = walkExpr(n.Y, &ll)
   821  	n.Y = ir.InitExpr(ll, n.Y)
   822  	return n
   823  }
   824  
   825  // walkSend walks an OSEND node.
   826  func walkSend(n *ir.SendStmt, init *ir.Nodes) ir.Node {
   827  	n1 := n.Value
   828  	n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
   829  	n1 = walkExpr(n1, init)
   830  	n1 = typecheck.NodAddr(n1)
   831  	return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
   832  }
   833  
   834  // walkSlice walks an OSLICE, OSLICEARR, OSLICESTR, OSLICE3, or OSLICE3ARR node.
   835  func walkSlice(n *ir.SliceExpr, init *ir.Nodes) ir.Node {
   836  	n.X = walkExpr(n.X, init)
   837  	n.Low = walkExpr(n.Low, init)
   838  	if n.Low != nil && ir.IsZero(n.Low) {
   839  		// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
   840  		n.Low = nil
   841  	}
   842  	n.High = walkExpr(n.High, init)
   843  	n.Max = walkExpr(n.Max, init)
   844  
   845  	if (n.Op() == ir.OSLICE || n.Op() == ir.OSLICESTR) && n.Low == nil && n.High == nil {
   846  		// Reduce x[:] to x.
   847  		if base.Debug.Slice > 0 {
   848  			base.Warn("slice: omit slice operation")
   849  		}
   850  		return n.X
   851  	}
   852  	return n
   853  }
   854  
   855  // walkSliceHeader walks an OSLICEHEADER node.
   856  func walkSliceHeader(n *ir.SliceHeaderExpr, init *ir.Nodes) ir.Node {
   857  	n.Ptr = walkExpr(n.Ptr, init)
   858  	n.Len = walkExpr(n.Len, init)
   859  	n.Cap = walkExpr(n.Cap, init)
   860  	return n
   861  }
   862  
   863  // walkStringHeader walks an OSTRINGHEADER node.
   864  func walkStringHeader(n *ir.StringHeaderExpr, init *ir.Nodes) ir.Node {
   865  	n.Ptr = walkExpr(n.Ptr, init)
   866  	n.Len = walkExpr(n.Len, init)
   867  	return n
   868  }
   869  
   870  // return 1 if integer n must be in range [0, max), 0 otherwise.
   871  func bounded(n ir.Node, max int64) bool {
   872  	if n.Type() == nil || !n.Type().IsInteger() {
   873  		return false
   874  	}
   875  
   876  	sign := n.Type().IsSigned()
   877  	bits := int32(8 * n.Type().Size())
   878  
   879  	if ir.IsSmallIntConst(n) {
   880  		v := ir.Int64Val(n)
   881  		return 0 <= v && v < max
   882  	}
   883  
   884  	switch n.Op() {
   885  	case ir.OAND, ir.OANDNOT:
   886  		n := n.(*ir.BinaryExpr)
   887  		v := int64(-1)
   888  		switch {
   889  		case ir.IsSmallIntConst(n.X):
   890  			v = ir.Int64Val(n.X)
   891  		case ir.IsSmallIntConst(n.Y):
   892  			v = ir.Int64Val(n.Y)
   893  			if n.Op() == ir.OANDNOT {
   894  				v = ^v
   895  				if !sign {
   896  					v &= 1<<uint(bits) - 1
   897  				}
   898  			}
   899  		}
   900  		if 0 <= v && v < max {
   901  			return true
   902  		}
   903  
   904  	case ir.OMOD:
   905  		n := n.(*ir.BinaryExpr)
   906  		if !sign && ir.IsSmallIntConst(n.Y) {
   907  			v := ir.Int64Val(n.Y)
   908  			if 0 <= v && v <= max {
   909  				return true
   910  			}
   911  		}
   912  
   913  	case ir.ODIV:
   914  		n := n.(*ir.BinaryExpr)
   915  		if !sign && ir.IsSmallIntConst(n.Y) {
   916  			v := ir.Int64Val(n.Y)
   917  			for bits > 0 && v >= 2 {
   918  				bits--
   919  				v >>= 1
   920  			}
   921  		}
   922  
   923  	case ir.ORSH:
   924  		n := n.(*ir.BinaryExpr)
   925  		if !sign && ir.IsSmallIntConst(n.Y) {
   926  			v := ir.Int64Val(n.Y)
   927  			if v > int64(bits) {
   928  				return true
   929  			}
   930  			bits -= int32(v)
   931  		}
   932  	}
   933  
   934  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
   935  		return true
   936  	}
   937  
   938  	return false
   939  }
   940  
   941  // usemethod checks calls for uses of reflect.Type.{Method,MethodByName}.
   942  func usemethod(n *ir.CallExpr) {
   943  	// Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
   944  	// Those functions may be alive via the itab, which should not cause all methods
   945  	// alive. We only want to mark their callers.
   946  	if base.Ctxt.Pkgpath == "reflect" {
   947  		switch ir.CurFunc.Nname.Sym().Name { // TODO: is there a better way than hardcoding the names?
   948  		case "(*rtype).Method", "(*rtype).MethodByName", "(*interfaceType).Method", "(*interfaceType).MethodByName":
   949  			return
   950  		}
   951  	}
   952  
   953  	dot, ok := n.X.(*ir.SelectorExpr)
   954  	if !ok {
   955  		return
   956  	}
   957  
   958  	// Looking for either direct method calls and interface method calls of:
   959  	//	reflect.Type.Method       - func(int) reflect.Method
   960  	//	reflect.Type.MethodByName - func(string) (reflect.Method, bool)
   961  	var pKind types.Kind
   962  
   963  	switch dot.Sel.Name {
   964  	case "Method":
   965  		pKind = types.TINT
   966  	case "MethodByName":
   967  		pKind = types.TSTRING
   968  	default:
   969  		return
   970  	}
   971  
   972  	t := dot.Selection.Type
   973  	if t.NumParams() != 1 || t.Params().Field(0).Type.Kind() != pKind {
   974  		return
   975  	}
   976  	switch t.NumResults() {
   977  	case 1:
   978  		// ok
   979  	case 2:
   980  		if t.Results().Field(1).Type.Kind() != types.TBOOL {
   981  			return
   982  		}
   983  	default:
   984  		return
   985  	}
   986  
   987  	// Check that first result type is "reflect.Method". Note that we have to check sym name and sym package
   988  	// separately, as we can't check for exact string "reflect.Method" reliably (e.g., see #19028 and #38515).
   989  	if s := t.Results().Field(0).Type.Sym(); s != nil && s.Name == "Method" && types.IsReflectPkg(s.Pkg) {
   990  		ir.CurFunc.SetReflectMethod(true)
   991  		// The LSym is initialized at this point. We need to set the attribute on the LSym.
   992  		ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
   993  	}
   994  }
   995  
   996  func usefield(n *ir.SelectorExpr) {
   997  	if !buildcfg.Experiment.FieldTrack {
   998  		return
   999  	}
  1000  
  1001  	switch n.Op() {
  1002  	default:
  1003  		base.Fatalf("usefield %v", n.Op())
  1004  
  1005  	case ir.ODOT, ir.ODOTPTR:
  1006  		break
  1007  	}
  1008  
  1009  	field := n.Selection
  1010  	if field == nil {
  1011  		base.Fatalf("usefield %v %v without paramfld", n.X.Type(), n.Sel)
  1012  	}
  1013  	if field.Sym != n.Sel {
  1014  		base.Fatalf("field inconsistency: %v != %v", field.Sym, n.Sel)
  1015  	}
  1016  	if !strings.Contains(field.Note, "go:\"track\"") {
  1017  		return
  1018  	}
  1019  
  1020  	outer := n.X.Type()
  1021  	if outer.IsPtr() {
  1022  		outer = outer.Elem()
  1023  	}
  1024  	if outer.Sym() == nil {
  1025  		base.Errorf("tracked field must be in named struct type")
  1026  	}
  1027  
  1028  	sym := reflectdata.TrackSym(outer, field)
  1029  	if ir.CurFunc.FieldTrack == nil {
  1030  		ir.CurFunc.FieldTrack = make(map[*obj.LSym]struct{})
  1031  	}
  1032  	ir.CurFunc.FieldTrack[sym] = struct{}{}
  1033  }