github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/walk/builtin.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"github.com/bir3/gocompiler/src/go/constant"
    10  	"github.com/bir3/gocompiler/src/go/token"
    11  	"strings"
    12  
    13  	"github.com/bir3/gocompiler/src/cmd/compile/internal/base"
    14  	"github.com/bir3/gocompiler/src/cmd/compile/internal/escape"
    15  	"github.com/bir3/gocompiler/src/cmd/compile/internal/ir"
    16  	"github.com/bir3/gocompiler/src/cmd/compile/internal/reflectdata"
    17  	"github.com/bir3/gocompiler/src/cmd/compile/internal/typecheck"
    18  	"github.com/bir3/gocompiler/src/cmd/compile/internal/types"
    19  )
    20  
    21  // Rewrite append(src, x, y, z) so that any side effects in
    22  // x, y, z (including runtime panics) are evaluated in
    23  // initialization statements before the append.
    24  // For normal code generation, stop there and leave the
    25  // rest to ssagen.
    26  //
    27  // For race detector, expand append(src, a [, b]* ) to
    28  //
    29  //	init {
    30  //	  s := src
    31  //	  const argc = len(args) - 1
    32  //	  newLen := s.len + argc
    33  //	  if uint(newLen) <= uint(s.cap) {
    34  //	    s = s[:newLen]
    35  //	  } else {
    36  //	    s = growslice(s.ptr, newLen, s.cap, argc, elemType)
    37  //	  }
    38  //	  s[s.len - argc] = a
    39  //	  s[s.len - argc + 1] = b
    40  //	  ...
    41  //	}
    42  //	s
    43  func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
    44  	if !ir.SameSafeExpr(dst, n.Args[0]) {
    45  		n.Args[0] = safeExpr(n.Args[0], init)
    46  		n.Args[0] = walkExpr(n.Args[0], init)
    47  	}
    48  	walkExprListSafe(n.Args[1:], init)
    49  
    50  	nsrc := n.Args[0]
    51  
    52  	// walkExprListSafe will leave OINDEX (s[n]) alone if both s
    53  	// and n are name or literal, but those may index the slice we're
    54  	// modifying here. Fix explicitly.
    55  	// Using cheapExpr also makes sure that the evaluation
    56  	// of all arguments (and especially any panics) happen
    57  	// before we begin to modify the slice in a visible way.
    58  	ls := n.Args[1:]
    59  	for i, n := range ls {
    60  		n = cheapExpr(n, init)
    61  		if !types.Identical(n.Type(), nsrc.Type().Elem()) {
    62  			n = typecheck.AssignConv(n, nsrc.Type().Elem(), "append")
    63  			n = walkExpr(n, init)
    64  		}
    65  		ls[i] = n
    66  	}
    67  
    68  	argc := len(n.Args) - 1
    69  	if argc < 1 {
    70  		return nsrc
    71  	}
    72  
    73  	// General case, with no function calls left as arguments.
    74  	// Leave for ssagen, except that instrumentation requires the old form.
    75  	if !base.Flag.Cfg.Instrumenting || base.Flag.CompilingRuntime {
    76  		return n
    77  	}
    78  
    79  	var l []ir.Node
    80  
    81  	// s = slice to append to
    82  	s := typecheck.Temp(nsrc.Type())
    83  	l = append(l, ir.NewAssignStmt(base.Pos, s, nsrc))
    84  
    85  	// num = number of things to append
    86  	num := ir.NewInt(int64(argc))
    87  
    88  	// newLen := s.len + num
    89  	newLen := typecheck.Temp(types.Types[types.TINT])
    90  	l = append(l, ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), num)))
    91  
    92  	// if uint(newLen) <= uint(s.cap)
    93  	nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
    94  	nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, typecheck.Conv(newLen, types.Types[types.TUINT]), typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT]))
    95  	nif.Likely = true
    96  
    97  	// then { s = s[:n] }
    98  	slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
    99  	slice.SetBounded(true)
   100  	nif.Body = []ir.Node{
   101  		ir.NewAssignStmt(base.Pos, s, slice),
   102  	}
   103  
   104  	fn := typecheck.LookupRuntime("growslice") //   growslice(ptr *T, newLen, oldCap, num int, <type>) (ret []T)
   105  	fn = typecheck.SubstArgTypes(fn, s.Type().Elem(), s.Type().Elem())
   106  
   107  	// else { s = growslice(s.ptr, n, s.cap, a, T) }
   108  	nif.Else = []ir.Node{
   109  		ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(),
   110  			ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
   111  			newLen,
   112  			ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
   113  			num,
   114  			reflectdata.TypePtr(s.Type().Elem()))),
   115  	}
   116  
   117  	l = append(l, nif)
   118  
   119  	ls = n.Args[1:]
   120  	for i, n := range ls {
   121  		// s[s.len-argc+i] = arg
   122  		ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewInt(int64(argc-i))))
   123  		ix.SetBounded(true)
   124  		l = append(l, ir.NewAssignStmt(base.Pos, ix, n))
   125  	}
   126  
   127  	typecheck.Stmts(l)
   128  	walkStmtList(l)
   129  	init.Append(l...)
   130  	return s
   131  }
   132  
   133  // walkClose walks an OCLOSE node.
   134  func walkClose(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
   135  	// cannot use chanfn - closechan takes any, not chan any
   136  	fn := typecheck.LookupRuntime("closechan")
   137  	fn = typecheck.SubstArgTypes(fn, n.X.Type())
   138  	return mkcall1(fn, nil, init, n.X)
   139  }
   140  
   141  // Lower copy(a, b) to a memmove call or a runtime call.
   142  //
   143  //	init {
   144  //	  n := len(a)
   145  //	  if n > len(b) { n = len(b) }
   146  //	  if a.ptr != b.ptr { memmove(a.ptr, b.ptr, n*sizeof(elem(a))) }
   147  //	}
   148  //	n;
   149  //
   150  // Also works if b is a string.
   151  func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
   152  	if n.X.Type().Elem().HasPointers() {
   153  		ir.CurFunc.SetWBPos(n.Pos())
   154  		fn := writebarrierfn("typedslicecopy", n.X.Type().Elem(), n.Y.Type().Elem())
   155  		n.X = cheapExpr(n.X, init)
   156  		ptrL, lenL := backingArrayPtrLen(n.X)
   157  		n.Y = cheapExpr(n.Y, init)
   158  		ptrR, lenR := backingArrayPtrLen(n.Y)
   159  		return mkcall1(fn, n.Type(), init, reflectdata.CopyElemRType(base.Pos, n), ptrL, lenL, ptrR, lenR)
   160  	}
   161  
   162  	if runtimecall {
   163  		// rely on runtime to instrument:
   164  		//  copy(n.Left, n.Right)
   165  		// n.Right can be a slice or string.
   166  
   167  		n.X = cheapExpr(n.X, init)
   168  		ptrL, lenL := backingArrayPtrLen(n.X)
   169  		n.Y = cheapExpr(n.Y, init)
   170  		ptrR, lenR := backingArrayPtrLen(n.Y)
   171  
   172  		fn := typecheck.LookupRuntime("slicecopy")
   173  		fn = typecheck.SubstArgTypes(fn, ptrL.Type().Elem(), ptrR.Type().Elem())
   174  
   175  		return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, ir.NewInt(n.X.Type().Elem().Size()))
   176  	}
   177  
   178  	n.X = walkExpr(n.X, init)
   179  	n.Y = walkExpr(n.Y, init)
   180  	nl := typecheck.Temp(n.X.Type())
   181  	nr := typecheck.Temp(n.Y.Type())
   182  	var l []ir.Node
   183  	l = append(l, ir.NewAssignStmt(base.Pos, nl, n.X))
   184  	l = append(l, ir.NewAssignStmt(base.Pos, nr, n.Y))
   185  
   186  	nfrm := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nr)
   187  	nto := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nl)
   188  
   189  	nlen := typecheck.Temp(types.Types[types.TINT])
   190  
   191  	// n = len(to)
   192  	l = append(l, ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nl)))
   193  
   194  	// if n > len(frm) { n = len(frm) }
   195  	nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   196  
   197  	nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr))
   198  	nif.Body.Append(ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr)))
   199  	l = append(l, nif)
   200  
   201  	// if to.ptr != frm.ptr { memmove( ... ) }
   202  	ne := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, nto, nfrm), nil, nil)
   203  	ne.Likely = true
   204  	l = append(l, ne)
   205  
   206  	fn := typecheck.LookupRuntime("memmove")
   207  	fn = typecheck.SubstArgTypes(fn, nl.Type().Elem(), nl.Type().Elem())
   208  	nwid := ir.Node(typecheck.Temp(types.Types[types.TUINTPTR]))
   209  	setwid := ir.NewAssignStmt(base.Pos, nwid, typecheck.Conv(nlen, types.Types[types.TUINTPTR]))
   210  	ne.Body.Append(setwid)
   211  	nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(nl.Type().Elem().Size()))
   212  	call := mkcall1(fn, nil, init, nto, nfrm, nwid)
   213  	ne.Body.Append(call)
   214  
   215  	typecheck.Stmts(l)
   216  	walkStmtList(l)
   217  	init.Append(l...)
   218  	return nlen
   219  }
   220  
   221  // walkDelete walks an ODELETE node.
   222  func walkDelete(init *ir.Nodes, n *ir.CallExpr) ir.Node {
   223  	init.Append(ir.TakeInit(n)...)
   224  	map_ := n.Args[0]
   225  	key := n.Args[1]
   226  	map_ = walkExpr(map_, init)
   227  	key = walkExpr(key, init)
   228  
   229  	t := map_.Type()
   230  	fast := mapfast(t)
   231  	key = mapKeyArg(fast, n, key, false)
   232  	return mkcall1(mapfndel(mapdelete[fast], t), nil, init, reflectdata.DeleteMapRType(base.Pos, n), map_, key)
   233  }
   234  
   235  // walkLenCap walks an OLEN or OCAP node.
   236  func walkLenCap(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
   237  	if isRuneCount(n) {
   238  		// Replace len([]rune(string)) with runtime.countrunes(string).
   239  		return mkcall("countrunes", n.Type(), init, typecheck.Conv(n.X.(*ir.ConvExpr).X, types.Types[types.TSTRING]))
   240  	}
   241  
   242  	n.X = walkExpr(n.X, init)
   243  
   244  	// replace len(*[10]int) with 10.
   245  	// delayed until now to preserve side effects.
   246  	t := n.X.Type()
   247  
   248  	if t.IsPtr() {
   249  		t = t.Elem()
   250  	}
   251  	if t.IsArray() {
   252  		safeExpr(n.X, init)
   253  		con := typecheck.OrigInt(n, t.NumElem())
   254  		con.SetTypecheck(1)
   255  		return con
   256  	}
   257  	return n
   258  }
   259  
   260  // walkMakeChan walks an OMAKECHAN node.
   261  func walkMakeChan(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
   262  	// When size fits into int, use makechan instead of
   263  	// makechan64, which is faster and shorter on 32 bit platforms.
   264  	size := n.Len
   265  	fnname := "makechan64"
   266  	argtype := types.Types[types.TINT64]
   267  
   268  	// Type checking guarantees that TIDEAL size is positive and fits in an int.
   269  	// The case of size overflow when converting TUINT or TUINTPTR to TINT
   270  	// will be handled by the negative range checks in makechan during runtime.
   271  	if size.Type().IsKind(types.TIDEAL) || size.Type().Size() <= types.Types[types.TUINT].Size() {
   272  		fnname = "makechan"
   273  		argtype = types.Types[types.TINT]
   274  	}
   275  
   276  	return mkcall1(chanfn(fnname, 1, n.Type()), n.Type(), init, reflectdata.MakeChanRType(base.Pos, n), typecheck.Conv(size, argtype))
   277  }
   278  
   279  // walkMakeMap walks an OMAKEMAP node.
   280  func walkMakeMap(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
   281  	t := n.Type()
   282  	hmapType := reflectdata.MapType(t)
   283  	hint := n.Len
   284  
   285  	// var h *hmap
   286  	var h ir.Node
   287  	if n.Esc() == ir.EscNone {
   288  		// Allocate hmap on stack.
   289  
   290  		// var hv hmap
   291  		// h = &hv
   292  		h = stackTempAddr(init, hmapType)
   293  
   294  		// Allocate one bucket pointed to by hmap.buckets on stack if hint
   295  		// is not larger than BUCKETSIZE. In case hint is larger than
   296  		// BUCKETSIZE runtime.makemap will allocate the buckets on the heap.
   297  		// Maximum key and elem size is 128 bytes, larger objects
   298  		// are stored with an indirection. So max bucket size is 2048+eps.
   299  		if !ir.IsConst(hint, constant.Int) ||
   300  			constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(reflectdata.BUCKETSIZE)) {
   301  
   302  			// In case hint is larger than BUCKETSIZE runtime.makemap
   303  			// will allocate the buckets on the heap, see #20184
   304  			//
   305  			// if hint <= BUCKETSIZE {
   306  			//     var bv bmap
   307  			//     b = &bv
   308  			//     h.buckets = b
   309  			// }
   310  
   311  			nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, ir.NewInt(reflectdata.BUCKETSIZE)), nil, nil)
   312  			nif.Likely = true
   313  
   314  			// var bv bmap
   315  			// b = &bv
   316  			b := stackTempAddr(&nif.Body, reflectdata.MapBucketType(t))
   317  
   318  			// h.buckets = b
   319  			bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap
   320  			na := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, bsym), b)
   321  			nif.Body.Append(na)
   322  			appendWalkStmt(init, nif)
   323  		}
   324  	}
   325  
   326  	if ir.IsConst(hint, constant.Int) && constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(reflectdata.BUCKETSIZE)) {
   327  		// Handling make(map[any]any) and
   328  		// make(map[any]any, hint) where hint <= BUCKETSIZE
   329  		// special allows for faster map initialization and
   330  		// improves binary size by using calls with fewer arguments.
   331  		// For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false
   332  		// and no buckets will be allocated by makemap. Therefore,
   333  		// no buckets need to be allocated in this code path.
   334  		if n.Esc() == ir.EscNone {
   335  			// Only need to initialize h.hash0 since
   336  			// hmap h has been allocated on the stack already.
   337  			// h.hash0 = fastrand()
   338  			rand := mkcall("fastrand", types.Types[types.TUINT32], init)
   339  			hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap
   340  			appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, hashsym), rand))
   341  			return typecheck.ConvNop(h, t)
   342  		}
   343  		// Call runtime.makehmap to allocate an
   344  		// hmap on the heap and initialize hmap's hash0 field.
   345  		fn := typecheck.LookupRuntime("makemap_small")
   346  		fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem())
   347  		return mkcall1(fn, n.Type(), init)
   348  	}
   349  
   350  	if n.Esc() != ir.EscNone {
   351  		h = typecheck.NodNil()
   352  	}
   353  	// Map initialization with a variable or large hint is
   354  	// more complicated. We therefore generate a call to
   355  	// runtime.makemap to initialize hmap and allocate the
   356  	// map buckets.
   357  
   358  	// When hint fits into int, use makemap instead of
   359  	// makemap64, which is faster and shorter on 32 bit platforms.
   360  	fnname := "makemap64"
   361  	argtype := types.Types[types.TINT64]
   362  
   363  	// Type checking guarantees that TIDEAL hint is positive and fits in an int.
   364  	// See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function.
   365  	// The case of hint overflow when converting TUINT or TUINTPTR to TINT
   366  	// will be handled by the negative range checks in makemap during runtime.
   367  	if hint.Type().IsKind(types.TIDEAL) || hint.Type().Size() <= types.Types[types.TUINT].Size() {
   368  		fnname = "makemap"
   369  		argtype = types.Types[types.TINT]
   370  	}
   371  
   372  	fn := typecheck.LookupRuntime(fnname)
   373  	fn = typecheck.SubstArgTypes(fn, hmapType, t.Key(), t.Elem())
   374  	return mkcall1(fn, n.Type(), init, reflectdata.MakeMapRType(base.Pos, n), typecheck.Conv(hint, argtype), h)
   375  }
   376  
   377  // walkMakeSlice walks an OMAKESLICE node.
   378  func walkMakeSlice(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
   379  	l := n.Len
   380  	r := n.Cap
   381  	if r == nil {
   382  		r = safeExpr(l, init)
   383  		l = r
   384  	}
   385  	t := n.Type()
   386  	if t.Elem().NotInHeap() {
   387  		base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
   388  	}
   389  	if n.Esc() == ir.EscNone {
   390  		if why := escape.HeapAllocReason(n); why != "" {
   391  			base.Fatalf("%v has EscNone, but %v", n, why)
   392  		}
   393  		// var arr [r]T
   394  		// n = arr[:l]
   395  		i := typecheck.IndexConst(r)
   396  		if i < 0 {
   397  			base.Fatalf("walkExpr: invalid index %v", r)
   398  		}
   399  
   400  		// cap is constrained to [0,2^31) or [0,2^63) depending on whether
   401  		// we're in 32-bit or 64-bit systems. So it's safe to do:
   402  		//
   403  		// if uint64(len) > cap {
   404  		//     if len < 0 { panicmakeslicelen() }
   405  		//     panicmakeslicecap()
   406  		// }
   407  		nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(l, types.Types[types.TUINT64]), ir.NewInt(i)), nil, nil)
   408  		niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, l, ir.NewInt(0)), nil, nil)
   409  		niflen.Body = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
   410  		nif.Body.Append(niflen, mkcall("panicmakeslicecap", nil, init))
   411  		init.Append(typecheck.Stmt(nif))
   412  
   413  		t = types.NewArray(t.Elem(), i) // [r]T
   414  		var_ := typecheck.Temp(t)
   415  		appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil))  // zero temp
   416  		r := ir.NewSliceExpr(base.Pos, ir.OSLICE, var_, nil, l, nil) // arr[:l]
   417  		// The conv is necessary in case n.Type is named.
   418  		return walkExpr(typecheck.Expr(typecheck.Conv(r, n.Type())), init)
   419  	}
   420  
   421  	// n escapes; set up a call to makeslice.
   422  	// When len and cap can fit into int, use makeslice instead of
   423  	// makeslice64, which is faster and shorter on 32 bit platforms.
   424  
   425  	len, cap := l, r
   426  
   427  	fnname := "makeslice64"
   428  	argtype := types.Types[types.TINT64]
   429  
   430  	// Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
   431  	// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
   432  	// will be handled by the negative range checks in makeslice during runtime.
   433  	if (len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size()) &&
   434  		(cap.Type().IsKind(types.TIDEAL) || cap.Type().Size() <= types.Types[types.TUINT].Size()) {
   435  		fnname = "makeslice"
   436  		argtype = types.Types[types.TINT]
   437  	}
   438  	fn := typecheck.LookupRuntime(fnname)
   439  	ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), typecheck.Conv(len, argtype), typecheck.Conv(cap, argtype))
   440  	ptr.MarkNonNil()
   441  	len = typecheck.Conv(len, types.Types[types.TINT])
   442  	cap = typecheck.Conv(cap, types.Types[types.TINT])
   443  	sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, len, cap)
   444  	return walkExpr(typecheck.Expr(sh), init)
   445  }
   446  
   447  // walkMakeSliceCopy walks an OMAKESLICECOPY node.
   448  func walkMakeSliceCopy(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
   449  	if n.Esc() == ir.EscNone {
   450  		base.Fatalf("OMAKESLICECOPY with EscNone: %v", n)
   451  	}
   452  
   453  	t := n.Type()
   454  	if t.Elem().NotInHeap() {
   455  		base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
   456  	}
   457  
   458  	length := typecheck.Conv(n.Len, types.Types[types.TINT])
   459  	copylen := ir.NewUnaryExpr(base.Pos, ir.OLEN, n.Cap)
   460  	copyptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, n.Cap)
   461  
   462  	if !t.Elem().HasPointers() && n.Bounded() {
   463  		// When len(to)==len(from) and elements have no pointers:
   464  		// replace make+copy with runtime.mallocgc+runtime.memmove.
   465  
   466  		// We do not check for overflow of len(to)*elem.Width here
   467  		// since len(from) is an existing checked slice capacity
   468  		// with same elem.Width for the from slice.
   469  		size := ir.NewBinaryExpr(base.Pos, ir.OMUL, typecheck.Conv(length, types.Types[types.TUINTPTR]), typecheck.Conv(ir.NewInt(t.Elem().Size()), types.Types[types.TUINTPTR]))
   470  
   471  		// instantiate mallocgc(size uintptr, typ *byte, needszero bool) unsafe.Pointer
   472  		fn := typecheck.LookupRuntime("mallocgc")
   473  		ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(false))
   474  		ptr.MarkNonNil()
   475  		sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
   476  
   477  		s := typecheck.Temp(t)
   478  		r := typecheck.Stmt(ir.NewAssignStmt(base.Pos, s, sh))
   479  		r = walkExpr(r, init)
   480  		init.Append(r)
   481  
   482  		// instantiate memmove(to *any, frm *any, size uintptr)
   483  		fn = typecheck.LookupRuntime("memmove")
   484  		fn = typecheck.SubstArgTypes(fn, t.Elem(), t.Elem())
   485  		ncopy := mkcall1(fn, nil, init, ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), copyptr, size)
   486  		init.Append(walkExpr(typecheck.Stmt(ncopy), init))
   487  
   488  		return s
   489  	}
   490  	// Replace make+copy with runtime.makeslicecopy.
   491  	// instantiate makeslicecopy(typ *byte, tolen int, fromlen int, from unsafe.Pointer) unsafe.Pointer
   492  	fn := typecheck.LookupRuntime("makeslicecopy")
   493  	ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), length, copylen, typecheck.Conv(copyptr, types.Types[types.TUNSAFEPTR]))
   494  	ptr.MarkNonNil()
   495  	sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
   496  	return walkExpr(typecheck.Expr(sh), init)
   497  }
   498  
   499  // walkNew walks an ONEW node.
   500  func walkNew(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
   501  	t := n.Type().Elem()
   502  	if t.NotInHeap() {
   503  		base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type().Elem())
   504  	}
   505  	if n.Esc() == ir.EscNone {
   506  		if t.Size() > ir.MaxImplicitStackVarSize {
   507  			base.Fatalf("large ONEW with EscNone: %v", n)
   508  		}
   509  		return stackTempAddr(init, t)
   510  	}
   511  	types.CalcSize(t)
   512  	n.MarkNonNil()
   513  	return n
   514  }
   515  
   516  // generate code for print.
   517  func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
   518  	// Hoist all the argument evaluation up before the lock.
   519  	walkExprListCheap(nn.Args, init)
   520  
   521  	// For println, add " " between elements and "\n" at the end.
   522  	if nn.Op() == ir.OPRINTN {
   523  		s := nn.Args
   524  		t := make([]ir.Node, 0, len(s)*2)
   525  		for i, n := range s {
   526  			if i != 0 {
   527  				t = append(t, ir.NewString(" "))
   528  			}
   529  			t = append(t, n)
   530  		}
   531  		t = append(t, ir.NewString("\n"))
   532  		nn.Args = t
   533  	}
   534  
   535  	// Collapse runs of constant strings.
   536  	s := nn.Args
   537  	t := make([]ir.Node, 0, len(s))
   538  	for i := 0; i < len(s); {
   539  		var strs []string
   540  		for i < len(s) && ir.IsConst(s[i], constant.String) {
   541  			strs = append(strs, ir.StringVal(s[i]))
   542  			i++
   543  		}
   544  		if len(strs) > 0 {
   545  			t = append(t, ir.NewString(strings.Join(strs, "")))
   546  		}
   547  		if i < len(s) {
   548  			t = append(t, s[i])
   549  			i++
   550  		}
   551  	}
   552  	nn.Args = t
   553  
   554  	calls := []ir.Node{mkcall("printlock", nil, init)}
   555  	for i, n := range nn.Args {
   556  		if n.Op() == ir.OLITERAL {
   557  			if n.Type() == types.UntypedRune {
   558  				n = typecheck.DefaultLit(n, types.RuneType)
   559  			}
   560  
   561  			switch n.Val().Kind() {
   562  			case constant.Int:
   563  				n = typecheck.DefaultLit(n, types.Types[types.TINT64])
   564  
   565  			case constant.Float:
   566  				n = typecheck.DefaultLit(n, types.Types[types.TFLOAT64])
   567  			}
   568  		}
   569  
   570  		if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
   571  			n = typecheck.DefaultLit(n, types.Types[types.TINT64])
   572  		}
   573  		n = typecheck.DefaultLit(n, nil)
   574  		nn.Args[i] = n
   575  		if n.Type() == nil || n.Type().Kind() == types.TFORW {
   576  			continue
   577  		}
   578  
   579  		var on *ir.Name
   580  		switch n.Type().Kind() {
   581  		case types.TINTER:
   582  			if n.Type().IsEmptyInterface() {
   583  				on = typecheck.LookupRuntime("printeface")
   584  			} else {
   585  				on = typecheck.LookupRuntime("printiface")
   586  			}
   587  			on = typecheck.SubstArgTypes(on, n.Type()) // any-1
   588  		case types.TPTR:
   589  			if n.Type().Elem().NotInHeap() {
   590  				on = typecheck.LookupRuntime("printuintptr")
   591  				n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
   592  				n.SetType(types.Types[types.TUNSAFEPTR])
   593  				n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
   594  				n.SetType(types.Types[types.TUINTPTR])
   595  				break
   596  			}
   597  			fallthrough
   598  		case types.TCHAN, types.TMAP, types.TFUNC, types.TUNSAFEPTR:
   599  			on = typecheck.LookupRuntime("printpointer")
   600  			on = typecheck.SubstArgTypes(on, n.Type()) // any-1
   601  		case types.TSLICE:
   602  			on = typecheck.LookupRuntime("printslice")
   603  			on = typecheck.SubstArgTypes(on, n.Type()) // any-1
   604  		case types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
   605  			if types.IsRuntimePkg(n.Type().Sym().Pkg) && n.Type().Sym().Name == "hex" {
   606  				on = typecheck.LookupRuntime("printhex")
   607  			} else {
   608  				on = typecheck.LookupRuntime("printuint")
   609  			}
   610  		case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64:
   611  			on = typecheck.LookupRuntime("printint")
   612  		case types.TFLOAT32, types.TFLOAT64:
   613  			on = typecheck.LookupRuntime("printfloat")
   614  		case types.TCOMPLEX64, types.TCOMPLEX128:
   615  			on = typecheck.LookupRuntime("printcomplex")
   616  		case types.TBOOL:
   617  			on = typecheck.LookupRuntime("printbool")
   618  		case types.TSTRING:
   619  			cs := ""
   620  			if ir.IsConst(n, constant.String) {
   621  				cs = ir.StringVal(n)
   622  			}
   623  			switch cs {
   624  			case " ":
   625  				on = typecheck.LookupRuntime("printsp")
   626  			case "\n":
   627  				on = typecheck.LookupRuntime("printnl")
   628  			default:
   629  				on = typecheck.LookupRuntime("printstring")
   630  			}
   631  		default:
   632  			badtype(ir.OPRINT, n.Type(), nil)
   633  			continue
   634  		}
   635  
   636  		r := ir.NewCallExpr(base.Pos, ir.OCALL, on, nil)
   637  		if params := on.Type().Params().FieldSlice(); len(params) > 0 {
   638  			t := params[0].Type
   639  			n = typecheck.Conv(n, t)
   640  			r.Args.Append(n)
   641  		}
   642  		calls = append(calls, r)
   643  	}
   644  
   645  	calls = append(calls, mkcall("printunlock", nil, init))
   646  
   647  	typecheck.Stmts(calls)
   648  	walkExprList(calls, init)
   649  
   650  	r := ir.NewBlockStmt(base.Pos, nil)
   651  	r.List = calls
   652  	return walkStmt(typecheck.Stmt(r))
   653  }
   654  
   655  // walkRecover walks an ORECOVERFP node.
   656  func walkRecoverFP(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
   657  	return mkcall("gorecover", nn.Type(), init, walkExpr(nn.Args[0], init))
   658  }
   659  
   660  // walkUnsafeData walks an OUNSAFESLICEDATA or OUNSAFESTRINGDATA expression.
   661  func walkUnsafeData(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
   662  	slice := walkExpr(n.X, init)
   663  	res := typecheck.Expr(ir.NewUnaryExpr(n.Pos(), ir.OSPTR, slice))
   664  	res.SetType(n.Type())
   665  	return walkExpr(res, init)
   666  }
   667  
   668  func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
   669  	ptr := safeExpr(n.X, init)
   670  	len := safeExpr(n.Y, init)
   671  	sliceType := n.Type()
   672  
   673  	lenType := types.Types[types.TINT64]
   674  	unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
   675  
   676  	// If checkptr enabled, call runtime.unsafeslicecheckptr to check ptr and len.
   677  	// for simplicity, unsafeslicecheckptr always uses int64.
   678  	// Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
   679  	// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
   680  	// will be handled by the negative range checks in unsafeslice during runtime.
   681  	if ir.ShouldCheckPtr(ir.CurFunc, 1) {
   682  		fnname := "unsafeslicecheckptr"
   683  		fn := typecheck.LookupRuntime(fnname)
   684  		init.Append(mkcall1(fn, nil, init, reflectdata.UnsafeSliceElemRType(base.Pos, n), unsafePtr, typecheck.Conv(len, lenType)))
   685  	} else {
   686  		// Otherwise, open code unsafe.Slice to prevent runtime call overhead.
   687  		// Keep this code in sync with runtime.unsafeslice{,64}
   688  		if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
   689  			lenType = types.Types[types.TINT]
   690  		} else {
   691  			// len64 := int64(len)
   692  			// if int64(int(len64)) != len64 {
   693  			//     panicunsafeslicelen()
   694  			// }
   695  			len64 := typecheck.Conv(len, lenType)
   696  			nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   697  			nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
   698  			nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
   699  			appendWalkStmt(init, nif)
   700  		}
   701  
   702  		// if len < 0 { panicunsafeslicelen() }
   703  		nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   704  		nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(0))
   705  		nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
   706  		appendWalkStmt(init, nif)
   707  
   708  		if sliceType.Elem().Size() == 0 {
   709  			// if ptr == nil && len > 0  {
   710  			//      panicunsafesliceptrnil()
   711  			// }
   712  			nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
   713  			isNil := ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
   714  			gtZero := ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, lenType), ir.NewInt(0))
   715  			nifPtr.Cond =
   716  				ir.NewLogicalExpr(base.Pos, ir.OANDAND, isNil, gtZero)
   717  			nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
   718  			appendWalkStmt(init, nifPtr)
   719  
   720  			h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
   721  				typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
   722  				typecheck.Conv(len, types.Types[types.TINT]),
   723  				typecheck.Conv(len, types.Types[types.TINT]))
   724  			return walkExpr(typecheck.Expr(h), init)
   725  		}
   726  
   727  		// mem, overflow := runtime.mulUintptr(et.size, len)
   728  		mem := typecheck.Temp(types.Types[types.TUINTPTR])
   729  		overflow := typecheck.Temp(types.Types[types.TBOOL])
   730  		fn := typecheck.LookupRuntime("mulUintptr")
   731  		call := mkcall1(fn, fn.Type().Results(), init, ir.NewInt(sliceType.Elem().Size()), typecheck.Conv(typecheck.Conv(len, lenType), types.Types[types.TUINTPTR]))
   732  		appendWalkStmt(init, ir.NewAssignListStmt(base.Pos, ir.OAS2, []ir.Node{mem, overflow}, []ir.Node{call}))
   733  
   734  		// if overflow || mem > -uintptr(ptr) {
   735  		//     if ptr == nil {
   736  		//         panicunsafesliceptrnil()
   737  		//     }
   738  		//     panicunsafeslicelen()
   739  		// }
   740  		nif = ir.NewIfStmt(base.Pos, nil, nil, nil)
   741  		memCond := ir.NewBinaryExpr(base.Pos, ir.OGT, mem, ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
   742  		nif.Cond = ir.NewLogicalExpr(base.Pos, ir.OOROR, overflow, memCond)
   743  		nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
   744  		nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
   745  		nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
   746  		nif.Body.Append(nifPtr, mkcall("panicunsafeslicelen", nil, &nif.Body))
   747  		appendWalkStmt(init, nif)
   748  	}
   749  
   750  	h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
   751  		typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
   752  		typecheck.Conv(len, types.Types[types.TINT]),
   753  		typecheck.Conv(len, types.Types[types.TINT]))
   754  	return walkExpr(typecheck.Expr(h), init)
   755  }
   756  
   757  func walkUnsafeString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
   758  	ptr := safeExpr(n.X, init)
   759  	len := safeExpr(n.Y, init)
   760  
   761  	lenType := types.Types[types.TINT64]
   762  	unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
   763  
   764  	// If checkptr enabled, call runtime.unsafestringcheckptr to check ptr and len.
   765  	// for simplicity, unsafestringcheckptr always uses int64.
   766  	// Type checking guarantees that TIDEAL len are positive and fit in an int.
   767  	if ir.ShouldCheckPtr(ir.CurFunc, 1) {
   768  		fnname := "unsafestringcheckptr"
   769  		fn := typecheck.LookupRuntime(fnname)
   770  		init.Append(mkcall1(fn, nil, init, unsafePtr, typecheck.Conv(len, lenType)))
   771  	} else {
   772  		// Otherwise, open code unsafe.String to prevent runtime call overhead.
   773  		// Keep this code in sync with runtime.unsafestring{,64}
   774  		if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
   775  			lenType = types.Types[types.TINT]
   776  		} else {
   777  			// len64 := int64(len)
   778  			// if int64(int(len64)) != len64 {
   779  			//     panicunsafestringlen()
   780  			// }
   781  			len64 := typecheck.Conv(len, lenType)
   782  			nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   783  			nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
   784  			nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
   785  			appendWalkStmt(init, nif)
   786  		}
   787  
   788  		// if len < 0 { panicunsafestringlen() }
   789  		nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   790  		nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(0))
   791  		nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
   792  		appendWalkStmt(init, nif)
   793  
   794  		// if uintpr(len) > -uintptr(ptr) {
   795  		//    if ptr == nil {
   796  		//       panicunsafestringnilptr()
   797  		//    }
   798  		//    panicunsafeslicelen()
   799  		// }
   800  		nifLen := ir.NewIfStmt(base.Pos, nil, nil, nil)
   801  		nifLen.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINTPTR]), ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
   802  		nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
   803  		nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
   804  		nifPtr.Body.Append(mkcall("panicunsafestringnilptr", nil, &nifPtr.Body))
   805  		nifLen.Body.Append(nifPtr, mkcall("panicunsafestringlen", nil, &nifLen.Body))
   806  		appendWalkStmt(init, nifLen)
   807  	}
   808  	h := ir.NewStringHeaderExpr(n.Pos(),
   809  		typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
   810  		typecheck.Conv(len, types.Types[types.TINT]),
   811  	)
   812  	return walkExpr(typecheck.Expr(h), init)
   813  }
   814  
   815  func badtype(op ir.Op, tl, tr *types.Type) {
   816  	var s string
   817  	if tl != nil {
   818  		s += fmt.Sprintf("\n\t%v", tl)
   819  	}
   820  	if tr != nil {
   821  		s += fmt.Sprintf("\n\t%v", tr)
   822  	}
   823  
   824  	// common mistake: *struct and *interface.
   825  	if tl != nil && tr != nil && tl.IsPtr() && tr.IsPtr() {
   826  		if tl.Elem().IsStruct() && tr.Elem().IsInterface() {
   827  			s += "\n\t(*struct vs *interface)"
   828  		} else if tl.Elem().IsInterface() && tr.Elem().IsStruct() {
   829  			s += "\n\t(*interface vs *struct)"
   830  		}
   831  	}
   832  
   833  	base.Errorf("illegal types for operand: %v%s", op, s)
   834  }
   835  
   836  func writebarrierfn(name string, l *types.Type, r *types.Type) ir.Node {
   837  	fn := typecheck.LookupRuntime(name)
   838  	fn = typecheck.SubstArgTypes(fn, l, r)
   839  	return fn
   840  }
   841  
   842  // isRuneCount reports whether n is of the form len([]rune(string)).
   843  // These are optimized into a call to runtime.countrunes.
   844  func isRuneCount(n ir.Node) bool {
   845  	return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).X.Op() == ir.OSTR2RUNES
   846  }