github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/walk/assign.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"github.com/bir3/gocompiler/src/go/constant"
     9  
    10  	"github.com/bir3/gocompiler/src/cmd/compile/internal/base"
    11  	"github.com/bir3/gocompiler/src/cmd/compile/internal/ir"
    12  	"github.com/bir3/gocompiler/src/cmd/compile/internal/reflectdata"
    13  	"github.com/bir3/gocompiler/src/cmd/compile/internal/typecheck"
    14  	"github.com/bir3/gocompiler/src/cmd/compile/internal/types"
    15  	"github.com/bir3/gocompiler/src/cmd/internal/src"
    16  )
    17  
    18  // walkAssign walks an OAS (AssignExpr) or OASOP (AssignOpExpr) node.
    19  func walkAssign(init *ir.Nodes, n ir.Node) ir.Node {
    20  	init.Append(ir.TakeInit(n)...)
    21  
    22  	var left, right ir.Node
    23  	switch n.Op() {
    24  	case ir.OAS:
    25  		n := n.(*ir.AssignStmt)
    26  		left, right = n.X, n.Y
    27  	case ir.OASOP:
    28  		n := n.(*ir.AssignOpStmt)
    29  		left, right = n.X, n.Y
    30  	}
    31  
    32  	// Recognize m[k] = append(m[k], ...) so we can reuse
    33  	// the mapassign call.
    34  	var mapAppend *ir.CallExpr
    35  	if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND {
    36  		left := left.(*ir.IndexExpr)
    37  		mapAppend = right.(*ir.CallExpr)
    38  		if !ir.SameSafeExpr(left, mapAppend.Args[0]) {
    39  			base.Fatalf("not same expressions: %v != %v", left, mapAppend.Args[0])
    40  		}
    41  	}
    42  
    43  	left = walkExpr(left, init)
    44  	left = safeExpr(left, init)
    45  	if mapAppend != nil {
    46  		mapAppend.Args[0] = left
    47  	}
    48  
    49  	if n.Op() == ir.OASOP {
    50  		// Rewrite x op= y into x = x op y.
    51  		n = ir.NewAssignStmt(base.Pos, left, typecheck.Expr(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right)))
    52  	} else {
    53  		n.(*ir.AssignStmt).X = left
    54  	}
    55  	as := n.(*ir.AssignStmt)
    56  
    57  	if oaslit(as, init) {
    58  		return ir.NewBlockStmt(as.Pos(), nil)
    59  	}
    60  
    61  	if as.Y == nil {
    62  		// TODO(austin): Check all "implicit zeroing"
    63  		return as
    64  	}
    65  
    66  	if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) {
    67  		return as
    68  	}
    69  
    70  	switch as.Y.Op() {
    71  	default:
    72  		as.Y = walkExpr(as.Y, init)
    73  
    74  	case ir.ORECV:
    75  		// x = <-c; as.Left is x, as.Right.Left is c.
    76  		// order.stmt made sure x is addressable.
    77  		recv := as.Y.(*ir.UnaryExpr)
    78  		recv.X = walkExpr(recv.X, init)
    79  
    80  		n1 := typecheck.NodAddr(as.X)
    81  		r := recv.X // the channel
    82  		return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1)
    83  
    84  	case ir.OAPPEND:
    85  		// x = append(...)
    86  		call := as.Y.(*ir.CallExpr)
    87  		if call.Type().Elem().NotInHeap() {
    88  			base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", call.Type().Elem())
    89  		}
    90  		var r ir.Node
    91  		switch {
    92  		case isAppendOfMake(call):
    93  			// x = append(y, make([]T, y)...)
    94  			r = extendSlice(call, init)
    95  		case call.IsDDD:
    96  			r = appendSlice(call, init) // also works for append(slice, string).
    97  		default:
    98  			r = walkAppend(call, init, as)
    99  		}
   100  		as.Y = r
   101  		if r.Op() == ir.OAPPEND {
   102  			r := r.(*ir.CallExpr)
   103  			// Left in place for back end.
   104  			// Do not add a new write barrier.
   105  			// Set up address of type for back end.
   106  			r.X = reflectdata.AppendElemRType(base.Pos, r)
   107  			return as
   108  		}
   109  		// Otherwise, lowered for race detector.
   110  		// Treat as ordinary assignment.
   111  	}
   112  
   113  	if as.X != nil && as.Y != nil {
   114  		return convas(as, init)
   115  	}
   116  	return as
   117  }
   118  
   119  // walkAssignDotType walks an OAS2DOTTYPE node.
   120  func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node {
   121  	walkExprListSafe(n.Lhs, init)
   122  	n.Rhs[0] = walkExpr(n.Rhs[0], init)
   123  	return n
   124  }
   125  
   126  // walkAssignFunc walks an OAS2FUNC node.
   127  func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   128  	init.Append(ir.TakeInit(n)...)
   129  
   130  	r := n.Rhs[0]
   131  	walkExprListSafe(n.Lhs, init)
   132  	r = walkExpr(r, init)
   133  
   134  	if ir.IsIntrinsicCall(r.(*ir.CallExpr)) {
   135  		n.Rhs = []ir.Node{r}
   136  		return n
   137  	}
   138  	init.Append(r)
   139  
   140  	ll := ascompatet(n.Lhs, r.Type())
   141  	return ir.NewBlockStmt(src.NoXPos, ll)
   142  }
   143  
   144  // walkAssignList walks an OAS2 node.
   145  func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   146  	init.Append(ir.TakeInit(n)...)
   147  	return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs))
   148  }
   149  
   150  // walkAssignMapRead walks an OAS2MAPR node.
   151  func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   152  	init.Append(ir.TakeInit(n)...)
   153  
   154  	r := n.Rhs[0].(*ir.IndexExpr)
   155  	walkExprListSafe(n.Lhs, init)
   156  	r.X = walkExpr(r.X, init)
   157  	r.Index = walkExpr(r.Index, init)
   158  	t := r.X.Type()
   159  
   160  	fast := mapfast(t)
   161  	key := mapKeyArg(fast, r, r.Index, false)
   162  
   163  	// from:
   164  	//   a,b = m[i]
   165  	// to:
   166  	//   var,b = mapaccess2*(t, m, i)
   167  	//   a = *var
   168  	a := n.Lhs[0]
   169  
   170  	var call *ir.CallExpr
   171  	if w := t.Elem().Size(); w <= zeroValSize {
   172  		fn := mapfn(mapaccess2[fast], t, false)
   173  		call = mkcall1(fn, fn.Type().Results(), init, reflectdata.IndexMapRType(base.Pos, r), r.X, key)
   174  	} else {
   175  		fn := mapfn("mapaccess2_fat", t, true)
   176  		z := reflectdata.ZeroAddr(w)
   177  		call = mkcall1(fn, fn.Type().Results(), init, reflectdata.IndexMapRType(base.Pos, r), r.X, key, z)
   178  	}
   179  
   180  	// mapaccess2* returns a typed bool, but due to spec changes,
   181  	// the boolean result of i.(T) is now untyped so we make it the
   182  	// same type as the variable on the lhs.
   183  	if ok := n.Lhs[1]; !ir.IsBlank(ok) && ok.Type().IsBoolean() {
   184  		call.Type().Field(1).Type = ok.Type()
   185  	}
   186  	n.Rhs = []ir.Node{call}
   187  	n.SetOp(ir.OAS2FUNC)
   188  
   189  	// don't generate a = *var if a is _
   190  	if ir.IsBlank(a) {
   191  		return walkExpr(typecheck.Stmt(n), init)
   192  	}
   193  
   194  	var_ := typecheck.Temp(types.NewPtr(t.Elem()))
   195  	var_.SetTypecheck(1)
   196  	var_.MarkNonNil() // mapaccess always returns a non-nil pointer
   197  
   198  	n.Lhs[0] = var_
   199  	init.Append(walkExpr(n, init))
   200  
   201  	as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_))
   202  	return walkExpr(typecheck.Stmt(as), init)
   203  }
   204  
   205  // walkAssignRecv walks an OAS2RECV node.
   206  func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   207  	init.Append(ir.TakeInit(n)...)
   208  
   209  	r := n.Rhs[0].(*ir.UnaryExpr) // recv
   210  	walkExprListSafe(n.Lhs, init)
   211  	r.X = walkExpr(r.X, init)
   212  	var n1 ir.Node
   213  	if ir.IsBlank(n.Lhs[0]) {
   214  		n1 = typecheck.NodNil()
   215  	} else {
   216  		n1 = typecheck.NodAddr(n.Lhs[0])
   217  	}
   218  	fn := chanfn("chanrecv2", 2, r.X.Type())
   219  	ok := n.Lhs[1]
   220  	call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1)
   221  	return typecheck.Stmt(ir.NewAssignStmt(base.Pos, ok, call))
   222  }
   223  
   224  // walkReturn walks an ORETURN node.
   225  func walkReturn(n *ir.ReturnStmt) ir.Node {
   226  	fn := ir.CurFunc
   227  
   228  	fn.NumReturns++
   229  	if len(n.Results) == 0 {
   230  		return n
   231  	}
   232  
   233  	results := fn.Type().Results().FieldSlice()
   234  	dsts := make([]ir.Node, len(results))
   235  	for i, v := range results {
   236  		// TODO(mdempsky): typecheck should have already checked the result variables.
   237  		dsts[i] = typecheck.AssignExpr(v.Nname.(*ir.Name))
   238  	}
   239  
   240  	n.Results = ascompatee(n.Op(), dsts, n.Results)
   241  	return n
   242  }
   243  
   244  // check assign type list to
   245  // an expression list. called in
   246  //
   247  //	expr-list = func()
   248  func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
   249  	if len(nl) != nr.NumFields() {
   250  		base.Fatalf("ascompatet: assignment count mismatch: %d = %d", len(nl), nr.NumFields())
   251  	}
   252  
   253  	var nn ir.Nodes
   254  	for i, l := range nl {
   255  		if ir.IsBlank(l) {
   256  			continue
   257  		}
   258  		r := nr.Field(i)
   259  
   260  		// Order should have created autotemps of the appropriate type for
   261  		// us to store results into.
   262  		if tmp, ok := l.(*ir.Name); !ok || !tmp.AutoTemp() || !types.Identical(tmp.Type(), r.Type) {
   263  			base.FatalfAt(l.Pos(), "assigning %v to %+v", r.Type, l)
   264  		}
   265  
   266  		res := ir.NewResultExpr(base.Pos, nil, types.BADWIDTH)
   267  		res.Index = int64(i)
   268  		res.SetType(r.Type)
   269  		res.SetTypecheck(1)
   270  
   271  		nn.Append(ir.NewAssignStmt(base.Pos, l, res))
   272  	}
   273  	return nn
   274  }
   275  
   276  // check assign expression list to
   277  // an expression list. called in
   278  //
   279  //	expr-list = expr-list
   280  func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node {
   281  	// cannot happen: should have been rejected during type checking
   282  	if len(nl) != len(nr) {
   283  		base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr))
   284  	}
   285  
   286  	var assigned ir.NameSet
   287  	var memWrite, deferResultWrite bool
   288  
   289  	// affected reports whether expression n could be affected by
   290  	// the assignments applied so far.
   291  	affected := func(n ir.Node) bool {
   292  		if deferResultWrite {
   293  			return true
   294  		}
   295  		return ir.Any(n, func(n ir.Node) bool {
   296  			if n.Op() == ir.ONAME && assigned.Has(n.(*ir.Name)) {
   297  				return true
   298  			}
   299  			if memWrite && readsMemory(n) {
   300  				return true
   301  			}
   302  			return false
   303  		})
   304  	}
   305  
   306  	// If a needed expression may be affected by an
   307  	// earlier assignment, make an early copy of that
   308  	// expression and use the copy instead.
   309  	var early ir.Nodes
   310  	save := func(np *ir.Node) {
   311  		if n := *np; affected(n) {
   312  			*np = copyExpr(n, n.Type(), &early)
   313  		}
   314  	}
   315  
   316  	var late ir.Nodes
   317  	for i, lorig := range nl {
   318  		l, r := lorig, nr[i]
   319  
   320  		// Do not generate 'x = x' during return. See issue 4014.
   321  		if op == ir.ORETURN && ir.SameSafeExpr(l, r) {
   322  			continue
   323  		}
   324  
   325  		// Save subexpressions needed on left side.
   326  		// Drill through non-dereferences.
   327  		for {
   328  			// If an expression has init statements, they must be evaluated
   329  			// before any of its saved sub-operands (#45706).
   330  			// TODO(mdempsky): Disallow init statements on lvalues.
   331  			init := ir.TakeInit(l)
   332  			walkStmtList(init)
   333  			early.Append(init...)
   334  
   335  			switch ll := l.(type) {
   336  			case *ir.IndexExpr:
   337  				if ll.X.Type().IsArray() {
   338  					save(&ll.Index)
   339  					l = ll.X
   340  					continue
   341  				}
   342  			case *ir.ParenExpr:
   343  				l = ll.X
   344  				continue
   345  			case *ir.SelectorExpr:
   346  				if ll.Op() == ir.ODOT {
   347  					l = ll.X
   348  					continue
   349  				}
   350  			}
   351  			break
   352  		}
   353  
   354  		var name *ir.Name
   355  		switch l.Op() {
   356  		default:
   357  			base.Fatalf("unexpected lvalue %v", l.Op())
   358  		case ir.ONAME:
   359  			name = l.(*ir.Name)
   360  		case ir.OINDEX, ir.OINDEXMAP:
   361  			l := l.(*ir.IndexExpr)
   362  			save(&l.X)
   363  			save(&l.Index)
   364  		case ir.ODEREF:
   365  			l := l.(*ir.StarExpr)
   366  			save(&l.X)
   367  		case ir.ODOTPTR:
   368  			l := l.(*ir.SelectorExpr)
   369  			save(&l.X)
   370  		}
   371  
   372  		// Save expression on right side.
   373  		save(&r)
   374  
   375  		appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late))
   376  
   377  		// Check for reasons why we may need to compute later expressions
   378  		// before this assignment happens.
   379  
   380  		if name == nil {
   381  			// Not a direct assignment to a declared variable.
   382  			// Conservatively assume any memory access might alias.
   383  			memWrite = true
   384  			continue
   385  		}
   386  
   387  		if name.Class == ir.PPARAMOUT && ir.CurFunc.HasDefer() {
   388  			// Assignments to a result parameter in a function with defers
   389  			// becomes visible early if evaluation of any later expression
   390  			// panics (#43835).
   391  			deferResultWrite = true
   392  			continue
   393  		}
   394  
   395  		if sym := types.OrigSym(name.Sym()); sym == nil || sym.IsBlank() {
   396  			// We can ignore assignments to blank or anonymous result parameters.
   397  			// These can't appear in expressions anyway.
   398  			continue
   399  		}
   400  
   401  		if name.Addrtaken() || !name.OnStack() {
   402  			// Global variable, heap escaped, or just addrtaken.
   403  			// Conservatively assume any memory access might alias.
   404  			memWrite = true
   405  			continue
   406  		}
   407  
   408  		// Local, non-addrtaken variable.
   409  		// Assignments can only alias with direct uses of this variable.
   410  		assigned.Add(name)
   411  	}
   412  
   413  	early.Append(late.Take()...)
   414  	return early
   415  }
   416  
   417  // readsMemory reports whether the evaluation n directly reads from
   418  // memory that might be written to indirectly.
   419  func readsMemory(n ir.Node) bool {
   420  	switch n.Op() {
   421  	case ir.ONAME:
   422  		n := n.(*ir.Name)
   423  		if n.Class == ir.PFUNC {
   424  			return false
   425  		}
   426  		return n.Addrtaken() || !n.OnStack()
   427  
   428  	case ir.OADD,
   429  		ir.OAND,
   430  		ir.OANDAND,
   431  		ir.OANDNOT,
   432  		ir.OBITNOT,
   433  		ir.OCONV,
   434  		ir.OCONVIFACE,
   435  		ir.OCONVIDATA,
   436  		ir.OCONVNOP,
   437  		ir.ODIV,
   438  		ir.ODOT,
   439  		ir.ODOTTYPE,
   440  		ir.OLITERAL,
   441  		ir.OLSH,
   442  		ir.OMOD,
   443  		ir.OMUL,
   444  		ir.ONEG,
   445  		ir.ONIL,
   446  		ir.OOR,
   447  		ir.OOROR,
   448  		ir.OPAREN,
   449  		ir.OPLUS,
   450  		ir.ORSH,
   451  		ir.OSUB,
   452  		ir.OXOR:
   453  		return false
   454  	}
   455  
   456  	// Be conservative.
   457  	return true
   458  }
   459  
   460  // expand append(l1, l2...) to
   461  //
   462  //	init {
   463  //	  s := l1
   464  //	  newLen := s.len + l2.len
   465  //	  // Compare as uint so growslice can panic on overflow.
   466  //	  if uint(newLen) <= uint(s.cap) {
   467  //	    s = s[:newLen]
   468  //	  } else {
   469  //	    s = growslice(s.ptr, s.len, s.cap, l2.len, T)
   470  //	  }
   471  //	  memmove(&s[s.len-l2.len], &l2[0], l2.len*sizeof(T))
   472  //	}
   473  //	s
   474  //
   475  // l2 is allowed to be a string.
   476  func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   477  	walkAppendArgs(n, init)
   478  
   479  	l1 := n.Args[0]
   480  	l2 := n.Args[1]
   481  	l2 = cheapExpr(l2, init)
   482  	n.Args[1] = l2
   483  
   484  	var nodes ir.Nodes
   485  
   486  	// var s []T
   487  	s := typecheck.Temp(l1.Type())
   488  	nodes.Append(ir.NewAssignStmt(base.Pos, s, l1)) // s = l1
   489  
   490  	elemtype := s.Type().Elem()
   491  
   492  	// Decompose slice.
   493  	oldPtr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
   494  	oldLen := ir.NewUnaryExpr(base.Pos, ir.OLEN, s)
   495  	oldCap := ir.NewUnaryExpr(base.Pos, ir.OCAP, s)
   496  
   497  	// Number of elements we are adding
   498  	num := ir.NewUnaryExpr(base.Pos, ir.OLEN, l2)
   499  
   500  	// newLen := oldLen + num
   501  	newLen := typecheck.Temp(types.Types[types.TINT])
   502  	nodes.Append(ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, oldLen, num)))
   503  
   504  	// if uint(newLen) <= uint(oldCap)
   505  	nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   506  	nuint := typecheck.Conv(newLen, types.Types[types.TUINT])
   507  	scapuint := typecheck.Conv(oldCap, types.Types[types.TUINT])
   508  	nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, scapuint)
   509  	nif.Likely = true
   510  
   511  	// then { s = s[:newLen] }
   512  	slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
   513  	slice.SetBounded(true)
   514  	nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, slice)}
   515  
   516  	// func growslice(oldPtr unsafe.Pointer, newLen, oldCap, num int, et *_type) []T
   517  	fn := typecheck.LookupRuntime("growslice")
   518  	fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
   519  
   520  	// else { s = growslice(oldPtr, newLen, oldCap, num, T) }
   521  	call := mkcall1(fn, s.Type(), nif.PtrInit(), oldPtr, newLen, oldCap, num, reflectdata.TypePtr(elemtype))
   522  	nif.Else = []ir.Node{ir.NewAssignStmt(base.Pos, s, call)}
   523  
   524  	nodes.Append(nif)
   525  
   526  	// Index to start copying into s.
   527  	//   idx = newLen - len(l2)
   528  	// We use this expression instead of oldLen because it avoids
   529  	// a spill/restore of oldLen.
   530  	// Note: this doesn't work optimally currently because
   531  	// the compiler optimizer undoes this arithmetic.
   532  	idx := ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewUnaryExpr(base.Pos, ir.OLEN, l2))
   533  
   534  	var ncopy ir.Node
   535  	if elemtype.HasPointers() {
   536  		// copy(s[idx:], l2)
   537  		slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
   538  		slice.SetType(s.Type())
   539  		slice.SetBounded(true)
   540  
   541  		ir.CurFunc.SetWBPos(n.Pos())
   542  
   543  		// instantiate typedslicecopy(typ *type, dstPtr *any, dstLen int, srcPtr *any, srcLen int) int
   544  		fn := typecheck.LookupRuntime("typedslicecopy")
   545  		fn = typecheck.SubstArgTypes(fn, l1.Type().Elem(), l2.Type().Elem())
   546  		ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
   547  		ptr2, len2 := backingArrayPtrLen(l2)
   548  		ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, reflectdata.AppendElemRType(base.Pos, n), ptr1, len1, ptr2, len2)
   549  	} else if base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime {
   550  		// rely on runtime to instrument:
   551  		//  copy(s[idx:], l2)
   552  		// l2 can be a slice or string.
   553  		slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
   554  		slice.SetType(s.Type())
   555  		slice.SetBounded(true)
   556  
   557  		ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
   558  		ptr2, len2 := backingArrayPtrLen(l2)
   559  
   560  		fn := typecheck.LookupRuntime("slicecopy")
   561  		fn = typecheck.SubstArgTypes(fn, ptr1.Type().Elem(), ptr2.Type().Elem())
   562  		ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(elemtype.Size()))
   563  	} else {
   564  		// memmove(&s[idx], &l2[0], len(l2)*sizeof(T))
   565  		ix := ir.NewIndexExpr(base.Pos, s, idx)
   566  		ix.SetBounded(true)
   567  		addr := typecheck.NodAddr(ix)
   568  
   569  		sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2)
   570  
   571  		nwid := cheapExpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes)
   572  		nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(elemtype.Size()))
   573  
   574  		// instantiate func memmove(to *any, frm *any, length uintptr)
   575  		fn := typecheck.LookupRuntime("memmove")
   576  		fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
   577  		ncopy = mkcall1(fn, nil, &nodes, addr, sptr, nwid)
   578  	}
   579  	ln := append(nodes, ncopy)
   580  
   581  	typecheck.Stmts(ln)
   582  	walkStmtList(ln)
   583  	init.Append(ln...)
   584  	return s
   585  }
   586  
   587  // isAppendOfMake reports whether n is of the form append(x, make([]T, y)...).
   588  // isAppendOfMake assumes n has already been typechecked.
   589  func isAppendOfMake(n ir.Node) bool {
   590  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   591  		return false
   592  	}
   593  
   594  	if n.Typecheck() == 0 {
   595  		base.Fatalf("missing typecheck: %+v", n)
   596  	}
   597  
   598  	if n.Op() != ir.OAPPEND {
   599  		return false
   600  	}
   601  	call := n.(*ir.CallExpr)
   602  	if !call.IsDDD || len(call.Args) != 2 || call.Args[1].Op() != ir.OMAKESLICE {
   603  		return false
   604  	}
   605  
   606  	mk := call.Args[1].(*ir.MakeExpr)
   607  	if mk.Cap != nil {
   608  		return false
   609  	}
   610  
   611  	// y must be either an integer constant or the largest possible positive value
   612  	// of variable y needs to fit into an uint.
   613  
   614  	// typecheck made sure that constant arguments to make are not negative and fit into an int.
   615  
   616  	// The care of overflow of the len argument to make will be handled by an explicit check of int(len) < 0 during runtime.
   617  	y := mk.Len
   618  	if !ir.IsConst(y, constant.Int) && y.Type().Size() > types.Types[types.TUINT].Size() {
   619  		return false
   620  	}
   621  
   622  	return true
   623  }
   624  
   625  // extendSlice rewrites append(l1, make([]T, l2)...) to
   626  //
   627  //	init {
   628  //	  if l2 >= 0 { // Empty if block here for more meaningful node.SetLikely(true)
   629  //	  } else {
   630  //	    panicmakeslicelen()
   631  //	  }
   632  //	  s := l1
   633  //	  n := len(s) + l2
   634  //	  // Compare n and s as uint so growslice can panic on overflow of len(s) + l2.
   635  //	  // cap is a positive int and n can become negative when len(s) + l2
   636  //	  // overflows int. Interpreting n when negative as uint makes it larger
   637  //	  // than cap(s). growslice will check the int n arg and panic if n is
   638  //	  // negative. This prevents the overflow from being undetected.
   639  //	  if uint(n) <= uint(cap(s)) {
   640  //	    s = s[:n]
   641  //	  } else {
   642  //	    s = growslice(T, s.ptr, n, s.cap, l2, T)
   643  //	  }
   644  //	  // clear the new portion of the underlying array.
   645  //	  hp := &s[len(s)-l2]
   646  //	  hn := l2 * sizeof(T)
   647  //	  memclr(hp, hn)
   648  //	}
   649  //	s
   650  //
   651  //	if T has pointers, the final memclr can go inside the "then" branch, as
   652  //	growslice will have done the clearing for us.
   653  
   654  func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   655  	// isAppendOfMake made sure all possible positive values of l2 fit into an uint.
   656  	// The case of l2 overflow when converting from e.g. uint to int is handled by an explicit
   657  	// check of l2 < 0 at runtime which is generated below.
   658  	l2 := typecheck.Conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT])
   659  	l2 = typecheck.Expr(l2)
   660  	n.Args[1] = l2 // walkAppendArgs expects l2 in n.List.Second().
   661  
   662  	walkAppendArgs(n, init)
   663  
   664  	l1 := n.Args[0]
   665  	l2 = n.Args[1] // re-read l2, as it may have been updated by walkAppendArgs
   666  
   667  	var nodes []ir.Node
   668  
   669  	// if l2 >= 0 (likely happens), do nothing
   670  	nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(0)), nil, nil)
   671  	nifneg.Likely = true
   672  
   673  	// else panicmakeslicelen()
   674  	nifneg.Else = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
   675  	nodes = append(nodes, nifneg)
   676  
   677  	// s := l1
   678  	s := typecheck.Temp(l1.Type())
   679  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, l1))
   680  
   681  	elemtype := s.Type().Elem()
   682  
   683  	// n := s.len + l2
   684  	nn := typecheck.Temp(types.Types[types.TINT])
   685  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2)))
   686  
   687  	// if uint(n) <= uint(s.cap)
   688  	nuint := typecheck.Conv(nn, types.Types[types.TUINT])
   689  	capuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
   690  	nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, capuint), nil, nil)
   691  	nif.Likely = true
   692  
   693  	// then { s = s[:n] }
   694  	nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
   695  	nt.SetBounded(true)
   696  	nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, nt)}
   697  
   698  	// instantiate growslice(oldPtr *any, newLen, oldCap, num int, typ *type) []any
   699  	fn := typecheck.LookupRuntime("growslice")
   700  	fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
   701  
   702  	// else { s = growslice(s.ptr, n, s.cap, l2, T) }
   703  	nif.Else = []ir.Node{
   704  		ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(),
   705  			ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
   706  			nn,
   707  			ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
   708  			l2,
   709  			reflectdata.TypePtr(elemtype))),
   710  	}
   711  
   712  	nodes = append(nodes, nif)
   713  
   714  	// hp := &s[s.len - l2]
   715  	// TODO: &s[s.len] - hn?
   716  	ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2))
   717  	ix.SetBounded(true)
   718  	hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
   719  
   720  	// hn := l2 * sizeof(elem(s))
   721  	hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(elemtype.Size())), types.Types[types.TUINTPTR])
   722  
   723  	clrname := "memclrNoHeapPointers"
   724  	hasPointers := elemtype.HasPointers()
   725  	if hasPointers {
   726  		clrname = "memclrHasPointers"
   727  		ir.CurFunc.SetWBPos(n.Pos())
   728  	}
   729  
   730  	var clr ir.Nodes
   731  	clrfn := mkcall(clrname, nil, &clr, hp, hn)
   732  	clr.Append(clrfn)
   733  	if hasPointers {
   734  		// growslice will have cleared the new entries, so only
   735  		// if growslice isn't called do we need to do the zeroing ourselves.
   736  		nif.Body = append(nif.Body, clr...)
   737  	} else {
   738  		nodes = append(nodes, clr...)
   739  	}
   740  
   741  	typecheck.Stmts(nodes)
   742  	walkStmtList(nodes)
   743  	init.Append(nodes...)
   744  	return s
   745  }