github.com/go-asm/go@v1.21.1-0.20240213172139-40c5ead50c48/cmd/compile/walk/assign.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"go/constant"
     9  
    10  	"github.com/go-asm/go/abi"
    11  
    12  	"github.com/go-asm/go/cmd/compile/base"
    13  	"github.com/go-asm/go/cmd/compile/ir"
    14  	"github.com/go-asm/go/cmd/compile/reflectdata"
    15  	"github.com/go-asm/go/cmd/compile/typecheck"
    16  	"github.com/go-asm/go/cmd/compile/types"
    17  	"github.com/go-asm/go/cmd/src"
    18  )
    19  
    20  // walkAssign walks an OAS (AssignExpr) or OASOP (AssignOpExpr) node.
    21  func walkAssign(init *ir.Nodes, n ir.Node) ir.Node {
    22  	init.Append(ir.TakeInit(n)...)
    23  
    24  	var left, right ir.Node
    25  	switch n.Op() {
    26  	case ir.OAS:
    27  		n := n.(*ir.AssignStmt)
    28  		left, right = n.X, n.Y
    29  	case ir.OASOP:
    30  		n := n.(*ir.AssignOpStmt)
    31  		left, right = n.X, n.Y
    32  	}
    33  
    34  	// Recognize m[k] = append(m[k], ...) so we can reuse
    35  	// the mapassign call.
    36  	var mapAppend *ir.CallExpr
    37  	if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND {
    38  		left := left.(*ir.IndexExpr)
    39  		mapAppend = right.(*ir.CallExpr)
    40  		if !ir.SameSafeExpr(left, mapAppend.Args[0]) {
    41  			base.Fatalf("not same expressions: %v != %v", left, mapAppend.Args[0])
    42  		}
    43  	}
    44  
    45  	left = walkExpr(left, init)
    46  	left = safeExpr(left, init)
    47  	if mapAppend != nil {
    48  		mapAppend.Args[0] = left
    49  	}
    50  
    51  	if n.Op() == ir.OASOP {
    52  		// Rewrite x op= y into x = x op y.
    53  		n = ir.NewAssignStmt(base.Pos, left, typecheck.Expr(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right)))
    54  	} else {
    55  		n.(*ir.AssignStmt).X = left
    56  	}
    57  	as := n.(*ir.AssignStmt)
    58  
    59  	if oaslit(as, init) {
    60  		return ir.NewBlockStmt(as.Pos(), nil)
    61  	}
    62  
    63  	if as.Y == nil {
    64  		// TODO(austin): Check all "implicit zeroing"
    65  		return as
    66  	}
    67  
    68  	if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) {
    69  		return as
    70  	}
    71  
    72  	switch as.Y.Op() {
    73  	default:
    74  		as.Y = walkExpr(as.Y, init)
    75  
    76  	case ir.ORECV:
    77  		// x = <-c; as.Left is x, as.Right.Left is c.
    78  		// order.stmt made sure x is addressable.
    79  		recv := as.Y.(*ir.UnaryExpr)
    80  		recv.X = walkExpr(recv.X, init)
    81  
    82  		n1 := typecheck.NodAddr(as.X)
    83  		r := recv.X // the channel
    84  		return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1)
    85  
    86  	case ir.OAPPEND:
    87  		// x = append(...)
    88  		call := as.Y.(*ir.CallExpr)
    89  		if call.Type().Elem().NotInHeap() {
    90  			base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", call.Type().Elem())
    91  		}
    92  		var r ir.Node
    93  		switch {
    94  		case isAppendOfMake(call):
    95  			// x = append(y, make([]T, y)...)
    96  			r = extendSlice(call, init)
    97  		case call.IsDDD:
    98  			r = appendSlice(call, init) // also works for append(slice, string).
    99  		default:
   100  			r = walkAppend(call, init, as)
   101  		}
   102  		as.Y = r
   103  		if r.Op() == ir.OAPPEND {
   104  			r := r.(*ir.CallExpr)
   105  			// Left in place for back end.
   106  			// Do not add a new write barrier.
   107  			// Set up address of type for back end.
   108  			r.Fun = reflectdata.AppendElemRType(base.Pos, r)
   109  			return as
   110  		}
   111  		// Otherwise, lowered for race detector.
   112  		// Treat as ordinary assignment.
   113  	}
   114  
   115  	if as.X != nil && as.Y != nil {
   116  		return convas(as, init)
   117  	}
   118  	return as
   119  }
   120  
   121  // walkAssignDotType walks an OAS2DOTTYPE node.
   122  func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node {
   123  	walkExprListSafe(n.Lhs, init)
   124  	n.Rhs[0] = walkExpr(n.Rhs[0], init)
   125  	return n
   126  }
   127  
   128  // walkAssignFunc walks an OAS2FUNC node.
   129  func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   130  	init.Append(ir.TakeInit(n)...)
   131  
   132  	r := n.Rhs[0]
   133  	walkExprListSafe(n.Lhs, init)
   134  	r = walkExpr(r, init)
   135  
   136  	if ir.IsIntrinsicCall(r.(*ir.CallExpr)) {
   137  		n.Rhs = []ir.Node{r}
   138  		return n
   139  	}
   140  	init.Append(r)
   141  
   142  	ll := ascompatet(n.Lhs, r.Type())
   143  	return ir.NewBlockStmt(src.NoXPos, ll)
   144  }
   145  
   146  // walkAssignList walks an OAS2 node.
   147  func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   148  	init.Append(ir.TakeInit(n)...)
   149  	return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs))
   150  }
   151  
   152  // walkAssignMapRead walks an OAS2MAPR node.
   153  func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   154  	init.Append(ir.TakeInit(n)...)
   155  
   156  	r := n.Rhs[0].(*ir.IndexExpr)
   157  	walkExprListSafe(n.Lhs, init)
   158  	r.X = walkExpr(r.X, init)
   159  	r.Index = walkExpr(r.Index, init)
   160  	t := r.X.Type()
   161  
   162  	fast := mapfast(t)
   163  	key := mapKeyArg(fast, r, r.Index, false)
   164  
   165  	// from:
   166  	//   a,b = m[i]
   167  	// to:
   168  	//   var,b = mapaccess2*(t, m, i)
   169  	//   a = *var
   170  	a := n.Lhs[0]
   171  
   172  	var call *ir.CallExpr
   173  	if w := t.Elem().Size(); w <= abi.ZeroValSize {
   174  		fn := mapfn(mapaccess2[fast], t, false)
   175  		call = mkcall1(fn, fn.Type().ResultsTuple(), init, reflectdata.IndexMapRType(base.Pos, r), r.X, key)
   176  	} else {
   177  		fn := mapfn("mapaccess2_fat", t, true)
   178  		z := reflectdata.ZeroAddr(w)
   179  		call = mkcall1(fn, fn.Type().ResultsTuple(), init, reflectdata.IndexMapRType(base.Pos, r), r.X, key, z)
   180  	}
   181  
   182  	// mapaccess2* returns a typed bool, but due to spec changes,
   183  	// the boolean result of i.(T) is now untyped so we make it the
   184  	// same type as the variable on the lhs.
   185  	if ok := n.Lhs[1]; !ir.IsBlank(ok) && ok.Type().IsBoolean() {
   186  		call.Type().Field(1).Type = ok.Type()
   187  	}
   188  	n.Rhs = []ir.Node{call}
   189  	n.SetOp(ir.OAS2FUNC)
   190  
   191  	// don't generate a = *var if a is _
   192  	if ir.IsBlank(a) {
   193  		return walkExpr(typecheck.Stmt(n), init)
   194  	}
   195  
   196  	var_ := typecheck.TempAt(base.Pos, ir.CurFunc, types.NewPtr(t.Elem()))
   197  	var_.SetTypecheck(1)
   198  	var_.MarkNonNil() // mapaccess always returns a non-nil pointer
   199  
   200  	n.Lhs[0] = var_
   201  	init.Append(walkExpr(n, init))
   202  
   203  	as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_))
   204  	return walkExpr(typecheck.Stmt(as), init)
   205  }
   206  
   207  // walkAssignRecv walks an OAS2RECV node.
   208  func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   209  	init.Append(ir.TakeInit(n)...)
   210  
   211  	r := n.Rhs[0].(*ir.UnaryExpr) // recv
   212  	walkExprListSafe(n.Lhs, init)
   213  	r.X = walkExpr(r.X, init)
   214  	var n1 ir.Node
   215  	if ir.IsBlank(n.Lhs[0]) {
   216  		n1 = typecheck.NodNil()
   217  	} else {
   218  		n1 = typecheck.NodAddr(n.Lhs[0])
   219  	}
   220  	fn := chanfn("chanrecv2", 2, r.X.Type())
   221  	ok := n.Lhs[1]
   222  	call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1)
   223  	return typecheck.Stmt(ir.NewAssignStmt(base.Pos, ok, call))
   224  }
   225  
   226  // walkReturn walks an ORETURN node.
   227  func walkReturn(n *ir.ReturnStmt) ir.Node {
   228  	fn := ir.CurFunc
   229  
   230  	fn.NumReturns++
   231  	if len(n.Results) == 0 {
   232  		return n
   233  	}
   234  
   235  	results := fn.Type().Results()
   236  	dsts := make([]ir.Node, len(results))
   237  	for i, v := range results {
   238  		// TODO(mdempsky): typecheck should have already checked the result variables.
   239  		dsts[i] = typecheck.AssignExpr(v.Nname.(*ir.Name))
   240  	}
   241  
   242  	n.Results = ascompatee(n.Op(), dsts, n.Results)
   243  	return n
   244  }
   245  
   246  // check assign type list to
   247  // an expression list. called in
   248  //
   249  //	expr-list = func()
   250  func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
   251  	if len(nl) != nr.NumFields() {
   252  		base.Fatalf("ascompatet: assignment count mismatch: %d = %d", len(nl), nr.NumFields())
   253  	}
   254  
   255  	var nn ir.Nodes
   256  	for i, l := range nl {
   257  		if ir.IsBlank(l) {
   258  			continue
   259  		}
   260  		r := nr.Field(i)
   261  
   262  		// Order should have created autotemps of the appropriate type for
   263  		// us to store results into.
   264  		if tmp, ok := l.(*ir.Name); !ok || !tmp.AutoTemp() || !types.Identical(tmp.Type(), r.Type) {
   265  			base.FatalfAt(l.Pos(), "assigning %v to %+v", r.Type, l)
   266  		}
   267  
   268  		res := ir.NewResultExpr(base.Pos, nil, types.BADWIDTH)
   269  		res.Index = int64(i)
   270  		res.SetType(r.Type)
   271  		res.SetTypecheck(1)
   272  
   273  		nn.Append(ir.NewAssignStmt(base.Pos, l, res))
   274  	}
   275  	return nn
   276  }
   277  
   278  // check assign expression list to
   279  // an expression list. called in
   280  //
   281  //	expr-list = expr-list
   282  func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node {
   283  	// cannot happen: should have been rejected during type checking
   284  	if len(nl) != len(nr) {
   285  		base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr))
   286  	}
   287  
   288  	var assigned ir.NameSet
   289  	var memWrite, deferResultWrite bool
   290  
   291  	// affected reports whether expression n could be affected by
   292  	// the assignments applied so far.
   293  	affected := func(n ir.Node) bool {
   294  		if deferResultWrite {
   295  			return true
   296  		}
   297  		return ir.Any(n, func(n ir.Node) bool {
   298  			if n.Op() == ir.ONAME && assigned.Has(n.(*ir.Name)) {
   299  				return true
   300  			}
   301  			if memWrite && readsMemory(n) {
   302  				return true
   303  			}
   304  			return false
   305  		})
   306  	}
   307  
   308  	// If a needed expression may be affected by an
   309  	// earlier assignment, make an early copy of that
   310  	// expression and use the copy instead.
   311  	var early ir.Nodes
   312  	save := func(np *ir.Node) {
   313  		if n := *np; affected(n) {
   314  			*np = copyExpr(n, n.Type(), &early)
   315  		}
   316  	}
   317  
   318  	var late ir.Nodes
   319  	for i, lorig := range nl {
   320  		l, r := lorig, nr[i]
   321  
   322  		// Do not generate 'x = x' during return. See issue 4014.
   323  		if op == ir.ORETURN && ir.SameSafeExpr(l, r) {
   324  			continue
   325  		}
   326  
   327  		// Save subexpressions needed on left side.
   328  		// Drill through non-dereferences.
   329  		for {
   330  			// If an expression has init statements, they must be evaluated
   331  			// before any of its saved sub-operands (#45706).
   332  			// TODO(mdempsky): Disallow init statements on lvalues.
   333  			init := ir.TakeInit(l)
   334  			walkStmtList(init)
   335  			early.Append(init...)
   336  
   337  			switch ll := l.(type) {
   338  			case *ir.IndexExpr:
   339  				if ll.X.Type().IsArray() {
   340  					save(&ll.Index)
   341  					l = ll.X
   342  					continue
   343  				}
   344  			case *ir.ParenExpr:
   345  				l = ll.X
   346  				continue
   347  			case *ir.SelectorExpr:
   348  				if ll.Op() == ir.ODOT {
   349  					l = ll.X
   350  					continue
   351  				}
   352  			}
   353  			break
   354  		}
   355  
   356  		var name *ir.Name
   357  		switch l.Op() {
   358  		default:
   359  			base.Fatalf("unexpected lvalue %v", l.Op())
   360  		case ir.ONAME:
   361  			name = l.(*ir.Name)
   362  		case ir.OINDEX, ir.OINDEXMAP:
   363  			l := l.(*ir.IndexExpr)
   364  			save(&l.X)
   365  			save(&l.Index)
   366  		case ir.ODEREF:
   367  			l := l.(*ir.StarExpr)
   368  			save(&l.X)
   369  		case ir.ODOTPTR:
   370  			l := l.(*ir.SelectorExpr)
   371  			save(&l.X)
   372  		}
   373  
   374  		// Save expression on right side.
   375  		save(&r)
   376  
   377  		appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late))
   378  
   379  		// Check for reasons why we may need to compute later expressions
   380  		// before this assignment happens.
   381  
   382  		if name == nil {
   383  			// Not a direct assignment to a declared variable.
   384  			// Conservatively assume any memory access might alias.
   385  			memWrite = true
   386  			continue
   387  		}
   388  
   389  		if name.Class == ir.PPARAMOUT && ir.CurFunc.HasDefer() {
   390  			// Assignments to a result parameter in a function with defers
   391  			// becomes visible early if evaluation of any later expression
   392  			// panics (#43835).
   393  			deferResultWrite = true
   394  			continue
   395  		}
   396  
   397  		if ir.IsBlank(name) {
   398  			// We can ignore assignments to blank or anonymous result parameters.
   399  			// These can't appear in expressions anyway.
   400  			continue
   401  		}
   402  
   403  		if name.Addrtaken() || !name.OnStack() {
   404  			// Global variable, heap escaped, or just addrtaken.
   405  			// Conservatively assume any memory access might alias.
   406  			memWrite = true
   407  			continue
   408  		}
   409  
   410  		// Local, non-addrtaken variable.
   411  		// Assignments can only alias with direct uses of this variable.
   412  		assigned.Add(name)
   413  	}
   414  
   415  	early.Append(late.Take()...)
   416  	return early
   417  }
   418  
   419  // readsMemory reports whether the evaluation n directly reads from
   420  // memory that might be written to indirectly.
   421  func readsMemory(n ir.Node) bool {
   422  	switch n.Op() {
   423  	case ir.ONAME:
   424  		n := n.(*ir.Name)
   425  		if n.Class == ir.PFUNC {
   426  			return false
   427  		}
   428  		return n.Addrtaken() || !n.OnStack()
   429  
   430  	case ir.OADD,
   431  		ir.OAND,
   432  		ir.OANDAND,
   433  		ir.OANDNOT,
   434  		ir.OBITNOT,
   435  		ir.OCONV,
   436  		ir.OCONVIFACE,
   437  		ir.OCONVNOP,
   438  		ir.ODIV,
   439  		ir.ODOT,
   440  		ir.ODOTTYPE,
   441  		ir.OLITERAL,
   442  		ir.OLSH,
   443  		ir.OMOD,
   444  		ir.OMUL,
   445  		ir.ONEG,
   446  		ir.ONIL,
   447  		ir.OOR,
   448  		ir.OOROR,
   449  		ir.OPAREN,
   450  		ir.OPLUS,
   451  		ir.ORSH,
   452  		ir.OSUB,
   453  		ir.OXOR:
   454  		return false
   455  	}
   456  
   457  	// Be conservative.
   458  	return true
   459  }
   460  
   461  // expand append(l1, l2...) to
   462  //
   463  //	init {
   464  //	  s := l1
   465  //	  newLen := s.len + l2.len
   466  //	  // Compare as uint so growslice can panic on overflow.
   467  //	  if uint(newLen) <= uint(s.cap) {
   468  //	    s = s[:newLen]
   469  //	  } else {
   470  //	    s = growslice(s.ptr, s.len, s.cap, l2.len, T)
   471  //	  }
   472  //	  memmove(&s[s.len-l2.len], &l2[0], l2.len*sizeof(T))
   473  //	}
   474  //	s
   475  //
   476  // l2 is allowed to be a string.
   477  func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   478  	walkAppendArgs(n, init)
   479  
   480  	l1 := n.Args[0]
   481  	l2 := n.Args[1]
   482  	l2 = cheapExpr(l2, init)
   483  	n.Args[1] = l2
   484  
   485  	var nodes ir.Nodes
   486  
   487  	// var s []T
   488  	s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type())
   489  	nodes.Append(ir.NewAssignStmt(base.Pos, s, l1)) // s = l1
   490  
   491  	elemtype := s.Type().Elem()
   492  
   493  	// Decompose slice.
   494  	oldPtr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
   495  	oldLen := ir.NewUnaryExpr(base.Pos, ir.OLEN, s)
   496  	oldCap := ir.NewUnaryExpr(base.Pos, ir.OCAP, s)
   497  
   498  	// Number of elements we are adding
   499  	num := ir.NewUnaryExpr(base.Pos, ir.OLEN, l2)
   500  
   501  	// newLen := oldLen + num
   502  	newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
   503  	nodes.Append(ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, oldLen, num)))
   504  
   505  	// if uint(newLen) <= uint(oldCap)
   506  	nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   507  	nuint := typecheck.Conv(newLen, types.Types[types.TUINT])
   508  	scapuint := typecheck.Conv(oldCap, types.Types[types.TUINT])
   509  	nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, scapuint)
   510  	nif.Likely = true
   511  
   512  	// then { s = s[:newLen] }
   513  	slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
   514  	slice.SetBounded(true)
   515  	nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, slice)}
   516  
   517  	// else { s = growslice(oldPtr, newLen, oldCap, num, T) }
   518  	call := walkGrowslice(s, nif.PtrInit(), oldPtr, newLen, oldCap, num)
   519  	nif.Else = []ir.Node{ir.NewAssignStmt(base.Pos, s, call)}
   520  
   521  	nodes.Append(nif)
   522  
   523  	// Index to start copying into s.
   524  	//   idx = newLen - len(l2)
   525  	// We use this expression instead of oldLen because it avoids
   526  	// a spill/restore of oldLen.
   527  	// Note: this doesn't work optimally currently because
   528  	// the compiler optimizer undoes this arithmetic.
   529  	idx := ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewUnaryExpr(base.Pos, ir.OLEN, l2))
   530  
   531  	var ncopy ir.Node
   532  	if elemtype.HasPointers() {
   533  		// copy(s[idx:], l2)
   534  		slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
   535  		slice.SetType(s.Type())
   536  		slice.SetBounded(true)
   537  
   538  		ir.CurFunc.SetWBPos(n.Pos())
   539  
   540  		// instantiate typedslicecopy(typ *type, dstPtr *any, dstLen int, srcPtr *any, srcLen int) int
   541  		fn := typecheck.LookupRuntime("typedslicecopy", l1.Type().Elem(), l2.Type().Elem())
   542  		ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
   543  		ptr2, len2 := backingArrayPtrLen(l2)
   544  		ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, reflectdata.AppendElemRType(base.Pos, n), ptr1, len1, ptr2, len2)
   545  	} else if base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime {
   546  		// rely on runtime to instrument:
   547  		//  copy(s[idx:], l2)
   548  		// l2 can be a slice or string.
   549  		slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
   550  		slice.SetType(s.Type())
   551  		slice.SetBounded(true)
   552  
   553  		ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
   554  		ptr2, len2 := backingArrayPtrLen(l2)
   555  
   556  		fn := typecheck.LookupRuntime("slicecopy", ptr1.Type().Elem(), ptr2.Type().Elem())
   557  		ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(base.Pos, elemtype.Size()))
   558  	} else {
   559  		// memmove(&s[idx], &l2[0], len(l2)*sizeof(T))
   560  		ix := ir.NewIndexExpr(base.Pos, s, idx)
   561  		ix.SetBounded(true)
   562  		addr := typecheck.NodAddr(ix)
   563  
   564  		sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2)
   565  
   566  		nwid := cheapExpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes)
   567  		nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, elemtype.Size()))
   568  
   569  		// instantiate func memmove(to *any, frm *any, length uintptr)
   570  		fn := typecheck.LookupRuntime("memmove", elemtype, elemtype)
   571  		ncopy = mkcall1(fn, nil, &nodes, addr, sptr, nwid)
   572  	}
   573  	ln := append(nodes, ncopy)
   574  
   575  	typecheck.Stmts(ln)
   576  	walkStmtList(ln)
   577  	init.Append(ln...)
   578  	return s
   579  }
   580  
   581  // isAppendOfMake reports whether n is of the form append(x, make([]T, y)...).
   582  // isAppendOfMake assumes n has already been typechecked.
   583  func isAppendOfMake(n ir.Node) bool {
   584  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   585  		return false
   586  	}
   587  
   588  	if n.Typecheck() == 0 {
   589  		base.Fatalf("missing typecheck: %+v", n)
   590  	}
   591  
   592  	if n.Op() != ir.OAPPEND {
   593  		return false
   594  	}
   595  	call := n.(*ir.CallExpr)
   596  	if !call.IsDDD || len(call.Args) != 2 || call.Args[1].Op() != ir.OMAKESLICE {
   597  		return false
   598  	}
   599  
   600  	mk := call.Args[1].(*ir.MakeExpr)
   601  	if mk.Cap != nil {
   602  		return false
   603  	}
   604  
   605  	// y must be either an integer constant or the largest possible positive value
   606  	// of variable y needs to fit into a uint.
   607  
   608  	// typecheck made sure that constant arguments to make are not negative and fit into an int.
   609  
   610  	// The care of overflow of the len argument to make will be handled by an explicit check of int(len) < 0 during runtime.
   611  	y := mk.Len
   612  	if !ir.IsConst(y, constant.Int) && y.Type().Size() > types.Types[types.TUINT].Size() {
   613  		return false
   614  	}
   615  
   616  	return true
   617  }
   618  
   619  // extendSlice rewrites append(l1, make([]T, l2)...) to
   620  //
   621  //	init {
   622  //	  if l2 >= 0 { // Empty if block here for more meaningful node.SetLikely(true)
   623  //	  } else {
   624  //	    panicmakeslicelen()
   625  //	  }
   626  //	  s := l1
   627  //	  n := len(s) + l2
   628  //	  // Compare n and s as uint so growslice can panic on overflow of len(s) + l2.
   629  //	  // cap is a positive int and n can become negative when len(s) + l2
   630  //	  // overflows int. Interpreting n when negative as uint makes it larger
   631  //	  // than cap(s). growslice will check the int n arg and panic if n is
   632  //	  // negative. This prevents the overflow from being undetected.
   633  //	  if uint(n) <= uint(cap(s)) {
   634  //	    s = s[:n]
   635  //	  } else {
   636  //	    s = growslice(T, s.ptr, n, s.cap, l2, T)
   637  //	  }
   638  //	  // clear the new portion of the underlying array.
   639  //	  hp := &s[len(s)-l2]
   640  //	  hn := l2 * sizeof(T)
   641  //	  memclr(hp, hn)
   642  //	}
   643  //	s
   644  //
   645  //	if T has pointers, the final memclr can go inside the "then" branch, as
   646  //	growslice will have done the clearing for us.
   647  
   648  func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   649  	// isAppendOfMake made sure all possible positive values of l2 fit into a uint.
   650  	// The case of l2 overflow when converting from e.g. uint to int is handled by an explicit
   651  	// check of l2 < 0 at runtime which is generated below.
   652  	l2 := typecheck.Conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT])
   653  	l2 = typecheck.Expr(l2)
   654  	n.Args[1] = l2 // walkAppendArgs expects l2 in n.List.Second().
   655  
   656  	walkAppendArgs(n, init)
   657  
   658  	l1 := n.Args[0]
   659  	l2 = n.Args[1] // re-read l2, as it may have been updated by walkAppendArgs
   660  
   661  	var nodes []ir.Node
   662  
   663  	// if l2 >= 0 (likely happens), do nothing
   664  	nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(base.Pos, 0)), nil, nil)
   665  	nifneg.Likely = true
   666  
   667  	// else panicmakeslicelen()
   668  	nifneg.Else = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
   669  	nodes = append(nodes, nifneg)
   670  
   671  	// s := l1
   672  	s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type())
   673  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, l1))
   674  
   675  	elemtype := s.Type().Elem()
   676  
   677  	// n := s.len + l2
   678  	nn := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
   679  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2)))
   680  
   681  	// if uint(n) <= uint(s.cap)
   682  	nuint := typecheck.Conv(nn, types.Types[types.TUINT])
   683  	capuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
   684  	nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, capuint), nil, nil)
   685  	nif.Likely = true
   686  
   687  	// then { s = s[:n] }
   688  	nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
   689  	nt.SetBounded(true)
   690  	nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, nt)}
   691  
   692  	// else { s = growslice(s.ptr, n, s.cap, l2, T) }
   693  	nif.Else = []ir.Node{
   694  		ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(),
   695  			ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
   696  			nn,
   697  			ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
   698  			l2)),
   699  	}
   700  
   701  	nodes = append(nodes, nif)
   702  
   703  	// hp := &s[s.len - l2]
   704  	// TODO: &s[s.len] - hn?
   705  	ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2))
   706  	ix.SetBounded(true)
   707  	hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
   708  
   709  	// hn := l2 * sizeof(elem(s))
   710  	hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(base.Pos, elemtype.Size())), types.Types[types.TUINTPTR])
   711  
   712  	clrname := "memclrNoHeapPointers"
   713  	hasPointers := elemtype.HasPointers()
   714  	if hasPointers {
   715  		clrname = "memclrHasPointers"
   716  		ir.CurFunc.SetWBPos(n.Pos())
   717  	}
   718  
   719  	var clr ir.Nodes
   720  	clrfn := mkcall(clrname, nil, &clr, hp, hn)
   721  	clr.Append(clrfn)
   722  	if hasPointers {
   723  		// growslice will have cleared the new entries, so only
   724  		// if growslice isn't called do we need to do the zeroing ourselves.
   725  		nif.Body = append(nif.Body, clr...)
   726  	} else {
   727  		nodes = append(nodes, clr...)
   728  	}
   729  
   730  	typecheck.Stmts(nodes)
   731  	walkStmtList(nodes)
   732  	init.Append(nodes...)
   733  	return s
   734  }