github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/walk/range.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"unicode/utf8"
     9  
    10  	"github.com/bir3/gocompiler/src/cmd/compile/internal/base"
    11  	"github.com/bir3/gocompiler/src/cmd/compile/internal/ir"
    12  	"github.com/bir3/gocompiler/src/cmd/compile/internal/reflectdata"
    13  	"github.com/bir3/gocompiler/src/cmd/compile/internal/ssagen"
    14  	"github.com/bir3/gocompiler/src/cmd/compile/internal/typecheck"
    15  	"github.com/bir3/gocompiler/src/cmd/compile/internal/types"
    16  	"github.com/bir3/gocompiler/src/cmd/internal/sys"
    17  )
    18  
    19  func cheapComputableIndex(width int64) bool {
    20  	switch ssagen.Arch.LinkArch.Family {
    21  	// MIPS does not have R+R addressing
    22  	// Arm64 may lack ability to generate this code in our assembler,
    23  	// but the architecture supports it.
    24  	case sys.PPC64, sys.S390X:
    25  		return width == 1
    26  	case sys.AMD64, sys.I386, sys.ARM64, sys.ARM:
    27  		switch width {
    28  		case 1, 2, 4, 8:
    29  			return true
    30  		}
    31  	}
    32  	return false
    33  }
    34  
    35  // walkRange transforms various forms of ORANGE into
    36  // simpler forms.  The result must be assigned back to n.
    37  // Node n may also be modified in place, and may also be
    38  // the returned node.
    39  func walkRange(nrange *ir.RangeStmt) ir.Node {
    40  	if isMapClear(nrange) {
    41  		return mapClear(nrange)
    42  	}
    43  
    44  	nfor := ir.NewForStmt(nrange.Pos(), nil, nil, nil, nil)
    45  	nfor.SetInit(nrange.Init())
    46  	nfor.Label = nrange.Label
    47  
    48  	// variable name conventions:
    49  	//	ohv1, hv1, hv2: hidden (old) val 1, 2
    50  	//	ha, hit: hidden aggregate, iterator
    51  	//	hn, hp: hidden len, pointer
    52  	//	hb: hidden bool
    53  	//	a, v1, v2: not hidden aggregate, val 1, 2
    54  
    55  	a := nrange.X
    56  	t := a.Type()
    57  	lno := ir.SetPos(a)
    58  
    59  	v1, v2 := nrange.Key, nrange.Value
    60  
    61  	if ir.IsBlank(v2) {
    62  		v2 = nil
    63  	}
    64  
    65  	if ir.IsBlank(v1) && v2 == nil {
    66  		v1 = nil
    67  	}
    68  
    69  	if v1 == nil && v2 != nil {
    70  		base.Fatalf("walkRange: v2 != nil while v1 == nil")
    71  	}
    72  
    73  	var body []ir.Node
    74  	var init []ir.Node
    75  	switch t.Kind() {
    76  	default:
    77  		base.Fatalf("walkRange")
    78  
    79  	case types.TARRAY, types.TSLICE, types.TPTR: // TPTR is pointer-to-array
    80  		if nn := arrayClear(nrange, v1, v2, a); nn != nil {
    81  			base.Pos = lno
    82  			return nn
    83  		}
    84  
    85  		// Element type of the iteration
    86  		var elem *types.Type
    87  		switch t.Kind() {
    88  		case types.TSLICE, types.TARRAY:
    89  			elem = t.Elem()
    90  		case types.TPTR:
    91  			elem = t.Elem().Elem()
    92  		}
    93  
    94  		// order.stmt arranged for a copy of the array/slice variable if needed.
    95  		ha := a
    96  
    97  		hv1 := typecheck.Temp(types.Types[types.TINT])
    98  		hn := typecheck.Temp(types.Types[types.TINT])
    99  
   100  		init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
   101  		init = append(init, ir.NewAssignStmt(base.Pos, hn, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha)))
   102  
   103  		nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn)
   104  		nfor.Post = ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, ir.NewInt(1)))
   105  
   106  		// for range ha { body }
   107  		if v1 == nil {
   108  			break
   109  		}
   110  
   111  		// for v1 := range ha { body }
   112  		if v2 == nil {
   113  			body = []ir.Node{rangeAssign(nrange, hv1)}
   114  			break
   115  		}
   116  
   117  		// for v1, v2 := range ha { body }
   118  		if cheapComputableIndex(elem.Size()) {
   119  			// v1, v2 = hv1, ha[hv1]
   120  			tmp := ir.NewIndexExpr(base.Pos, ha, hv1)
   121  			tmp.SetBounded(true)
   122  			body = []ir.Node{rangeAssign2(nrange, hv1, tmp)}
   123  			break
   124  		}
   125  
   126  		// Slice to iterate over
   127  		var hs ir.Node
   128  		if t.IsSlice() {
   129  			hs = ha
   130  		} else {
   131  			var arr ir.Node
   132  			if t.IsPtr() {
   133  				arr = ha
   134  			} else {
   135  				arr = typecheck.NodAddr(ha)
   136  				arr.SetType(t.PtrTo())
   137  				arr.SetTypecheck(1)
   138  			}
   139  			hs = ir.NewSliceExpr(base.Pos, ir.OSLICEARR, arr, nil, nil, nil)
   140  			// old typechecker doesn't know OSLICEARR, so we set types explicitly
   141  			hs.SetType(types.NewSlice(elem))
   142  			hs.SetTypecheck(1)
   143  		}
   144  
   145  		// We use a "pointer" to keep track of where we are in the backing array
   146  		// of the slice hs. This pointer starts at hs.ptr and gets incremented
   147  		// by the element size each time through the loop.
   148  		//
   149  		// It's tricky, though, as on the last iteration this pointer gets
   150  		// incremented to point past the end of the backing array. We can't
   151  		// let the garbage collector see that final out-of-bounds pointer.
   152  		//
   153  		// To avoid this, we keep the "pointer" alternately in 2 variables, one
   154  		// pointer typed and one uintptr typed. Most of the time it lives in the
   155  		// regular pointer variable, but when it might be out of bounds (after it
   156  		// has been incremented, but before the loop condition has been checked)
   157  		// it lives briefly in the uintptr variable.
   158  		//
   159  		// hp contains the pointer version (of type *T, where T is the element type).
   160  		// It is guaranteed to always be in range, keeps the backing store alive,
   161  		// and is updated on stack copies. If a GC occurs when this function is
   162  		// suspended at any safepoint, this variable ensures correct operation.
   163  		//
   164  		// hu contains the equivalent uintptr version. It may point past the
   165  		// end, but doesn't keep the backing store alive and doesn't get updated
   166  		// on a stack copy. If a GC occurs while this function is on the top of
   167  		// the stack, then the last frame is scanned conservatively and hu will
   168  		// act as a reference to the backing array to ensure it is not collected.
   169  		//
   170  		// The "pointer" we're moving across the backing array lives in one
   171  		// or the other of hp and hu as the loop proceeds.
   172  		//
   173  		// hp is live during most of the body of the loop. But it isn't live
   174  		// at the very top of the loop, when we haven't checked i<n yet, and
   175  		// it could point off the end of the backing store.
   176  		// hu is live only at the very top and very bottom of the loop.
   177  		// In particular, only when it cannot possibly be live across a call.
   178  		//
   179  		// So we do
   180  		//   hu = uintptr(unsafe.Pointer(hs.ptr))
   181  		//   for i := 0; i < hs.len; i++ {
   182  		//     hp = (*T)(unsafe.Pointer(hu))
   183  		//     v1, v2 = i, *hp
   184  		//     ... body of loop ...
   185  		//     hu = uintptr(unsafe.Pointer(hp)) + elemsize
   186  		//   }
   187  		//
   188  		// Between the assignments to hu and the assignment back to hp, there
   189  		// must not be any calls.
   190  
   191  		// Pointer to current iteration position. Start on entry to the loop
   192  		// with the pointer in hu.
   193  		ptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, hs)
   194  		ptr.SetBounded(true)
   195  		huVal := ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUNSAFEPTR], ptr)
   196  		huVal = ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUINTPTR], huVal)
   197  		hu := typecheck.Temp(types.Types[types.TUINTPTR])
   198  		init = append(init, ir.NewAssignStmt(base.Pos, hu, huVal))
   199  
   200  		// Convert hu to hp at the top of the loop (afer the condition has been checked).
   201  		hpVal := ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUNSAFEPTR], hu)
   202  		hpVal.SetCheckPtr(true) // disable checkptr on this conversion
   203  		hpVal = ir.NewConvExpr(base.Pos, ir.OCONVNOP, elem.PtrTo(), hpVal)
   204  		hp := typecheck.Temp(elem.PtrTo())
   205  		body = append(body, ir.NewAssignStmt(base.Pos, hp, hpVal))
   206  
   207  		// Assign variables on the LHS of the range statement. Use *hp to get the element.
   208  		e := ir.NewStarExpr(base.Pos, hp)
   209  		e.SetBounded(true)
   210  		a := rangeAssign2(nrange, hv1, e)
   211  		body = append(body, a)
   212  
   213  		// Advance pointer for next iteration of the loop.
   214  		// This reads from hp and writes to hu.
   215  		huVal = ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUNSAFEPTR], hp)
   216  		huVal = ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUINTPTR], huVal)
   217  		as := ir.NewAssignStmt(base.Pos, hu, ir.NewBinaryExpr(base.Pos, ir.OADD, huVal, ir.NewInt(elem.Size())))
   218  		nfor.Post = ir.NewBlockStmt(base.Pos, []ir.Node{nfor.Post, as})
   219  
   220  	case types.TMAP:
   221  		// order.stmt allocated the iterator for us.
   222  		// we only use a once, so no copy needed.
   223  		ha := a
   224  
   225  		hit := nrange.Prealloc
   226  		th := hit.Type()
   227  		// depends on layout of iterator struct.
   228  		// See cmd/compile/internal/reflectdata/reflect.go:MapIterType
   229  		keysym := th.Field(0).Sym
   230  		elemsym := th.Field(1).Sym // ditto
   231  
   232  		fn := typecheck.LookupRuntime("mapiterinit")
   233  
   234  		fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem(), th)
   235  		init = append(init, mkcallstmt1(fn, reflectdata.RangeMapRType(base.Pos, nrange), ha, typecheck.NodAddr(hit)))
   236  		nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), typecheck.NodNil())
   237  
   238  		fn = typecheck.LookupRuntime("mapiternext")
   239  		fn = typecheck.SubstArgTypes(fn, th)
   240  		nfor.Post = mkcallstmt1(fn, typecheck.NodAddr(hit))
   241  
   242  		key := ir.NewStarExpr(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym))
   243  		if v1 == nil {
   244  			body = nil
   245  		} else if v2 == nil {
   246  			body = []ir.Node{rangeAssign(nrange, key)}
   247  		} else {
   248  			elem := ir.NewStarExpr(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, elemsym))
   249  			body = []ir.Node{rangeAssign2(nrange, key, elem)}
   250  		}
   251  
   252  	case types.TCHAN:
   253  		// order.stmt arranged for a copy of the channel variable.
   254  		ha := a
   255  
   256  		hv1 := typecheck.Temp(t.Elem())
   257  		hv1.SetTypecheck(1)
   258  		if t.Elem().HasPointers() {
   259  			init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
   260  		}
   261  		hb := typecheck.Temp(types.Types[types.TBOOL])
   262  
   263  		nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, hb, ir.NewBool(false))
   264  		lhs := []ir.Node{hv1, hb}
   265  		rhs := []ir.Node{ir.NewUnaryExpr(base.Pos, ir.ORECV, ha)}
   266  		a := ir.NewAssignListStmt(base.Pos, ir.OAS2RECV, lhs, rhs)
   267  		a.SetTypecheck(1)
   268  		nfor.Cond = ir.InitExpr([]ir.Node{a}, nfor.Cond)
   269  		if v1 == nil {
   270  			body = nil
   271  		} else {
   272  			body = []ir.Node{rangeAssign(nrange, hv1)}
   273  		}
   274  		// Zero hv1. This prevents hv1 from being the sole, inaccessible
   275  		// reference to an otherwise GC-able value during the next channel receive.
   276  		// See issue 15281.
   277  		body = append(body, ir.NewAssignStmt(base.Pos, hv1, nil))
   278  
   279  	case types.TSTRING:
   280  		// Transform string range statements like "for v1, v2 = range a" into
   281  		//
   282  		// ha := a
   283  		// for hv1 := 0; hv1 < len(ha); {
   284  		//   hv1t := hv1
   285  		//   hv2 := rune(ha[hv1])
   286  		//   if hv2 < utf8.RuneSelf {
   287  		//      hv1++
   288  		//   } else {
   289  		//      hv2, hv1 = decoderune(ha, hv1)
   290  		//   }
   291  		//   v1, v2 = hv1t, hv2
   292  		//   // original body
   293  		// }
   294  
   295  		// order.stmt arranged for a copy of the string variable.
   296  		ha := a
   297  
   298  		hv1 := typecheck.Temp(types.Types[types.TINT])
   299  		hv1t := typecheck.Temp(types.Types[types.TINT])
   300  		hv2 := typecheck.Temp(types.RuneType)
   301  
   302  		// hv1 := 0
   303  		init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
   304  
   305  		// hv1 < len(ha)
   306  		nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha))
   307  
   308  		if v1 != nil {
   309  			// hv1t = hv1
   310  			body = append(body, ir.NewAssignStmt(base.Pos, hv1t, hv1))
   311  		}
   312  
   313  		// hv2 := rune(ha[hv1])
   314  		nind := ir.NewIndexExpr(base.Pos, ha, hv1)
   315  		nind.SetBounded(true)
   316  		body = append(body, ir.NewAssignStmt(base.Pos, hv2, typecheck.Conv(nind, types.RuneType)))
   317  
   318  		// if hv2 < utf8.RuneSelf
   319  		nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   320  		nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv2, ir.NewInt(utf8.RuneSelf))
   321  
   322  		// hv1++
   323  		nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, ir.NewInt(1)))}
   324  
   325  		// } else {
   326  		// hv2, hv1 = decoderune(ha, hv1)
   327  		fn := typecheck.LookupRuntime("decoderune")
   328  		call := mkcall1(fn, fn.Type().Results(), &nif.Else, ha, hv1)
   329  		a := ir.NewAssignListStmt(base.Pos, ir.OAS2, []ir.Node{hv2, hv1}, []ir.Node{call})
   330  		nif.Else.Append(a)
   331  
   332  		body = append(body, nif)
   333  
   334  		if v1 != nil {
   335  			if v2 != nil {
   336  				// v1, v2 = hv1t, hv2
   337  				body = append(body, rangeAssign2(nrange, hv1t, hv2))
   338  			} else {
   339  				// v1 = hv1t
   340  				body = append(body, rangeAssign(nrange, hv1t))
   341  			}
   342  		}
   343  	}
   344  
   345  	typecheck.Stmts(init)
   346  
   347  	nfor.PtrInit().Append(init...)
   348  
   349  	typecheck.Stmts(nfor.Cond.Init())
   350  
   351  	nfor.Cond = typecheck.Expr(nfor.Cond)
   352  	nfor.Cond = typecheck.DefaultLit(nfor.Cond, nil)
   353  	nfor.Post = typecheck.Stmt(nfor.Post)
   354  	typecheck.Stmts(body)
   355  	nfor.Body.Append(body...)
   356  	nfor.Body.Append(nrange.Body...)
   357  
   358  	var n ir.Node = nfor
   359  
   360  	n = walkStmt(n)
   361  
   362  	base.Pos = lno
   363  	return n
   364  }
   365  
   366  // rangeAssign returns "n.Key = key".
   367  func rangeAssign(n *ir.RangeStmt, key ir.Node) ir.Node {
   368  	key = rangeConvert(n, n.Key.Type(), key, n.KeyTypeWord, n.KeySrcRType)
   369  	return ir.NewAssignStmt(n.Pos(), n.Key, key)
   370  }
   371  
   372  // rangeAssign2 returns "n.Key, n.Value = key, value".
   373  func rangeAssign2(n *ir.RangeStmt, key, value ir.Node) ir.Node {
   374  	// Use OAS2 to correctly handle assignments
   375  	// of the form "v1, a[v1] = range".
   376  	key = rangeConvert(n, n.Key.Type(), key, n.KeyTypeWord, n.KeySrcRType)
   377  	value = rangeConvert(n, n.Value.Type(), value, n.ValueTypeWord, n.ValueSrcRType)
   378  	return ir.NewAssignListStmt(n.Pos(), ir.OAS2, []ir.Node{n.Key, n.Value}, []ir.Node{key, value})
   379  }
   380  
   381  // rangeConvert returns src, converted to dst if necessary. If a
   382  // conversion is necessary, then typeWord and srcRType are copied to
   383  // their respective ConvExpr fields.
   384  func rangeConvert(nrange *ir.RangeStmt, dst *types.Type, src, typeWord, srcRType ir.Node) ir.Node {
   385  	src = typecheck.Expr(src)
   386  	if dst.Kind() == types.TBLANK || types.Identical(dst, src.Type()) {
   387  		return src
   388  	}
   389  
   390  	n := ir.NewConvExpr(nrange.Pos(), ir.OCONV, dst, src)
   391  	n.TypeWord = typeWord
   392  	n.SrcRType = srcRType
   393  	return typecheck.Expr(n)
   394  }
   395  
   396  // isMapClear checks if n is of the form:
   397  //
   398  //	for k := range m {
   399  //		delete(m, k)
   400  //	}
   401  //
   402  // where == for keys of map m is reflexive.
   403  func isMapClear(n *ir.RangeStmt) bool {
   404  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   405  		return false
   406  	}
   407  
   408  	t := n.X.Type()
   409  	if n.Op() != ir.ORANGE || t.Kind() != types.TMAP || n.Key == nil || n.Value != nil {
   410  		return false
   411  	}
   412  
   413  	k := n.Key
   414  	// Require k to be a new variable name.
   415  	if !ir.DeclaredBy(k, n) {
   416  		return false
   417  	}
   418  
   419  	if len(n.Body) != 1 {
   420  		return false
   421  	}
   422  
   423  	stmt := n.Body[0] // only stmt in body
   424  	if stmt == nil || stmt.Op() != ir.ODELETE {
   425  		return false
   426  	}
   427  
   428  	m := n.X
   429  	if delete := stmt.(*ir.CallExpr); !ir.SameSafeExpr(delete.Args[0], m) || !ir.SameSafeExpr(delete.Args[1], k) {
   430  		return false
   431  	}
   432  
   433  	// Keys where equality is not reflexive can not be deleted from maps.
   434  	if !types.IsReflexive(t.Key()) {
   435  		return false
   436  	}
   437  
   438  	return true
   439  }
   440  
   441  // mapClear constructs a call to runtime.mapclear for the map m.
   442  func mapClear(nrange *ir.RangeStmt) ir.Node {
   443  	m := nrange.X
   444  	origPos := ir.SetPos(m)
   445  	defer func() { base.Pos = origPos }()
   446  
   447  	t := m.Type()
   448  
   449  	// instantiate mapclear(typ *type, hmap map[any]any)
   450  	fn := typecheck.LookupRuntime("mapclear")
   451  	fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem())
   452  	n := mkcallstmt1(fn, reflectdata.RangeMapRType(base.Pos, nrange), m)
   453  	return walkStmt(typecheck.Stmt(n))
   454  }
   455  
   456  // Lower n into runtime·memclr if possible, for
   457  // fast zeroing of slices and arrays (issue 5373).
   458  // Look for instances of
   459  //
   460  //	for i := range a {
   461  //		a[i] = zero
   462  //	}
   463  //
   464  // in which the evaluation of a is side-effect-free.
   465  //
   466  // Parameters are as in walkRange: "for v1, v2 = range a".
   467  func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
   468  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   469  		return nil
   470  	}
   471  
   472  	if v1 == nil || v2 != nil {
   473  		return nil
   474  	}
   475  
   476  	if len(loop.Body) != 1 || loop.Body[0] == nil {
   477  		return nil
   478  	}
   479  
   480  	stmt1 := loop.Body[0] // only stmt in body
   481  	if stmt1.Op() != ir.OAS {
   482  		return nil
   483  	}
   484  	stmt := stmt1.(*ir.AssignStmt)
   485  	if stmt.X.Op() != ir.OINDEX {
   486  		return nil
   487  	}
   488  	lhs := stmt.X.(*ir.IndexExpr)
   489  	x := lhs.X
   490  	if a.Type().IsPtr() && a.Type().Elem().IsArray() {
   491  		if s, ok := x.(*ir.StarExpr); ok && s.Op() == ir.ODEREF {
   492  			x = s.X
   493  		}
   494  	}
   495  
   496  	if !ir.SameSafeExpr(x, a) || !ir.SameSafeExpr(lhs.Index, v1) {
   497  		return nil
   498  	}
   499  
   500  	elemsize := typecheck.RangeExprType(loop.X.Type()).Elem().Size()
   501  	if elemsize <= 0 || !ir.IsZero(stmt.Y) {
   502  		return nil
   503  	}
   504  
   505  	// Convert to
   506  	// if len(a) != 0 {
   507  	// 	hp = &a[0]
   508  	// 	hn = len(a)*sizeof(elem(a))
   509  	// 	memclr{NoHeap,Has}Pointers(hp, hn)
   510  	// 	i = len(a) - 1
   511  	// }
   512  	n := ir.NewIfStmt(base.Pos, nil, nil, nil)
   513  	n.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(0))
   514  
   515  	// hp = &a[0]
   516  	hp := typecheck.Temp(types.Types[types.TUNSAFEPTR])
   517  
   518  	ix := ir.NewIndexExpr(base.Pos, a, ir.NewInt(0))
   519  	ix.SetBounded(true)
   520  	addr := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
   521  	n.Body.Append(ir.NewAssignStmt(base.Pos, hp, addr))
   522  
   523  	// hn = len(a) * sizeof(elem(a))
   524  	hn := typecheck.Temp(types.Types[types.TUINTPTR])
   525  	mul := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(elemsize)), types.Types[types.TUINTPTR])
   526  	n.Body.Append(ir.NewAssignStmt(base.Pos, hn, mul))
   527  
   528  	var fn ir.Node
   529  	if a.Type().Elem().HasPointers() {
   530  		// memclrHasPointers(hp, hn)
   531  		ir.CurFunc.SetWBPos(stmt.Pos())
   532  		fn = mkcallstmt("memclrHasPointers", hp, hn)
   533  	} else {
   534  		// memclrNoHeapPointers(hp, hn)
   535  		fn = mkcallstmt("memclrNoHeapPointers", hp, hn)
   536  	}
   537  
   538  	n.Body.Append(fn)
   539  
   540  	// i = len(a) - 1
   541  	v1 = ir.NewAssignStmt(base.Pos, v1, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(1)))
   542  
   543  	n.Body.Append(v1)
   544  
   545  	n.Cond = typecheck.Expr(n.Cond)
   546  	n.Cond = typecheck.DefaultLit(n.Cond, nil)
   547  	typecheck.Stmts(n.Body)
   548  	return walkStmt(n)
   549  }
   550  
   551  // addptr returns (*T)(uintptr(p) + n).
   552  func addptr(p ir.Node, n int64) ir.Node {
   553  	t := p.Type()
   554  
   555  	p = ir.NewConvExpr(base.Pos, ir.OCONVNOP, nil, p)
   556  	p.SetType(types.Types[types.TUINTPTR])
   557  
   558  	p = ir.NewBinaryExpr(base.Pos, ir.OADD, p, ir.NewInt(n))
   559  
   560  	p = ir.NewConvExpr(base.Pos, ir.OCONVNOP, nil, p)
   561  	p.SetType(t)
   562  
   563  	return p
   564  }