github.com/gagliardetto/golang-go@v0.0.0-20201020153340-53909ea70814/cmd/compile/internal/gc/walk.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc
     6  
     7  import (
     8  	"github.com/gagliardetto/golang-go/cmd/compile/internal/types"
     9  	"github.com/gagliardetto/golang-go/cmd/internal/objabi"
    10  	"github.com/gagliardetto/golang-go/cmd/internal/sys"
    11  	"encoding/binary"
    12  	"fmt"
    13  	"strings"
    14  )
    15  
    16  // The constant is known to runtime.
    17  const tmpstringbufsize = 32
    18  const zeroValSize = 1024 // must match value of runtime/map.go:maxZero
    19  
    20  func walk(fn *Node) {
    21  	Curfn = fn
    22  
    23  	if Debug['W'] != 0 {
    24  		s := fmt.Sprintf("\nbefore walk %v", Curfn.Func.Nname.Sym)
    25  		dumplist(s, Curfn.Nbody)
    26  	}
    27  
    28  	lno := lineno
    29  
    30  	// Final typecheck for any unused variables.
    31  	for i, ln := range fn.Func.Dcl {
    32  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) {
    33  			ln = typecheck(ln, ctxExpr|ctxAssign)
    34  			fn.Func.Dcl[i] = ln
    35  		}
    36  	}
    37  
    38  	// Propagate the used flag for typeswitch variables up to the NONAME in its definition.
    39  	for _, ln := range fn.Func.Dcl {
    40  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() {
    41  			ln.Name.Defn.Left.Name.SetUsed(true)
    42  		}
    43  	}
    44  
    45  	for _, ln := range fn.Func.Dcl {
    46  		if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() {
    47  			continue
    48  		}
    49  		if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW {
    50  			if defn.Left.Name.Used() {
    51  				continue
    52  			}
    53  			yyerrorl(defn.Left.Pos, "%v declared but not used", ln.Sym)
    54  			defn.Left.Name.SetUsed(true) // suppress repeats
    55  		} else {
    56  			yyerrorl(ln.Pos, "%v declared but not used", ln.Sym)
    57  		}
    58  	}
    59  
    60  	lineno = lno
    61  	if nerrors != 0 {
    62  		return
    63  	}
    64  	walkstmtlist(Curfn.Nbody.Slice())
    65  	if Debug['W'] != 0 {
    66  		s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym)
    67  		dumplist(s, Curfn.Nbody)
    68  	}
    69  
    70  	zeroResults()
    71  	heapmoves()
    72  	if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 {
    73  		s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym)
    74  		dumplist(s, Curfn.Func.Enter)
    75  	}
    76  }
    77  
    78  func walkstmtlist(s []*Node) {
    79  	for i := range s {
    80  		s[i] = walkstmt(s[i])
    81  	}
    82  }
    83  
    84  func samelist(a, b []*Node) bool {
    85  	if len(a) != len(b) {
    86  		return false
    87  	}
    88  	for i, n := range a {
    89  		if n != b[i] {
    90  			return false
    91  		}
    92  	}
    93  	return true
    94  }
    95  
    96  func paramoutheap(fn *Node) bool {
    97  	for _, ln := range fn.Func.Dcl {
    98  		switch ln.Class() {
    99  		case PPARAMOUT:
   100  			if ln.isParamStackCopy() || ln.Name.Addrtaken() {
   101  				return true
   102  			}
   103  
   104  		case PAUTO:
   105  			// stop early - parameters are over
   106  			return false
   107  		}
   108  	}
   109  
   110  	return false
   111  }
   112  
   113  // The result of walkstmt MUST be assigned back to n, e.g.
   114  // 	n.Left = walkstmt(n.Left)
   115  func walkstmt(n *Node) *Node {
   116  	if n == nil {
   117  		return n
   118  	}
   119  
   120  	setlineno(n)
   121  
   122  	walkstmtlist(n.Ninit.Slice())
   123  
   124  	switch n.Op {
   125  	default:
   126  		if n.Op == ONAME {
   127  			yyerror("%v is not a top level statement", n.Sym)
   128  		} else {
   129  			yyerror("%v is not a top level statement", n.Op)
   130  		}
   131  		Dump("nottop", n)
   132  
   133  	case OAS,
   134  		OASOP,
   135  		OAS2,
   136  		OAS2DOTTYPE,
   137  		OAS2RECV,
   138  		OAS2FUNC,
   139  		OAS2MAPR,
   140  		OCLOSE,
   141  		OCOPY,
   142  		OCALLMETH,
   143  		OCALLINTER,
   144  		OCALL,
   145  		OCALLFUNC,
   146  		ODELETE,
   147  		OSEND,
   148  		OPRINT,
   149  		OPRINTN,
   150  		OPANIC,
   151  		OEMPTY,
   152  		ORECOVER,
   153  		OGETG:
   154  		if n.Typecheck() == 0 {
   155  			Fatalf("missing typecheck: %+v", n)
   156  		}
   157  		wascopy := n.Op == OCOPY
   158  		init := n.Ninit
   159  		n.Ninit.Set(nil)
   160  		n = walkexpr(n, &init)
   161  		n = addinit(n, init.Slice())
   162  		if wascopy && n.Op == OCONVNOP {
   163  			n.Op = OEMPTY // don't leave plain values as statements.
   164  		}
   165  
   166  	// special case for a receive where we throw away
   167  	// the value received.
   168  	case ORECV:
   169  		if n.Typecheck() == 0 {
   170  			Fatalf("missing typecheck: %+v", n)
   171  		}
   172  		init := n.Ninit
   173  		n.Ninit.Set(nil)
   174  
   175  		n.Left = walkexpr(n.Left, &init)
   176  		n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil())
   177  		n = walkexpr(n, &init)
   178  
   179  		n = addinit(n, init.Slice())
   180  
   181  	case OBREAK,
   182  		OCONTINUE,
   183  		OFALL,
   184  		OGOTO,
   185  		OLABEL,
   186  		ODCLCONST,
   187  		ODCLTYPE,
   188  		OCHECKNIL,
   189  		OVARDEF,
   190  		OVARKILL,
   191  		OVARLIVE:
   192  		break
   193  
   194  	case ODCL:
   195  		v := n.Left
   196  		if v.Class() == PAUTOHEAP {
   197  			if compiling_runtime {
   198  				yyerror("%v escapes to heap, not allowed in runtime", v)
   199  			}
   200  			if prealloc[v] == nil {
   201  				prealloc[v] = callnew(v.Type)
   202  			}
   203  			nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v])
   204  			nn.SetColas(true)
   205  			nn = typecheck(nn, ctxStmt)
   206  			return walkstmt(nn)
   207  		}
   208  
   209  	case OBLOCK:
   210  		walkstmtlist(n.List.Slice())
   211  
   212  	case OCASE:
   213  		yyerror("case statement out of place")
   214  
   215  	case ODEFER:
   216  		Curfn.Func.SetHasDefer(true)
   217  		Curfn.Func.numDefers++
   218  		if Curfn.Func.numDefers > maxOpenDefers {
   219  			// Don't allow open-coded defers if there are more than
   220  			// 8 defers in the function, since we use a single
   221  			// byte to record active defers.
   222  			Curfn.Func.SetOpenCodedDeferDisallowed(true)
   223  		}
   224  		if n.Esc != EscNever {
   225  			// If n.Esc is not EscNever, then this defer occurs in a loop,
   226  			// so open-coded defers cannot be used in this function.
   227  			Curfn.Func.SetOpenCodedDeferDisallowed(true)
   228  		}
   229  		fallthrough
   230  	case OGO:
   231  		switch n.Left.Op {
   232  		case OPRINT, OPRINTN:
   233  			n.Left = wrapCall(n.Left, &n.Ninit)
   234  
   235  		case ODELETE:
   236  			if mapfast(n.Left.List.First().Type) == mapslow {
   237  				n.Left = wrapCall(n.Left, &n.Ninit)
   238  			} else {
   239  				n.Left = walkexpr(n.Left, &n.Ninit)
   240  			}
   241  
   242  		case OCOPY:
   243  			n.Left = copyany(n.Left, &n.Ninit, true)
   244  
   245  		default:
   246  			n.Left = walkexpr(n.Left, &n.Ninit)
   247  		}
   248  
   249  	case OFOR, OFORUNTIL:
   250  		if n.Left != nil {
   251  			walkstmtlist(n.Left.Ninit.Slice())
   252  			init := n.Left.Ninit
   253  			n.Left.Ninit.Set(nil)
   254  			n.Left = walkexpr(n.Left, &init)
   255  			n.Left = addinit(n.Left, init.Slice())
   256  		}
   257  
   258  		n.Right = walkstmt(n.Right)
   259  		if n.Op == OFORUNTIL {
   260  			walkstmtlist(n.List.Slice())
   261  		}
   262  		walkstmtlist(n.Nbody.Slice())
   263  
   264  	case OIF:
   265  		n.Left = walkexpr(n.Left, &n.Ninit)
   266  		walkstmtlist(n.Nbody.Slice())
   267  		walkstmtlist(n.Rlist.Slice())
   268  
   269  	case ORETURN:
   270  		Curfn.Func.numReturns++
   271  		if n.List.Len() == 0 {
   272  			break
   273  		}
   274  		if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) {
   275  			// assign to the function out parameters,
   276  			// so that reorder3 can fix up conflicts
   277  			var rl []*Node
   278  
   279  			for _, ln := range Curfn.Func.Dcl {
   280  				cl := ln.Class()
   281  				if cl == PAUTO || cl == PAUTOHEAP {
   282  					break
   283  				}
   284  				if cl == PPARAMOUT {
   285  					if ln.isParamStackCopy() {
   286  						ln = walkexpr(typecheck(nod(ODEREF, ln.Name.Param.Heapaddr, nil), ctxExpr), nil)
   287  					}
   288  					rl = append(rl, ln)
   289  				}
   290  			}
   291  
   292  			if got, want := n.List.Len(), len(rl); got != want {
   293  				// order should have rewritten multi-value function calls
   294  				// with explicit OAS2FUNC nodes.
   295  				Fatalf("expected %v return arguments, have %v", want, got)
   296  			}
   297  
   298  			if samelist(rl, n.List.Slice()) {
   299  				// special return in disguise
   300  				// TODO(josharian, 1.12): is "special return" still relevant?
   301  				// Tests still pass w/o this. See comments on https://go-review.googlesource.com/c/go/+/118318
   302  				walkexprlist(n.List.Slice(), &n.Ninit)
   303  				n.List.Set(nil)
   304  
   305  				break
   306  			}
   307  
   308  			// move function calls out, to make reorder3's job easier.
   309  			walkexprlistsafe(n.List.Slice(), &n.Ninit)
   310  
   311  			ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit)
   312  			n.List.Set(reorder3(ll))
   313  			break
   314  		}
   315  		walkexprlist(n.List.Slice(), &n.Ninit)
   316  
   317  		// For each return parameter (lhs), assign the corresponding result (rhs).
   318  		lhs := Curfn.Type.Results()
   319  		rhs := n.List.Slice()
   320  		res := make([]*Node, lhs.NumFields())
   321  		for i, nl := range lhs.FieldSlice() {
   322  			nname := asNode(nl.Nname)
   323  			if nname.isParamHeapCopy() {
   324  				nname = nname.Name.Param.Stackcopy
   325  			}
   326  			a := nod(OAS, nname, rhs[i])
   327  			res[i] = convas(a, &n.Ninit)
   328  		}
   329  		n.List.Set(res)
   330  
   331  	case ORETJMP:
   332  		break
   333  
   334  	case OINLMARK:
   335  		break
   336  
   337  	case OSELECT:
   338  		walkselect(n)
   339  
   340  	case OSWITCH:
   341  		walkswitch(n)
   342  
   343  	case ORANGE:
   344  		n = walkrange(n)
   345  	}
   346  
   347  	if n.Op == ONAME {
   348  		Fatalf("walkstmt ended up with name: %+v", n)
   349  	}
   350  	return n
   351  }
   352  
   353  func isSmallMakeSlice(n *Node) bool {
   354  	if n.Op != OMAKESLICE {
   355  		return false
   356  	}
   357  	l := n.Left
   358  	r := n.Right
   359  	if r == nil {
   360  		r = l
   361  	}
   362  	t := n.Type
   363  
   364  	return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < maxImplicitStackVarSize/t.Elem().Width)
   365  }
   366  
   367  // walk the whole tree of the body of an
   368  // expression or simple statement.
   369  // the types expressions are calculated.
   370  // compile-time constants are evaluated.
   371  // complex side effects like statements are appended to init
   372  func walkexprlist(s []*Node, init *Nodes) {
   373  	for i := range s {
   374  		s[i] = walkexpr(s[i], init)
   375  	}
   376  }
   377  
   378  func walkexprlistsafe(s []*Node, init *Nodes) {
   379  	for i, n := range s {
   380  		s[i] = safeexpr(n, init)
   381  		s[i] = walkexpr(s[i], init)
   382  	}
   383  }
   384  
   385  func walkexprlistcheap(s []*Node, init *Nodes) {
   386  	for i, n := range s {
   387  		s[i] = cheapexpr(n, init)
   388  		s[i] = walkexpr(s[i], init)
   389  	}
   390  }
   391  
   392  // convFuncName builds the runtime function name for interface conversion.
   393  // It also reports whether the function expects the data by address.
   394  // Not all names are possible. For example, we never generate convE2E or convE2I.
   395  func convFuncName(from, to *types.Type) (fnname string, needsaddr bool) {
   396  	tkind := to.Tie()
   397  	switch from.Tie() {
   398  	case 'I':
   399  		if tkind == 'I' {
   400  			return "convI2I", false
   401  		}
   402  	case 'T':
   403  		switch {
   404  		case from.Size() == 2 && from.Align == 2:
   405  			return "convT16", false
   406  		case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   407  			return "convT32", false
   408  		case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   409  			return "convT64", false
   410  		}
   411  		if sc := from.SoleComponent(); sc != nil {
   412  			switch {
   413  			case sc.IsString():
   414  				return "convTstring", false
   415  			case sc.IsSlice():
   416  				return "convTslice", false
   417  			}
   418  		}
   419  
   420  		switch tkind {
   421  		case 'E':
   422  			if !types.Haspointers(from) {
   423  				return "convT2Enoptr", true
   424  			}
   425  			return "convT2E", true
   426  		case 'I':
   427  			if !types.Haspointers(from) {
   428  				return "convT2Inoptr", true
   429  			}
   430  			return "convT2I", true
   431  		}
   432  	}
   433  	Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
   434  	panic("unreachable")
   435  }
   436  
   437  // The result of walkexpr MUST be assigned back to n, e.g.
   438  // 	n.Left = walkexpr(n.Left, init)
   439  func walkexpr(n *Node, init *Nodes) *Node {
   440  	if n == nil {
   441  		return n
   442  	}
   443  
   444  	// Eagerly checkwidth all expressions for the back end.
   445  	if n.Type != nil && !n.Type.WidthCalculated() {
   446  		switch n.Type.Etype {
   447  		case TBLANK, TNIL, TIDEAL:
   448  		default:
   449  			checkwidth(n.Type)
   450  		}
   451  	}
   452  
   453  	if init == &n.Ninit {
   454  		// not okay to use n->ninit when walking n,
   455  		// because we might replace n with some other node
   456  		// and would lose the init list.
   457  		Fatalf("walkexpr init == &n->ninit")
   458  	}
   459  
   460  	if n.Ninit.Len() != 0 {
   461  		walkstmtlist(n.Ninit.Slice())
   462  		init.AppendNodes(&n.Ninit)
   463  	}
   464  
   465  	lno := setlineno(n)
   466  
   467  	if Debug['w'] > 1 {
   468  		Dump("before walk expr", n)
   469  	}
   470  
   471  	if n.Typecheck() != 1 {
   472  		Fatalf("missed typecheck: %+v", n)
   473  	}
   474  
   475  	if n.Type.IsUntyped() {
   476  		Fatalf("expression has untyped type: %+v", n)
   477  	}
   478  
   479  	if n.Op == ONAME && n.Class() == PAUTOHEAP {
   480  		nn := nod(ODEREF, n.Name.Param.Heapaddr, nil)
   481  		nn = typecheck(nn, ctxExpr)
   482  		nn = walkexpr(nn, init)
   483  		nn.Left.SetNonNil(true)
   484  		return nn
   485  	}
   486  
   487  opswitch:
   488  	switch n.Op {
   489  	default:
   490  		Dump("walk", n)
   491  		Fatalf("walkexpr: switch 1 unknown op %+S", n)
   492  
   493  	case ONONAME, OEMPTY, OGETG, ONEWOBJ:
   494  
   495  	case OTYPE, ONAME, OLITERAL:
   496  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
   497  		// Perhaps refactor to use Node.mayBeShared for these instead.
   498  		// If these return early, make sure to still call
   499  		// stringsym for constant strings.
   500  
   501  	case ONOT, ONEG, OPLUS, OBITNOT, OREAL, OIMAG, ODOTMETH, ODOTINTER,
   502  		ODEREF, OSPTR, OITAB, OIDATA, OADDR:
   503  		n.Left = walkexpr(n.Left, init)
   504  
   505  	case OEFACE, OAND, OSUB, OMUL, OADD, OOR, OXOR, OLSH, ORSH:
   506  		n.Left = walkexpr(n.Left, init)
   507  		n.Right = walkexpr(n.Right, init)
   508  
   509  	case ODOT, ODOTPTR:
   510  		usefield(n)
   511  		n.Left = walkexpr(n.Left, init)
   512  
   513  	case ODOTTYPE, ODOTTYPE2:
   514  		n.Left = walkexpr(n.Left, init)
   515  		// Set up interface type addresses for back end.
   516  		n.Right = typename(n.Type)
   517  		if n.Op == ODOTTYPE {
   518  			n.Right.Right = typename(n.Left.Type)
   519  		}
   520  		if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   521  			n.List.Set1(itabname(n.Type, n.Left.Type))
   522  		}
   523  
   524  	case OLEN, OCAP:
   525  		if isRuneCount(n) {
   526  			// Replace len([]rune(string)) with runtime.countrunes(string).
   527  			n = mkcall("countrunes", n.Type, init, conv(n.Left.Left, types.Types[TSTRING]))
   528  			break
   529  		}
   530  
   531  		n.Left = walkexpr(n.Left, init)
   532  
   533  		// replace len(*[10]int) with 10.
   534  		// delayed until now to preserve side effects.
   535  		t := n.Left.Type
   536  
   537  		if t.IsPtr() {
   538  			t = t.Elem()
   539  		}
   540  		if t.IsArray() {
   541  			safeexpr(n.Left, init)
   542  			setintconst(n, t.NumElem())
   543  			n.SetTypecheck(1)
   544  		}
   545  
   546  	case OCOMPLEX:
   547  		// Use results from call expression as arguments for complex.
   548  		if n.Left == nil && n.Right == nil {
   549  			n.Left = n.List.First()
   550  			n.Right = n.List.Second()
   551  		}
   552  		n.Left = walkexpr(n.Left, init)
   553  		n.Right = walkexpr(n.Right, init)
   554  
   555  	case OEQ, ONE, OLT, OLE, OGT, OGE:
   556  		n = walkcompare(n, init)
   557  
   558  	case OANDAND, OOROR:
   559  		n.Left = walkexpr(n.Left, init)
   560  
   561  		// cannot put side effects from n.Right on init,
   562  		// because they cannot run before n.Left is checked.
   563  		// save elsewhere and store on the eventual n.Right.
   564  		var ll Nodes
   565  
   566  		n.Right = walkexpr(n.Right, &ll)
   567  		n.Right = addinit(n.Right, ll.Slice())
   568  		n = walkinrange(n, init)
   569  
   570  	case OPRINT, OPRINTN:
   571  		n = walkprint(n, init)
   572  
   573  	case OPANIC:
   574  		n = mkcall("gopanic", nil, init, n.Left)
   575  
   576  	case ORECOVER:
   577  		n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil))
   578  
   579  	case OCLOSUREVAR, OCFUNC:
   580  
   581  	case OCALLINTER, OCALLFUNC, OCALLMETH:
   582  		if n.Op == OCALLINTER {
   583  			usemethod(n)
   584  		}
   585  
   586  		if n.Op == OCALLFUNC && n.Left.Op == OCLOSURE {
   587  			// Transform direct call of a closure to call of a normal function.
   588  			// transformclosure already did all preparation work.
   589  
   590  			// Prepend captured variables to argument list.
   591  			n.List.Prepend(n.Left.Func.Enter.Slice()...)
   592  
   593  			n.Left.Func.Enter.Set(nil)
   594  
   595  			// Replace OCLOSURE with ONAME/PFUNC.
   596  			n.Left = n.Left.Func.Closure.Func.Nname
   597  
   598  			// Update type of OCALLFUNC node.
   599  			// Output arguments had not changed, but their offsets could.
   600  			if n.Left.Type.NumResults() == 1 {
   601  				n.Type = n.Left.Type.Results().Field(0).Type
   602  			} else {
   603  				n.Type = n.Left.Type.Results()
   604  			}
   605  		}
   606  
   607  		walkCall(n, init)
   608  
   609  	case OAS, OASOP:
   610  		init.AppendNodes(&n.Ninit)
   611  
   612  		// Recognize m[k] = append(m[k], ...) so we can reuse
   613  		// the mapassign call.
   614  		mapAppend := n.Left.Op == OINDEXMAP && n.Right.Op == OAPPEND
   615  		if mapAppend && !samesafeexpr(n.Left, n.Right.List.First()) {
   616  			Fatalf("not same expressions: %v != %v", n.Left, n.Right.List.First())
   617  		}
   618  
   619  		n.Left = walkexpr(n.Left, init)
   620  		n.Left = safeexpr(n.Left, init)
   621  
   622  		if mapAppend {
   623  			n.Right.List.SetFirst(n.Left)
   624  		}
   625  
   626  		if n.Op == OASOP {
   627  			// Rewrite x op= y into x = x op y.
   628  			n.Right = nod(n.SubOp(), n.Left, n.Right)
   629  			n.Right = typecheck(n.Right, ctxExpr)
   630  
   631  			n.Op = OAS
   632  			n.ResetAux()
   633  		}
   634  
   635  		if oaslit(n, init) {
   636  			break
   637  		}
   638  
   639  		if n.Right == nil {
   640  			// TODO(austin): Check all "implicit zeroing"
   641  			break
   642  		}
   643  
   644  		if !instrumenting && isZero(n.Right) {
   645  			break
   646  		}
   647  
   648  		switch n.Right.Op {
   649  		default:
   650  			n.Right = walkexpr(n.Right, init)
   651  
   652  		case ORECV:
   653  			// x = <-c; n.Left is x, n.Right.Left is c.
   654  			// order.stmt made sure x is addressable.
   655  			n.Right.Left = walkexpr(n.Right.Left, init)
   656  
   657  			n1 := nod(OADDR, n.Left, nil)
   658  			r := n.Right.Left // the channel
   659  			n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1)
   660  			n = walkexpr(n, init)
   661  			break opswitch
   662  
   663  		case OAPPEND:
   664  			// x = append(...)
   665  			r := n.Right
   666  			if r.Type.Elem().NotInHeap() {
   667  				yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem())
   668  			}
   669  			switch {
   670  			case isAppendOfMake(r):
   671  				// x = append(y, make([]T, y)...)
   672  				r = extendslice(r, init)
   673  			case r.IsDDD():
   674  				r = appendslice(r, init) // also works for append(slice, string).
   675  			default:
   676  				r = walkappend(r, init, n)
   677  			}
   678  			n.Right = r
   679  			if r.Op == OAPPEND {
   680  				// Left in place for back end.
   681  				// Do not add a new write barrier.
   682  				// Set up address of type for back end.
   683  				r.Left = typename(r.Type.Elem())
   684  				break opswitch
   685  			}
   686  			// Otherwise, lowered for race detector.
   687  			// Treat as ordinary assignment.
   688  		}
   689  
   690  		if n.Left != nil && n.Right != nil {
   691  			n = convas(n, init)
   692  		}
   693  
   694  	case OAS2:
   695  		init.AppendNodes(&n.Ninit)
   696  		walkexprlistsafe(n.List.Slice(), init)
   697  		walkexprlistsafe(n.Rlist.Slice(), init)
   698  		ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init)
   699  		ll = reorder3(ll)
   700  		n = liststmt(ll)
   701  
   702  	// a,b,... = fn()
   703  	case OAS2FUNC:
   704  		init.AppendNodes(&n.Ninit)
   705  
   706  		r := n.Right
   707  		walkexprlistsafe(n.List.Slice(), init)
   708  		r = walkexpr(r, init)
   709  
   710  		if isIntrinsicCall(r) {
   711  			n.Right = r
   712  			break
   713  		}
   714  		init.Append(r)
   715  
   716  		ll := ascompatet(n.List, r.Type)
   717  		n = liststmt(ll)
   718  
   719  	// x, y = <-c
   720  	// order.stmt made sure x is addressable or blank.
   721  	case OAS2RECV:
   722  		init.AppendNodes(&n.Ninit)
   723  
   724  		r := n.Right
   725  		walkexprlistsafe(n.List.Slice(), init)
   726  		r.Left = walkexpr(r.Left, init)
   727  		var n1 *Node
   728  		if n.List.First().isBlank() {
   729  			n1 = nodnil()
   730  		} else {
   731  			n1 = nod(OADDR, n.List.First(), nil)
   732  		}
   733  		fn := chanfn("chanrecv2", 2, r.Left.Type)
   734  		ok := n.List.Second()
   735  		call := mkcall1(fn, types.Types[TBOOL], init, r.Left, n1)
   736  		n = nod(OAS, ok, call)
   737  		n = typecheck(n, ctxStmt)
   738  
   739  	// a,b = m[i]
   740  	case OAS2MAPR:
   741  		init.AppendNodes(&n.Ninit)
   742  
   743  		r := n.Right
   744  		walkexprlistsafe(n.List.Slice(), init)
   745  		r.Left = walkexpr(r.Left, init)
   746  		r.Right = walkexpr(r.Right, init)
   747  		t := r.Left.Type
   748  
   749  		fast := mapfast(t)
   750  		var key *Node
   751  		if fast != mapslow {
   752  			// fast versions take key by value
   753  			key = r.Right
   754  		} else {
   755  			// standard version takes key by reference
   756  			// order.expr made sure key is addressable.
   757  			key = nod(OADDR, r.Right, nil)
   758  		}
   759  
   760  		// from:
   761  		//   a,b = m[i]
   762  		// to:
   763  		//   var,b = mapaccess2*(t, m, i)
   764  		//   a = *var
   765  		a := n.List.First()
   766  
   767  		if w := t.Elem().Width; w <= zeroValSize {
   768  			fn := mapfn(mapaccess2[fast], t)
   769  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key)
   770  		} else {
   771  			fn := mapfn("mapaccess2_fat", t)
   772  			z := zeroaddr(w)
   773  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z)
   774  		}
   775  
   776  		// mapaccess2* returns a typed bool, but due to spec changes,
   777  		// the boolean result of i.(T) is now untyped so we make it the
   778  		// same type as the variable on the lhs.
   779  		if ok := n.List.Second(); !ok.isBlank() && ok.Type.IsBoolean() {
   780  			r.Type.Field(1).Type = ok.Type
   781  		}
   782  		n.Right = r
   783  		n.Op = OAS2FUNC
   784  
   785  		// don't generate a = *var if a is _
   786  		if !a.isBlank() {
   787  			var_ := temp(types.NewPtr(t.Elem()))
   788  			var_.SetTypecheck(1)
   789  			var_.SetNonNil(true) // mapaccess always returns a non-nil pointer
   790  			n.List.SetFirst(var_)
   791  			n = walkexpr(n, init)
   792  			init.Append(n)
   793  			n = nod(OAS, a, nod(ODEREF, var_, nil))
   794  		}
   795  
   796  		n = typecheck(n, ctxStmt)
   797  		n = walkexpr(n, init)
   798  
   799  	case ODELETE:
   800  		init.AppendNodes(&n.Ninit)
   801  		map_ := n.List.First()
   802  		key := n.List.Second()
   803  		map_ = walkexpr(map_, init)
   804  		key = walkexpr(key, init)
   805  
   806  		t := map_.Type
   807  		fast := mapfast(t)
   808  		if fast == mapslow {
   809  			// order.stmt made sure key is addressable.
   810  			key = nod(OADDR, key, nil)
   811  		}
   812  		n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key)
   813  
   814  	case OAS2DOTTYPE:
   815  		walkexprlistsafe(n.List.Slice(), init)
   816  		n.Right = walkexpr(n.Right, init)
   817  
   818  	case OCONVIFACE:
   819  		n.Left = walkexpr(n.Left, init)
   820  
   821  		fromType := n.Left.Type
   822  		toType := n.Type
   823  
   824  		// typeword generates the type word of the interface value.
   825  		typeword := func() *Node {
   826  			if toType.IsEmptyInterface() {
   827  				return typename(fromType)
   828  			}
   829  			return itabname(fromType, toType)
   830  		}
   831  
   832  		// Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
   833  		if isdirectiface(fromType) {
   834  			l := nod(OEFACE, typeword(), n.Left)
   835  			l.Type = toType
   836  			l.SetTypecheck(n.Typecheck())
   837  			n = l
   838  			break
   839  		}
   840  
   841  		if staticbytes == nil {
   842  			staticbytes = newname(Runtimepkg.Lookup("staticbytes"))
   843  			staticbytes.SetClass(PEXTERN)
   844  			staticbytes.Type = types.NewArray(types.Types[TUINT8], 256)
   845  			zerobase = newname(Runtimepkg.Lookup("zerobase"))
   846  			zerobase.SetClass(PEXTERN)
   847  			zerobase.Type = types.Types[TUINTPTR]
   848  		}
   849  
   850  		// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
   851  		// by using an existing addressable value identical to n.Left
   852  		// or creating one on the stack.
   853  		var value *Node
   854  		switch {
   855  		case fromType.Size() == 0:
   856  			// n.Left is zero-sized. Use zerobase.
   857  			cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246.
   858  			value = zerobase
   859  		case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()):
   860  			// n.Left is a bool/byte. Use staticbytes[n.Left].
   861  			n.Left = cheapexpr(n.Left, init)
   862  			value = nod(OINDEX, staticbytes, byteindex(n.Left))
   863  			value.SetBounded(true)
   864  		case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly():
   865  			// n.Left is a readonly global; use it directly.
   866  			value = n.Left
   867  		case !fromType.IsInterface() && n.Esc == EscNone && fromType.Width <= 1024:
   868  			// n.Left does not escape. Use a stack temporary initialized to n.Left.
   869  			value = temp(fromType)
   870  			init.Append(typecheck(nod(OAS, value, n.Left), ctxStmt))
   871  		}
   872  
   873  		if value != nil {
   874  			// Value is identical to n.Left.
   875  			// Construct the interface directly: {type/itab, &value}.
   876  			l := nod(OEFACE, typeword(), typecheck(nod(OADDR, value, nil), ctxExpr))
   877  			l.Type = toType
   878  			l.SetTypecheck(n.Typecheck())
   879  			n = l
   880  			break
   881  		}
   882  
   883  		// Implement interface to empty interface conversion.
   884  		// tmp = i.itab
   885  		// if tmp != nil {
   886  		//    tmp = tmp.type
   887  		// }
   888  		// e = iface{tmp, i.data}
   889  		if toType.IsEmptyInterface() && fromType.IsInterface() && !fromType.IsEmptyInterface() {
   890  			// Evaluate the input interface.
   891  			c := temp(fromType)
   892  			init.Append(nod(OAS, c, n.Left))
   893  
   894  			// Get the itab out of the interface.
   895  			tmp := temp(types.NewPtr(types.Types[TUINT8]))
   896  			init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), ctxExpr)))
   897  
   898  			// Get the type out of the itab.
   899  			nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), ctxExpr), nil)
   900  			nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp)))
   901  			init.Append(nif)
   902  
   903  			// Build the result.
   904  			e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8])))
   905  			e.Type = toType // assign type manually, typecheck doesn't understand OEFACE.
   906  			e.SetTypecheck(1)
   907  			n = e
   908  			break
   909  		}
   910  
   911  		fnname, needsaddr := convFuncName(fromType, toType)
   912  
   913  		if !needsaddr && !fromType.IsInterface() {
   914  			// Use a specialized conversion routine that only returns a data pointer.
   915  			// ptr = convT2X(val)
   916  			// e = iface{typ/tab, ptr}
   917  			fn := syslook(fnname)
   918  			dowidth(fromType)
   919  			fn = substArgTypes(fn, fromType)
   920  			dowidth(fn.Type)
   921  			call := nod(OCALL, fn, nil)
   922  			call.List.Set1(n.Left)
   923  			call = typecheck(call, ctxExpr)
   924  			call = walkexpr(call, init)
   925  			call = safeexpr(call, init)
   926  			e := nod(OEFACE, typeword(), call)
   927  			e.Type = toType
   928  			e.SetTypecheck(1)
   929  			n = e
   930  			break
   931  		}
   932  
   933  		var tab *Node
   934  		if fromType.IsInterface() {
   935  			// convI2I
   936  			tab = typename(toType)
   937  		} else {
   938  			// convT2x
   939  			tab = typeword()
   940  		}
   941  
   942  		v := n.Left
   943  		if needsaddr {
   944  			// Types of large or unknown size are passed by reference.
   945  			// Orderexpr arranged for n.Left to be a temporary for all
   946  			// the conversions it could see. Comparison of an interface
   947  			// with a non-interface, especially in a switch on interface value
   948  			// with non-interface cases, is not visible to order.stmt, so we
   949  			// have to fall back on allocating a temp here.
   950  			if !islvalue(v) {
   951  				v = copyexpr(v, v.Type, init)
   952  			}
   953  			v = nod(OADDR, v, nil)
   954  		}
   955  
   956  		dowidth(fromType)
   957  		fn := syslook(fnname)
   958  		fn = substArgTypes(fn, fromType, toType)
   959  		dowidth(fn.Type)
   960  		n = nod(OCALL, fn, nil)
   961  		n.List.Set2(tab, v)
   962  		n = typecheck(n, ctxExpr)
   963  		n = walkexpr(n, init)
   964  
   965  	case OCONV, OCONVNOP:
   966  		n.Left = walkexpr(n.Left, init)
   967  		if n.Op == OCONVNOP && checkPtr(Curfn, 1) {
   968  			if n.Type.IsPtr() && n.Left.Type.Etype == TUNSAFEPTR { // unsafe.Pointer to *T
   969  				n = walkCheckPtrAlignment(n, init, nil)
   970  				break
   971  			}
   972  			if n.Type.Etype == TUNSAFEPTR && n.Left.Type.Etype == TUINTPTR { // uintptr to unsafe.Pointer
   973  				n = walkCheckPtrArithmetic(n, init)
   974  				break
   975  			}
   976  		}
   977  		param, result := rtconvfn(n.Left.Type, n.Type)
   978  		if param == Txxx {
   979  			break
   980  		}
   981  		fn := basicnames[param] + "to" + basicnames[result]
   982  		n = conv(mkcall(fn, types.Types[result], init, conv(n.Left, types.Types[param])), n.Type)
   983  
   984  	case OANDNOT:
   985  		n.Left = walkexpr(n.Left, init)
   986  		n.Op = OAND
   987  		n.Right = nod(OBITNOT, n.Right, nil)
   988  		n.Right = typecheck(n.Right, ctxExpr)
   989  		n.Right = walkexpr(n.Right, init)
   990  
   991  	case ODIV, OMOD:
   992  		n.Left = walkexpr(n.Left, init)
   993  		n.Right = walkexpr(n.Right, init)
   994  
   995  		// rewrite complex div into function call.
   996  		et := n.Left.Type.Etype
   997  
   998  		if isComplex[et] && n.Op == ODIV {
   999  			t := n.Type
  1000  			n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128]))
  1001  			n = conv(n, t)
  1002  			break
  1003  		}
  1004  
  1005  		// Nothing to do for float divisions.
  1006  		if isFloat[et] {
  1007  			break
  1008  		}
  1009  
  1010  		// rewrite 64-bit div and mod on 32-bit architectures.
  1011  		// TODO: Remove this code once we can introduce
  1012  		// runtime calls late in SSA processing.
  1013  		if Widthreg < 8 && (et == TINT64 || et == TUINT64) {
  1014  			if n.Right.Op == OLITERAL {
  1015  				// Leave div/mod by constant powers of 2.
  1016  				// The SSA backend will handle those.
  1017  				switch et {
  1018  				case TINT64:
  1019  					c := n.Right.Int64()
  1020  					if c < 0 {
  1021  						c = -c
  1022  					}
  1023  					if c != 0 && c&(c-1) == 0 {
  1024  						break opswitch
  1025  					}
  1026  				case TUINT64:
  1027  					c := uint64(n.Right.Int64())
  1028  					if c != 0 && c&(c-1) == 0 {
  1029  						break opswitch
  1030  					}
  1031  				}
  1032  			}
  1033  			var fn string
  1034  			if et == TINT64 {
  1035  				fn = "int64"
  1036  			} else {
  1037  				fn = "uint64"
  1038  			}
  1039  			if n.Op == ODIV {
  1040  				fn += "div"
  1041  			} else {
  1042  				fn += "mod"
  1043  			}
  1044  			n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et]))
  1045  		}
  1046  
  1047  	case OINDEX:
  1048  		n.Left = walkexpr(n.Left, init)
  1049  
  1050  		// save the original node for bounds checking elision.
  1051  		// If it was a ODIV/OMOD walk might rewrite it.
  1052  		r := n.Right
  1053  
  1054  		n.Right = walkexpr(n.Right, init)
  1055  
  1056  		// if range of type cannot exceed static array bound,
  1057  		// disable bounds check.
  1058  		if n.Bounded() {
  1059  			break
  1060  		}
  1061  		t := n.Left.Type
  1062  		if t != nil && t.IsPtr() {
  1063  			t = t.Elem()
  1064  		}
  1065  		if t.IsArray() {
  1066  			n.SetBounded(bounded(r, t.NumElem()))
  1067  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1068  				Warn("index bounds check elided")
  1069  			}
  1070  			if smallintconst(n.Right) && !n.Bounded() {
  1071  				yyerror("index out of bounds")
  1072  			}
  1073  		} else if Isconst(n.Left, CTSTR) {
  1074  			n.SetBounded(bounded(r, int64(len(strlit(n.Left)))))
  1075  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1076  				Warn("index bounds check elided")
  1077  			}
  1078  			if smallintconst(n.Right) && !n.Bounded() {
  1079  				yyerror("index out of bounds")
  1080  			}
  1081  		}
  1082  
  1083  		if Isconst(n.Right, CTINT) {
  1084  			if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
  1085  				yyerror("index out of bounds")
  1086  			}
  1087  		}
  1088  
  1089  	case OINDEXMAP:
  1090  		// Replace m[k] with *map{access1,assign}(maptype, m, &k)
  1091  		n.Left = walkexpr(n.Left, init)
  1092  		n.Right = walkexpr(n.Right, init)
  1093  		map_ := n.Left
  1094  		key := n.Right
  1095  		t := map_.Type
  1096  		if n.IndexMapLValue() {
  1097  			// This m[k] expression is on the left-hand side of an assignment.
  1098  			fast := mapfast(t)
  1099  			if fast == mapslow {
  1100  				// standard version takes key by reference.
  1101  				// order.expr made sure key is addressable.
  1102  				key = nod(OADDR, key, nil)
  1103  			}
  1104  			n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key)
  1105  		} else {
  1106  			// m[k] is not the target of an assignment.
  1107  			fast := mapfast(t)
  1108  			if fast == mapslow {
  1109  				// standard version takes key by reference.
  1110  				// order.expr made sure key is addressable.
  1111  				key = nod(OADDR, key, nil)
  1112  			}
  1113  
  1114  			if w := t.Elem().Width; w <= zeroValSize {
  1115  				n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Elem()), init, typename(t), map_, key)
  1116  			} else {
  1117  				z := zeroaddr(w)
  1118  				n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Elem()), init, typename(t), map_, key, z)
  1119  			}
  1120  		}
  1121  		n.Type = types.NewPtr(t.Elem())
  1122  		n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers.
  1123  		n = nod(ODEREF, n, nil)
  1124  		n.Type = t.Elem()
  1125  		n.SetTypecheck(1)
  1126  
  1127  	case ORECV:
  1128  		Fatalf("walkexpr ORECV") // should see inside OAS only
  1129  
  1130  	case OSLICEHEADER:
  1131  		n.Left = walkexpr(n.Left, init)
  1132  		n.List.SetFirst(walkexpr(n.List.First(), init))
  1133  		n.List.SetSecond(walkexpr(n.List.Second(), init))
  1134  
  1135  	case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
  1136  		checkSlice := checkPtr(Curfn, 1) && n.Op == OSLICE3ARR && n.Left.Op == OCONVNOP && n.Left.Left.Type.Etype == TUNSAFEPTR
  1137  		if checkSlice {
  1138  			n.Left.Left = walkexpr(n.Left.Left, init)
  1139  		} else {
  1140  			n.Left = walkexpr(n.Left, init)
  1141  		}
  1142  		low, high, max := n.SliceBounds()
  1143  		low = walkexpr(low, init)
  1144  		if low != nil && isZero(low) {
  1145  			// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
  1146  			low = nil
  1147  		}
  1148  		high = walkexpr(high, init)
  1149  		max = walkexpr(max, init)
  1150  		n.SetSliceBounds(low, high, max)
  1151  		if checkSlice {
  1152  			n.Left = walkCheckPtrAlignment(n.Left, init, max)
  1153  		}
  1154  		if n.Op.IsSlice3() {
  1155  			if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) {
  1156  				// Reduce x[i:j:cap(x)] to x[i:j].
  1157  				if n.Op == OSLICE3 {
  1158  					n.Op = OSLICE
  1159  				} else {
  1160  					n.Op = OSLICEARR
  1161  				}
  1162  				n = reduceSlice(n)
  1163  			}
  1164  		} else {
  1165  			n = reduceSlice(n)
  1166  		}
  1167  
  1168  	case ONEW:
  1169  		if n.Esc == EscNone {
  1170  			if n.Type.Elem().Width >= maxImplicitStackVarSize {
  1171  				Fatalf("large ONEW with EscNone: %v", n)
  1172  			}
  1173  			r := temp(n.Type.Elem())
  1174  			r = nod(OAS, r, nil) // zero temp
  1175  			r = typecheck(r, ctxStmt)
  1176  			init.Append(r)
  1177  			r = nod(OADDR, r.Left, nil)
  1178  			r = typecheck(r, ctxExpr)
  1179  			n = r
  1180  		} else {
  1181  			n = callnew(n.Type.Elem())
  1182  		}
  1183  
  1184  	case OADDSTR:
  1185  		n = addstr(n, init)
  1186  
  1187  	case OAPPEND:
  1188  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
  1189  		Fatalf("append outside assignment")
  1190  
  1191  	case OCOPY:
  1192  		n = copyany(n, init, instrumenting && !compiling_runtime)
  1193  
  1194  		// cannot use chanfn - closechan takes any, not chan any
  1195  	case OCLOSE:
  1196  		fn := syslook("closechan")
  1197  
  1198  		fn = substArgTypes(fn, n.Left.Type)
  1199  		n = mkcall1(fn, nil, init, n.Left)
  1200  
  1201  	case OMAKECHAN:
  1202  		// When size fits into int, use makechan instead of
  1203  		// makechan64, which is faster and shorter on 32 bit platforms.
  1204  		size := n.Left
  1205  		fnname := "makechan64"
  1206  		argtype := types.Types[TINT64]
  1207  
  1208  		// Type checking guarantees that TIDEAL size is positive and fits in an int.
  1209  		// The case of size overflow when converting TUINT or TUINTPTR to TINT
  1210  		// will be handled by the negative range checks in makechan during runtime.
  1211  		if size.Type.IsKind(TIDEAL) || maxintval[size.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1212  			fnname = "makechan"
  1213  			argtype = types.Types[TINT]
  1214  		}
  1215  
  1216  		n = mkcall1(chanfn(fnname, 1, n.Type), n.Type, init, typename(n.Type), conv(size, argtype))
  1217  
  1218  	case OMAKEMAP:
  1219  		t := n.Type
  1220  		hmapType := hmap(t)
  1221  		hint := n.Left
  1222  
  1223  		// var h *hmap
  1224  		var h *Node
  1225  		if n.Esc == EscNone {
  1226  			// Allocate hmap on stack.
  1227  
  1228  			// var hv hmap
  1229  			hv := temp(hmapType)
  1230  			zero := nod(OAS, hv, nil)
  1231  			zero = typecheck(zero, ctxStmt)
  1232  			init.Append(zero)
  1233  			// h = &hv
  1234  			h = nod(OADDR, hv, nil)
  1235  
  1236  			// Allocate one bucket pointed to by hmap.buckets on stack if hint
  1237  			// is not larger than BUCKETSIZE. In case hint is larger than
  1238  			// BUCKETSIZE runtime.makemap will allocate the buckets on the heap.
  1239  			// Maximum key and elem size is 128 bytes, larger objects
  1240  			// are stored with an indirection. So max bucket size is 2048+eps.
  1241  			if !Isconst(hint, CTINT) ||
  1242  				hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) <= 0 {
  1243  				// var bv bmap
  1244  				bv := temp(bmap(t))
  1245  
  1246  				zero = nod(OAS, bv, nil)
  1247  				zero = typecheck(zero, ctxStmt)
  1248  				init.Append(zero)
  1249  
  1250  				// b = &bv
  1251  				b := nod(OADDR, bv, nil)
  1252  
  1253  				// h.buckets = b
  1254  				bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap
  1255  				na := nod(OAS, nodSym(ODOT, h, bsym), b)
  1256  				na = typecheck(na, ctxStmt)
  1257  				init.Append(na)
  1258  			}
  1259  		}
  1260  
  1261  		if Isconst(hint, CTINT) && hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) <= 0 {
  1262  			// Handling make(map[any]any) and
  1263  			// make(map[any]any, hint) where hint <= BUCKETSIZE
  1264  			// special allows for faster map initialization and
  1265  			// improves binary size by using calls with fewer arguments.
  1266  			// For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false
  1267  			// and no buckets will be allocated by makemap. Therefore,
  1268  			// no buckets need to be allocated in this code path.
  1269  			if n.Esc == EscNone {
  1270  				// Only need to initialize h.hash0 since
  1271  				// hmap h has been allocated on the stack already.
  1272  				// h.hash0 = fastrand()
  1273  				rand := mkcall("fastrand", types.Types[TUINT32], init)
  1274  				hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap
  1275  				a := nod(OAS, nodSym(ODOT, h, hashsym), rand)
  1276  				a = typecheck(a, ctxStmt)
  1277  				a = walkexpr(a, init)
  1278  				init.Append(a)
  1279  				n = convnop(h, t)
  1280  			} else {
  1281  				// Call runtime.makehmap to allocate an
  1282  				// hmap on the heap and initialize hmap's hash0 field.
  1283  				fn := syslook("makemap_small")
  1284  				fn = substArgTypes(fn, t.Key(), t.Elem())
  1285  				n = mkcall1(fn, n.Type, init)
  1286  			}
  1287  		} else {
  1288  			if n.Esc != EscNone {
  1289  				h = nodnil()
  1290  			}
  1291  			// Map initialization with a variable or large hint is
  1292  			// more complicated. We therefore generate a call to
  1293  			// runtime.makemap to initialize hmap and allocate the
  1294  			// map buckets.
  1295  
  1296  			// When hint fits into int, use makemap instead of
  1297  			// makemap64, which is faster and shorter on 32 bit platforms.
  1298  			fnname := "makemap64"
  1299  			argtype := types.Types[TINT64]
  1300  
  1301  			// Type checking guarantees that TIDEAL hint is positive and fits in an int.
  1302  			// See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function.
  1303  			// The case of hint overflow when converting TUINT or TUINTPTR to TINT
  1304  			// will be handled by the negative range checks in makemap during runtime.
  1305  			if hint.Type.IsKind(TIDEAL) || maxintval[hint.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1306  				fnname = "makemap"
  1307  				argtype = types.Types[TINT]
  1308  			}
  1309  
  1310  			fn := syslook(fnname)
  1311  			fn = substArgTypes(fn, hmapType, t.Key(), t.Elem())
  1312  			n = mkcall1(fn, n.Type, init, typename(n.Type), conv(hint, argtype), h)
  1313  		}
  1314  
  1315  	case OMAKESLICE:
  1316  		l := n.Left
  1317  		r := n.Right
  1318  		if r == nil {
  1319  			r = safeexpr(l, init)
  1320  			l = r
  1321  		}
  1322  		t := n.Type
  1323  		if n.Esc == EscNone {
  1324  			if !isSmallMakeSlice(n) {
  1325  				Fatalf("non-small OMAKESLICE with EscNone: %v", n)
  1326  			}
  1327  			// var arr [r]T
  1328  			// n = arr[:l]
  1329  			i := indexconst(r)
  1330  			if i < 0 {
  1331  				Fatalf("walkexpr: invalid index %v", r)
  1332  			}
  1333  			t = types.NewArray(t.Elem(), i) // [r]T
  1334  			var_ := temp(t)
  1335  			a := nod(OAS, var_, nil) // zero temp
  1336  			a = typecheck(a, ctxStmt)
  1337  			init.Append(a)
  1338  			r := nod(OSLICE, var_, nil) // arr[:l]
  1339  			r.SetSliceBounds(nil, l, nil)
  1340  			r = conv(r, n.Type) // in case n.Type is named.
  1341  			r = typecheck(r, ctxExpr)
  1342  			r = walkexpr(r, init)
  1343  			n = r
  1344  		} else {
  1345  			// n escapes; set up a call to makeslice.
  1346  			// When len and cap can fit into int, use makeslice instead of
  1347  			// makeslice64, which is faster and shorter on 32 bit platforms.
  1348  
  1349  			if t.Elem().NotInHeap() {
  1350  				yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem())
  1351  			}
  1352  
  1353  			len, cap := l, r
  1354  
  1355  			fnname := "makeslice64"
  1356  			argtype := types.Types[TINT64]
  1357  
  1358  			// Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
  1359  			// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
  1360  			// will be handled by the negative range checks in makeslice during runtime.
  1361  			if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) &&
  1362  				(cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) {
  1363  				fnname = "makeslice"
  1364  				argtype = types.Types[TINT]
  1365  			}
  1366  
  1367  			m := nod(OSLICEHEADER, nil, nil)
  1368  			m.Type = t
  1369  
  1370  			fn := syslook(fnname)
  1371  			m.Left = mkcall1(fn, types.Types[TUNSAFEPTR], init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
  1372  			m.Left.SetNonNil(true)
  1373  			m.List.Set2(conv(len, types.Types[TINT]), conv(cap, types.Types[TINT]))
  1374  
  1375  			m = typecheck(m, ctxExpr)
  1376  			m = walkexpr(m, init)
  1377  			n = m
  1378  		}
  1379  
  1380  	case ORUNESTR:
  1381  		a := nodnil()
  1382  		if n.Esc == EscNone {
  1383  			t := types.NewArray(types.Types[TUINT8], 4)
  1384  			a = nod(OADDR, temp(t), nil)
  1385  		}
  1386  		// intstring(*[4]byte, rune)
  1387  		n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64]))
  1388  
  1389  	case OBYTES2STR, ORUNES2STR:
  1390  		a := nodnil()
  1391  		if n.Esc == EscNone {
  1392  			// Create temporary buffer for string on stack.
  1393  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1394  			a = nod(OADDR, temp(t), nil)
  1395  		}
  1396  		fn := "slicebytetostring"
  1397  		if n.Op == ORUNES2STR {
  1398  			fn = "slicerunetostring"
  1399  		}
  1400  		// slicebytetostring(*[32]byte, []byte) string
  1401  		// slicerunetostring(*[32]byte, []rune) string
  1402  		n = mkcall(fn, n.Type, init, a, n.Left)
  1403  
  1404  	case OBYTES2STRTMP:
  1405  		n.Left = walkexpr(n.Left, init)
  1406  		if !instrumenting {
  1407  			// Let the backend handle OBYTES2STRTMP directly
  1408  			// to avoid a function call to slicebytetostringtmp.
  1409  			break
  1410  		}
  1411  		// slicebytetostringtmp([]byte) string
  1412  		n = mkcall("slicebytetostringtmp", n.Type, init, n.Left)
  1413  
  1414  	case OSTR2BYTES:
  1415  		s := n.Left
  1416  		if Isconst(s, CTSTR) {
  1417  			sc := strlit(s)
  1418  
  1419  			// Allocate a [n]byte of the right size.
  1420  			t := types.NewArray(types.Types[TUINT8], int64(len(sc)))
  1421  			var a *Node
  1422  			if n.Esc == EscNone && len(sc) <= int(maxImplicitStackVarSize) {
  1423  				a = nod(OADDR, temp(t), nil)
  1424  			} else {
  1425  				a = callnew(t)
  1426  			}
  1427  			p := temp(t.PtrTo()) // *[n]byte
  1428  			init.Append(typecheck(nod(OAS, p, a), ctxStmt))
  1429  
  1430  			// Copy from the static string data to the [n]byte.
  1431  			if len(sc) > 0 {
  1432  				as := nod(OAS,
  1433  					nod(ODEREF, p, nil),
  1434  					nod(ODEREF, convnop(nod(OSPTR, s, nil), t.PtrTo()), nil))
  1435  				as = typecheck(as, ctxStmt)
  1436  				as = walkstmt(as)
  1437  				init.Append(as)
  1438  			}
  1439  
  1440  			// Slice the [n]byte to a []byte.
  1441  			n.Op = OSLICEARR
  1442  			n.Left = p
  1443  			n = walkexpr(n, init)
  1444  			break
  1445  		}
  1446  
  1447  		a := nodnil()
  1448  		if n.Esc == EscNone {
  1449  			// Create temporary buffer for slice on stack.
  1450  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1451  			a = nod(OADDR, temp(t), nil)
  1452  		}
  1453  		// stringtoslicebyte(*32[byte], string) []byte
  1454  		n = mkcall("stringtoslicebyte", n.Type, init, a, conv(s, types.Types[TSTRING]))
  1455  
  1456  	case OSTR2BYTESTMP:
  1457  		// []byte(string) conversion that creates a slice
  1458  		// referring to the actual string bytes.
  1459  		// This conversion is handled later by the backend and
  1460  		// is only for use by internal compiler optimizations
  1461  		// that know that the slice won't be mutated.
  1462  		// The only such case today is:
  1463  		// for i, c := range []byte(string)
  1464  		n.Left = walkexpr(n.Left, init)
  1465  
  1466  	case OSTR2RUNES:
  1467  		a := nodnil()
  1468  		if n.Esc == EscNone {
  1469  			// Create temporary buffer for slice on stack.
  1470  			t := types.NewArray(types.Types[TINT32], tmpstringbufsize)
  1471  			a = nod(OADDR, temp(t), nil)
  1472  		}
  1473  		// stringtoslicerune(*[32]rune, string) []rune
  1474  		n = mkcall("stringtoslicerune", n.Type, init, a, conv(n.Left, types.Types[TSTRING]))
  1475  
  1476  	case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT:
  1477  		if isStaticCompositeLiteral(n) && !canSSAType(n.Type) {
  1478  			// n can be directly represented in the read-only data section.
  1479  			// Make direct reference to the static data. See issue 12841.
  1480  			vstat := staticname(n.Type)
  1481  			vstat.Name.SetReadonly(true)
  1482  			fixedlit(inInitFunction, initKindStatic, n, vstat, init)
  1483  			n = vstat
  1484  			n = typecheck(n, ctxExpr)
  1485  			break
  1486  		}
  1487  		var_ := temp(n.Type)
  1488  		anylit(n, var_, init)
  1489  		n = var_
  1490  
  1491  	case OSEND:
  1492  		n1 := n.Right
  1493  		n1 = assignconv(n1, n.Left.Type.Elem(), "chan send")
  1494  		n1 = walkexpr(n1, init)
  1495  		n1 = nod(OADDR, n1, nil)
  1496  		n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1)
  1497  
  1498  	case OCLOSURE:
  1499  		n = walkclosure(n, init)
  1500  
  1501  	case OCALLPART:
  1502  		n = walkpartialcall(n, init)
  1503  	}
  1504  
  1505  	// Expressions that are constant at run time but not
  1506  	// considered const by the language spec are not turned into
  1507  	// constants until walk. For example, if n is y%1 == 0, the
  1508  	// walk of y%1 may have replaced it by 0.
  1509  	// Check whether n with its updated args is itself now a constant.
  1510  	t := n.Type
  1511  	evconst(n)
  1512  	if n.Type != t {
  1513  		Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type)
  1514  	}
  1515  	if n.Op == OLITERAL {
  1516  		n = typecheck(n, ctxExpr)
  1517  		// Emit string symbol now to avoid emitting
  1518  		// any concurrently during the backend.
  1519  		if s, ok := n.Val().U.(string); ok {
  1520  			_ = stringsym(n.Pos, s)
  1521  		}
  1522  	}
  1523  
  1524  	updateHasCall(n)
  1525  
  1526  	if Debug['w'] != 0 && n != nil {
  1527  		Dump("after walk expr", n)
  1528  	}
  1529  
  1530  	lineno = lno
  1531  	return n
  1532  }
  1533  
  1534  // rtconvfn returns the parameter and result types that will be used by a
  1535  // runtime function to convert from type src to type dst. The runtime function
  1536  // name can be derived from the names of the returned types.
  1537  //
  1538  // If no such function is necessary, it returns (Txxx, Txxx).
  1539  func rtconvfn(src, dst *types.Type) (param, result types.EType) {
  1540  	if thearch.SoftFloat {
  1541  		return Txxx, Txxx
  1542  	}
  1543  
  1544  	switch thearch.LinkArch.Family {
  1545  	case sys.ARM, sys.MIPS:
  1546  		if src.IsFloat() {
  1547  			switch dst.Etype {
  1548  			case TINT64, TUINT64:
  1549  				return TFLOAT64, dst.Etype
  1550  			}
  1551  		}
  1552  		if dst.IsFloat() {
  1553  			switch src.Etype {
  1554  			case TINT64, TUINT64:
  1555  				return src.Etype, TFLOAT64
  1556  			}
  1557  		}
  1558  
  1559  	case sys.I386:
  1560  		if src.IsFloat() {
  1561  			switch dst.Etype {
  1562  			case TINT64, TUINT64:
  1563  				return TFLOAT64, dst.Etype
  1564  			case TUINT32, TUINT, TUINTPTR:
  1565  				return TFLOAT64, TUINT32
  1566  			}
  1567  		}
  1568  		if dst.IsFloat() {
  1569  			switch src.Etype {
  1570  			case TINT64, TUINT64:
  1571  				return src.Etype, TFLOAT64
  1572  			case TUINT32, TUINT, TUINTPTR:
  1573  				return TUINT32, TFLOAT64
  1574  			}
  1575  		}
  1576  	}
  1577  	return Txxx, Txxx
  1578  }
  1579  
  1580  // TODO(josharian): combine this with its caller and simplify
  1581  func reduceSlice(n *Node) *Node {
  1582  	low, high, max := n.SliceBounds()
  1583  	if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) {
  1584  		// Reduce x[i:len(x)] to x[i:].
  1585  		high = nil
  1586  	}
  1587  	n.SetSliceBounds(low, high, max)
  1588  	if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil {
  1589  		// Reduce x[:] to x.
  1590  		if Debug_slice > 0 {
  1591  			Warn("slice: omit slice operation")
  1592  		}
  1593  		return n.Left
  1594  	}
  1595  	return n
  1596  }
  1597  
  1598  func ascompatee1(l *Node, r *Node, init *Nodes) *Node {
  1599  	// convas will turn map assigns into function calls,
  1600  	// making it impossible for reorder3 to work.
  1601  	n := nod(OAS, l, r)
  1602  
  1603  	if l.Op == OINDEXMAP {
  1604  		return n
  1605  	}
  1606  
  1607  	return convas(n, init)
  1608  }
  1609  
  1610  func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
  1611  	// check assign expression list to
  1612  	// an expression list. called in
  1613  	//	expr-list = expr-list
  1614  
  1615  	// ensure order of evaluation for function calls
  1616  	for i := range nl {
  1617  		nl[i] = safeexpr(nl[i], init)
  1618  	}
  1619  	for i1 := range nr {
  1620  		nr[i1] = safeexpr(nr[i1], init)
  1621  	}
  1622  
  1623  	var nn []*Node
  1624  	i := 0
  1625  	for ; i < len(nl); i++ {
  1626  		if i >= len(nr) {
  1627  			break
  1628  		}
  1629  		// Do not generate 'x = x' during return. See issue 4014.
  1630  		if op == ORETURN && samesafeexpr(nl[i], nr[i]) {
  1631  			continue
  1632  		}
  1633  		nn = append(nn, ascompatee1(nl[i], nr[i], init))
  1634  	}
  1635  
  1636  	// cannot happen: caller checked that lists had same length
  1637  	if i < len(nl) || i < len(nr) {
  1638  		var nln, nrn Nodes
  1639  		nln.Set(nl)
  1640  		nrn.Set(nr)
  1641  		Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname())
  1642  	}
  1643  	return nn
  1644  }
  1645  
  1646  // fncall reports whether assigning an rvalue of type rt to an lvalue l might involve a function call.
  1647  func fncall(l *Node, rt *types.Type) bool {
  1648  	if l.HasCall() || l.Op == OINDEXMAP {
  1649  		return true
  1650  	}
  1651  	if types.Identical(l.Type, rt) {
  1652  		return false
  1653  	}
  1654  	// There might be a conversion required, which might involve a runtime call.
  1655  	return true
  1656  }
  1657  
  1658  // check assign type list to
  1659  // an expression list. called in
  1660  //	expr-list = func()
  1661  func ascompatet(nl Nodes, nr *types.Type) []*Node {
  1662  	if nl.Len() != nr.NumFields() {
  1663  		Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
  1664  	}
  1665  
  1666  	var nn, mm Nodes
  1667  	for i, l := range nl.Slice() {
  1668  		if l.isBlank() {
  1669  			continue
  1670  		}
  1671  		r := nr.Field(i)
  1672  
  1673  		// Any assignment to an lvalue that might cause a function call must be
  1674  		// deferred until all the returned values have been read.
  1675  		if fncall(l, r.Type) {
  1676  			tmp := temp(r.Type)
  1677  			tmp = typecheck(tmp, ctxExpr)
  1678  			a := nod(OAS, l, tmp)
  1679  			a = convas(a, &mm)
  1680  			mm.Append(a)
  1681  			l = tmp
  1682  		}
  1683  
  1684  		res := nod(ORESULT, nil, nil)
  1685  		res.Xoffset = Ctxt.FixedFrameSize() + r.Offset
  1686  		res.Type = r.Type
  1687  		res.SetTypecheck(1)
  1688  
  1689  		a := nod(OAS, l, res)
  1690  		a = convas(a, &nn)
  1691  		updateHasCall(a)
  1692  		if a.HasCall() {
  1693  			Dump("ascompatet ucount", a)
  1694  			Fatalf("ascompatet: too many function calls evaluating parameters")
  1695  		}
  1696  
  1697  		nn.Append(a)
  1698  	}
  1699  	return append(nn.Slice(), mm.Slice()...)
  1700  }
  1701  
  1702  // package all the arguments that match a ... T parameter into a []T.
  1703  func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node {
  1704  	esc := uint16(EscUnknown)
  1705  	if ddd != nil {
  1706  		esc = ddd.Esc
  1707  	}
  1708  	if len(args) == 0 {
  1709  		n := nodnil()
  1710  		n.Type = typ
  1711  		return n
  1712  	}
  1713  
  1714  	n := nod(OCOMPLIT, nil, typenod(typ))
  1715  	if ddd != nil && prealloc[ddd] != nil {
  1716  		prealloc[n] = prealloc[ddd] // temporary to use
  1717  	}
  1718  	n.List.Set(args)
  1719  	n.Esc = esc
  1720  	n = typecheck(n, ctxExpr)
  1721  	if n.Type == nil {
  1722  		Fatalf("mkdotargslice: typecheck failed")
  1723  	}
  1724  	n = walkexpr(n, init)
  1725  	return n
  1726  }
  1727  
  1728  func walkCall(n *Node, init *Nodes) {
  1729  	if n.Rlist.Len() != 0 {
  1730  		return // already walked
  1731  	}
  1732  	n.Left = walkexpr(n.Left, init)
  1733  	walkexprlist(n.List.Slice(), init)
  1734  
  1735  	params := n.Left.Type.Params()
  1736  	args := n.List.Slice()
  1737  	// If there's a ... parameter (which is only valid as the final
  1738  	// parameter) and this is not a ... call expression,
  1739  	// then assign the remaining arguments as a slice.
  1740  	if nf := params.NumFields(); nf > 0 {
  1741  		if last := params.Field(nf - 1); last.IsDDD() && !n.IsDDD() {
  1742  			// The callsite does not use a ..., but the called function is declared
  1743  			// with a final argument that has a ... . Build the slice that we will
  1744  			// pass as the ... argument.
  1745  			tail := args[nf-1:]
  1746  			slice := mkdotargslice(last.Type, tail, init, n.Right)
  1747  			// Allow immediate GC.
  1748  			for i := range tail {
  1749  				tail[i] = nil
  1750  			}
  1751  			args = append(args[:nf-1], slice)
  1752  		}
  1753  	}
  1754  
  1755  	// If this is a method call, add the receiver at the beginning of the args.
  1756  	if n.Op == OCALLMETH {
  1757  		withRecv := make([]*Node, len(args)+1)
  1758  		withRecv[0] = n.Left.Left
  1759  		n.Left.Left = nil
  1760  		copy(withRecv[1:], args)
  1761  		args = withRecv
  1762  	}
  1763  
  1764  	// For any argument whose evaluation might require a function call,
  1765  	// store that argument into a temporary variable,
  1766  	// to prevent that calls from clobbering arguments already on the stack.
  1767  	// When instrumenting, all arguments might require function calls.
  1768  	var tempAssigns []*Node
  1769  	for i, arg := range args {
  1770  		updateHasCall(arg)
  1771  		// Determine param type.
  1772  		var t *types.Type
  1773  		if n.Op == OCALLMETH {
  1774  			if i == 0 {
  1775  				t = n.Left.Type.Recv().Type
  1776  			} else {
  1777  				t = params.Field(i - 1).Type
  1778  			}
  1779  		} else {
  1780  			t = params.Field(i).Type
  1781  		}
  1782  		if instrumenting || fncall(arg, t) {
  1783  			// make assignment of fncall to tempAt
  1784  			tmp := temp(t)
  1785  			a := nod(OAS, tmp, arg)
  1786  			a = convas(a, init)
  1787  			tempAssigns = append(tempAssigns, a)
  1788  			// replace arg with temp
  1789  			args[i] = tmp
  1790  		}
  1791  	}
  1792  
  1793  	n.List.Set(tempAssigns)
  1794  	n.Rlist.Set(args)
  1795  }
  1796  
  1797  // generate code for print
  1798  func walkprint(nn *Node, init *Nodes) *Node {
  1799  	// Hoist all the argument evaluation up before the lock.
  1800  	walkexprlistcheap(nn.List.Slice(), init)
  1801  
  1802  	// For println, add " " between elements and "\n" at the end.
  1803  	if nn.Op == OPRINTN {
  1804  		s := nn.List.Slice()
  1805  		t := make([]*Node, 0, len(s)*2)
  1806  		for i, n := range s {
  1807  			if i != 0 {
  1808  				t = append(t, nodstr(" "))
  1809  			}
  1810  			t = append(t, n)
  1811  		}
  1812  		t = append(t, nodstr("\n"))
  1813  		nn.List.Set(t)
  1814  	}
  1815  
  1816  	// Collapse runs of constant strings.
  1817  	s := nn.List.Slice()
  1818  	t := make([]*Node, 0, len(s))
  1819  	for i := 0; i < len(s); {
  1820  		var strs []string
  1821  		for i < len(s) && Isconst(s[i], CTSTR) {
  1822  			strs = append(strs, strlit(s[i]))
  1823  			i++
  1824  		}
  1825  		if len(strs) > 0 {
  1826  			t = append(t, nodstr(strings.Join(strs, "")))
  1827  		}
  1828  		if i < len(s) {
  1829  			t = append(t, s[i])
  1830  			i++
  1831  		}
  1832  	}
  1833  	nn.List.Set(t)
  1834  
  1835  	calls := []*Node{mkcall("printlock", nil, init)}
  1836  	for i, n := range nn.List.Slice() {
  1837  		if n.Op == OLITERAL {
  1838  			switch n.Val().Ctype() {
  1839  			case CTRUNE:
  1840  				n = defaultlit(n, types.Runetype)
  1841  
  1842  			case CTINT:
  1843  				n = defaultlit(n, types.Types[TINT64])
  1844  
  1845  			case CTFLT:
  1846  				n = defaultlit(n, types.Types[TFLOAT64])
  1847  			}
  1848  		}
  1849  
  1850  		if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
  1851  			n = defaultlit(n, types.Types[TINT64])
  1852  		}
  1853  		n = defaultlit(n, nil)
  1854  		nn.List.SetIndex(i, n)
  1855  		if n.Type == nil || n.Type.Etype == TFORW {
  1856  			continue
  1857  		}
  1858  
  1859  		var on *Node
  1860  		switch n.Type.Etype {
  1861  		case TINTER:
  1862  			if n.Type.IsEmptyInterface() {
  1863  				on = syslook("printeface")
  1864  			} else {
  1865  				on = syslook("printiface")
  1866  			}
  1867  			on = substArgTypes(on, n.Type) // any-1
  1868  		case TPTR, TCHAN, TMAP, TFUNC, TUNSAFEPTR:
  1869  			on = syslook("printpointer")
  1870  			on = substArgTypes(on, n.Type) // any-1
  1871  		case TSLICE:
  1872  			on = syslook("printslice")
  1873  			on = substArgTypes(on, n.Type) // any-1
  1874  		case TUINT, TUINT8, TUINT16, TUINT32, TUINT64, TUINTPTR:
  1875  			if isRuntimePkg(n.Type.Sym.Pkg) && n.Type.Sym.Name == "hex" {
  1876  				on = syslook("printhex")
  1877  			} else {
  1878  				on = syslook("printuint")
  1879  			}
  1880  		case TINT, TINT8, TINT16, TINT32, TINT64:
  1881  			on = syslook("printint")
  1882  		case TFLOAT32, TFLOAT64:
  1883  			on = syslook("printfloat")
  1884  		case TCOMPLEX64, TCOMPLEX128:
  1885  			on = syslook("printcomplex")
  1886  		case TBOOL:
  1887  			on = syslook("printbool")
  1888  		case TSTRING:
  1889  			cs := ""
  1890  			if Isconst(n, CTSTR) {
  1891  				cs = strlit(n)
  1892  			}
  1893  			switch cs {
  1894  			case " ":
  1895  				on = syslook("printsp")
  1896  			case "\n":
  1897  				on = syslook("printnl")
  1898  			default:
  1899  				on = syslook("printstring")
  1900  			}
  1901  		default:
  1902  			badtype(OPRINT, n.Type, nil)
  1903  			continue
  1904  		}
  1905  
  1906  		r := nod(OCALL, on, nil)
  1907  		if params := on.Type.Params().FieldSlice(); len(params) > 0 {
  1908  			t := params[0].Type
  1909  			if !types.Identical(t, n.Type) {
  1910  				n = nod(OCONV, n, nil)
  1911  				n.Type = t
  1912  			}
  1913  			r.List.Append(n)
  1914  		}
  1915  		calls = append(calls, r)
  1916  	}
  1917  
  1918  	calls = append(calls, mkcall("printunlock", nil, init))
  1919  
  1920  	typecheckslice(calls, ctxStmt)
  1921  	walkexprlist(calls, init)
  1922  
  1923  	r := nod(OEMPTY, nil, nil)
  1924  	r = typecheck(r, ctxStmt)
  1925  	r = walkexpr(r, init)
  1926  	r.Ninit.Set(calls)
  1927  	return r
  1928  }
  1929  
  1930  func callnew(t *types.Type) *Node {
  1931  	if t.NotInHeap() {
  1932  		yyerror("%v is go:notinheap; heap allocation disallowed", t)
  1933  	}
  1934  	dowidth(t)
  1935  	n := nod(ONEWOBJ, typename(t), nil)
  1936  	n.Type = types.NewPtr(t)
  1937  	n.SetTypecheck(1)
  1938  	n.SetNonNil(true)
  1939  	return n
  1940  }
  1941  
  1942  // isReflectHeaderDataField reports whether l is an expression p.Data
  1943  // where p has type reflect.SliceHeader or reflect.StringHeader.
  1944  func isReflectHeaderDataField(l *Node) bool {
  1945  	if l.Type != types.Types[TUINTPTR] {
  1946  		return false
  1947  	}
  1948  
  1949  	var tsym *types.Sym
  1950  	switch l.Op {
  1951  	case ODOT:
  1952  		tsym = l.Left.Type.Sym
  1953  	case ODOTPTR:
  1954  		tsym = l.Left.Type.Elem().Sym
  1955  	default:
  1956  		return false
  1957  	}
  1958  
  1959  	if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" {
  1960  		return false
  1961  	}
  1962  	return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader"
  1963  }
  1964  
  1965  func convas(n *Node, init *Nodes) *Node {
  1966  	if n.Op != OAS {
  1967  		Fatalf("convas: not OAS %v", n.Op)
  1968  	}
  1969  	defer updateHasCall(n)
  1970  
  1971  	n.SetTypecheck(1)
  1972  
  1973  	if n.Left == nil || n.Right == nil {
  1974  		return n
  1975  	}
  1976  
  1977  	lt := n.Left.Type
  1978  	rt := n.Right.Type
  1979  	if lt == nil || rt == nil {
  1980  		return n
  1981  	}
  1982  
  1983  	if n.Left.isBlank() {
  1984  		n.Right = defaultlit(n.Right, nil)
  1985  		return n
  1986  	}
  1987  
  1988  	if !types.Identical(lt, rt) {
  1989  		n.Right = assignconv(n.Right, lt, "assignment")
  1990  		n.Right = walkexpr(n.Right, init)
  1991  	}
  1992  	dowidth(n.Right.Type)
  1993  
  1994  	return n
  1995  }
  1996  
  1997  // from ascompat[ee]
  1998  //	a,b = c,d
  1999  // simultaneous assignment. there cannot
  2000  // be later use of an earlier lvalue.
  2001  //
  2002  // function calls have been removed.
  2003  func reorder3(all []*Node) []*Node {
  2004  	// If a needed expression may be affected by an
  2005  	// earlier assignment, make an early copy of that
  2006  	// expression and use the copy instead.
  2007  	var early []*Node
  2008  
  2009  	var mapinit Nodes
  2010  	for i, n := range all {
  2011  		l := n.Left
  2012  
  2013  		// Save subexpressions needed on left side.
  2014  		// Drill through non-dereferences.
  2015  		for {
  2016  			if l.Op == ODOT || l.Op == OPAREN {
  2017  				l = l.Left
  2018  				continue
  2019  			}
  2020  
  2021  			if l.Op == OINDEX && l.Left.Type.IsArray() {
  2022  				l.Right = reorder3save(l.Right, all, i, &early)
  2023  				l = l.Left
  2024  				continue
  2025  			}
  2026  
  2027  			break
  2028  		}
  2029  
  2030  		switch l.Op {
  2031  		default:
  2032  			Fatalf("reorder3 unexpected lvalue %#v", l.Op)
  2033  
  2034  		case ONAME:
  2035  			break
  2036  
  2037  		case OINDEX, OINDEXMAP:
  2038  			l.Left = reorder3save(l.Left, all, i, &early)
  2039  			l.Right = reorder3save(l.Right, all, i, &early)
  2040  			if l.Op == OINDEXMAP {
  2041  				all[i] = convas(all[i], &mapinit)
  2042  			}
  2043  
  2044  		case ODEREF, ODOTPTR:
  2045  			l.Left = reorder3save(l.Left, all, i, &early)
  2046  		}
  2047  
  2048  		// Save expression on right side.
  2049  		all[i].Right = reorder3save(all[i].Right, all, i, &early)
  2050  	}
  2051  
  2052  	early = append(mapinit.Slice(), early...)
  2053  	return append(early, all...)
  2054  }
  2055  
  2056  // if the evaluation of *np would be affected by the
  2057  // assignments in all up to but not including the ith assignment,
  2058  // copy into a temporary during *early and
  2059  // replace *np with that temp.
  2060  // The result of reorder3save MUST be assigned back to n, e.g.
  2061  // 	n.Left = reorder3save(n.Left, all, i, early)
  2062  func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node {
  2063  	if !aliased(n, all, i) {
  2064  		return n
  2065  	}
  2066  
  2067  	q := temp(n.Type)
  2068  	q = nod(OAS, q, n)
  2069  	q = typecheck(q, ctxStmt)
  2070  	*early = append(*early, q)
  2071  	return q.Left
  2072  }
  2073  
  2074  // what's the outer value that a write to n affects?
  2075  // outer value means containing struct or array.
  2076  func outervalue(n *Node) *Node {
  2077  	for {
  2078  		switch n.Op {
  2079  		case OXDOT:
  2080  			Fatalf("OXDOT in walk")
  2081  		case ODOT, OPAREN, OCONVNOP:
  2082  			n = n.Left
  2083  			continue
  2084  		case OINDEX:
  2085  			if n.Left.Type != nil && n.Left.Type.IsArray() {
  2086  				n = n.Left
  2087  				continue
  2088  			}
  2089  		}
  2090  
  2091  		return n
  2092  	}
  2093  }
  2094  
  2095  // Is it possible that the computation of n might be
  2096  // affected by writes in as up to but not including the ith element?
  2097  func aliased(n *Node, all []*Node, i int) bool {
  2098  	if n == nil {
  2099  		return false
  2100  	}
  2101  
  2102  	// Treat all fields of a struct as referring to the whole struct.
  2103  	// We could do better but we would have to keep track of the fields.
  2104  	for n.Op == ODOT {
  2105  		n = n.Left
  2106  	}
  2107  
  2108  	// Look for obvious aliasing: a variable being assigned
  2109  	// during the all list and appearing in n.
  2110  	// Also record whether there are any writes to main memory.
  2111  	// Also record whether there are any writes to variables
  2112  	// whose addresses have been taken.
  2113  	memwrite := false
  2114  	varwrite := false
  2115  	for _, an := range all[:i] {
  2116  		a := outervalue(an.Left)
  2117  
  2118  		for a.Op == ODOT {
  2119  			a = a.Left
  2120  		}
  2121  
  2122  		if a.Op != ONAME {
  2123  			memwrite = true
  2124  			continue
  2125  		}
  2126  
  2127  		switch n.Class() {
  2128  		default:
  2129  			varwrite = true
  2130  			continue
  2131  
  2132  		case PAUTO, PPARAM, PPARAMOUT:
  2133  			if n.Name.Addrtaken() {
  2134  				varwrite = true
  2135  				continue
  2136  			}
  2137  
  2138  			if vmatch2(a, n) {
  2139  				// Direct hit.
  2140  				return true
  2141  			}
  2142  		}
  2143  	}
  2144  
  2145  	// The variables being written do not appear in n.
  2146  	// However, n might refer to computed addresses
  2147  	// that are being written.
  2148  
  2149  	// If no computed addresses are affected by the writes, no aliasing.
  2150  	if !memwrite && !varwrite {
  2151  		return false
  2152  	}
  2153  
  2154  	// If n does not refer to computed addresses
  2155  	// (that is, if n only refers to variables whose addresses
  2156  	// have not been taken), no aliasing.
  2157  	if varexpr(n) {
  2158  		return false
  2159  	}
  2160  
  2161  	// Otherwise, both the writes and n refer to computed memory addresses.
  2162  	// Assume that they might conflict.
  2163  	return true
  2164  }
  2165  
  2166  // does the evaluation of n only refer to variables
  2167  // whose addresses have not been taken?
  2168  // (and no other memory)
  2169  func varexpr(n *Node) bool {
  2170  	if n == nil {
  2171  		return true
  2172  	}
  2173  
  2174  	switch n.Op {
  2175  	case OLITERAL:
  2176  		return true
  2177  
  2178  	case ONAME:
  2179  		switch n.Class() {
  2180  		case PAUTO, PPARAM, PPARAMOUT:
  2181  			if !n.Name.Addrtaken() {
  2182  				return true
  2183  			}
  2184  		}
  2185  
  2186  		return false
  2187  
  2188  	case OADD,
  2189  		OSUB,
  2190  		OOR,
  2191  		OXOR,
  2192  		OMUL,
  2193  		ODIV,
  2194  		OMOD,
  2195  		OLSH,
  2196  		ORSH,
  2197  		OAND,
  2198  		OANDNOT,
  2199  		OPLUS,
  2200  		ONEG,
  2201  		OBITNOT,
  2202  		OPAREN,
  2203  		OANDAND,
  2204  		OOROR,
  2205  		OCONV,
  2206  		OCONVNOP,
  2207  		OCONVIFACE,
  2208  		ODOTTYPE:
  2209  		return varexpr(n.Left) && varexpr(n.Right)
  2210  
  2211  	case ODOT: // but not ODOTPTR
  2212  		// Should have been handled in aliased.
  2213  		Fatalf("varexpr unexpected ODOT")
  2214  	}
  2215  
  2216  	// Be conservative.
  2217  	return false
  2218  }
  2219  
  2220  // is the name l mentioned in r?
  2221  func vmatch2(l *Node, r *Node) bool {
  2222  	if r == nil {
  2223  		return false
  2224  	}
  2225  	switch r.Op {
  2226  	// match each right given left
  2227  	case ONAME:
  2228  		return l == r
  2229  
  2230  	case OLITERAL:
  2231  		return false
  2232  	}
  2233  
  2234  	if vmatch2(l, r.Left) {
  2235  		return true
  2236  	}
  2237  	if vmatch2(l, r.Right) {
  2238  		return true
  2239  	}
  2240  	for _, n := range r.List.Slice() {
  2241  		if vmatch2(l, n) {
  2242  			return true
  2243  		}
  2244  	}
  2245  	return false
  2246  }
  2247  
  2248  // is any name mentioned in l also mentioned in r?
  2249  // called by sinit.go
  2250  func vmatch1(l *Node, r *Node) bool {
  2251  	// isolate all left sides
  2252  	if l == nil || r == nil {
  2253  		return false
  2254  	}
  2255  	switch l.Op {
  2256  	case ONAME:
  2257  		switch l.Class() {
  2258  		case PPARAM, PAUTO:
  2259  			break
  2260  
  2261  		default:
  2262  			// assignment to non-stack variable must be
  2263  			// delayed if right has function calls.
  2264  			if r.HasCall() {
  2265  				return true
  2266  			}
  2267  		}
  2268  
  2269  		return vmatch2(l, r)
  2270  
  2271  	case OLITERAL:
  2272  		return false
  2273  	}
  2274  
  2275  	if vmatch1(l.Left, r) {
  2276  		return true
  2277  	}
  2278  	if vmatch1(l.Right, r) {
  2279  		return true
  2280  	}
  2281  	for _, n := range l.List.Slice() {
  2282  		if vmatch1(n, r) {
  2283  			return true
  2284  		}
  2285  	}
  2286  	return false
  2287  }
  2288  
  2289  // paramstoheap returns code to allocate memory for heap-escaped parameters
  2290  // and to copy non-result parameters' values from the stack.
  2291  func paramstoheap(params *types.Type) []*Node {
  2292  	var nn []*Node
  2293  	for _, t := range params.Fields().Slice() {
  2294  		v := asNode(t.Nname)
  2295  		if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result
  2296  			v = nil
  2297  		}
  2298  		if v == nil {
  2299  			continue
  2300  		}
  2301  
  2302  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil {
  2303  			nn = append(nn, walkstmt(nod(ODCL, v, nil)))
  2304  			if stackcopy.Class() == PPARAM {
  2305  				nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), ctxStmt)))
  2306  			}
  2307  		}
  2308  	}
  2309  
  2310  	return nn
  2311  }
  2312  
  2313  // zeroResults zeros the return values at the start of the function.
  2314  // We need to do this very early in the function.  Defer might stop a
  2315  // panic and show the return values as they exist at the time of
  2316  // panic.  For precise stacks, the garbage collector assumes results
  2317  // are always live, so we need to zero them before any allocations,
  2318  // even allocations to move params/results to the heap.
  2319  // The generated code is added to Curfn's Enter list.
  2320  func zeroResults() {
  2321  	for _, f := range Curfn.Type.Results().Fields().Slice() {
  2322  		v := asNode(f.Nname)
  2323  		if v != nil && v.Name.Param.Heapaddr != nil {
  2324  			// The local which points to the return value is the
  2325  			// thing that needs zeroing. This is already handled
  2326  			// by a Needzero annotation in plive.go:livenessepilogue.
  2327  			continue
  2328  		}
  2329  		if v.isParamHeapCopy() {
  2330  			// TODO(josharian/khr): Investigate whether we can switch to "continue" here,
  2331  			// and document more in either case.
  2332  			// In the review of CL 114797, Keith wrote (roughly):
  2333  			// I don't think the zeroing below matters.
  2334  			// The stack return value will never be marked as live anywhere in the function.
  2335  			// It is not written to until deferreturn returns.
  2336  			v = v.Name.Param.Stackcopy
  2337  		}
  2338  		// Zero the stack location containing f.
  2339  		Curfn.Func.Enter.Append(nodl(Curfn.Pos, OAS, v, nil))
  2340  	}
  2341  }
  2342  
  2343  // returnsfromheap returns code to copy values for heap-escaped parameters
  2344  // back to the stack.
  2345  func returnsfromheap(params *types.Type) []*Node {
  2346  	var nn []*Node
  2347  	for _, t := range params.Fields().Slice() {
  2348  		v := asNode(t.Nname)
  2349  		if v == nil {
  2350  			continue
  2351  		}
  2352  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT {
  2353  			nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), ctxStmt)))
  2354  		}
  2355  	}
  2356  
  2357  	return nn
  2358  }
  2359  
  2360  // heapmoves generates code to handle migrating heap-escaped parameters
  2361  // between the stack and the heap. The generated code is added to Curfn's
  2362  // Enter and Exit lists.
  2363  func heapmoves() {
  2364  	lno := lineno
  2365  	lineno = Curfn.Pos
  2366  	nn := paramstoheap(Curfn.Type.Recvs())
  2367  	nn = append(nn, paramstoheap(Curfn.Type.Params())...)
  2368  	nn = append(nn, paramstoheap(Curfn.Type.Results())...)
  2369  	Curfn.Func.Enter.Append(nn...)
  2370  	lineno = Curfn.Func.Endlineno
  2371  	Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...)
  2372  	lineno = lno
  2373  }
  2374  
  2375  func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node {
  2376  	if fn.Type == nil || fn.Type.Etype != TFUNC {
  2377  		Fatalf("mkcall %v %v", fn, fn.Type)
  2378  	}
  2379  
  2380  	n := fn.Type.NumParams()
  2381  	if n != len(va) {
  2382  		Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
  2383  	}
  2384  
  2385  	r := nod(OCALL, fn, nil)
  2386  	r.List.Set(va)
  2387  	if fn.Type.NumResults() > 0 {
  2388  		r = typecheck(r, ctxExpr|ctxMultiOK)
  2389  	} else {
  2390  		r = typecheck(r, ctxStmt)
  2391  	}
  2392  	r = walkexpr(r, init)
  2393  	r.Type = t
  2394  	return r
  2395  }
  2396  
  2397  func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node {
  2398  	return vmkcall(syslook(name), t, init, args)
  2399  }
  2400  
  2401  func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node {
  2402  	return vmkcall(fn, t, init, args)
  2403  }
  2404  
  2405  func conv(n *Node, t *types.Type) *Node {
  2406  	if types.Identical(n.Type, t) {
  2407  		return n
  2408  	}
  2409  	n = nod(OCONV, n, nil)
  2410  	n.Type = t
  2411  	n = typecheck(n, ctxExpr)
  2412  	return n
  2413  }
  2414  
  2415  // convnop converts node n to type t using the OCONVNOP op
  2416  // and typechecks the result with ctxExpr.
  2417  func convnop(n *Node, t *types.Type) *Node {
  2418  	if types.Identical(n.Type, t) {
  2419  		return n
  2420  	}
  2421  	n = nod(OCONVNOP, n, nil)
  2422  	n.Type = t
  2423  	n = typecheck(n, ctxExpr)
  2424  	return n
  2425  }
  2426  
  2427  // byteindex converts n, which is byte-sized, to a uint8.
  2428  // We cannot use conv, because we allow converting bool to uint8 here,
  2429  // which is forbidden in user code.
  2430  func byteindex(n *Node) *Node {
  2431  	if types.Identical(n.Type, types.Types[TUINT8]) {
  2432  		return n
  2433  	}
  2434  	n = nod(OCONV, n, nil)
  2435  	n.Type = types.Types[TUINT8]
  2436  	n.SetTypecheck(1)
  2437  	return n
  2438  }
  2439  
  2440  func chanfn(name string, n int, t *types.Type) *Node {
  2441  	if !t.IsChan() {
  2442  		Fatalf("chanfn %v", t)
  2443  	}
  2444  	fn := syslook(name)
  2445  	switch n {
  2446  	default:
  2447  		Fatalf("chanfn %d", n)
  2448  	case 1:
  2449  		fn = substArgTypes(fn, t.Elem())
  2450  	case 2:
  2451  		fn = substArgTypes(fn, t.Elem(), t.Elem())
  2452  	}
  2453  	return fn
  2454  }
  2455  
  2456  func mapfn(name string, t *types.Type) *Node {
  2457  	if !t.IsMap() {
  2458  		Fatalf("mapfn %v", t)
  2459  	}
  2460  	fn := syslook(name)
  2461  	fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key(), t.Elem())
  2462  	return fn
  2463  }
  2464  
  2465  func mapfndel(name string, t *types.Type) *Node {
  2466  	if !t.IsMap() {
  2467  		Fatalf("mapfn %v", t)
  2468  	}
  2469  	fn := syslook(name)
  2470  	fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key())
  2471  	return fn
  2472  }
  2473  
  2474  const (
  2475  	mapslow = iota
  2476  	mapfast32
  2477  	mapfast32ptr
  2478  	mapfast64
  2479  	mapfast64ptr
  2480  	mapfaststr
  2481  	nmapfast
  2482  )
  2483  
  2484  type mapnames [nmapfast]string
  2485  
  2486  func mkmapnames(base string, ptr string) mapnames {
  2487  	return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
  2488  }
  2489  
  2490  var mapaccess1 = mkmapnames("mapaccess1", "")
  2491  var mapaccess2 = mkmapnames("mapaccess2", "")
  2492  var mapassign = mkmapnames("mapassign", "ptr")
  2493  var mapdelete = mkmapnames("mapdelete", "")
  2494  
  2495  func mapfast(t *types.Type) int {
  2496  	// Check runtime/map.go:maxElemSize before changing.
  2497  	if t.Elem().Width > 128 {
  2498  		return mapslow
  2499  	}
  2500  	switch algtype(t.Key()) {
  2501  	case AMEM32:
  2502  		if !t.Key().HasHeapPointer() {
  2503  			return mapfast32
  2504  		}
  2505  		if Widthptr == 4 {
  2506  			return mapfast32ptr
  2507  		}
  2508  		Fatalf("small pointer %v", t.Key())
  2509  	case AMEM64:
  2510  		if !t.Key().HasHeapPointer() {
  2511  			return mapfast64
  2512  		}
  2513  		if Widthptr == 8 {
  2514  			return mapfast64ptr
  2515  		}
  2516  		// Two-word object, at least one of which is a pointer.
  2517  		// Use the slow path.
  2518  	case ASTRING:
  2519  		return mapfaststr
  2520  	}
  2521  	return mapslow
  2522  }
  2523  
  2524  func writebarrierfn(name string, l *types.Type, r *types.Type) *Node {
  2525  	fn := syslook(name)
  2526  	fn = substArgTypes(fn, l, r)
  2527  	return fn
  2528  }
  2529  
  2530  func addstr(n *Node, init *Nodes) *Node {
  2531  	// order.expr rewrote OADDSTR to have a list of strings.
  2532  	c := n.List.Len()
  2533  
  2534  	if c < 2 {
  2535  		Fatalf("addstr count %d too small", c)
  2536  	}
  2537  
  2538  	buf := nodnil()
  2539  	if n.Esc == EscNone {
  2540  		sz := int64(0)
  2541  		for _, n1 := range n.List.Slice() {
  2542  			if n1.Op == OLITERAL {
  2543  				sz += int64(len(strlit(n1)))
  2544  			}
  2545  		}
  2546  
  2547  		// Don't allocate the buffer if the result won't fit.
  2548  		if sz < tmpstringbufsize {
  2549  			// Create temporary buffer for result string on stack.
  2550  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  2551  			buf = nod(OADDR, temp(t), nil)
  2552  		}
  2553  	}
  2554  
  2555  	// build list of string arguments
  2556  	args := []*Node{buf}
  2557  	for _, n2 := range n.List.Slice() {
  2558  		args = append(args, conv(n2, types.Types[TSTRING]))
  2559  	}
  2560  
  2561  	var fn string
  2562  	if c <= 5 {
  2563  		// small numbers of strings use direct runtime helpers.
  2564  		// note: order.expr knows this cutoff too.
  2565  		fn = fmt.Sprintf("concatstring%d", c)
  2566  	} else {
  2567  		// large numbers of strings are passed to the runtime as a slice.
  2568  		fn = "concatstrings"
  2569  
  2570  		t := types.NewSlice(types.Types[TSTRING])
  2571  		slice := nod(OCOMPLIT, nil, typenod(t))
  2572  		if prealloc[n] != nil {
  2573  			prealloc[slice] = prealloc[n]
  2574  		}
  2575  		slice.List.Set(args[1:]) // skip buf arg
  2576  		args = []*Node{buf, slice}
  2577  		slice.Esc = EscNone
  2578  	}
  2579  
  2580  	cat := syslook(fn)
  2581  	r := nod(OCALL, cat, nil)
  2582  	r.List.Set(args)
  2583  	r = typecheck(r, ctxExpr)
  2584  	r = walkexpr(r, init)
  2585  	r.Type = n.Type
  2586  
  2587  	return r
  2588  }
  2589  
  2590  func walkAppendArgs(n *Node, init *Nodes) {
  2591  	walkexprlistsafe(n.List.Slice(), init)
  2592  
  2593  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2594  	// and n are name or literal, but those may index the slice we're
  2595  	// modifying here. Fix explicitly.
  2596  	ls := n.List.Slice()
  2597  	for i1, n1 := range ls {
  2598  		ls[i1] = cheapexpr(n1, init)
  2599  	}
  2600  }
  2601  
  2602  // expand append(l1, l2...) to
  2603  //   init {
  2604  //     s := l1
  2605  //     n := len(s) + len(l2)
  2606  //     // Compare as uint so growslice can panic on overflow.
  2607  //     if uint(n) > uint(cap(s)) {
  2608  //       s = growslice(s, n)
  2609  //     }
  2610  //     s = s[:n]
  2611  //     memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2612  //   }
  2613  //   s
  2614  //
  2615  // l2 is allowed to be a string.
  2616  func appendslice(n *Node, init *Nodes) *Node {
  2617  	walkAppendArgs(n, init)
  2618  
  2619  	l1 := n.List.First()
  2620  	l2 := n.List.Second()
  2621  
  2622  	var nodes Nodes
  2623  
  2624  	// var s []T
  2625  	s := temp(l1.Type)
  2626  	nodes.Append(nod(OAS, s, l1)) // s = l1
  2627  
  2628  	elemtype := s.Type.Elem()
  2629  
  2630  	// n := len(s) + len(l2)
  2631  	nn := temp(types.Types[TINT])
  2632  	nodes.Append(nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil))))
  2633  
  2634  	// if uint(n) > uint(cap(s))
  2635  	nif := nod(OIF, nil, nil)
  2636  	nuint := conv(nn, types.Types[TUINT])
  2637  	scapuint := conv(nod(OCAP, s, nil), types.Types[TUINT])
  2638  	nif.Left = nod(OGT, nuint, scapuint)
  2639  
  2640  	// instantiate growslice(typ *type, []any, int) []any
  2641  	fn := syslook("growslice")
  2642  	fn = substArgTypes(fn, elemtype, elemtype)
  2643  
  2644  	// s = growslice(T, s, n)
  2645  	nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(elemtype), s, nn)))
  2646  	nodes.Append(nif)
  2647  
  2648  	// s = s[:n]
  2649  	nt := nod(OSLICE, s, nil)
  2650  	nt.SetSliceBounds(nil, nn, nil)
  2651  	nt.SetBounded(true)
  2652  	nodes.Append(nod(OAS, s, nt))
  2653  
  2654  	var ncopy *Node
  2655  	if elemtype.HasHeapPointer() {
  2656  		// copy(s[len(l1):], l2)
  2657  		nptr1 := nod(OSLICE, s, nil)
  2658  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2659  
  2660  		nptr2 := l2
  2661  
  2662  		Curfn.Func.setWBPos(n.Pos)
  2663  
  2664  		// instantiate typedslicecopy(typ *type, dst any, src any) int
  2665  		fn := syslook("typedslicecopy")
  2666  		fn = substArgTypes(fn, l1.Type, l2.Type)
  2667  		ncopy = mkcall1(fn, types.Types[TINT], &nodes, typename(elemtype), nptr1, nptr2)
  2668  
  2669  	} else if instrumenting && !compiling_runtime {
  2670  		// rely on runtime to instrument copy.
  2671  		// copy(s[len(l1):], l2)
  2672  		nptr1 := nod(OSLICE, s, nil)
  2673  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2674  
  2675  		nptr2 := l2
  2676  
  2677  		if l2.Type.IsString() {
  2678  			// instantiate func slicestringcopy(to any, fr any) int
  2679  			fn := syslook("slicestringcopy")
  2680  			fn = substArgTypes(fn, l1.Type, l2.Type)
  2681  			ncopy = mkcall1(fn, types.Types[TINT], &nodes, nptr1, nptr2)
  2682  		} else {
  2683  			// instantiate func slicecopy(to any, fr any, wid uintptr) int
  2684  			fn := syslook("slicecopy")
  2685  			fn = substArgTypes(fn, l1.Type, l2.Type)
  2686  			ncopy = mkcall1(fn, types.Types[TINT], &nodes, nptr1, nptr2, nodintconst(elemtype.Width))
  2687  		}
  2688  
  2689  	} else {
  2690  		// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2691  		nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil))
  2692  		nptr1.SetBounded(true)
  2693  		nptr1 = nod(OADDR, nptr1, nil)
  2694  
  2695  		nptr2 := nod(OSPTR, l2, nil)
  2696  
  2697  		nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &nodes)
  2698  		nwid = nod(OMUL, nwid, nodintconst(elemtype.Width))
  2699  
  2700  		// instantiate func memmove(to *any, frm *any, length uintptr)
  2701  		fn := syslook("memmove")
  2702  		fn = substArgTypes(fn, elemtype, elemtype)
  2703  		ncopy = mkcall1(fn, nil, &nodes, nptr1, nptr2, nwid)
  2704  	}
  2705  	ln := append(nodes.Slice(), ncopy)
  2706  
  2707  	typecheckslice(ln, ctxStmt)
  2708  	walkstmtlist(ln)
  2709  	init.Append(ln...)
  2710  	return s
  2711  }
  2712  
  2713  // isAppendOfMake reports whether n is of the form append(x , make([]T, y)...).
  2714  // isAppendOfMake assumes n has already been typechecked.
  2715  func isAppendOfMake(n *Node) bool {
  2716  	if Debug['N'] != 0 || instrumenting {
  2717  		return false
  2718  	}
  2719  
  2720  	if n.Typecheck() == 0 {
  2721  		Fatalf("missing typecheck: %+v", n)
  2722  	}
  2723  
  2724  	if n.Op != OAPPEND || !n.IsDDD() || n.List.Len() != 2 {
  2725  		return false
  2726  	}
  2727  
  2728  	second := n.List.Second()
  2729  	if second.Op != OMAKESLICE || second.Right != nil {
  2730  		return false
  2731  	}
  2732  
  2733  	// y must be either an integer constant or the largest possible positive value
  2734  	// of variable y needs to fit into an uint.
  2735  
  2736  	// typecheck made sure that constant arguments to make are not negative and fit into an int.
  2737  
  2738  	// The care of overflow of the len argument to make will be handled by an explicit check of int(len) < 0 during runtime.
  2739  	y := second.Left
  2740  	if !Isconst(y, CTINT) && maxintval[y.Type.Etype].Cmp(maxintval[TUINT]) > 0 {
  2741  		return false
  2742  	}
  2743  
  2744  	return true
  2745  }
  2746  
  2747  // extendslice rewrites append(l1, make([]T, l2)...) to
  2748  //   init {
  2749  //     if l2 >= 0 { // Empty if block here for more meaningful node.SetLikely(true)
  2750  //     } else {
  2751  //       panicmakeslicelen()
  2752  //     }
  2753  //     s := l1
  2754  //     n := len(s) + l2
  2755  //     // Compare n and s as uint so growslice can panic on overflow of len(s) + l2.
  2756  //     // cap is a positive int and n can become negative when len(s) + l2
  2757  //     // overflows int. Interpreting n when negative as uint makes it larger
  2758  //     // than cap(s). growslice will check the int n arg and panic if n is
  2759  //     // negative. This prevents the overflow from being undetected.
  2760  //     if uint(n) > uint(cap(s)) {
  2761  //       s = growslice(T, s, n)
  2762  //     }
  2763  //     s = s[:n]
  2764  //     lptr := &l1[0]
  2765  //     sptr := &s[0]
  2766  //     if lptr == sptr || !hasPointers(T) {
  2767  //       // growslice did not clear the whole underlying array (or did not get called)
  2768  //       hp := &s[len(l1)]
  2769  //       hn := l2 * sizeof(T)
  2770  //       memclr(hp, hn)
  2771  //     }
  2772  //   }
  2773  //   s
  2774  func extendslice(n *Node, init *Nodes) *Node {
  2775  	// isAppendOfMake made sure all possible positive values of l2 fit into an uint.
  2776  	// The case of l2 overflow when converting from e.g. uint to int is handled by an explicit
  2777  	// check of l2 < 0 at runtime which is generated below.
  2778  	l2 := conv(n.List.Second().Left, types.Types[TINT])
  2779  	l2 = typecheck(l2, ctxExpr)
  2780  	n.List.SetSecond(l2) // walkAppendArgs expects l2 in n.List.Second().
  2781  
  2782  	walkAppendArgs(n, init)
  2783  
  2784  	l1 := n.List.First()
  2785  	l2 = n.List.Second() // re-read l2, as it may have been updated by walkAppendArgs
  2786  
  2787  	var nodes []*Node
  2788  
  2789  	// if l2 >= 0 (likely happens), do nothing
  2790  	nifneg := nod(OIF, nod(OGE, l2, nodintconst(0)), nil)
  2791  	nifneg.SetLikely(true)
  2792  
  2793  	// else panicmakeslicelen()
  2794  	nifneg.Rlist.Set1(mkcall("panicmakeslicelen", nil, init))
  2795  	nodes = append(nodes, nifneg)
  2796  
  2797  	// s := l1
  2798  	s := temp(l1.Type)
  2799  	nodes = append(nodes, nod(OAS, s, l1))
  2800  
  2801  	elemtype := s.Type.Elem()
  2802  
  2803  	// n := len(s) + l2
  2804  	nn := temp(types.Types[TINT])
  2805  	nodes = append(nodes, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), l2)))
  2806  
  2807  	// if uint(n) > uint(cap(s))
  2808  	nuint := conv(nn, types.Types[TUINT])
  2809  	capuint := conv(nod(OCAP, s, nil), types.Types[TUINT])
  2810  	nif := nod(OIF, nod(OGT, nuint, capuint), nil)
  2811  
  2812  	// instantiate growslice(typ *type, old []any, newcap int) []any
  2813  	fn := syslook("growslice")
  2814  	fn = substArgTypes(fn, elemtype, elemtype)
  2815  
  2816  	// s = growslice(T, s, n)
  2817  	nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(elemtype), s, nn)))
  2818  	nodes = append(nodes, nif)
  2819  
  2820  	// s = s[:n]
  2821  	nt := nod(OSLICE, s, nil)
  2822  	nt.SetSliceBounds(nil, nn, nil)
  2823  	nt.SetBounded(true)
  2824  	nodes = append(nodes, nod(OAS, s, nt))
  2825  
  2826  	// lptr := &l1[0]
  2827  	l1ptr := temp(l1.Type.Elem().PtrTo())
  2828  	tmp := nod(OSPTR, l1, nil)
  2829  	nodes = append(nodes, nod(OAS, l1ptr, tmp))
  2830  
  2831  	// sptr := &s[0]
  2832  	sptr := temp(elemtype.PtrTo())
  2833  	tmp = nod(OSPTR, s, nil)
  2834  	nodes = append(nodes, nod(OAS, sptr, tmp))
  2835  
  2836  	// hp := &s[len(l1)]
  2837  	hp := nod(OINDEX, s, nod(OLEN, l1, nil))
  2838  	hp.SetBounded(true)
  2839  	hp = nod(OADDR, hp, nil)
  2840  	hp = convnop(hp, types.Types[TUNSAFEPTR])
  2841  
  2842  	// hn := l2 * sizeof(elem(s))
  2843  	hn := nod(OMUL, l2, nodintconst(elemtype.Width))
  2844  	hn = conv(hn, types.Types[TUINTPTR])
  2845  
  2846  	clrname := "memclrNoHeapPointers"
  2847  	hasPointers := types.Haspointers(elemtype)
  2848  	if hasPointers {
  2849  		clrname = "memclrHasPointers"
  2850  		Curfn.Func.setWBPos(n.Pos)
  2851  	}
  2852  
  2853  	var clr Nodes
  2854  	clrfn := mkcall(clrname, nil, &clr, hp, hn)
  2855  	clr.Append(clrfn)
  2856  
  2857  	if hasPointers {
  2858  		// if l1ptr == sptr
  2859  		nifclr := nod(OIF, nod(OEQ, l1ptr, sptr), nil)
  2860  		nifclr.Nbody = clr
  2861  		nodes = append(nodes, nifclr)
  2862  	} else {
  2863  		nodes = append(nodes, clr.Slice()...)
  2864  	}
  2865  
  2866  	typecheckslice(nodes, ctxStmt)
  2867  	walkstmtlist(nodes)
  2868  	init.Append(nodes...)
  2869  	return s
  2870  }
  2871  
  2872  // Rewrite append(src, x, y, z) so that any side effects in
  2873  // x, y, z (including runtime panics) are evaluated in
  2874  // initialization statements before the append.
  2875  // For normal code generation, stop there and leave the
  2876  // rest to cgen_append.
  2877  //
  2878  // For race detector, expand append(src, a [, b]* ) to
  2879  //
  2880  //   init {
  2881  //     s := src
  2882  //     const argc = len(args) - 1
  2883  //     if cap(s) - len(s) < argc {
  2884  //	    s = growslice(s, len(s)+argc)
  2885  //     }
  2886  //     n := len(s)
  2887  //     s = s[:n+argc]
  2888  //     s[n] = a
  2889  //     s[n+1] = b
  2890  //     ...
  2891  //   }
  2892  //   s
  2893  func walkappend(n *Node, init *Nodes, dst *Node) *Node {
  2894  	if !samesafeexpr(dst, n.List.First()) {
  2895  		n.List.SetFirst(safeexpr(n.List.First(), init))
  2896  		n.List.SetFirst(walkexpr(n.List.First(), init))
  2897  	}
  2898  	walkexprlistsafe(n.List.Slice()[1:], init)
  2899  
  2900  	nsrc := n.List.First()
  2901  
  2902  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2903  	// and n are name or literal, but those may index the slice we're
  2904  	// modifying here. Fix explicitly.
  2905  	// Using cheapexpr also makes sure that the evaluation
  2906  	// of all arguments (and especially any panics) happen
  2907  	// before we begin to modify the slice in a visible way.
  2908  	ls := n.List.Slice()[1:]
  2909  	for i, n := range ls {
  2910  		n = cheapexpr(n, init)
  2911  		if !types.Identical(n.Type, nsrc.Type.Elem()) {
  2912  			n = assignconv(n, nsrc.Type.Elem(), "append")
  2913  			n = walkexpr(n, init)
  2914  		}
  2915  		ls[i] = n
  2916  	}
  2917  
  2918  	argc := n.List.Len() - 1
  2919  	if argc < 1 {
  2920  		return nsrc
  2921  	}
  2922  
  2923  	// General case, with no function calls left as arguments.
  2924  	// Leave for gen, except that instrumentation requires old form.
  2925  	if !instrumenting || compiling_runtime {
  2926  		return n
  2927  	}
  2928  
  2929  	var l []*Node
  2930  
  2931  	ns := temp(nsrc.Type)
  2932  	l = append(l, nod(OAS, ns, nsrc)) // s = src
  2933  
  2934  	na := nodintconst(int64(argc)) // const argc
  2935  	nx := nod(OIF, nil, nil)       // if cap(s) - len(s) < argc
  2936  	nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na)
  2937  
  2938  	fn := syslook("growslice") //   growslice(<type>, old []T, mincap int) (ret []T)
  2939  	fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem())
  2940  
  2941  	nx.Nbody.Set1(nod(OAS, ns,
  2942  		mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns,
  2943  			nod(OADD, nod(OLEN, ns, nil), na))))
  2944  
  2945  	l = append(l, nx)
  2946  
  2947  	nn := temp(types.Types[TINT])
  2948  	l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s)
  2949  
  2950  	nx = nod(OSLICE, ns, nil) // ...s[:n+argc]
  2951  	nx.SetSliceBounds(nil, nod(OADD, nn, na), nil)
  2952  	nx.SetBounded(true)
  2953  	l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc]
  2954  
  2955  	ls = n.List.Slice()[1:]
  2956  	for i, n := range ls {
  2957  		nx = nod(OINDEX, ns, nn) // s[n] ...
  2958  		nx.SetBounded(true)
  2959  		l = append(l, nod(OAS, nx, n)) // s[n] = arg
  2960  		if i+1 < len(ls) {
  2961  			l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1
  2962  		}
  2963  	}
  2964  
  2965  	typecheckslice(l, ctxStmt)
  2966  	walkstmtlist(l)
  2967  	init.Append(l...)
  2968  	return ns
  2969  }
  2970  
  2971  // Lower copy(a, b) to a memmove call or a runtime call.
  2972  //
  2973  // init {
  2974  //   n := len(a)
  2975  //   if n > len(b) { n = len(b) }
  2976  //   if a.ptr != b.ptr { memmove(a.ptr, b.ptr, n*sizeof(elem(a))) }
  2977  // }
  2978  // n;
  2979  //
  2980  // Also works if b is a string.
  2981  //
  2982  func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
  2983  	if n.Left.Type.Elem().HasHeapPointer() {
  2984  		Curfn.Func.setWBPos(n.Pos)
  2985  		fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
  2986  		return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right)
  2987  	}
  2988  
  2989  	if runtimecall {
  2990  		if n.Right.Type.IsString() {
  2991  			fn := syslook("slicestringcopy")
  2992  			fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  2993  			return mkcall1(fn, n.Type, init, n.Left, n.Right)
  2994  		}
  2995  
  2996  		fn := syslook("slicecopy")
  2997  		fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  2998  		return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width))
  2999  	}
  3000  
  3001  	n.Left = walkexpr(n.Left, init)
  3002  	n.Right = walkexpr(n.Right, init)
  3003  	nl := temp(n.Left.Type)
  3004  	nr := temp(n.Right.Type)
  3005  	var l []*Node
  3006  	l = append(l, nod(OAS, nl, n.Left))
  3007  	l = append(l, nod(OAS, nr, n.Right))
  3008  
  3009  	nfrm := nod(OSPTR, nr, nil)
  3010  	nto := nod(OSPTR, nl, nil)
  3011  
  3012  	nlen := temp(types.Types[TINT])
  3013  
  3014  	// n = len(to)
  3015  	l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil)))
  3016  
  3017  	// if n > len(frm) { n = len(frm) }
  3018  	nif := nod(OIF, nil, nil)
  3019  
  3020  	nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil))
  3021  	nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil)))
  3022  	l = append(l, nif)
  3023  
  3024  	// if to.ptr != frm.ptr { memmove( ... ) }
  3025  	ne := nod(OIF, nod(ONE, nto, nfrm), nil)
  3026  	ne.SetLikely(true)
  3027  	l = append(l, ne)
  3028  
  3029  	fn := syslook("memmove")
  3030  	fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem())
  3031  	nwid := temp(types.Types[TUINTPTR])
  3032  	setwid := nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR]))
  3033  	ne.Nbody.Append(setwid)
  3034  	nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width))
  3035  	call := mkcall1(fn, nil, init, nto, nfrm, nwid)
  3036  	ne.Nbody.Append(call)
  3037  
  3038  	typecheckslice(l, ctxStmt)
  3039  	walkstmtlist(l)
  3040  	init.Append(l...)
  3041  	return nlen
  3042  }
  3043  
  3044  func eqfor(t *types.Type) (n *Node, needsize bool) {
  3045  	// Should only arrive here with large memory or
  3046  	// a struct/array containing a non-memory field/element.
  3047  	// Small memory is handled inline, and single non-memory
  3048  	// is handled by walkcompare.
  3049  	switch a, _ := algtype1(t); a {
  3050  	case AMEM:
  3051  		n := syslook("memequal")
  3052  		n = substArgTypes(n, t, t)
  3053  		return n, true
  3054  	case ASPECIAL:
  3055  		sym := typesymprefix(".eq", t)
  3056  		n := newname(sym)
  3057  		n.SetClass(PFUNC)
  3058  		n.Sym.SetFunc(true)
  3059  		n.Type = functype(nil, []*Node{
  3060  			anonfield(types.NewPtr(t)),
  3061  			anonfield(types.NewPtr(t)),
  3062  		}, []*Node{
  3063  			anonfield(types.Types[TBOOL]),
  3064  		})
  3065  		return n, false
  3066  	}
  3067  	Fatalf("eqfor %v", t)
  3068  	return nil, false
  3069  }
  3070  
  3071  // The result of walkcompare MUST be assigned back to n, e.g.
  3072  // 	n.Left = walkcompare(n.Left, init)
  3073  func walkcompare(n *Node, init *Nodes) *Node {
  3074  	if n.Left.Type.IsInterface() && n.Right.Type.IsInterface() && n.Left.Op != OLITERAL && n.Right.Op != OLITERAL {
  3075  		return walkcompareInterface(n, init)
  3076  	}
  3077  
  3078  	if n.Left.Type.IsString() && n.Right.Type.IsString() {
  3079  		return walkcompareString(n, init)
  3080  	}
  3081  
  3082  	n.Left = walkexpr(n.Left, init)
  3083  	n.Right = walkexpr(n.Right, init)
  3084  
  3085  	// Given mixed interface/concrete comparison,
  3086  	// rewrite into types-equal && data-equal.
  3087  	// This is efficient, avoids allocations, and avoids runtime calls.
  3088  	if n.Left.Type.IsInterface() != n.Right.Type.IsInterface() {
  3089  		// Preserve side-effects in case of short-circuiting; see #32187.
  3090  		l := cheapexpr(n.Left, init)
  3091  		r := cheapexpr(n.Right, init)
  3092  		// Swap so that l is the interface value and r is the concrete value.
  3093  		if n.Right.Type.IsInterface() {
  3094  			l, r = r, l
  3095  		}
  3096  
  3097  		// Handle both == and !=.
  3098  		eq := n.Op
  3099  		andor := OOROR
  3100  		if eq == OEQ {
  3101  			andor = OANDAND
  3102  		}
  3103  		// Check for types equal.
  3104  		// For empty interface, this is:
  3105  		//   l.tab == type(r)
  3106  		// For non-empty interface, this is:
  3107  		//   l.tab != nil && l.tab._type == type(r)
  3108  		var eqtype *Node
  3109  		tab := nod(OITAB, l, nil)
  3110  		rtyp := typename(r.Type)
  3111  		if l.Type.IsEmptyInterface() {
  3112  			tab.Type = types.NewPtr(types.Types[TUINT8])
  3113  			tab.SetTypecheck(1)
  3114  			eqtype = nod(eq, tab, rtyp)
  3115  		} else {
  3116  			nonnil := nod(brcom(eq), nodnil(), tab)
  3117  			match := nod(eq, itabType(tab), rtyp)
  3118  			eqtype = nod(andor, nonnil, match)
  3119  		}
  3120  		// Check for data equal.
  3121  		eqdata := nod(eq, ifaceData(l, r.Type), r)
  3122  		// Put it all together.
  3123  		expr := nod(andor, eqtype, eqdata)
  3124  		n = finishcompare(n, expr, init)
  3125  		return n
  3126  	}
  3127  
  3128  	// Must be comparison of array or struct.
  3129  	// Otherwise back end handles it.
  3130  	// While we're here, decide whether to
  3131  	// inline or call an eq alg.
  3132  	t := n.Left.Type
  3133  	var inline bool
  3134  
  3135  	maxcmpsize := int64(4)
  3136  	unalignedLoad := canMergeLoads()
  3137  	if unalignedLoad {
  3138  		// Keep this low enough to generate less code than a function call.
  3139  		maxcmpsize = 2 * int64(thearch.LinkArch.RegSize)
  3140  	}
  3141  
  3142  	switch t.Etype {
  3143  	default:
  3144  		if Debug_libfuzzer != 0 && t.IsInteger() {
  3145  			n.Left = cheapexpr(n.Left, init)
  3146  			n.Right = cheapexpr(n.Right, init)
  3147  
  3148  			// If exactly one comparison operand is
  3149  			// constant, invoke the constcmp functions
  3150  			// instead, and arrange for the constant
  3151  			// operand to be the first argument.
  3152  			l, r := n.Left, n.Right
  3153  			if r.Op == OLITERAL {
  3154  				l, r = r, l
  3155  			}
  3156  			constcmp := l.Op == OLITERAL && r.Op != OLITERAL
  3157  
  3158  			var fn string
  3159  			var paramType *types.Type
  3160  			switch t.Size() {
  3161  			case 1:
  3162  				fn = "libfuzzerTraceCmp1"
  3163  				if constcmp {
  3164  					fn = "libfuzzerTraceConstCmp1"
  3165  				}
  3166  				paramType = types.Types[TUINT8]
  3167  			case 2:
  3168  				fn = "libfuzzerTraceCmp2"
  3169  				if constcmp {
  3170  					fn = "libfuzzerTraceConstCmp2"
  3171  				}
  3172  				paramType = types.Types[TUINT16]
  3173  			case 4:
  3174  				fn = "libfuzzerTraceCmp4"
  3175  				if constcmp {
  3176  					fn = "libfuzzerTraceConstCmp4"
  3177  				}
  3178  				paramType = types.Types[TUINT32]
  3179  			case 8:
  3180  				fn = "libfuzzerTraceCmp8"
  3181  				if constcmp {
  3182  					fn = "libfuzzerTraceConstCmp8"
  3183  				}
  3184  				paramType = types.Types[TUINT64]
  3185  			default:
  3186  				Fatalf("unexpected integer size %d for %v", t.Size(), t)
  3187  			}
  3188  			init.Append(mkcall(fn, nil, init, tracecmpArg(l, paramType, init), tracecmpArg(r, paramType, init)))
  3189  		}
  3190  		return n
  3191  	case TARRAY:
  3192  		// We can compare several elements at once with 2/4/8 byte integer compares
  3193  		inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize))
  3194  	case TSTRUCT:
  3195  		inline = t.NumComponents(types.IgnoreBlankFields) <= 4
  3196  	}
  3197  
  3198  	cmpl := n.Left
  3199  	for cmpl != nil && cmpl.Op == OCONVNOP {
  3200  		cmpl = cmpl.Left
  3201  	}
  3202  	cmpr := n.Right
  3203  	for cmpr != nil && cmpr.Op == OCONVNOP {
  3204  		cmpr = cmpr.Left
  3205  	}
  3206  
  3207  	// Chose not to inline. Call equality function directly.
  3208  	if !inline {
  3209  		// eq algs take pointers; cmpl and cmpr must be addressable
  3210  		if !islvalue(cmpl) || !islvalue(cmpr) {
  3211  			Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
  3212  		}
  3213  
  3214  		fn, needsize := eqfor(t)
  3215  		call := nod(OCALL, fn, nil)
  3216  		call.List.Append(nod(OADDR, cmpl, nil))
  3217  		call.List.Append(nod(OADDR, cmpr, nil))
  3218  		if needsize {
  3219  			call.List.Append(nodintconst(t.Width))
  3220  		}
  3221  		res := call
  3222  		if n.Op != OEQ {
  3223  			res = nod(ONOT, res, nil)
  3224  		}
  3225  		n = finishcompare(n, res, init)
  3226  		return n
  3227  	}
  3228  
  3229  	// inline: build boolean expression comparing element by element
  3230  	andor := OANDAND
  3231  	if n.Op == ONE {
  3232  		andor = OOROR
  3233  	}
  3234  	var expr *Node
  3235  	compare := func(el, er *Node) {
  3236  		a := nod(n.Op, el, er)
  3237  		if expr == nil {
  3238  			expr = a
  3239  		} else {
  3240  			expr = nod(andor, expr, a)
  3241  		}
  3242  	}
  3243  	cmpl = safeexpr(cmpl, init)
  3244  	cmpr = safeexpr(cmpr, init)
  3245  	if t.IsStruct() {
  3246  		for _, f := range t.Fields().Slice() {
  3247  			sym := f.Sym
  3248  			if sym.IsBlank() {
  3249  				continue
  3250  			}
  3251  			compare(
  3252  				nodSym(OXDOT, cmpl, sym),
  3253  				nodSym(OXDOT, cmpr, sym),
  3254  			)
  3255  		}
  3256  	} else {
  3257  		step := int64(1)
  3258  		remains := t.NumElem() * t.Elem().Width
  3259  		combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger()
  3260  		combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger()
  3261  		combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger()
  3262  		for i := int64(0); remains > 0; {
  3263  			var convType *types.Type
  3264  			switch {
  3265  			case remains >= 8 && combine64bit:
  3266  				convType = types.Types[TINT64]
  3267  				step = 8 / t.Elem().Width
  3268  			case remains >= 4 && combine32bit:
  3269  				convType = types.Types[TUINT32]
  3270  				step = 4 / t.Elem().Width
  3271  			case remains >= 2 && combine16bit:
  3272  				convType = types.Types[TUINT16]
  3273  				step = 2 / t.Elem().Width
  3274  			default:
  3275  				step = 1
  3276  			}
  3277  			if step == 1 {
  3278  				compare(
  3279  					nod(OINDEX, cmpl, nodintconst(i)),
  3280  					nod(OINDEX, cmpr, nodintconst(i)),
  3281  				)
  3282  				i++
  3283  				remains -= t.Elem().Width
  3284  			} else {
  3285  				elemType := t.Elem().ToUnsigned()
  3286  				cmplw := nod(OINDEX, cmpl, nodintconst(i))
  3287  				cmplw = conv(cmplw, elemType) // convert to unsigned
  3288  				cmplw = conv(cmplw, convType) // widen
  3289  				cmprw := nod(OINDEX, cmpr, nodintconst(i))
  3290  				cmprw = conv(cmprw, elemType)
  3291  				cmprw = conv(cmprw, convType)
  3292  				// For code like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3293  				// ssa will generate a single large load.
  3294  				for offset := int64(1); offset < step; offset++ {
  3295  					lb := nod(OINDEX, cmpl, nodintconst(i+offset))
  3296  					lb = conv(lb, elemType)
  3297  					lb = conv(lb, convType)
  3298  					lb = nod(OLSH, lb, nodintconst(8*t.Elem().Width*offset))
  3299  					cmplw = nod(OOR, cmplw, lb)
  3300  					rb := nod(OINDEX, cmpr, nodintconst(i+offset))
  3301  					rb = conv(rb, elemType)
  3302  					rb = conv(rb, convType)
  3303  					rb = nod(OLSH, rb, nodintconst(8*t.Elem().Width*offset))
  3304  					cmprw = nod(OOR, cmprw, rb)
  3305  				}
  3306  				compare(cmplw, cmprw)
  3307  				i += step
  3308  				remains -= step * t.Elem().Width
  3309  			}
  3310  		}
  3311  	}
  3312  	if expr == nil {
  3313  		expr = nodbool(n.Op == OEQ)
  3314  		// We still need to use cmpl and cmpr, in case they contain
  3315  		// an expression which might panic. See issue 23837.
  3316  		t := temp(cmpl.Type)
  3317  		a1 := nod(OAS, t, cmpl)
  3318  		a1 = typecheck(a1, ctxStmt)
  3319  		a2 := nod(OAS, t, cmpr)
  3320  		a2 = typecheck(a2, ctxStmt)
  3321  		init.Append(a1, a2)
  3322  	}
  3323  	n = finishcompare(n, expr, init)
  3324  	return n
  3325  }
  3326  
  3327  func tracecmpArg(n *Node, t *types.Type, init *Nodes) *Node {
  3328  	// Ugly hack to avoid "constant -1 overflows uintptr" errors, etc.
  3329  	if n.Op == OLITERAL && n.Type.IsSigned() && n.Int64() < 0 {
  3330  		n = copyexpr(n, n.Type, init)
  3331  	}
  3332  
  3333  	return conv(n, t)
  3334  }
  3335  
  3336  func walkcompareInterface(n *Node, init *Nodes) *Node {
  3337  	// ifaceeq(i1 any-1, i2 any-2) (ret bool);
  3338  	if !types.Identical(n.Left.Type, n.Right.Type) {
  3339  		Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type)
  3340  	}
  3341  	var fn *Node
  3342  	if n.Left.Type.IsEmptyInterface() {
  3343  		fn = syslook("efaceeq")
  3344  	} else {
  3345  		fn = syslook("ifaceeq")
  3346  	}
  3347  
  3348  	n.Right = cheapexpr(n.Right, init)
  3349  	n.Left = cheapexpr(n.Left, init)
  3350  	lt := nod(OITAB, n.Left, nil)
  3351  	rt := nod(OITAB, n.Right, nil)
  3352  	ld := nod(OIDATA, n.Left, nil)
  3353  	rd := nod(OIDATA, n.Right, nil)
  3354  	ld.Type = types.Types[TUNSAFEPTR]
  3355  	rd.Type = types.Types[TUNSAFEPTR]
  3356  	ld.SetTypecheck(1)
  3357  	rd.SetTypecheck(1)
  3358  	call := mkcall1(fn, n.Type, init, lt, ld, rd)
  3359  
  3360  	// Check itable/type before full compare.
  3361  	// Note: short-circuited because order matters.
  3362  	var cmp *Node
  3363  	if n.Op == OEQ {
  3364  		cmp = nod(OANDAND, nod(OEQ, lt, rt), call)
  3365  	} else {
  3366  		cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil))
  3367  	}
  3368  	return finishcompare(n, cmp, init)
  3369  }
  3370  
  3371  func walkcompareString(n *Node, init *Nodes) *Node {
  3372  	// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
  3373  	var cs, ncs *Node // const string, non-const string
  3374  	switch {
  3375  	case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR):
  3376  		// ignore; will be constant evaluated
  3377  	case Isconst(n.Left, CTSTR):
  3378  		cs = n.Left
  3379  		ncs = n.Right
  3380  	case Isconst(n.Right, CTSTR):
  3381  		cs = n.Right
  3382  		ncs = n.Left
  3383  	}
  3384  	if cs != nil {
  3385  		cmp := n.Op
  3386  		// Our comparison below assumes that the non-constant string
  3387  		// is on the left hand side, so rewrite "" cmp x to x cmp "".
  3388  		// See issue 24817.
  3389  		if Isconst(n.Left, CTSTR) {
  3390  			cmp = brrev(cmp)
  3391  		}
  3392  
  3393  		// maxRewriteLen was chosen empirically.
  3394  		// It is the value that minimizes cmd/go file size
  3395  		// across most architectures.
  3396  		// See the commit description for CL 26758 for details.
  3397  		maxRewriteLen := 6
  3398  		// Some architectures can load unaligned byte sequence as 1 word.
  3399  		// So we can cover longer strings with the same amount of code.
  3400  		canCombineLoads := canMergeLoads()
  3401  		combine64bit := false
  3402  		if canCombineLoads {
  3403  			// Keep this low enough to generate less code than a function call.
  3404  			maxRewriteLen = 2 * thearch.LinkArch.RegSize
  3405  			combine64bit = thearch.LinkArch.RegSize >= 8
  3406  		}
  3407  
  3408  		var and Op
  3409  		switch cmp {
  3410  		case OEQ:
  3411  			and = OANDAND
  3412  		case ONE:
  3413  			and = OOROR
  3414  		default:
  3415  			// Don't do byte-wise comparisons for <, <=, etc.
  3416  			// They're fairly complicated.
  3417  			// Length-only checks are ok, though.
  3418  			maxRewriteLen = 0
  3419  		}
  3420  		if s := strlit(cs); len(s) <= maxRewriteLen {
  3421  			if len(s) > 0 {
  3422  				ncs = safeexpr(ncs, init)
  3423  			}
  3424  			r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s))))
  3425  			remains := len(s)
  3426  			for i := 0; remains > 0; {
  3427  				if remains == 1 || !canCombineLoads {
  3428  					cb := nodintconst(int64(s[i]))
  3429  					ncb := nod(OINDEX, ncs, nodintconst(int64(i)))
  3430  					r = nod(and, r, nod(cmp, ncb, cb))
  3431  					remains--
  3432  					i++
  3433  					continue
  3434  				}
  3435  				var step int
  3436  				var convType *types.Type
  3437  				switch {
  3438  				case remains >= 8 && combine64bit:
  3439  					convType = types.Types[TINT64]
  3440  					step = 8
  3441  				case remains >= 4:
  3442  					convType = types.Types[TUINT32]
  3443  					step = 4
  3444  				case remains >= 2:
  3445  					convType = types.Types[TUINT16]
  3446  					step = 2
  3447  				}
  3448  				ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i)))
  3449  				ncsubstr = conv(ncsubstr, convType)
  3450  				csubstr := int64(s[i])
  3451  				// Calculate large constant from bytes as sequence of shifts and ors.
  3452  				// Like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3453  				// ssa will combine this into a single large load.
  3454  				for offset := 1; offset < step; offset++ {
  3455  					b := nod(OINDEX, ncs, nodintconst(int64(i+offset)))
  3456  					b = conv(b, convType)
  3457  					b = nod(OLSH, b, nodintconst(int64(8*offset)))
  3458  					ncsubstr = nod(OOR, ncsubstr, b)
  3459  					csubstr |= int64(s[i+offset]) << uint8(8*offset)
  3460  				}
  3461  				csubstrPart := nodintconst(csubstr)
  3462  				// Compare "step" bytes as once
  3463  				r = nod(and, r, nod(cmp, csubstrPart, ncsubstr))
  3464  				remains -= step
  3465  				i += step
  3466  			}
  3467  			return finishcompare(n, r, init)
  3468  		}
  3469  	}
  3470  
  3471  	var r *Node
  3472  	if n.Op == OEQ || n.Op == ONE {
  3473  		// prepare for rewrite below
  3474  		n.Left = cheapexpr(n.Left, init)
  3475  		n.Right = cheapexpr(n.Right, init)
  3476  
  3477  		lstr := conv(n.Left, types.Types[TSTRING])
  3478  		rstr := conv(n.Right, types.Types[TSTRING])
  3479  		lptr := nod(OSPTR, lstr, nil)
  3480  		rptr := nod(OSPTR, rstr, nil)
  3481  		llen := conv(nod(OLEN, lstr, nil), types.Types[TUINTPTR])
  3482  		rlen := conv(nod(OLEN, rstr, nil), types.Types[TUINTPTR])
  3483  
  3484  		fn := syslook("memequal")
  3485  		fn = substArgTypes(fn, types.Types[TUINT8], types.Types[TUINT8])
  3486  		r = mkcall1(fn, types.Types[TBOOL], init, lptr, rptr, llen)
  3487  
  3488  		// quick check of len before full compare for == or !=.
  3489  		// memequal then tests equality up to length len.
  3490  		if n.Op == OEQ {
  3491  			// len(left) == len(right) && memequal(left, right, len)
  3492  			r = nod(OANDAND, nod(OEQ, llen, rlen), r)
  3493  		} else {
  3494  			// len(left) != len(right) || !memequal(left, right, len)
  3495  			r = nod(ONOT, r, nil)
  3496  			r = nod(OOROR, nod(ONE, llen, rlen), r)
  3497  		}
  3498  	} else {
  3499  		// sys_cmpstring(s1, s2) :: 0
  3500  		r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  3501  		r = nod(n.Op, r, nodintconst(0))
  3502  	}
  3503  
  3504  	return finishcompare(n, r, init)
  3505  }
  3506  
  3507  // The result of finishcompare MUST be assigned back to n, e.g.
  3508  // 	n.Left = finishcompare(n.Left, x, r, init)
  3509  func finishcompare(n, r *Node, init *Nodes) *Node {
  3510  	r = typecheck(r, ctxExpr)
  3511  	r = conv(r, n.Type)
  3512  	r = walkexpr(r, init)
  3513  	return r
  3514  }
  3515  
  3516  // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers.
  3517  func (n *Node) isIntOrdering() bool {
  3518  	switch n.Op {
  3519  	case OLE, OLT, OGE, OGT:
  3520  	default:
  3521  		return false
  3522  	}
  3523  	return n.Left.Type.IsInteger() && n.Right.Type.IsInteger()
  3524  }
  3525  
  3526  // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10.
  3527  // n must be an OANDAND or OOROR node.
  3528  // The result of walkinrange MUST be assigned back to n, e.g.
  3529  // 	n.Left = walkinrange(n.Left)
  3530  func walkinrange(n *Node, init *Nodes) *Node {
  3531  	// We are looking for something equivalent to a opl b OP b opr c, where:
  3532  	// * a, b, and c have integer type
  3533  	// * b is side-effect-free
  3534  	// * opl and opr are each < or ≤
  3535  	// * OP is &&
  3536  	l := n.Left
  3537  	r := n.Right
  3538  	if !l.isIntOrdering() || !r.isIntOrdering() {
  3539  		return n
  3540  	}
  3541  
  3542  	// Find b, if it exists, and rename appropriately.
  3543  	// Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right
  3544  	// Output is: a opl b(==x) ANDAND/OROR b(==x) opr c
  3545  	a, opl, b := l.Left, l.Op, l.Right
  3546  	x, opr, c := r.Left, r.Op, r.Right
  3547  	for i := 0; ; i++ {
  3548  		if samesafeexpr(b, x) {
  3549  			break
  3550  		}
  3551  		if i == 3 {
  3552  			// Tried all permutations and couldn't find an appropriate b == x.
  3553  			return n
  3554  		}
  3555  		if i&1 == 0 {
  3556  			a, opl, b = b, brrev(opl), a
  3557  		} else {
  3558  			x, opr, c = c, brrev(opr), x
  3559  		}
  3560  	}
  3561  
  3562  	// If n.Op is ||, apply de Morgan.
  3563  	// Negate the internal ops now; we'll negate the top level op at the end.
  3564  	// Henceforth assume &&.
  3565  	negateResult := n.Op == OOROR
  3566  	if negateResult {
  3567  		opl = brcom(opl)
  3568  		opr = brcom(opr)
  3569  	}
  3570  
  3571  	cmpdir := func(o Op) int {
  3572  		switch o {
  3573  		case OLE, OLT:
  3574  			return -1
  3575  		case OGE, OGT:
  3576  			return +1
  3577  		}
  3578  		Fatalf("walkinrange cmpdir %v", o)
  3579  		return 0
  3580  	}
  3581  	if cmpdir(opl) != cmpdir(opr) {
  3582  		// Not a range check; something like b < a && b < c.
  3583  		return n
  3584  	}
  3585  
  3586  	switch opl {
  3587  	case OGE, OGT:
  3588  		// We have something like a > b && b ≥ c.
  3589  		// Switch and reverse ops and rename constants,
  3590  		// to make it look like a ≤ b && b < c.
  3591  		a, c = c, a
  3592  		opl, opr = brrev(opr), brrev(opl)
  3593  	}
  3594  
  3595  	// We must ensure that c-a is non-negative.
  3596  	// For now, require a and c to be constants.
  3597  	// In the future, we could also support a == 0 and c == len/cap(...).
  3598  	// Unfortunately, by this point, most len/cap expressions have been
  3599  	// stored into temporary variables.
  3600  	if !Isconst(a, CTINT) || !Isconst(c, CTINT) {
  3601  		return n
  3602  	}
  3603  
  3604  	// Ensure that Int64() does not overflow on a and c (it'll happen
  3605  	// for any const above 2**63; see issue #27143).
  3606  	if !a.CanInt64() || !c.CanInt64() {
  3607  		return n
  3608  	}
  3609  
  3610  	if opl == OLT {
  3611  		// We have a < b && ...
  3612  		// We need a ≤ b && ... to safely use unsigned comparison tricks.
  3613  		// If a is not the maximum constant for b's type,
  3614  		// we can increment a and switch to ≤.
  3615  		if a.Int64() >= maxintval[b.Type.Etype].Int64() {
  3616  			return n
  3617  		}
  3618  		a = nodintconst(a.Int64() + 1)
  3619  		opl = OLE
  3620  	}
  3621  
  3622  	bound := c.Int64() - a.Int64()
  3623  	if bound < 0 {
  3624  		// Bad news. Something like 5 <= x && x < 3.
  3625  		// Rare in practice, and we still need to generate side-effects,
  3626  		// so just leave it alone.
  3627  		return n
  3628  	}
  3629  
  3630  	// We have a ≤ b && b < c (or a ≤ b && b ≤ c).
  3631  	// This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a),
  3632  	// which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a),
  3633  	// which is equivalent to uint(b-a) < uint(c-a).
  3634  	ut := b.Type.ToUnsigned()
  3635  	lhs := conv(nod(OSUB, b, a), ut)
  3636  	rhs := nodintconst(bound)
  3637  	if negateResult {
  3638  		// Negate top level.
  3639  		opr = brcom(opr)
  3640  	}
  3641  	cmp := nod(opr, lhs, rhs)
  3642  	cmp.Pos = n.Pos
  3643  	cmp = addinit(cmp, l.Ninit.Slice())
  3644  	cmp = addinit(cmp, r.Ninit.Slice())
  3645  	// Typecheck the AST rooted at cmp...
  3646  	cmp = typecheck(cmp, ctxExpr)
  3647  	// ...but then reset cmp's type to match n's type.
  3648  	cmp.Type = n.Type
  3649  	cmp = walkexpr(cmp, init)
  3650  	return cmp
  3651  }
  3652  
  3653  // return 1 if integer n must be in range [0, max), 0 otherwise
  3654  func bounded(n *Node, max int64) bool {
  3655  	if n.Type == nil || !n.Type.IsInteger() {
  3656  		return false
  3657  	}
  3658  
  3659  	sign := n.Type.IsSigned()
  3660  	bits := int32(8 * n.Type.Width)
  3661  
  3662  	if smallintconst(n) {
  3663  		v := n.Int64()
  3664  		return 0 <= v && v < max
  3665  	}
  3666  
  3667  	switch n.Op {
  3668  	case OAND:
  3669  		v := int64(-1)
  3670  		if smallintconst(n.Left) {
  3671  			v = n.Left.Int64()
  3672  		} else if smallintconst(n.Right) {
  3673  			v = n.Right.Int64()
  3674  		}
  3675  
  3676  		if 0 <= v && v < max {
  3677  			return true
  3678  		}
  3679  
  3680  	case OMOD:
  3681  		if !sign && smallintconst(n.Right) {
  3682  			v := n.Right.Int64()
  3683  			if 0 <= v && v <= max {
  3684  				return true
  3685  			}
  3686  		}
  3687  
  3688  	case ODIV:
  3689  		if !sign && smallintconst(n.Right) {
  3690  			v := n.Right.Int64()
  3691  			for bits > 0 && v >= 2 {
  3692  				bits--
  3693  				v >>= 1
  3694  			}
  3695  		}
  3696  
  3697  	case ORSH:
  3698  		if !sign && smallintconst(n.Right) {
  3699  			v := n.Right.Int64()
  3700  			if v > int64(bits) {
  3701  				return true
  3702  			}
  3703  			bits -= int32(v)
  3704  		}
  3705  	}
  3706  
  3707  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
  3708  		return true
  3709  	}
  3710  
  3711  	return false
  3712  }
  3713  
  3714  // usemethod checks interface method calls for uses of reflect.Type.Method.
  3715  func usemethod(n *Node) {
  3716  	t := n.Left.Type
  3717  
  3718  	// Looking for either of:
  3719  	//	Method(int) reflect.Method
  3720  	//	MethodByName(string) (reflect.Method, bool)
  3721  	//
  3722  	// TODO(crawshaw): improve precision of match by working out
  3723  	//                 how to check the method name.
  3724  	if n := t.NumParams(); n != 1 {
  3725  		return
  3726  	}
  3727  	if n := t.NumResults(); n != 1 && n != 2 {
  3728  		return
  3729  	}
  3730  	p0 := t.Params().Field(0)
  3731  	res0 := t.Results().Field(0)
  3732  	var res1 *types.Field
  3733  	if t.NumResults() == 2 {
  3734  		res1 = t.Results().Field(1)
  3735  	}
  3736  
  3737  	if res1 == nil {
  3738  		if p0.Type.Etype != TINT {
  3739  			return
  3740  		}
  3741  	} else {
  3742  		if !p0.Type.IsString() {
  3743  			return
  3744  		}
  3745  		if !res1.Type.IsBoolean() {
  3746  			return
  3747  		}
  3748  	}
  3749  
  3750  	// Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
  3751  	//       (including global variables such as numImports - was issue #19028).
  3752  	if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" {
  3753  		Curfn.Func.SetReflectMethod(true)
  3754  	}
  3755  }
  3756  
  3757  func usefield(n *Node) {
  3758  	if objabi.Fieldtrack_enabled == 0 {
  3759  		return
  3760  	}
  3761  
  3762  	switch n.Op {
  3763  	default:
  3764  		Fatalf("usefield %v", n.Op)
  3765  
  3766  	case ODOT, ODOTPTR:
  3767  		break
  3768  	}
  3769  	if n.Sym == nil {
  3770  		// No field name.  This DOTPTR was built by the compiler for access
  3771  		// to runtime data structures.  Ignore.
  3772  		return
  3773  	}
  3774  
  3775  	t := n.Left.Type
  3776  	if t.IsPtr() {
  3777  		t = t.Elem()
  3778  	}
  3779  	field := dotField[typeSymKey{t.Orig, n.Sym}]
  3780  	if field == nil {
  3781  		Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
  3782  	}
  3783  	if !strings.Contains(field.Note, "go:\"track\"") {
  3784  		return
  3785  	}
  3786  
  3787  	outer := n.Left.Type
  3788  	if outer.IsPtr() {
  3789  		outer = outer.Elem()
  3790  	}
  3791  	if outer.Sym == nil {
  3792  		yyerror("tracked field must be in named struct type")
  3793  	}
  3794  	if !types.IsExported(field.Sym.Name) {
  3795  		yyerror("tracked field must be exported (upper case)")
  3796  	}
  3797  
  3798  	sym := tracksym(outer, field)
  3799  	if Curfn.Func.FieldTrack == nil {
  3800  		Curfn.Func.FieldTrack = make(map[*types.Sym]struct{})
  3801  	}
  3802  	Curfn.Func.FieldTrack[sym] = struct{}{}
  3803  }
  3804  
  3805  func candiscardlist(l Nodes) bool {
  3806  	for _, n := range l.Slice() {
  3807  		if !candiscard(n) {
  3808  			return false
  3809  		}
  3810  	}
  3811  	return true
  3812  }
  3813  
  3814  func candiscard(n *Node) bool {
  3815  	if n == nil {
  3816  		return true
  3817  	}
  3818  
  3819  	switch n.Op {
  3820  	default:
  3821  		return false
  3822  
  3823  		// Discardable as long as the subpieces are.
  3824  	case ONAME,
  3825  		ONONAME,
  3826  		OTYPE,
  3827  		OPACK,
  3828  		OLITERAL,
  3829  		OADD,
  3830  		OSUB,
  3831  		OOR,
  3832  		OXOR,
  3833  		OADDSTR,
  3834  		OADDR,
  3835  		OANDAND,
  3836  		OBYTES2STR,
  3837  		ORUNES2STR,
  3838  		OSTR2BYTES,
  3839  		OSTR2RUNES,
  3840  		OCAP,
  3841  		OCOMPLIT,
  3842  		OMAPLIT,
  3843  		OSTRUCTLIT,
  3844  		OARRAYLIT,
  3845  		OSLICELIT,
  3846  		OPTRLIT,
  3847  		OCONV,
  3848  		OCONVIFACE,
  3849  		OCONVNOP,
  3850  		ODOT,
  3851  		OEQ,
  3852  		ONE,
  3853  		OLT,
  3854  		OLE,
  3855  		OGT,
  3856  		OGE,
  3857  		OKEY,
  3858  		OSTRUCTKEY,
  3859  		OLEN,
  3860  		OMUL,
  3861  		OLSH,
  3862  		ORSH,
  3863  		OAND,
  3864  		OANDNOT,
  3865  		ONEW,
  3866  		ONOT,
  3867  		OBITNOT,
  3868  		OPLUS,
  3869  		ONEG,
  3870  		OOROR,
  3871  		OPAREN,
  3872  		ORUNESTR,
  3873  		OREAL,
  3874  		OIMAG,
  3875  		OCOMPLEX:
  3876  		break
  3877  
  3878  		// Discardable as long as we know it's not division by zero.
  3879  	case ODIV, OMOD:
  3880  		if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 {
  3881  			break
  3882  		}
  3883  		if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 {
  3884  			break
  3885  		}
  3886  		return false
  3887  
  3888  		// Discardable as long as we know it won't fail because of a bad size.
  3889  	case OMAKECHAN, OMAKEMAP:
  3890  		if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 {
  3891  			break
  3892  		}
  3893  		return false
  3894  
  3895  		// Difficult to tell what sizes are okay.
  3896  	case OMAKESLICE:
  3897  		return false
  3898  	}
  3899  
  3900  	if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) {
  3901  		return false
  3902  	}
  3903  
  3904  	return true
  3905  }
  3906  
  3907  // Rewrite
  3908  //	go builtin(x, y, z)
  3909  // into
  3910  //	go func(a1, a2, a3) {
  3911  //		builtin(a1, a2, a3)
  3912  //	}(x, y, z)
  3913  // for print, println, and delete.
  3914  
  3915  var wrapCall_prgen int
  3916  
  3917  // The result of wrapCall MUST be assigned back to n, e.g.
  3918  // 	n.Left = wrapCall(n.Left, init)
  3919  func wrapCall(n *Node, init *Nodes) *Node {
  3920  	if n.Ninit.Len() != 0 {
  3921  		walkstmtlist(n.Ninit.Slice())
  3922  		init.AppendNodes(&n.Ninit)
  3923  	}
  3924  
  3925  	t := nod(OTFUNC, nil, nil)
  3926  	for i, arg := range n.List.Slice() {
  3927  		s := lookupN("a", i)
  3928  		t.List.Append(symfield(s, arg.Type))
  3929  	}
  3930  
  3931  	wrapCall_prgen++
  3932  	sym := lookupN("wrap·", wrapCall_prgen)
  3933  	fn := dclfunc(sym, t)
  3934  
  3935  	a := nod(n.Op, nil, nil)
  3936  	a.List.Set(paramNnames(t.Type))
  3937  	a = typecheck(a, ctxStmt)
  3938  	fn.Nbody.Set1(a)
  3939  
  3940  	funcbody()
  3941  
  3942  	fn = typecheck(fn, ctxStmt)
  3943  	typecheckslice(fn.Nbody.Slice(), ctxStmt)
  3944  	xtop = append(xtop, fn)
  3945  
  3946  	a = nod(OCALL, nil, nil)
  3947  	a.Left = fn.Func.Nname
  3948  	a.List.Set(n.List.Slice())
  3949  	a = typecheck(a, ctxStmt)
  3950  	a = walkexpr(a, init)
  3951  	return a
  3952  }
  3953  
  3954  // substArgTypes substitutes the given list of types for
  3955  // successive occurrences of the "any" placeholder in the
  3956  // type syntax expression n.Type.
  3957  // The result of substArgTypes MUST be assigned back to old, e.g.
  3958  // 	n.Left = substArgTypes(n.Left, t1, t2)
  3959  func substArgTypes(old *Node, types_ ...*types.Type) *Node {
  3960  	n := old.copy()
  3961  
  3962  	for _, t := range types_ {
  3963  		dowidth(t)
  3964  	}
  3965  	n.Type = types.SubstAny(n.Type, &types_)
  3966  	if len(types_) > 0 {
  3967  		Fatalf("substArgTypes: too many argument types")
  3968  	}
  3969  	return n
  3970  }
  3971  
  3972  // canMergeLoads reports whether the backend optimization passes for
  3973  // the current architecture can combine adjacent loads into a single
  3974  // larger, possibly unaligned, load. Note that currently the
  3975  // optimizations must be able to handle little endian byte order.
  3976  func canMergeLoads() bool {
  3977  	switch thearch.LinkArch.Family {
  3978  	case sys.ARM64, sys.AMD64, sys.I386, sys.S390X:
  3979  		return true
  3980  	case sys.PPC64:
  3981  		// Load combining only supported on ppc64le.
  3982  		return thearch.LinkArch.ByteOrder == binary.LittleEndian
  3983  	}
  3984  	return false
  3985  }
  3986  
  3987  // isRuneCount reports whether n is of the form len([]rune(string)).
  3988  // These are optimized into a call to runtime.countrunes.
  3989  func isRuneCount(n *Node) bool {
  3990  	return Debug['N'] == 0 && !instrumenting && n.Op == OLEN && n.Left.Op == OSTR2RUNES
  3991  }
  3992  
  3993  func walkCheckPtrAlignment(n *Node, init *Nodes, count *Node) *Node {
  3994  	if !n.Type.IsPtr() {
  3995  		Fatalf("expected pointer type: %v", n.Type)
  3996  	}
  3997  	elem := n.Type.Elem()
  3998  	if count != nil {
  3999  		if !elem.IsArray() {
  4000  			Fatalf("expected array type: %v", elem)
  4001  		}
  4002  		elem = elem.Elem()
  4003  	}
  4004  
  4005  	size := elem.Size()
  4006  	if elem.Alignment() == 1 && (size == 0 || size == 1 && count == nil) {
  4007  		return n
  4008  	}
  4009  
  4010  	if count == nil {
  4011  		count = nodintconst(1)
  4012  	}
  4013  
  4014  	n.Left = cheapexpr(n.Left, init)
  4015  	init.Append(mkcall("checkptrAlignment", nil, init, convnop(n.Left, types.Types[TUNSAFEPTR]), typename(elem), conv(count, types.Types[TUINTPTR])))
  4016  	return n
  4017  }
  4018  
  4019  var walkCheckPtrArithmeticMarker byte
  4020  
  4021  func walkCheckPtrArithmetic(n *Node, init *Nodes) *Node {
  4022  	// Calling cheapexpr(n, init) below leads to a recursive call
  4023  	// to walkexpr, which leads us back here again. Use n.Opt to
  4024  	// prevent infinite loops.
  4025  	if opt := n.Opt(); opt == &walkCheckPtrArithmeticMarker {
  4026  		return n
  4027  	} else if opt != nil {
  4028  		// We use n.Opt() here because today it's not used for OCONVNOP. If that changes,
  4029  		// there's no guarantee that temporarily replacing it is safe, so just hard fail here.
  4030  		Fatalf("unexpected Opt: %v", opt)
  4031  	}
  4032  	n.SetOpt(&walkCheckPtrArithmeticMarker)
  4033  	defer n.SetOpt(nil)
  4034  
  4035  	// TODO(mdempsky): Make stricter. We only need to exempt
  4036  	// reflect.Value.Pointer and reflect.Value.UnsafeAddr.
  4037  	switch n.Left.Op {
  4038  	case OCALLFUNC, OCALLMETH, OCALLINTER:
  4039  		return n
  4040  	}
  4041  
  4042  	if n.Left.Op == ODOTPTR && isReflectHeaderDataField(n.Left) {
  4043  		return n
  4044  	}
  4045  
  4046  	// Find original unsafe.Pointer operands involved in this
  4047  	// arithmetic expression.
  4048  	//
  4049  	// "It is valid both to add and to subtract offsets from a
  4050  	// pointer in this way. It is also valid to use &^ to round
  4051  	// pointers, usually for alignment."
  4052  	var originals []*Node
  4053  	var walk func(n *Node)
  4054  	walk = func(n *Node) {
  4055  		switch n.Op {
  4056  		case OADD:
  4057  			walk(n.Left)
  4058  			walk(n.Right)
  4059  		case OSUB, OANDNOT:
  4060  			walk(n.Left)
  4061  		case OCONVNOP:
  4062  			if n.Left.Type.Etype == TUNSAFEPTR {
  4063  				n.Left = cheapexpr(n.Left, init)
  4064  				originals = append(originals, convnop(n.Left, types.Types[TUNSAFEPTR]))
  4065  			}
  4066  		}
  4067  	}
  4068  	walk(n.Left)
  4069  
  4070  	n = cheapexpr(n, init)
  4071  
  4072  	ddd := nodl(n.Pos, ODDDARG, nil, nil)
  4073  	ddd.Type = types.NewPtr(types.NewArray(types.Types[TUNSAFEPTR], int64(len(originals))))
  4074  	ddd.Esc = EscNone
  4075  	slice := mkdotargslice(types.NewSlice(types.Types[TUNSAFEPTR]), originals, init, ddd)
  4076  
  4077  	init.Append(mkcall("checkptrArithmetic", nil, init, convnop(n, types.Types[TUNSAFEPTR]), slice))
  4078  	// TODO(khr): Mark backing store of slice as dead. This will allow us to reuse
  4079  	// the backing store for multiple calls to checkptrArithmetic.
  4080  
  4081  	return n
  4082  }
  4083  
  4084  // checkPtr reports whether pointer checking should be enabled for
  4085  // function fn at a given level. See debugHelpFooter for defined
  4086  // levels.
  4087  func checkPtr(fn *Node, level int) bool {
  4088  	return Debug_checkptr >= level && fn.Func.Pragma&NoCheckPtr == 0
  4089  }