github.com/hikaru7719/go@v0.0.0-20181025140707-c8b2ac68906a/src/cmd/compile/internal/gc/walk.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc
     6  
     7  import (
     8  	"cmd/compile/internal/types"
     9  	"cmd/internal/objabi"
    10  	"cmd/internal/sys"
    11  	"encoding/binary"
    12  	"fmt"
    13  	"strings"
    14  )
    15  
    16  // The constant is known to runtime.
    17  const tmpstringbufsize = 32
    18  
    19  func walk(fn *Node) {
    20  	Curfn = fn
    21  
    22  	if Debug['W'] != 0 {
    23  		s := fmt.Sprintf("\nbefore walk %v", Curfn.Func.Nname.Sym)
    24  		dumplist(s, Curfn.Nbody)
    25  	}
    26  
    27  	lno := lineno
    28  
    29  	// Final typecheck for any unused variables.
    30  	for i, ln := range fn.Func.Dcl {
    31  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) {
    32  			ln = typecheck(ln, Erv|Easgn)
    33  			fn.Func.Dcl[i] = ln
    34  		}
    35  	}
    36  
    37  	// Propagate the used flag for typeswitch variables up to the NONAME in its definition.
    38  	for _, ln := range fn.Func.Dcl {
    39  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() {
    40  			ln.Name.Defn.Left.Name.SetUsed(true)
    41  		}
    42  	}
    43  
    44  	for _, ln := range fn.Func.Dcl {
    45  		if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() {
    46  			continue
    47  		}
    48  		if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW {
    49  			if defn.Left.Name.Used() {
    50  				continue
    51  			}
    52  			yyerrorl(defn.Left.Pos, "%v declared and not used", ln.Sym)
    53  			defn.Left.Name.SetUsed(true) // suppress repeats
    54  		} else {
    55  			yyerrorl(ln.Pos, "%v declared and not used", ln.Sym)
    56  		}
    57  	}
    58  
    59  	lineno = lno
    60  	if nerrors != 0 {
    61  		return
    62  	}
    63  	walkstmtlist(Curfn.Nbody.Slice())
    64  	if Debug['W'] != 0 {
    65  		s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym)
    66  		dumplist(s, Curfn.Nbody)
    67  	}
    68  
    69  	zeroResults()
    70  	heapmoves()
    71  	if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 {
    72  		s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym)
    73  		dumplist(s, Curfn.Func.Enter)
    74  	}
    75  }
    76  
    77  func walkstmtlist(s []*Node) {
    78  	for i := range s {
    79  		s[i] = walkstmt(s[i])
    80  	}
    81  }
    82  
    83  func samelist(a, b []*Node) bool {
    84  	if len(a) != len(b) {
    85  		return false
    86  	}
    87  	for i, n := range a {
    88  		if n != b[i] {
    89  			return false
    90  		}
    91  	}
    92  	return true
    93  }
    94  
    95  func paramoutheap(fn *Node) bool {
    96  	for _, ln := range fn.Func.Dcl {
    97  		switch ln.Class() {
    98  		case PPARAMOUT:
    99  			if ln.isParamStackCopy() || ln.Addrtaken() {
   100  				return true
   101  			}
   102  
   103  		case PAUTO:
   104  			// stop early - parameters are over
   105  			return false
   106  		}
   107  	}
   108  
   109  	return false
   110  }
   111  
   112  // The result of walkstmt MUST be assigned back to n, e.g.
   113  // 	n.Left = walkstmt(n.Left)
   114  func walkstmt(n *Node) *Node {
   115  	if n == nil {
   116  		return n
   117  	}
   118  
   119  	setlineno(n)
   120  
   121  	walkstmtlist(n.Ninit.Slice())
   122  
   123  	switch n.Op {
   124  	default:
   125  		if n.Op == ONAME {
   126  			yyerror("%v is not a top level statement", n.Sym)
   127  		} else {
   128  			yyerror("%v is not a top level statement", n.Op)
   129  		}
   130  		Dump("nottop", n)
   131  
   132  	case OAS,
   133  		OASOP,
   134  		OAS2,
   135  		OAS2DOTTYPE,
   136  		OAS2RECV,
   137  		OAS2FUNC,
   138  		OAS2MAPR,
   139  		OCLOSE,
   140  		OCOPY,
   141  		OCALLMETH,
   142  		OCALLINTER,
   143  		OCALL,
   144  		OCALLFUNC,
   145  		ODELETE,
   146  		OSEND,
   147  		OPRINT,
   148  		OPRINTN,
   149  		OPANIC,
   150  		OEMPTY,
   151  		ORECOVER,
   152  		OGETG:
   153  		if n.Typecheck() == 0 {
   154  			Fatalf("missing typecheck: %+v", n)
   155  		}
   156  		wascopy := n.Op == OCOPY
   157  		init := n.Ninit
   158  		n.Ninit.Set(nil)
   159  		n = walkexpr(n, &init)
   160  		n = addinit(n, init.Slice())
   161  		if wascopy && n.Op == OCONVNOP {
   162  			n.Op = OEMPTY // don't leave plain values as statements.
   163  		}
   164  
   165  	// special case for a receive where we throw away
   166  	// the value received.
   167  	case ORECV:
   168  		if n.Typecheck() == 0 {
   169  			Fatalf("missing typecheck: %+v", n)
   170  		}
   171  		init := n.Ninit
   172  		n.Ninit.Set(nil)
   173  
   174  		n.Left = walkexpr(n.Left, &init)
   175  		n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil())
   176  		n = walkexpr(n, &init)
   177  
   178  		n = addinit(n, init.Slice())
   179  
   180  	case OBREAK,
   181  		OCONTINUE,
   182  		OFALL,
   183  		OGOTO,
   184  		OLABEL,
   185  		ODCLCONST,
   186  		ODCLTYPE,
   187  		OCHECKNIL,
   188  		OVARDEF,
   189  		OVARKILL,
   190  		OVARLIVE:
   191  		break
   192  
   193  	case ODCL:
   194  		v := n.Left
   195  		if v.Class() == PAUTOHEAP {
   196  			if compiling_runtime {
   197  				yyerror("%v escapes to heap, not allowed in runtime.", v)
   198  			}
   199  			if prealloc[v] == nil {
   200  				prealloc[v] = callnew(v.Type)
   201  			}
   202  			nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v])
   203  			nn.SetColas(true)
   204  			nn = typecheck(nn, Etop)
   205  			return walkstmt(nn)
   206  		}
   207  
   208  	case OBLOCK:
   209  		walkstmtlist(n.List.Slice())
   210  
   211  	case OXCASE:
   212  		yyerror("case statement out of place")
   213  		n.Op = OCASE
   214  		fallthrough
   215  
   216  	case OCASE:
   217  		n.Right = walkstmt(n.Right)
   218  
   219  	case ODEFER:
   220  		Curfn.Func.SetHasDefer(true)
   221  		fallthrough
   222  	case OPROC:
   223  		switch n.Left.Op {
   224  		case OPRINT, OPRINTN:
   225  			n.Left = wrapCall(n.Left, &n.Ninit)
   226  
   227  		case ODELETE:
   228  			if mapfast(n.Left.List.First().Type) == mapslow {
   229  				n.Left = wrapCall(n.Left, &n.Ninit)
   230  			} else {
   231  				n.Left = walkexpr(n.Left, &n.Ninit)
   232  			}
   233  
   234  		case OCOPY:
   235  			n.Left = copyany(n.Left, &n.Ninit, true)
   236  
   237  		default:
   238  			n.Left = walkexpr(n.Left, &n.Ninit)
   239  		}
   240  
   241  	case OFOR, OFORUNTIL:
   242  		if n.Left != nil {
   243  			walkstmtlist(n.Left.Ninit.Slice())
   244  			init := n.Left.Ninit
   245  			n.Left.Ninit.Set(nil)
   246  			n.Left = walkexpr(n.Left, &init)
   247  			n.Left = addinit(n.Left, init.Slice())
   248  		}
   249  
   250  		n.Right = walkstmt(n.Right)
   251  		if n.Op == OFORUNTIL {
   252  			walkstmtlist(n.List.Slice())
   253  		}
   254  		walkstmtlist(n.Nbody.Slice())
   255  
   256  	case OIF:
   257  		n.Left = walkexpr(n.Left, &n.Ninit)
   258  		walkstmtlist(n.Nbody.Slice())
   259  		walkstmtlist(n.Rlist.Slice())
   260  
   261  	case ORETURN:
   262  		if n.List.Len() == 0 {
   263  			break
   264  		}
   265  		if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) {
   266  			// assign to the function out parameters,
   267  			// so that reorder3 can fix up conflicts
   268  			var rl []*Node
   269  
   270  			for _, ln := range Curfn.Func.Dcl {
   271  				cl := ln.Class()
   272  				if cl == PAUTO || cl == PAUTOHEAP {
   273  					break
   274  				}
   275  				if cl == PPARAMOUT {
   276  					if ln.isParamStackCopy() {
   277  						ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil)
   278  					}
   279  					rl = append(rl, ln)
   280  				}
   281  			}
   282  
   283  			if got, want := n.List.Len(), len(rl); got != want {
   284  				// order should have rewritten multi-value function calls
   285  				// with explicit OAS2FUNC nodes.
   286  				Fatalf("expected %v return arguments, have %v", want, got)
   287  			}
   288  
   289  			if samelist(rl, n.List.Slice()) {
   290  				// special return in disguise
   291  				// TODO(josharian, 1.12): is "special return" still relevant?
   292  				// Tests still pass w/o this. See comments on https://go-review.googlesource.com/c/go/+/118318
   293  				walkexprlist(n.List.Slice(), &n.Ninit)
   294  				n.List.Set(nil)
   295  
   296  				break
   297  			}
   298  
   299  			// move function calls out, to make reorder3's job easier.
   300  			walkexprlistsafe(n.List.Slice(), &n.Ninit)
   301  
   302  			ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit)
   303  			n.List.Set(reorder3(ll))
   304  			break
   305  		}
   306  		walkexprlist(n.List.Slice(), &n.Ninit)
   307  
   308  		// For each return parameter (lhs), assign the corresponding result (rhs).
   309  		lhs := Curfn.Type.Results()
   310  		rhs := n.List.Slice()
   311  		res := make([]*Node, lhs.NumFields())
   312  		for i, nl := range lhs.FieldSlice() {
   313  			nname := asNode(nl.Nname)
   314  			if nname.isParamHeapCopy() {
   315  				nname = nname.Name.Param.Stackcopy
   316  			}
   317  			a := nod(OAS, nname, rhs[i])
   318  			res[i] = convas(a, &n.Ninit)
   319  		}
   320  		n.List.Set(res)
   321  
   322  	case ORETJMP:
   323  		break
   324  
   325  	case OSELECT:
   326  		walkselect(n)
   327  
   328  	case OSWITCH:
   329  		walkswitch(n)
   330  
   331  	case ORANGE:
   332  		n = walkrange(n)
   333  	}
   334  
   335  	if n.Op == ONAME {
   336  		Fatalf("walkstmt ended up with name: %+v", n)
   337  	}
   338  	return n
   339  }
   340  
   341  func isSmallMakeSlice(n *Node) bool {
   342  	if n.Op != OMAKESLICE {
   343  		return false
   344  	}
   345  	l := n.Left
   346  	r := n.Right
   347  	if r == nil {
   348  		r = l
   349  	}
   350  	t := n.Type
   351  
   352  	return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < maxImplicitStackVarSize/t.Elem().Width)
   353  }
   354  
   355  // walk the whole tree of the body of an
   356  // expression or simple statement.
   357  // the types expressions are calculated.
   358  // compile-time constants are evaluated.
   359  // complex side effects like statements are appended to init
   360  func walkexprlist(s []*Node, init *Nodes) {
   361  	for i := range s {
   362  		s[i] = walkexpr(s[i], init)
   363  	}
   364  }
   365  
   366  func walkexprlistsafe(s []*Node, init *Nodes) {
   367  	for i, n := range s {
   368  		s[i] = safeexpr(n, init)
   369  		s[i] = walkexpr(s[i], init)
   370  	}
   371  }
   372  
   373  func walkexprlistcheap(s []*Node, init *Nodes) {
   374  	for i, n := range s {
   375  		s[i] = cheapexpr(n, init)
   376  		s[i] = walkexpr(s[i], init)
   377  	}
   378  }
   379  
   380  // convFuncName builds the runtime function name for interface conversion.
   381  // It also reports whether the function expects the data by address.
   382  // Not all names are possible. For example, we never generate convE2E or convE2I.
   383  func convFuncName(from, to *types.Type) (fnname string, needsaddr bool) {
   384  	tkind := to.Tie()
   385  	switch from.Tie() {
   386  	case 'I':
   387  		switch tkind {
   388  		case 'I':
   389  			return "convI2I", false
   390  		}
   391  	case 'T':
   392  		switch tkind {
   393  		case 'E':
   394  			switch {
   395  			case from.Size() == 2 && from.Align == 2:
   396  				return "convT2E16", false
   397  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   398  				return "convT2E32", false
   399  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   400  				return "convT2E64", false
   401  			case from.IsString():
   402  				return "convT2Estring", false
   403  			case from.IsSlice():
   404  				return "convT2Eslice", false
   405  			case !types.Haspointers(from):
   406  				return "convT2Enoptr", true
   407  			}
   408  			return "convT2E", true
   409  		case 'I':
   410  			switch {
   411  			case from.Size() == 2 && from.Align == 2:
   412  				return "convT2I16", false
   413  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   414  				return "convT2I32", false
   415  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   416  				return "convT2I64", false
   417  			case from.IsString():
   418  				return "convT2Istring", false
   419  			case from.IsSlice():
   420  				return "convT2Islice", false
   421  			case !types.Haspointers(from):
   422  				return "convT2Inoptr", true
   423  			}
   424  			return "convT2I", true
   425  		}
   426  	}
   427  	Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
   428  	panic("unreachable")
   429  }
   430  
   431  // The result of walkexpr MUST be assigned back to n, e.g.
   432  // 	n.Left = walkexpr(n.Left, init)
   433  func walkexpr(n *Node, init *Nodes) *Node {
   434  	if n == nil {
   435  		return n
   436  	}
   437  
   438  	// Eagerly checkwidth all expressions for the back end.
   439  	if n.Type != nil && !n.Type.WidthCalculated() {
   440  		switch n.Type.Etype {
   441  		case TBLANK, TNIL, TIDEAL:
   442  		default:
   443  			checkwidth(n.Type)
   444  		}
   445  	}
   446  
   447  	if init == &n.Ninit {
   448  		// not okay to use n->ninit when walking n,
   449  		// because we might replace n with some other node
   450  		// and would lose the init list.
   451  		Fatalf("walkexpr init == &n->ninit")
   452  	}
   453  
   454  	if n.Ninit.Len() != 0 {
   455  		walkstmtlist(n.Ninit.Slice())
   456  		init.AppendNodes(&n.Ninit)
   457  	}
   458  
   459  	lno := setlineno(n)
   460  
   461  	if Debug['w'] > 1 {
   462  		Dump("before walk expr", n)
   463  	}
   464  
   465  	if n.Typecheck() != 1 {
   466  		Fatalf("missed typecheck: %+v", n)
   467  	}
   468  
   469  	if n.Type.IsUntyped() {
   470  		Fatalf("expression has untyped type: %+v", n)
   471  	}
   472  
   473  	if n.Op == ONAME && n.Class() == PAUTOHEAP {
   474  		nn := nod(OIND, n.Name.Param.Heapaddr, nil)
   475  		nn = typecheck(nn, Erv)
   476  		nn = walkexpr(nn, init)
   477  		nn.Left.SetNonNil(true)
   478  		return nn
   479  	}
   480  
   481  opswitch:
   482  	switch n.Op {
   483  	default:
   484  		Dump("walk", n)
   485  		Fatalf("walkexpr: switch 1 unknown op %+S", n)
   486  
   487  	case ONONAME, OINDREGSP, OEMPTY, OGETG:
   488  
   489  	case OTYPE, ONAME, OLITERAL:
   490  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
   491  		// Perhaps refactor to use Node.mayBeShared for these instead.
   492  		// If these return early, make sure to still call
   493  		// stringsym for constant strings.
   494  
   495  	case ONOT, OMINUS, OPLUS, OCOM, OREAL, OIMAG, ODOTMETH, ODOTINTER,
   496  		OIND, OSPTR, OITAB, OIDATA, OADDR:
   497  		n.Left = walkexpr(n.Left, init)
   498  
   499  	case OEFACE, OAND, OSUB, OMUL, OADD, OOR, OXOR:
   500  		n.Left = walkexpr(n.Left, init)
   501  		n.Right = walkexpr(n.Right, init)
   502  
   503  	case ODOT:
   504  		usefield(n)
   505  		n.Left = walkexpr(n.Left, init)
   506  
   507  	case ODOTTYPE, ODOTTYPE2:
   508  		n.Left = walkexpr(n.Left, init)
   509  		// Set up interface type addresses for back end.
   510  		n.Right = typename(n.Type)
   511  		if n.Op == ODOTTYPE {
   512  			n.Right.Right = typename(n.Left.Type)
   513  		}
   514  		if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   515  			n.List.Set1(itabname(n.Type, n.Left.Type))
   516  		}
   517  
   518  	case ODOTPTR:
   519  		usefield(n)
   520  		if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 {
   521  			// No actual copy will be generated, so emit an explicit nil check.
   522  			n.Left = cheapexpr(n.Left, init)
   523  
   524  			checknil(n.Left, init)
   525  		}
   526  
   527  		n.Left = walkexpr(n.Left, init)
   528  
   529  	case OLEN, OCAP:
   530  		if isRuneCount(n) {
   531  			// Replace len([]rune(string)) with runtime.countrunes(string).
   532  			n = mkcall("countrunes", n.Type, init, conv(n.Left.Left, types.Types[TSTRING]))
   533  			break
   534  		}
   535  
   536  		n.Left = walkexpr(n.Left, init)
   537  
   538  		// replace len(*[10]int) with 10.
   539  		// delayed until now to preserve side effects.
   540  		t := n.Left.Type
   541  
   542  		if t.IsPtr() {
   543  			t = t.Elem()
   544  		}
   545  		if t.IsArray() {
   546  			safeexpr(n.Left, init)
   547  			setintconst(n, t.NumElem())
   548  			n.SetTypecheck(1)
   549  		}
   550  
   551  	case OLSH, ORSH:
   552  		n.Left = walkexpr(n.Left, init)
   553  		n.Right = walkexpr(n.Right, init)
   554  		t := n.Left.Type
   555  		n.SetBounded(bounded(n.Right, 8*t.Width))
   556  		if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
   557  			Warn("shift bounds check elided")
   558  		}
   559  
   560  	case OCOMPLEX:
   561  		// Use results from call expression as arguments for complex.
   562  		if n.Left == nil && n.Right == nil {
   563  			n.Left = n.List.First()
   564  			n.Right = n.List.Second()
   565  		}
   566  		n.Left = walkexpr(n.Left, init)
   567  		n.Right = walkexpr(n.Right, init)
   568  
   569  	case OEQ, ONE, OLT, OLE, OGT, OGE:
   570  		n = walkcompare(n, init)
   571  
   572  	case OANDAND, OOROR:
   573  		n.Left = walkexpr(n.Left, init)
   574  
   575  		// cannot put side effects from n.Right on init,
   576  		// because they cannot run before n.Left is checked.
   577  		// save elsewhere and store on the eventual n.Right.
   578  		var ll Nodes
   579  
   580  		n.Right = walkexpr(n.Right, &ll)
   581  		n.Right = addinit(n.Right, ll.Slice())
   582  		n = walkinrange(n, init)
   583  
   584  	case OPRINT, OPRINTN:
   585  		walkexprlist(n.List.Slice(), init)
   586  		n = walkprint(n, init)
   587  
   588  	case OPANIC:
   589  		n = mkcall("gopanic", nil, init, n.Left)
   590  
   591  	case ORECOVER:
   592  		n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil))
   593  
   594  	case OCLOSUREVAR, OCFUNC:
   595  		n.SetAddable(true)
   596  
   597  	case OCALLINTER, OCALLFUNC, OCALLMETH:
   598  		if n.Op == OCALLINTER {
   599  			usemethod(n)
   600  		}
   601  
   602  		if n.Op == OCALLFUNC && n.Left.Op == OCLOSURE {
   603  			// Transform direct call of a closure to call of a normal function.
   604  			// transformclosure already did all preparation work.
   605  
   606  			// Prepend captured variables to argument list.
   607  			n.List.Prepend(n.Left.Func.Enter.Slice()...)
   608  
   609  			n.Left.Func.Enter.Set(nil)
   610  
   611  			// Replace OCLOSURE with ONAME/PFUNC.
   612  			n.Left = n.Left.Func.Closure.Func.Nname
   613  
   614  			// Update type of OCALLFUNC node.
   615  			// Output arguments had not changed, but their offsets could.
   616  			if n.Left.Type.NumResults() == 1 {
   617  				n.Type = n.Left.Type.Results().Field(0).Type
   618  			} else {
   619  				n.Type = n.Left.Type.Results()
   620  			}
   621  		}
   622  
   623  		walkCall(n, init)
   624  
   625  	case OAS, OASOP:
   626  		init.AppendNodes(&n.Ninit)
   627  
   628  		// Recognize m[k] = append(m[k], ...) so we can reuse
   629  		// the mapassign call.
   630  		mapAppend := n.Left.Op == OINDEXMAP && n.Right.Op == OAPPEND
   631  		if mapAppend && !samesafeexpr(n.Left, n.Right.List.First()) {
   632  			Fatalf("not same expressions: %v != %v", n.Left, n.Right.List.First())
   633  		}
   634  
   635  		n.Left = walkexpr(n.Left, init)
   636  		n.Left = safeexpr(n.Left, init)
   637  
   638  		if mapAppend {
   639  			n.Right.List.SetFirst(n.Left)
   640  		}
   641  
   642  		if n.Op == OASOP {
   643  			// Rewrite x op= y into x = x op y.
   644  			n.Right = nod(n.SubOp(), n.Left, n.Right)
   645  			n.Right = typecheck(n.Right, Erv)
   646  
   647  			n.Op = OAS
   648  			n.ResetAux()
   649  		}
   650  
   651  		if oaslit(n, init) {
   652  			break
   653  		}
   654  
   655  		if n.Right == nil {
   656  			// TODO(austin): Check all "implicit zeroing"
   657  			break
   658  		}
   659  
   660  		if !instrumenting && isZero(n.Right) {
   661  			break
   662  		}
   663  
   664  		switch n.Right.Op {
   665  		default:
   666  			n.Right = walkexpr(n.Right, init)
   667  
   668  		case ORECV:
   669  			// x = <-c; n.Left is x, n.Right.Left is c.
   670  			// orderstmt made sure x is addressable.
   671  			n.Right.Left = walkexpr(n.Right.Left, init)
   672  
   673  			n1 := nod(OADDR, n.Left, nil)
   674  			r := n.Right.Left // the channel
   675  			n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1)
   676  			n = walkexpr(n, init)
   677  			break opswitch
   678  
   679  		case OAPPEND:
   680  			// x = append(...)
   681  			r := n.Right
   682  			if r.Type.Elem().NotInHeap() {
   683  				yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem())
   684  			}
   685  			switch {
   686  			case isAppendOfMake(r):
   687  				// x = append(y, make([]T, y)...)
   688  				r = extendslice(r, init)
   689  			case r.Isddd():
   690  				r = appendslice(r, init) // also works for append(slice, string).
   691  			default:
   692  				r = walkappend(r, init, n)
   693  			}
   694  			n.Right = r
   695  			if r.Op == OAPPEND {
   696  				// Left in place for back end.
   697  				// Do not add a new write barrier.
   698  				// Set up address of type for back end.
   699  				r.Left = typename(r.Type.Elem())
   700  				break opswitch
   701  			}
   702  			// Otherwise, lowered for race detector.
   703  			// Treat as ordinary assignment.
   704  		}
   705  
   706  		if n.Left != nil && n.Right != nil {
   707  			n = convas(n, init)
   708  		}
   709  
   710  	case OAS2:
   711  		init.AppendNodes(&n.Ninit)
   712  		walkexprlistsafe(n.List.Slice(), init)
   713  		walkexprlistsafe(n.Rlist.Slice(), init)
   714  		ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init)
   715  		ll = reorder3(ll)
   716  		n = liststmt(ll)
   717  
   718  	// a,b,... = fn()
   719  	case OAS2FUNC:
   720  		init.AppendNodes(&n.Ninit)
   721  
   722  		r := n.Rlist.First()
   723  		walkexprlistsafe(n.List.Slice(), init)
   724  		r = walkexpr(r, init)
   725  
   726  		if isIntrinsicCall(r) {
   727  			n.Rlist.Set1(r)
   728  			break
   729  		}
   730  		init.Append(r)
   731  
   732  		ll := ascompatet(n.List, r.Type)
   733  		n = liststmt(ll)
   734  
   735  	// x, y = <-c
   736  	// orderstmt made sure x is addressable.
   737  	case OAS2RECV:
   738  		init.AppendNodes(&n.Ninit)
   739  
   740  		r := n.Rlist.First()
   741  		walkexprlistsafe(n.List.Slice(), init)
   742  		r.Left = walkexpr(r.Left, init)
   743  		var n1 *Node
   744  		if n.List.First().isBlank() {
   745  			n1 = nodnil()
   746  		} else {
   747  			n1 = nod(OADDR, n.List.First(), nil)
   748  		}
   749  		fn := chanfn("chanrecv2", 2, r.Left.Type)
   750  		ok := n.List.Second()
   751  		call := mkcall1(fn, ok.Type, init, r.Left, n1)
   752  		n = nod(OAS, ok, call)
   753  		n = typecheck(n, Etop)
   754  
   755  	// a,b = m[i]
   756  	case OAS2MAPR:
   757  		init.AppendNodes(&n.Ninit)
   758  
   759  		r := n.Rlist.First()
   760  		walkexprlistsafe(n.List.Slice(), init)
   761  		r.Left = walkexpr(r.Left, init)
   762  		r.Right = walkexpr(r.Right, init)
   763  		t := r.Left.Type
   764  
   765  		fast := mapfast(t)
   766  		var key *Node
   767  		if fast != mapslow {
   768  			// fast versions take key by value
   769  			key = r.Right
   770  		} else {
   771  			// standard version takes key by reference
   772  			// orderexpr made sure key is addressable.
   773  			key = nod(OADDR, r.Right, nil)
   774  		}
   775  
   776  		// from:
   777  		//   a,b = m[i]
   778  		// to:
   779  		//   var,b = mapaccess2*(t, m, i)
   780  		//   a = *var
   781  		a := n.List.First()
   782  
   783  		if w := t.Elem().Width; w <= 1024 { // 1024 must match runtime/map.go:maxZero
   784  			fn := mapfn(mapaccess2[fast], t)
   785  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key)
   786  		} else {
   787  			fn := mapfn("mapaccess2_fat", t)
   788  			z := zeroaddr(w)
   789  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z)
   790  		}
   791  
   792  		// mapaccess2* returns a typed bool, but due to spec changes,
   793  		// the boolean result of i.(T) is now untyped so we make it the
   794  		// same type as the variable on the lhs.
   795  		if ok := n.List.Second(); !ok.isBlank() && ok.Type.IsBoolean() {
   796  			r.Type.Field(1).Type = ok.Type
   797  		}
   798  		n.Rlist.Set1(r)
   799  		n.Op = OAS2FUNC
   800  
   801  		// don't generate a = *var if a is _
   802  		if !a.isBlank() {
   803  			var_ := temp(types.NewPtr(t.Elem()))
   804  			var_.SetTypecheck(1)
   805  			var_.SetNonNil(true) // mapaccess always returns a non-nil pointer
   806  			n.List.SetFirst(var_)
   807  			n = walkexpr(n, init)
   808  			init.Append(n)
   809  			n = nod(OAS, a, nod(OIND, var_, nil))
   810  		}
   811  
   812  		n = typecheck(n, Etop)
   813  		n = walkexpr(n, init)
   814  
   815  	case ODELETE:
   816  		init.AppendNodes(&n.Ninit)
   817  		map_ := n.List.First()
   818  		key := n.List.Second()
   819  		map_ = walkexpr(map_, init)
   820  		key = walkexpr(key, init)
   821  
   822  		t := map_.Type
   823  		fast := mapfast(t)
   824  		if fast == mapslow {
   825  			// orderstmt made sure key is addressable.
   826  			key = nod(OADDR, key, nil)
   827  		}
   828  		n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key)
   829  
   830  	case OAS2DOTTYPE:
   831  		walkexprlistsafe(n.List.Slice(), init)
   832  		n.Rlist.SetFirst(walkexpr(n.Rlist.First(), init))
   833  
   834  	case OCONVIFACE:
   835  		n.Left = walkexpr(n.Left, init)
   836  
   837  		// Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
   838  		if isdirectiface(n.Left.Type) {
   839  			var t *Node
   840  			if n.Type.IsEmptyInterface() {
   841  				t = typename(n.Left.Type)
   842  			} else {
   843  				t = itabname(n.Left.Type, n.Type)
   844  			}
   845  			l := nod(OEFACE, t, n.Left)
   846  			l.Type = n.Type
   847  			l.SetTypecheck(n.Typecheck())
   848  			n = l
   849  			break
   850  		}
   851  
   852  		if staticbytes == nil {
   853  			staticbytes = newname(Runtimepkg.Lookup("staticbytes"))
   854  			staticbytes.SetClass(PEXTERN)
   855  			staticbytes.Type = types.NewArray(types.Types[TUINT8], 256)
   856  			zerobase = newname(Runtimepkg.Lookup("zerobase"))
   857  			zerobase.SetClass(PEXTERN)
   858  			zerobase.Type = types.Types[TUINTPTR]
   859  		}
   860  
   861  		// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
   862  		// by using an existing addressable value identical to n.Left
   863  		// or creating one on the stack.
   864  		var value *Node
   865  		switch {
   866  		case n.Left.Type.Size() == 0:
   867  			// n.Left is zero-sized. Use zerobase.
   868  			cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246.
   869  			value = zerobase
   870  		case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()):
   871  			// n.Left is a bool/byte. Use staticbytes[n.Left].
   872  			n.Left = cheapexpr(n.Left, init)
   873  			value = nod(OINDEX, staticbytes, byteindex(n.Left))
   874  			value.SetBounded(true)
   875  		case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly():
   876  			// n.Left is a readonly global; use it directly.
   877  			value = n.Left
   878  		case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024:
   879  			// n.Left does not escape. Use a stack temporary initialized to n.Left.
   880  			value = temp(n.Left.Type)
   881  			init.Append(typecheck(nod(OAS, value, n.Left), Etop))
   882  		}
   883  
   884  		if value != nil {
   885  			// Value is identical to n.Left.
   886  			// Construct the interface directly: {type/itab, &value}.
   887  			var t *Node
   888  			if n.Type.IsEmptyInterface() {
   889  				t = typename(n.Left.Type)
   890  			} else {
   891  				t = itabname(n.Left.Type, n.Type)
   892  			}
   893  			l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv))
   894  			l.Type = n.Type
   895  			l.SetTypecheck(n.Typecheck())
   896  			n = l
   897  			break
   898  		}
   899  
   900  		// Implement interface to empty interface conversion.
   901  		// tmp = i.itab
   902  		// if tmp != nil {
   903  		//    tmp = tmp.type
   904  		// }
   905  		// e = iface{tmp, i.data}
   906  		if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   907  			// Evaluate the input interface.
   908  			c := temp(n.Left.Type)
   909  			init.Append(nod(OAS, c, n.Left))
   910  
   911  			// Get the itab out of the interface.
   912  			tmp := temp(types.NewPtr(types.Types[TUINT8]))
   913  			init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv)))
   914  
   915  			// Get the type out of the itab.
   916  			nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil)
   917  			nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp)))
   918  			init.Append(nif)
   919  
   920  			// Build the result.
   921  			e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8])))
   922  			e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE.
   923  			e.SetTypecheck(1)
   924  			n = e
   925  			break
   926  		}
   927  
   928  		var ll []*Node
   929  		if n.Type.IsEmptyInterface() {
   930  			if !n.Left.Type.IsInterface() {
   931  				ll = append(ll, typename(n.Left.Type))
   932  			}
   933  		} else {
   934  			if n.Left.Type.IsInterface() {
   935  				ll = append(ll, typename(n.Type))
   936  			} else {
   937  				ll = append(ll, itabname(n.Left.Type, n.Type))
   938  			}
   939  		}
   940  
   941  		fnname, needsaddr := convFuncName(n.Left.Type, n.Type)
   942  		v := n.Left
   943  		if needsaddr {
   944  			// Types of large or unknown size are passed by reference.
   945  			// Orderexpr arranged for n.Left to be a temporary for all
   946  			// the conversions it could see. Comparison of an interface
   947  			// with a non-interface, especially in a switch on interface value
   948  			// with non-interface cases, is not visible to orderstmt, so we
   949  			// have to fall back on allocating a temp here.
   950  			if !islvalue(v) {
   951  				v = copyexpr(v, v.Type, init)
   952  			}
   953  			v = nod(OADDR, v, nil)
   954  		}
   955  		ll = append(ll, v)
   956  
   957  		dowidth(n.Left.Type)
   958  		fn := syslook(fnname)
   959  		fn = substArgTypes(fn, n.Left.Type, n.Type)
   960  		dowidth(fn.Type)
   961  		n = nod(OCALL, fn, nil)
   962  		n.List.Set(ll)
   963  		n = typecheck(n, Erv)
   964  		n = walkexpr(n, init)
   965  
   966  	case OCONV, OCONVNOP:
   967  		n.Left = walkexpr(n.Left, init)
   968  		param, result := rtconvfn(n.Left.Type, n.Type)
   969  		if param == Txxx {
   970  			break
   971  		}
   972  		fn := basicnames[param] + "to" + basicnames[result]
   973  		n = conv(mkcall(fn, types.Types[result], init, conv(n.Left, types.Types[param])), n.Type)
   974  
   975  	case OANDNOT:
   976  		n.Left = walkexpr(n.Left, init)
   977  		n.Op = OAND
   978  		n.Right = nod(OCOM, n.Right, nil)
   979  		n.Right = typecheck(n.Right, Erv)
   980  		n.Right = walkexpr(n.Right, init)
   981  
   982  	case ODIV, OMOD:
   983  		n.Left = walkexpr(n.Left, init)
   984  		n.Right = walkexpr(n.Right, init)
   985  
   986  		// rewrite complex div into function call.
   987  		et := n.Left.Type.Etype
   988  
   989  		if isComplex[et] && n.Op == ODIV {
   990  			t := n.Type
   991  			n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128]))
   992  			n = conv(n, t)
   993  			break
   994  		}
   995  
   996  		// Nothing to do for float divisions.
   997  		if isFloat[et] {
   998  			break
   999  		}
  1000  
  1001  		// rewrite 64-bit div and mod on 32-bit architectures.
  1002  		// TODO: Remove this code once we can introduce
  1003  		// runtime calls late in SSA processing.
  1004  		if Widthreg < 8 && (et == TINT64 || et == TUINT64) {
  1005  			if n.Right.Op == OLITERAL {
  1006  				// Leave div/mod by constant powers of 2.
  1007  				// The SSA backend will handle those.
  1008  				switch et {
  1009  				case TINT64:
  1010  					c := n.Right.Int64()
  1011  					if c < 0 {
  1012  						c = -c
  1013  					}
  1014  					if c != 0 && c&(c-1) == 0 {
  1015  						break opswitch
  1016  					}
  1017  				case TUINT64:
  1018  					c := uint64(n.Right.Int64())
  1019  					if c != 0 && c&(c-1) == 0 {
  1020  						break opswitch
  1021  					}
  1022  				}
  1023  			}
  1024  			var fn string
  1025  			if et == TINT64 {
  1026  				fn = "int64"
  1027  			} else {
  1028  				fn = "uint64"
  1029  			}
  1030  			if n.Op == ODIV {
  1031  				fn += "div"
  1032  			} else {
  1033  				fn += "mod"
  1034  			}
  1035  			n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et]))
  1036  		}
  1037  
  1038  	case OINDEX:
  1039  		n.Left = walkexpr(n.Left, init)
  1040  
  1041  		// save the original node for bounds checking elision.
  1042  		// If it was a ODIV/OMOD walk might rewrite it.
  1043  		r := n.Right
  1044  
  1045  		n.Right = walkexpr(n.Right, init)
  1046  
  1047  		// if range of type cannot exceed static array bound,
  1048  		// disable bounds check.
  1049  		if n.Bounded() {
  1050  			break
  1051  		}
  1052  		t := n.Left.Type
  1053  		if t != nil && t.IsPtr() {
  1054  			t = t.Elem()
  1055  		}
  1056  		if t.IsArray() {
  1057  			n.SetBounded(bounded(r, t.NumElem()))
  1058  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1059  				Warn("index bounds check elided")
  1060  			}
  1061  			if smallintconst(n.Right) && !n.Bounded() {
  1062  				yyerror("index out of bounds")
  1063  			}
  1064  		} else if Isconst(n.Left, CTSTR) {
  1065  			n.SetBounded(bounded(r, int64(len(n.Left.Val().U.(string)))))
  1066  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1067  				Warn("index bounds check elided")
  1068  			}
  1069  			if smallintconst(n.Right) && !n.Bounded() {
  1070  				yyerror("index out of bounds")
  1071  			}
  1072  		}
  1073  
  1074  		if Isconst(n.Right, CTINT) {
  1075  			if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
  1076  				yyerror("index out of bounds")
  1077  			}
  1078  		}
  1079  
  1080  	case OINDEXMAP:
  1081  		// Replace m[k] with *map{access1,assign}(maptype, m, &k)
  1082  		n.Left = walkexpr(n.Left, init)
  1083  		n.Right = walkexpr(n.Right, init)
  1084  		map_ := n.Left
  1085  		key := n.Right
  1086  		t := map_.Type
  1087  		if n.IndexMapLValue() {
  1088  			// This m[k] expression is on the left-hand side of an assignment.
  1089  			fast := mapfast(t)
  1090  			if fast == mapslow {
  1091  				// standard version takes key by reference.
  1092  				// orderexpr made sure key is addressable.
  1093  				key = nod(OADDR, key, nil)
  1094  			}
  1095  			n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key)
  1096  		} else {
  1097  			// m[k] is not the target of an assignment.
  1098  			fast := mapfast(t)
  1099  			if fast == mapslow {
  1100  				// standard version takes key by reference.
  1101  				// orderexpr made sure key is addressable.
  1102  				key = nod(OADDR, key, nil)
  1103  			}
  1104  
  1105  			if w := t.Elem().Width; w <= 1024 { // 1024 must match runtime/map.go:maxZero
  1106  				n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Elem()), init, typename(t), map_, key)
  1107  			} else {
  1108  				z := zeroaddr(w)
  1109  				n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Elem()), init, typename(t), map_, key, z)
  1110  			}
  1111  		}
  1112  		n.Type = types.NewPtr(t.Elem())
  1113  		n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers.
  1114  		n = nod(OIND, n, nil)
  1115  		n.Type = t.Elem()
  1116  		n.SetTypecheck(1)
  1117  
  1118  	case ORECV:
  1119  		Fatalf("walkexpr ORECV") // should see inside OAS only
  1120  
  1121  	case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
  1122  		n.Left = walkexpr(n.Left, init)
  1123  		low, high, max := n.SliceBounds()
  1124  		low = walkexpr(low, init)
  1125  		if low != nil && isZero(low) {
  1126  			// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
  1127  			low = nil
  1128  		}
  1129  		high = walkexpr(high, init)
  1130  		max = walkexpr(max, init)
  1131  		n.SetSliceBounds(low, high, max)
  1132  		if n.Op.IsSlice3() {
  1133  			if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) {
  1134  				// Reduce x[i:j:cap(x)] to x[i:j].
  1135  				if n.Op == OSLICE3 {
  1136  					n.Op = OSLICE
  1137  				} else {
  1138  					n.Op = OSLICEARR
  1139  				}
  1140  				n = reduceSlice(n)
  1141  			}
  1142  		} else {
  1143  			n = reduceSlice(n)
  1144  		}
  1145  
  1146  	case ONEW:
  1147  		if n.Esc == EscNone {
  1148  			if n.Type.Elem().Width >= maxImplicitStackVarSize {
  1149  				Fatalf("large ONEW with EscNone: %v", n)
  1150  			}
  1151  			r := temp(n.Type.Elem())
  1152  			r = nod(OAS, r, nil) // zero temp
  1153  			r = typecheck(r, Etop)
  1154  			init.Append(r)
  1155  			r = nod(OADDR, r.Left, nil)
  1156  			r = typecheck(r, Erv)
  1157  			n = r
  1158  		} else {
  1159  			n = callnew(n.Type.Elem())
  1160  		}
  1161  
  1162  	case OADDSTR:
  1163  		n = addstr(n, init)
  1164  
  1165  	case OAPPEND:
  1166  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
  1167  		Fatalf("append outside assignment")
  1168  
  1169  	case OCOPY:
  1170  		n = copyany(n, init, instrumenting && !compiling_runtime)
  1171  
  1172  		// cannot use chanfn - closechan takes any, not chan any
  1173  	case OCLOSE:
  1174  		fn := syslook("closechan")
  1175  
  1176  		fn = substArgTypes(fn, n.Left.Type)
  1177  		n = mkcall1(fn, nil, init, n.Left)
  1178  
  1179  	case OMAKECHAN:
  1180  		// When size fits into int, use makechan instead of
  1181  		// makechan64, which is faster and shorter on 32 bit platforms.
  1182  		size := n.Left
  1183  		fnname := "makechan64"
  1184  		argtype := types.Types[TINT64]
  1185  
  1186  		// Type checking guarantees that TIDEAL size is positive and fits in an int.
  1187  		// The case of size overflow when converting TUINT or TUINTPTR to TINT
  1188  		// will be handled by the negative range checks in makechan during runtime.
  1189  		if size.Type.IsKind(TIDEAL) || maxintval[size.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1190  			fnname = "makechan"
  1191  			argtype = types.Types[TINT]
  1192  		}
  1193  
  1194  		n = mkcall1(chanfn(fnname, 1, n.Type), n.Type, init, typename(n.Type), conv(size, argtype))
  1195  
  1196  	case OMAKEMAP:
  1197  		t := n.Type
  1198  		hmapType := hmap(t)
  1199  		hint := n.Left
  1200  
  1201  		// var h *hmap
  1202  		var h *Node
  1203  		if n.Esc == EscNone {
  1204  			// Allocate hmap on stack.
  1205  
  1206  			// var hv hmap
  1207  			hv := temp(hmapType)
  1208  			zero := nod(OAS, hv, nil)
  1209  			zero = typecheck(zero, Etop)
  1210  			init.Append(zero)
  1211  			// h = &hv
  1212  			h = nod(OADDR, hv, nil)
  1213  
  1214  			// Allocate one bucket pointed to by hmap.buckets on stack if hint
  1215  			// is not larger than BUCKETSIZE. In case hint is larger than
  1216  			// BUCKETSIZE runtime.makemap will allocate the buckets on the heap.
  1217  			// Maximum key and value size is 128 bytes, larger objects
  1218  			// are stored with an indirection. So max bucket size is 2048+eps.
  1219  			if !Isconst(hint, CTINT) ||
  1220  				hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) <= 0 {
  1221  				// var bv bmap
  1222  				bv := temp(bmap(t))
  1223  
  1224  				zero = nod(OAS, bv, nil)
  1225  				zero = typecheck(zero, Etop)
  1226  				init.Append(zero)
  1227  
  1228  				// b = &bv
  1229  				b := nod(OADDR, bv, nil)
  1230  
  1231  				// h.buckets = b
  1232  				bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap
  1233  				na := nod(OAS, nodSym(ODOT, h, bsym), b)
  1234  				na = typecheck(na, Etop)
  1235  				init.Append(na)
  1236  			}
  1237  		}
  1238  
  1239  		if Isconst(hint, CTINT) && hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) <= 0 {
  1240  			// Handling make(map[any]any) and
  1241  			// make(map[any]any, hint) where hint <= BUCKETSIZE
  1242  			// special allows for faster map initialization and
  1243  			// improves binary size by using calls with fewer arguments.
  1244  			// For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false
  1245  			// and no buckets will be allocated by makemap. Therefore,
  1246  			// no buckets need to be allocated in this code path.
  1247  			if n.Esc == EscNone {
  1248  				// Only need to initialize h.hash0 since
  1249  				// hmap h has been allocated on the stack already.
  1250  				// h.hash0 = fastrand()
  1251  				rand := mkcall("fastrand", types.Types[TUINT32], init)
  1252  				hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap
  1253  				a := nod(OAS, nodSym(ODOT, h, hashsym), rand)
  1254  				a = typecheck(a, Etop)
  1255  				a = walkexpr(a, init)
  1256  				init.Append(a)
  1257  				n = convnop(h, t)
  1258  			} else {
  1259  				// Call runtime.makehmap to allocate an
  1260  				// hmap on the heap and initialize hmap's hash0 field.
  1261  				fn := syslook("makemap_small")
  1262  				fn = substArgTypes(fn, t.Key(), t.Elem())
  1263  				n = mkcall1(fn, n.Type, init)
  1264  			}
  1265  		} else {
  1266  			if n.Esc != EscNone {
  1267  				h = nodnil()
  1268  			}
  1269  			// Map initialization with a variable or large hint is
  1270  			// more complicated. We therefore generate a call to
  1271  			// runtime.makemap to intialize hmap and allocate the
  1272  			// map buckets.
  1273  
  1274  			// When hint fits into int, use makemap instead of
  1275  			// makemap64, which is faster and shorter on 32 bit platforms.
  1276  			fnname := "makemap64"
  1277  			argtype := types.Types[TINT64]
  1278  
  1279  			// Type checking guarantees that TIDEAL hint is positive and fits in an int.
  1280  			// See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function.
  1281  			// The case of hint overflow when converting TUINT or TUINTPTR to TINT
  1282  			// will be handled by the negative range checks in makemap during runtime.
  1283  			if hint.Type.IsKind(TIDEAL) || maxintval[hint.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1284  				fnname = "makemap"
  1285  				argtype = types.Types[TINT]
  1286  			}
  1287  
  1288  			fn := syslook(fnname)
  1289  			fn = substArgTypes(fn, hmapType, t.Key(), t.Elem())
  1290  			n = mkcall1(fn, n.Type, init, typename(n.Type), conv(hint, argtype), h)
  1291  		}
  1292  
  1293  	case OMAKESLICE:
  1294  		l := n.Left
  1295  		r := n.Right
  1296  		if r == nil {
  1297  			r = safeexpr(l, init)
  1298  			l = r
  1299  		}
  1300  		t := n.Type
  1301  		if n.Esc == EscNone {
  1302  			if !isSmallMakeSlice(n) {
  1303  				Fatalf("non-small OMAKESLICE with EscNone: %v", n)
  1304  			}
  1305  			// var arr [r]T
  1306  			// n = arr[:l]
  1307  			t = types.NewArray(t.Elem(), nonnegintconst(r)) // [r]T
  1308  			var_ := temp(t)
  1309  			a := nod(OAS, var_, nil) // zero temp
  1310  			a = typecheck(a, Etop)
  1311  			init.Append(a)
  1312  			r := nod(OSLICE, var_, nil) // arr[:l]
  1313  			r.SetSliceBounds(nil, l, nil)
  1314  			r = conv(r, n.Type) // in case n.Type is named.
  1315  			r = typecheck(r, Erv)
  1316  			r = walkexpr(r, init)
  1317  			n = r
  1318  		} else {
  1319  			// n escapes; set up a call to makeslice.
  1320  			// When len and cap can fit into int, use makeslice instead of
  1321  			// makeslice64, which is faster and shorter on 32 bit platforms.
  1322  
  1323  			if t.Elem().NotInHeap() {
  1324  				yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem())
  1325  			}
  1326  
  1327  			len, cap := l, r
  1328  
  1329  			fnname := "makeslice64"
  1330  			argtype := types.Types[TINT64]
  1331  
  1332  			// Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
  1333  			// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
  1334  			// will be handled by the negative range checks in makeslice during runtime.
  1335  			if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) &&
  1336  				(cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) {
  1337  				fnname = "makeslice"
  1338  				argtype = types.Types[TINT]
  1339  			}
  1340  
  1341  			fn := syslook(fnname)
  1342  			fn = substArgTypes(fn, t.Elem()) // any-1
  1343  			n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
  1344  		}
  1345  
  1346  	case ORUNESTR:
  1347  		a := nodnil()
  1348  		if n.Esc == EscNone {
  1349  			t := types.NewArray(types.Types[TUINT8], 4)
  1350  			var_ := temp(t)
  1351  			a = nod(OADDR, var_, nil)
  1352  		}
  1353  
  1354  		// intstring(*[4]byte, rune)
  1355  		n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64]))
  1356  
  1357  	case OARRAYBYTESTR:
  1358  		a := nodnil()
  1359  		if n.Esc == EscNone {
  1360  			// Create temporary buffer for string on stack.
  1361  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1362  
  1363  			a = nod(OADDR, temp(t), nil)
  1364  		}
  1365  
  1366  		// slicebytetostring(*[32]byte, []byte) string;
  1367  		n = mkcall("slicebytetostring", n.Type, init, a, n.Left)
  1368  
  1369  		// slicebytetostringtmp([]byte) string;
  1370  	case OARRAYBYTESTRTMP:
  1371  		n.Left = walkexpr(n.Left, init)
  1372  
  1373  		if !instrumenting {
  1374  			// Let the backend handle OARRAYBYTESTRTMP directly
  1375  			// to avoid a function call to slicebytetostringtmp.
  1376  			break
  1377  		}
  1378  
  1379  		n = mkcall("slicebytetostringtmp", n.Type, init, n.Left)
  1380  
  1381  		// slicerunetostring(*[32]byte, []rune) string;
  1382  	case OARRAYRUNESTR:
  1383  		a := nodnil()
  1384  
  1385  		if n.Esc == EscNone {
  1386  			// Create temporary buffer for string on stack.
  1387  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1388  
  1389  			a = nod(OADDR, temp(t), nil)
  1390  		}
  1391  
  1392  		n = mkcall("slicerunetostring", n.Type, init, a, n.Left)
  1393  
  1394  	case OSTRARRAYBYTE:
  1395  		s := n.Left
  1396  		if Isconst(s, CTSTR) {
  1397  			sc := s.Val().U.(string)
  1398  
  1399  			// Allocate a [n]byte of the right size.
  1400  			t := types.NewArray(types.Types[TUINT8], int64(len(sc)))
  1401  			var a *Node
  1402  			if n.Esc == EscNone && len(sc) <= maxImplicitStackVarSize {
  1403  				a = nod(OADDR, temp(t), nil)
  1404  			} else {
  1405  				a = callnew(t)
  1406  			}
  1407  			p := temp(t.PtrTo()) // *[n]byte
  1408  			init.Append(typecheck(nod(OAS, p, a), Etop))
  1409  
  1410  			// Copy from the static string data to the [n]byte.
  1411  			if len(sc) > 0 {
  1412  				as := nod(OAS,
  1413  					nod(OIND, p, nil),
  1414  					nod(OIND, convnop(nod(OSPTR, s, nil), t.PtrTo()), nil))
  1415  				as = typecheck(as, Etop)
  1416  				as = walkstmt(as)
  1417  				init.Append(as)
  1418  			}
  1419  
  1420  			// Slice the [n]byte to a []byte.
  1421  			n.Op = OSLICEARR
  1422  			n.Left = p
  1423  			n = walkexpr(n, init)
  1424  			break
  1425  		}
  1426  		a := nodnil()
  1427  
  1428  		if n.Esc == EscNone {
  1429  			// Create temporary buffer for slice on stack.
  1430  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1431  
  1432  			a = nod(OADDR, temp(t), nil)
  1433  		}
  1434  
  1435  		// stringtoslicebyte(*32[byte], string) []byte;
  1436  		n = mkcall("stringtoslicebyte", n.Type, init, a, conv(s, types.Types[TSTRING]))
  1437  
  1438  	case OSTRARRAYBYTETMP:
  1439  		// []byte(string) conversion that creates a slice
  1440  		// referring to the actual string bytes.
  1441  		// This conversion is handled later by the backend and
  1442  		// is only for use by internal compiler optimizations
  1443  		// that know that the slice won't be mutated.
  1444  		// The only such case today is:
  1445  		// for i, c := range []byte(string)
  1446  		n.Left = walkexpr(n.Left, init)
  1447  
  1448  		// stringtoslicerune(*[32]rune, string) []rune
  1449  	case OSTRARRAYRUNE:
  1450  		a := nodnil()
  1451  
  1452  		if n.Esc == EscNone {
  1453  			// Create temporary buffer for slice on stack.
  1454  			t := types.NewArray(types.Types[TINT32], tmpstringbufsize)
  1455  
  1456  			a = nod(OADDR, temp(t), nil)
  1457  		}
  1458  
  1459  		n = mkcall("stringtoslicerune", n.Type, init, a, conv(n.Left, types.Types[TSTRING]))
  1460  
  1461  	case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT:
  1462  		if isStaticCompositeLiteral(n) && !canSSAType(n.Type) {
  1463  			// n can be directly represented in the read-only data section.
  1464  			// Make direct reference to the static data. See issue 12841.
  1465  			vstat := staticname(n.Type)
  1466  			vstat.Name.SetReadonly(true)
  1467  			fixedlit(inInitFunction, initKindStatic, n, vstat, init)
  1468  			n = vstat
  1469  			n = typecheck(n, Erv)
  1470  			break
  1471  		}
  1472  		var_ := temp(n.Type)
  1473  		anylit(n, var_, init)
  1474  		n = var_
  1475  
  1476  	case OSEND:
  1477  		n1 := n.Right
  1478  		n1 = assignconv(n1, n.Left.Type.Elem(), "chan send")
  1479  		n1 = walkexpr(n1, init)
  1480  		n1 = nod(OADDR, n1, nil)
  1481  		n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1)
  1482  
  1483  	case OCLOSURE:
  1484  		n = walkclosure(n, init)
  1485  
  1486  	case OCALLPART:
  1487  		n = walkpartialcall(n, init)
  1488  	}
  1489  
  1490  	// Expressions that are constant at run time but not
  1491  	// considered const by the language spec are not turned into
  1492  	// constants until walk. For example, if n is y%1 == 0, the
  1493  	// walk of y%1 may have replaced it by 0.
  1494  	// Check whether n with its updated args is itself now a constant.
  1495  	t := n.Type
  1496  	evconst(n)
  1497  	if n.Type != t {
  1498  		Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type)
  1499  	}
  1500  	if n.Op == OLITERAL {
  1501  		n = typecheck(n, Erv)
  1502  		// Emit string symbol now to avoid emitting
  1503  		// any concurrently during the backend.
  1504  		if s, ok := n.Val().U.(string); ok {
  1505  			_ = stringsym(n.Pos, s)
  1506  		}
  1507  	}
  1508  
  1509  	updateHasCall(n)
  1510  
  1511  	if Debug['w'] != 0 && n != nil {
  1512  		Dump("after walk expr", n)
  1513  	}
  1514  
  1515  	lineno = lno
  1516  	return n
  1517  }
  1518  
  1519  // rtconvfn returns the parameter and result types that will be used by a
  1520  // runtime function to convert from type src to type dst. The runtime function
  1521  // name can be derived from the names of the returned types.
  1522  //
  1523  // If no such function is necessary, it returns (Txxx, Txxx).
  1524  func rtconvfn(src, dst *types.Type) (param, result types.EType) {
  1525  	if thearch.SoftFloat {
  1526  		return Txxx, Txxx
  1527  	}
  1528  
  1529  	switch thearch.LinkArch.Family {
  1530  	case sys.ARM, sys.MIPS:
  1531  		if src.IsFloat() {
  1532  			switch dst.Etype {
  1533  			case TINT64, TUINT64:
  1534  				return TFLOAT64, dst.Etype
  1535  			}
  1536  		}
  1537  		if dst.IsFloat() {
  1538  			switch src.Etype {
  1539  			case TINT64, TUINT64:
  1540  				return src.Etype, TFLOAT64
  1541  			}
  1542  		}
  1543  
  1544  	case sys.I386:
  1545  		if src.IsFloat() {
  1546  			switch dst.Etype {
  1547  			case TINT64, TUINT64:
  1548  				return TFLOAT64, dst.Etype
  1549  			case TUINT32, TUINT, TUINTPTR:
  1550  				return TFLOAT64, TUINT32
  1551  			}
  1552  		}
  1553  		if dst.IsFloat() {
  1554  			switch src.Etype {
  1555  			case TINT64, TUINT64:
  1556  				return src.Etype, TFLOAT64
  1557  			case TUINT32, TUINT, TUINTPTR:
  1558  				return TUINT32, TFLOAT64
  1559  			}
  1560  		}
  1561  	}
  1562  	return Txxx, Txxx
  1563  }
  1564  
  1565  // TODO(josharian): combine this with its caller and simplify
  1566  func reduceSlice(n *Node) *Node {
  1567  	low, high, max := n.SliceBounds()
  1568  	if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) {
  1569  		// Reduce x[i:len(x)] to x[i:].
  1570  		high = nil
  1571  	}
  1572  	n.SetSliceBounds(low, high, max)
  1573  	if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil {
  1574  		// Reduce x[:] to x.
  1575  		if Debug_slice > 0 {
  1576  			Warn("slice: omit slice operation")
  1577  		}
  1578  		return n.Left
  1579  	}
  1580  	return n
  1581  }
  1582  
  1583  func ascompatee1(l *Node, r *Node, init *Nodes) *Node {
  1584  	// convas will turn map assigns into function calls,
  1585  	// making it impossible for reorder3 to work.
  1586  	n := nod(OAS, l, r)
  1587  
  1588  	if l.Op == OINDEXMAP {
  1589  		return n
  1590  	}
  1591  
  1592  	return convas(n, init)
  1593  }
  1594  
  1595  func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
  1596  	// check assign expression list to
  1597  	// an expression list. called in
  1598  	//	expr-list = expr-list
  1599  
  1600  	// ensure order of evaluation for function calls
  1601  	for i := range nl {
  1602  		nl[i] = safeexpr(nl[i], init)
  1603  	}
  1604  	for i1 := range nr {
  1605  		nr[i1] = safeexpr(nr[i1], init)
  1606  	}
  1607  
  1608  	var nn []*Node
  1609  	i := 0
  1610  	for ; i < len(nl); i++ {
  1611  		if i >= len(nr) {
  1612  			break
  1613  		}
  1614  		// Do not generate 'x = x' during return. See issue 4014.
  1615  		if op == ORETURN && samesafeexpr(nl[i], nr[i]) {
  1616  			continue
  1617  		}
  1618  		nn = append(nn, ascompatee1(nl[i], nr[i], init))
  1619  	}
  1620  
  1621  	// cannot happen: caller checked that lists had same length
  1622  	if i < len(nl) || i < len(nr) {
  1623  		var nln, nrn Nodes
  1624  		nln.Set(nl)
  1625  		nrn.Set(nr)
  1626  		Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname())
  1627  	}
  1628  	return nn
  1629  }
  1630  
  1631  // fncall reports whether assigning an rvalue of type rt to an lvalue l might involve a function call.
  1632  func fncall(l *Node, rt *types.Type) bool {
  1633  	if l.HasCall() || l.Op == OINDEXMAP {
  1634  		return true
  1635  	}
  1636  	if types.Identical(l.Type, rt) {
  1637  		return false
  1638  	}
  1639  	// There might be a conversion required, which might involve a runtime call.
  1640  	return true
  1641  }
  1642  
  1643  // check assign type list to
  1644  // an expression list. called in
  1645  //	expr-list = func()
  1646  func ascompatet(nl Nodes, nr *types.Type) []*Node {
  1647  	if nl.Len() != nr.NumFields() {
  1648  		Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
  1649  	}
  1650  
  1651  	var nn, mm Nodes
  1652  	for i, l := range nl.Slice() {
  1653  		if l.isBlank() {
  1654  			continue
  1655  		}
  1656  		r := nr.Field(i)
  1657  
  1658  		// Any assignment to an lvalue that might cause a function call must be
  1659  		// deferred until all the returned values have been read.
  1660  		if fncall(l, r.Type) {
  1661  			tmp := temp(r.Type)
  1662  			tmp = typecheck(tmp, Erv)
  1663  			a := nod(OAS, l, tmp)
  1664  			a = convas(a, &mm)
  1665  			mm.Append(a)
  1666  			l = tmp
  1667  		}
  1668  
  1669  		a := nod(OAS, l, nodarg(r))
  1670  		a = convas(a, &nn)
  1671  		updateHasCall(a)
  1672  		if a.HasCall() {
  1673  			Dump("ascompatet ucount", a)
  1674  			Fatalf("ascompatet: too many function calls evaluating parameters")
  1675  		}
  1676  
  1677  		nn.Append(a)
  1678  	}
  1679  	return append(nn.Slice(), mm.Slice()...)
  1680  }
  1681  
  1682  // nodarg returns a Node for the function argument f.
  1683  // f is a *types.Field within a struct *types.Type.
  1684  //
  1685  // The node is for use by a caller invoking the given
  1686  // function, preparing the arguments before the call
  1687  // or retrieving the results after the call.
  1688  // In this case, the node will correspond to an outgoing argument
  1689  // slot like 8(SP).
  1690  func nodarg(f *types.Field) *Node {
  1691  	// Build fake name for individual variable.
  1692  	n := newname(lookup("__"))
  1693  	n.Type = f.Type
  1694  	if f.Offset == BADWIDTH {
  1695  		Fatalf("nodarg: offset not computed for %v", f)
  1696  	}
  1697  	n.Xoffset = f.Offset
  1698  	n.Orig = asNode(f.Nname)
  1699  
  1700  	// preparing arguments for call
  1701  	n.Op = OINDREGSP
  1702  	n.Xoffset += Ctxt.FixedFrameSize()
  1703  	n.SetTypecheck(1)
  1704  	n.SetAddrtaken(true) // keep optimizers at bay
  1705  	return n
  1706  }
  1707  
  1708  // package all the arguments that match a ... T parameter into a []T.
  1709  func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node {
  1710  	esc := uint16(EscUnknown)
  1711  	if ddd != nil {
  1712  		esc = ddd.Esc
  1713  	}
  1714  
  1715  	if len(args) == 0 {
  1716  		n := nodnil()
  1717  		n.Type = typ
  1718  		return n
  1719  	}
  1720  
  1721  	n := nod(OCOMPLIT, nil, typenod(typ))
  1722  	if ddd != nil && prealloc[ddd] != nil {
  1723  		prealloc[n] = prealloc[ddd] // temporary to use
  1724  	}
  1725  	n.List.Set(args)
  1726  	n.Esc = esc
  1727  	n = typecheck(n, Erv)
  1728  	if n.Type == nil {
  1729  		Fatalf("mkdotargslice: typecheck failed")
  1730  	}
  1731  	n = walkexpr(n, init)
  1732  	return n
  1733  }
  1734  
  1735  func walkCall(n *Node, init *Nodes) {
  1736  	if n.Rlist.Len() != 0 {
  1737  		return // already walked
  1738  	}
  1739  	n.Left = walkexpr(n.Left, init)
  1740  	walkexprlist(n.List.Slice(), init)
  1741  
  1742  	params := n.Left.Type.Params()
  1743  	args := n.List.Slice()
  1744  	// If there's a ... parameter (which is only valid as the final
  1745  	// parameter) and this is not a ... call expression,
  1746  	// then assign the remaining arguments as a slice.
  1747  	if nf := params.NumFields(); nf > 0 {
  1748  		if last := params.Field(nf - 1); last.Isddd() && !n.Isddd() {
  1749  			tail := args[nf-1:]
  1750  			slice := mkdotargslice(last.Type, tail, init, n.Right)
  1751  			// Allow immediate GC.
  1752  			for i := range tail {
  1753  				tail[i] = nil
  1754  			}
  1755  			args = append(args[:nf-1], slice)
  1756  		}
  1757  	}
  1758  
  1759  	// If this is a method call, add the receiver at the beginning of the args.
  1760  	if n.Op == OCALLMETH {
  1761  		withRecv := make([]*Node, len(args)+1)
  1762  		withRecv[0] = n.Left.Left
  1763  		n.Left.Left = nil
  1764  		copy(withRecv[1:], args)
  1765  		args = withRecv
  1766  	}
  1767  
  1768  	// For any argument whose evaluation might require a function call,
  1769  	// store that argument into a temporary variable,
  1770  	// to prevent that calls from clobbering arguments already on the stack.
  1771  	// When instrumenting, all arguments might require function calls.
  1772  	var tempAssigns []*Node
  1773  	for i, arg := range args {
  1774  		updateHasCall(arg)
  1775  		if instrumenting || arg.HasCall() {
  1776  			// make assignment of fncall to tempname
  1777  			tmp := temp(arg.Type)
  1778  			a := nod(OAS, tmp, arg)
  1779  			tempAssigns = append(tempAssigns, a)
  1780  			// replace arg with temp
  1781  			args[i] = tmp
  1782  		}
  1783  	}
  1784  
  1785  	n.List.Set(tempAssigns)
  1786  	n.Rlist.Set(args)
  1787  }
  1788  
  1789  // generate code for print
  1790  func walkprint(nn *Node, init *Nodes) *Node {
  1791  	// Hoist all the argument evaluation up before the lock.
  1792  	walkexprlistcheap(nn.List.Slice(), init)
  1793  
  1794  	// For println, add " " between elements and "\n" at the end.
  1795  	if nn.Op == OPRINTN {
  1796  		s := nn.List.Slice()
  1797  		t := make([]*Node, 0, len(s)*2)
  1798  		for i, n := range s {
  1799  			if i != 0 {
  1800  				t = append(t, nodstr(" "))
  1801  			}
  1802  			t = append(t, n)
  1803  		}
  1804  		t = append(t, nodstr("\n"))
  1805  		nn.List.Set(t)
  1806  	}
  1807  
  1808  	// Collapse runs of constant strings.
  1809  	s := nn.List.Slice()
  1810  	t := make([]*Node, 0, len(s))
  1811  	for i := 0; i < len(s); {
  1812  		var strs []string
  1813  		for i < len(s) && Isconst(s[i], CTSTR) {
  1814  			strs = append(strs, s[i].Val().U.(string))
  1815  			i++
  1816  		}
  1817  		if len(strs) > 0 {
  1818  			t = append(t, nodstr(strings.Join(strs, "")))
  1819  		}
  1820  		if i < len(s) {
  1821  			t = append(t, s[i])
  1822  			i++
  1823  		}
  1824  	}
  1825  	nn.List.Set(t)
  1826  
  1827  	calls := []*Node{mkcall("printlock", nil, init)}
  1828  	for i, n := range nn.List.Slice() {
  1829  		if n.Op == OLITERAL {
  1830  			switch n.Val().Ctype() {
  1831  			case CTRUNE:
  1832  				n = defaultlit(n, types.Runetype)
  1833  
  1834  			case CTINT:
  1835  				n = defaultlit(n, types.Types[TINT64])
  1836  
  1837  			case CTFLT:
  1838  				n = defaultlit(n, types.Types[TFLOAT64])
  1839  			}
  1840  		}
  1841  
  1842  		if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
  1843  			n = defaultlit(n, types.Types[TINT64])
  1844  		}
  1845  		n = defaultlit(n, nil)
  1846  		nn.List.SetIndex(i, n)
  1847  		if n.Type == nil || n.Type.Etype == TFORW {
  1848  			continue
  1849  		}
  1850  
  1851  		var on *Node
  1852  		switch n.Type.Etype {
  1853  		case TINTER:
  1854  			if n.Type.IsEmptyInterface() {
  1855  				on = syslook("printeface")
  1856  			} else {
  1857  				on = syslook("printiface")
  1858  			}
  1859  			on = substArgTypes(on, n.Type) // any-1
  1860  		case TPTR, TCHAN, TMAP, TFUNC, TUNSAFEPTR:
  1861  			on = syslook("printpointer")
  1862  			on = substArgTypes(on, n.Type) // any-1
  1863  		case TSLICE:
  1864  			on = syslook("printslice")
  1865  			on = substArgTypes(on, n.Type) // any-1
  1866  		case TUINT, TUINT8, TUINT16, TUINT32, TUINT64, TUINTPTR:
  1867  			if isRuntimePkg(n.Type.Sym.Pkg) && n.Type.Sym.Name == "hex" {
  1868  				on = syslook("printhex")
  1869  			} else {
  1870  				on = syslook("printuint")
  1871  			}
  1872  		case TINT, TINT8, TINT16, TINT32, TINT64:
  1873  			on = syslook("printint")
  1874  		case TFLOAT32, TFLOAT64:
  1875  			on = syslook("printfloat")
  1876  		case TCOMPLEX64, TCOMPLEX128:
  1877  			on = syslook("printcomplex")
  1878  		case TBOOL:
  1879  			on = syslook("printbool")
  1880  		case TSTRING:
  1881  			cs := ""
  1882  			if Isconst(n, CTSTR) {
  1883  				cs = n.Val().U.(string)
  1884  			}
  1885  			switch cs {
  1886  			case " ":
  1887  				on = syslook("printsp")
  1888  			case "\n":
  1889  				on = syslook("printnl")
  1890  			default:
  1891  				on = syslook("printstring")
  1892  			}
  1893  		default:
  1894  			badtype(OPRINT, n.Type, nil)
  1895  			continue
  1896  		}
  1897  
  1898  		r := nod(OCALL, on, nil)
  1899  		if params := on.Type.Params().FieldSlice(); len(params) > 0 {
  1900  			t := params[0].Type
  1901  			if !types.Identical(t, n.Type) {
  1902  				n = nod(OCONV, n, nil)
  1903  				n.Type = t
  1904  			}
  1905  			r.List.Append(n)
  1906  		}
  1907  		calls = append(calls, r)
  1908  	}
  1909  
  1910  	calls = append(calls, mkcall("printunlock", nil, init))
  1911  
  1912  	typecheckslice(calls, Etop)
  1913  	walkexprlist(calls, init)
  1914  
  1915  	r := nod(OEMPTY, nil, nil)
  1916  	r = typecheck(r, Etop)
  1917  	r = walkexpr(r, init)
  1918  	r.Ninit.Set(calls)
  1919  	return r
  1920  }
  1921  
  1922  func callnew(t *types.Type) *Node {
  1923  	if t.NotInHeap() {
  1924  		yyerror("%v is go:notinheap; heap allocation disallowed", t)
  1925  	}
  1926  	dowidth(t)
  1927  	fn := syslook("newobject")
  1928  	fn = substArgTypes(fn, t)
  1929  	v := mkcall1(fn, types.NewPtr(t), nil, typename(t))
  1930  	v.SetNonNil(true)
  1931  	return v
  1932  }
  1933  
  1934  // isReflectHeaderDataField reports whether l is an expression p.Data
  1935  // where p has type reflect.SliceHeader or reflect.StringHeader.
  1936  func isReflectHeaderDataField(l *Node) bool {
  1937  	if l.Type != types.Types[TUINTPTR] {
  1938  		return false
  1939  	}
  1940  
  1941  	var tsym *types.Sym
  1942  	switch l.Op {
  1943  	case ODOT:
  1944  		tsym = l.Left.Type.Sym
  1945  	case ODOTPTR:
  1946  		tsym = l.Left.Type.Elem().Sym
  1947  	default:
  1948  		return false
  1949  	}
  1950  
  1951  	if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" {
  1952  		return false
  1953  	}
  1954  	return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader"
  1955  }
  1956  
  1957  func convas(n *Node, init *Nodes) *Node {
  1958  	if n.Op != OAS {
  1959  		Fatalf("convas: not OAS %v", n.Op)
  1960  	}
  1961  	defer updateHasCall(n)
  1962  
  1963  	n.SetTypecheck(1)
  1964  
  1965  	if n.Left == nil || n.Right == nil {
  1966  		return n
  1967  	}
  1968  
  1969  	lt := n.Left.Type
  1970  	rt := n.Right.Type
  1971  	if lt == nil || rt == nil {
  1972  		return n
  1973  	}
  1974  
  1975  	if n.Left.isBlank() {
  1976  		n.Right = defaultlit(n.Right, nil)
  1977  		return n
  1978  	}
  1979  
  1980  	if !types.Identical(lt, rt) {
  1981  		n.Right = assignconv(n.Right, lt, "assignment")
  1982  		n.Right = walkexpr(n.Right, init)
  1983  	}
  1984  	dowidth(n.Right.Type)
  1985  
  1986  	return n
  1987  }
  1988  
  1989  // from ascompat[ee]
  1990  //	a,b = c,d
  1991  // simultaneous assignment. there cannot
  1992  // be later use of an earlier lvalue.
  1993  //
  1994  // function calls have been removed.
  1995  func reorder3(all []*Node) []*Node {
  1996  	// If a needed expression may be affected by an
  1997  	// earlier assignment, make an early copy of that
  1998  	// expression and use the copy instead.
  1999  	var early []*Node
  2000  
  2001  	var mapinit Nodes
  2002  	for i, n := range all {
  2003  		l := n.Left
  2004  
  2005  		// Save subexpressions needed on left side.
  2006  		// Drill through non-dereferences.
  2007  		for {
  2008  			if l.Op == ODOT || l.Op == OPAREN {
  2009  				l = l.Left
  2010  				continue
  2011  			}
  2012  
  2013  			if l.Op == OINDEX && l.Left.Type.IsArray() {
  2014  				l.Right = reorder3save(l.Right, all, i, &early)
  2015  				l = l.Left
  2016  				continue
  2017  			}
  2018  
  2019  			break
  2020  		}
  2021  
  2022  		switch l.Op {
  2023  		default:
  2024  			Fatalf("reorder3 unexpected lvalue %#v", l.Op)
  2025  
  2026  		case ONAME:
  2027  			break
  2028  
  2029  		case OINDEX, OINDEXMAP:
  2030  			l.Left = reorder3save(l.Left, all, i, &early)
  2031  			l.Right = reorder3save(l.Right, all, i, &early)
  2032  			if l.Op == OINDEXMAP {
  2033  				all[i] = convas(all[i], &mapinit)
  2034  			}
  2035  
  2036  		case OIND, ODOTPTR:
  2037  			l.Left = reorder3save(l.Left, all, i, &early)
  2038  		}
  2039  
  2040  		// Save expression on right side.
  2041  		all[i].Right = reorder3save(all[i].Right, all, i, &early)
  2042  	}
  2043  
  2044  	early = append(mapinit.Slice(), early...)
  2045  	return append(early, all...)
  2046  }
  2047  
  2048  // if the evaluation of *np would be affected by the
  2049  // assignments in all up to but not including the ith assignment,
  2050  // copy into a temporary during *early and
  2051  // replace *np with that temp.
  2052  // The result of reorder3save MUST be assigned back to n, e.g.
  2053  // 	n.Left = reorder3save(n.Left, all, i, early)
  2054  func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node {
  2055  	if !aliased(n, all, i) {
  2056  		return n
  2057  	}
  2058  
  2059  	q := temp(n.Type)
  2060  	q = nod(OAS, q, n)
  2061  	q = typecheck(q, Etop)
  2062  	*early = append(*early, q)
  2063  	return q.Left
  2064  }
  2065  
  2066  // what's the outer value that a write to n affects?
  2067  // outer value means containing struct or array.
  2068  func outervalue(n *Node) *Node {
  2069  	for {
  2070  		switch n.Op {
  2071  		case OXDOT:
  2072  			Fatalf("OXDOT in walk")
  2073  		case ODOT, OPAREN, OCONVNOP:
  2074  			n = n.Left
  2075  			continue
  2076  		case OINDEX:
  2077  			if n.Left.Type != nil && n.Left.Type.IsArray() {
  2078  				n = n.Left
  2079  				continue
  2080  			}
  2081  		}
  2082  
  2083  		return n
  2084  	}
  2085  }
  2086  
  2087  // Is it possible that the computation of n might be
  2088  // affected by writes in as up to but not including the ith element?
  2089  func aliased(n *Node, all []*Node, i int) bool {
  2090  	if n == nil {
  2091  		return false
  2092  	}
  2093  
  2094  	// Treat all fields of a struct as referring to the whole struct.
  2095  	// We could do better but we would have to keep track of the fields.
  2096  	for n.Op == ODOT {
  2097  		n = n.Left
  2098  	}
  2099  
  2100  	// Look for obvious aliasing: a variable being assigned
  2101  	// during the all list and appearing in n.
  2102  	// Also record whether there are any writes to main memory.
  2103  	// Also record whether there are any writes to variables
  2104  	// whose addresses have been taken.
  2105  	memwrite := false
  2106  	varwrite := false
  2107  	for _, an := range all[:i] {
  2108  		a := outervalue(an.Left)
  2109  
  2110  		for a.Op == ODOT {
  2111  			a = a.Left
  2112  		}
  2113  
  2114  		if a.Op != ONAME {
  2115  			memwrite = true
  2116  			continue
  2117  		}
  2118  
  2119  		switch n.Class() {
  2120  		default:
  2121  			varwrite = true
  2122  			continue
  2123  
  2124  		case PAUTO, PPARAM, PPARAMOUT:
  2125  			if n.Addrtaken() {
  2126  				varwrite = true
  2127  				continue
  2128  			}
  2129  
  2130  			if vmatch2(a, n) {
  2131  				// Direct hit.
  2132  				return true
  2133  			}
  2134  		}
  2135  	}
  2136  
  2137  	// The variables being written do not appear in n.
  2138  	// However, n might refer to computed addresses
  2139  	// that are being written.
  2140  
  2141  	// If no computed addresses are affected by the writes, no aliasing.
  2142  	if !memwrite && !varwrite {
  2143  		return false
  2144  	}
  2145  
  2146  	// If n does not refer to computed addresses
  2147  	// (that is, if n only refers to variables whose addresses
  2148  	// have not been taken), no aliasing.
  2149  	if varexpr(n) {
  2150  		return false
  2151  	}
  2152  
  2153  	// Otherwise, both the writes and n refer to computed memory addresses.
  2154  	// Assume that they might conflict.
  2155  	return true
  2156  }
  2157  
  2158  // does the evaluation of n only refer to variables
  2159  // whose addresses have not been taken?
  2160  // (and no other memory)
  2161  func varexpr(n *Node) bool {
  2162  	if n == nil {
  2163  		return true
  2164  	}
  2165  
  2166  	switch n.Op {
  2167  	case OLITERAL:
  2168  		return true
  2169  
  2170  	case ONAME:
  2171  		switch n.Class() {
  2172  		case PAUTO, PPARAM, PPARAMOUT:
  2173  			if !n.Addrtaken() {
  2174  				return true
  2175  			}
  2176  		}
  2177  
  2178  		return false
  2179  
  2180  	case OADD,
  2181  		OSUB,
  2182  		OOR,
  2183  		OXOR,
  2184  		OMUL,
  2185  		ODIV,
  2186  		OMOD,
  2187  		OLSH,
  2188  		ORSH,
  2189  		OAND,
  2190  		OANDNOT,
  2191  		OPLUS,
  2192  		OMINUS,
  2193  		OCOM,
  2194  		OPAREN,
  2195  		OANDAND,
  2196  		OOROR,
  2197  		OCONV,
  2198  		OCONVNOP,
  2199  		OCONVIFACE,
  2200  		ODOTTYPE:
  2201  		return varexpr(n.Left) && varexpr(n.Right)
  2202  
  2203  	case ODOT: // but not ODOTPTR
  2204  		// Should have been handled in aliased.
  2205  		Fatalf("varexpr unexpected ODOT")
  2206  	}
  2207  
  2208  	// Be conservative.
  2209  	return false
  2210  }
  2211  
  2212  // is the name l mentioned in r?
  2213  func vmatch2(l *Node, r *Node) bool {
  2214  	if r == nil {
  2215  		return false
  2216  	}
  2217  	switch r.Op {
  2218  	// match each right given left
  2219  	case ONAME:
  2220  		return l == r
  2221  
  2222  	case OLITERAL:
  2223  		return false
  2224  	}
  2225  
  2226  	if vmatch2(l, r.Left) {
  2227  		return true
  2228  	}
  2229  	if vmatch2(l, r.Right) {
  2230  		return true
  2231  	}
  2232  	for _, n := range r.List.Slice() {
  2233  		if vmatch2(l, n) {
  2234  			return true
  2235  		}
  2236  	}
  2237  	return false
  2238  }
  2239  
  2240  // is any name mentioned in l also mentioned in r?
  2241  // called by sinit.go
  2242  func vmatch1(l *Node, r *Node) bool {
  2243  	// isolate all left sides
  2244  	if l == nil || r == nil {
  2245  		return false
  2246  	}
  2247  	switch l.Op {
  2248  	case ONAME:
  2249  		switch l.Class() {
  2250  		case PPARAM, PAUTO:
  2251  			break
  2252  
  2253  		default:
  2254  			// assignment to non-stack variable must be
  2255  			// delayed if right has function calls.
  2256  			if r.HasCall() {
  2257  				return true
  2258  			}
  2259  		}
  2260  
  2261  		return vmatch2(l, r)
  2262  
  2263  	case OLITERAL:
  2264  		return false
  2265  	}
  2266  
  2267  	if vmatch1(l.Left, r) {
  2268  		return true
  2269  	}
  2270  	if vmatch1(l.Right, r) {
  2271  		return true
  2272  	}
  2273  	for _, n := range l.List.Slice() {
  2274  		if vmatch1(n, r) {
  2275  			return true
  2276  		}
  2277  	}
  2278  	return false
  2279  }
  2280  
  2281  // paramstoheap returns code to allocate memory for heap-escaped parameters
  2282  // and to copy non-result parameters' values from the stack.
  2283  func paramstoheap(params *types.Type) []*Node {
  2284  	var nn []*Node
  2285  	for _, t := range params.Fields().Slice() {
  2286  		v := asNode(t.Nname)
  2287  		if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result
  2288  			v = nil
  2289  		}
  2290  		if v == nil {
  2291  			continue
  2292  		}
  2293  
  2294  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil {
  2295  			nn = append(nn, walkstmt(nod(ODCL, v, nil)))
  2296  			if stackcopy.Class() == PPARAM {
  2297  				nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop)))
  2298  			}
  2299  		}
  2300  	}
  2301  
  2302  	return nn
  2303  }
  2304  
  2305  // zeroResults zeros the return values at the start of the function.
  2306  // We need to do this very early in the function.  Defer might stop a
  2307  // panic and show the return values as they exist at the time of
  2308  // panic.  For precise stacks, the garbage collector assumes results
  2309  // are always live, so we need to zero them before any allocations,
  2310  // even allocations to move params/results to the heap.
  2311  // The generated code is added to Curfn's Enter list.
  2312  func zeroResults() {
  2313  	for _, f := range Curfn.Type.Results().Fields().Slice() {
  2314  		v := asNode(f.Nname)
  2315  		if v != nil && v.Name.Param.Heapaddr != nil {
  2316  			// The local which points to the return value is the
  2317  			// thing that needs zeroing. This is already handled
  2318  			// by a Needzero annotation in plive.go:livenessepilogue.
  2319  			continue
  2320  		}
  2321  		if v.isParamHeapCopy() {
  2322  			// TODO(josharian/khr): Investigate whether we can switch to "continue" here,
  2323  			// and document more in either case.
  2324  			// In the review of CL 114797, Keith wrote (roughly):
  2325  			// I don't think the zeroing below matters.
  2326  			// The stack return value will never be marked as live anywhere in the function.
  2327  			// It is not written to until deferreturn returns.
  2328  			v = v.Name.Param.Stackcopy
  2329  		}
  2330  		// Zero the stack location containing f.
  2331  		Curfn.Func.Enter.Append(nodl(Curfn.Pos, OAS, v, nil))
  2332  	}
  2333  }
  2334  
  2335  // returnsfromheap returns code to copy values for heap-escaped parameters
  2336  // back to the stack.
  2337  func returnsfromheap(params *types.Type) []*Node {
  2338  	var nn []*Node
  2339  	for _, t := range params.Fields().Slice() {
  2340  		v := asNode(t.Nname)
  2341  		if v == nil {
  2342  			continue
  2343  		}
  2344  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT {
  2345  			nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop)))
  2346  		}
  2347  	}
  2348  
  2349  	return nn
  2350  }
  2351  
  2352  // heapmoves generates code to handle migrating heap-escaped parameters
  2353  // between the stack and the heap. The generated code is added to Curfn's
  2354  // Enter and Exit lists.
  2355  func heapmoves() {
  2356  	lno := lineno
  2357  	lineno = Curfn.Pos
  2358  	nn := paramstoheap(Curfn.Type.Recvs())
  2359  	nn = append(nn, paramstoheap(Curfn.Type.Params())...)
  2360  	nn = append(nn, paramstoheap(Curfn.Type.Results())...)
  2361  	Curfn.Func.Enter.Append(nn...)
  2362  	lineno = Curfn.Func.Endlineno
  2363  	Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...)
  2364  	lineno = lno
  2365  }
  2366  
  2367  func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node {
  2368  	if fn.Type == nil || fn.Type.Etype != TFUNC {
  2369  		Fatalf("mkcall %v %v", fn, fn.Type)
  2370  	}
  2371  
  2372  	n := fn.Type.NumParams()
  2373  	if n != len(va) {
  2374  		Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
  2375  	}
  2376  
  2377  	r := nod(OCALL, fn, nil)
  2378  	r.List.Set(va)
  2379  	if fn.Type.NumResults() > 0 {
  2380  		r = typecheck(r, Erv|Efnstruct)
  2381  	} else {
  2382  		r = typecheck(r, Etop)
  2383  	}
  2384  	r = walkexpr(r, init)
  2385  	r.Type = t
  2386  	return r
  2387  }
  2388  
  2389  func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node {
  2390  	return vmkcall(syslook(name), t, init, args)
  2391  }
  2392  
  2393  func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node {
  2394  	return vmkcall(fn, t, init, args)
  2395  }
  2396  
  2397  func conv(n *Node, t *types.Type) *Node {
  2398  	if types.Identical(n.Type, t) {
  2399  		return n
  2400  	}
  2401  	n = nod(OCONV, n, nil)
  2402  	n.Type = t
  2403  	n = typecheck(n, Erv)
  2404  	return n
  2405  }
  2406  
  2407  // convnop converts node n to type t using the OCONVNOP op
  2408  // and typechecks the result with Erv.
  2409  func convnop(n *Node, t *types.Type) *Node {
  2410  	n = nod(OCONVNOP, n, nil)
  2411  	n.Type = t
  2412  	n = typecheck(n, Erv)
  2413  	return n
  2414  }
  2415  
  2416  // byteindex converts n, which is byte-sized, to a uint8.
  2417  // We cannot use conv, because we allow converting bool to uint8 here,
  2418  // which is forbidden in user code.
  2419  func byteindex(n *Node) *Node {
  2420  	if types.Identical(n.Type, types.Types[TUINT8]) {
  2421  		return n
  2422  	}
  2423  	n = nod(OCONV, n, nil)
  2424  	n.Type = types.Types[TUINT8]
  2425  	n.SetTypecheck(1)
  2426  	return n
  2427  }
  2428  
  2429  func chanfn(name string, n int, t *types.Type) *Node {
  2430  	if !t.IsChan() {
  2431  		Fatalf("chanfn %v", t)
  2432  	}
  2433  	fn := syslook(name)
  2434  	switch n {
  2435  	default:
  2436  		Fatalf("chanfn %d", n)
  2437  	case 1:
  2438  		fn = substArgTypes(fn, t.Elem())
  2439  	case 2:
  2440  		fn = substArgTypes(fn, t.Elem(), t.Elem())
  2441  	}
  2442  	return fn
  2443  }
  2444  
  2445  func mapfn(name string, t *types.Type) *Node {
  2446  	if !t.IsMap() {
  2447  		Fatalf("mapfn %v", t)
  2448  	}
  2449  	fn := syslook(name)
  2450  	fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key(), t.Elem())
  2451  	return fn
  2452  }
  2453  
  2454  func mapfndel(name string, t *types.Type) *Node {
  2455  	if !t.IsMap() {
  2456  		Fatalf("mapfn %v", t)
  2457  	}
  2458  	fn := syslook(name)
  2459  	fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key())
  2460  	return fn
  2461  }
  2462  
  2463  const (
  2464  	mapslow = iota
  2465  	mapfast32
  2466  	mapfast32ptr
  2467  	mapfast64
  2468  	mapfast64ptr
  2469  	mapfaststr
  2470  	nmapfast
  2471  )
  2472  
  2473  type mapnames [nmapfast]string
  2474  
  2475  func mkmapnames(base string, ptr string) mapnames {
  2476  	return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
  2477  }
  2478  
  2479  var mapaccess1 = mkmapnames("mapaccess1", "")
  2480  var mapaccess2 = mkmapnames("mapaccess2", "")
  2481  var mapassign = mkmapnames("mapassign", "ptr")
  2482  var mapdelete = mkmapnames("mapdelete", "")
  2483  
  2484  func mapfast(t *types.Type) int {
  2485  	// Check runtime/map.go:maxValueSize before changing.
  2486  	if t.Elem().Width > 128 {
  2487  		return mapslow
  2488  	}
  2489  	switch algtype(t.Key()) {
  2490  	case AMEM32:
  2491  		if !t.Key().HasHeapPointer() {
  2492  			return mapfast32
  2493  		}
  2494  		if Widthptr == 4 {
  2495  			return mapfast32ptr
  2496  		}
  2497  		Fatalf("small pointer %v", t.Key())
  2498  	case AMEM64:
  2499  		if !t.Key().HasHeapPointer() {
  2500  			return mapfast64
  2501  		}
  2502  		if Widthptr == 8 {
  2503  			return mapfast64ptr
  2504  		}
  2505  		// Two-word object, at least one of which is a pointer.
  2506  		// Use the slow path.
  2507  	case ASTRING:
  2508  		return mapfaststr
  2509  	}
  2510  	return mapslow
  2511  }
  2512  
  2513  func writebarrierfn(name string, l *types.Type, r *types.Type) *Node {
  2514  	fn := syslook(name)
  2515  	fn = substArgTypes(fn, l, r)
  2516  	return fn
  2517  }
  2518  
  2519  func addstr(n *Node, init *Nodes) *Node {
  2520  	// orderexpr rewrote OADDSTR to have a list of strings.
  2521  	c := n.List.Len()
  2522  
  2523  	if c < 2 {
  2524  		Fatalf("addstr count %d too small", c)
  2525  	}
  2526  
  2527  	buf := nodnil()
  2528  	if n.Esc == EscNone {
  2529  		sz := int64(0)
  2530  		for _, n1 := range n.List.Slice() {
  2531  			if n1.Op == OLITERAL {
  2532  				sz += int64(len(n1.Val().U.(string)))
  2533  			}
  2534  		}
  2535  
  2536  		// Don't allocate the buffer if the result won't fit.
  2537  		if sz < tmpstringbufsize {
  2538  			// Create temporary buffer for result string on stack.
  2539  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  2540  
  2541  			buf = nod(OADDR, temp(t), nil)
  2542  		}
  2543  	}
  2544  
  2545  	// build list of string arguments
  2546  	args := []*Node{buf}
  2547  	for _, n2 := range n.List.Slice() {
  2548  		args = append(args, conv(n2, types.Types[TSTRING]))
  2549  	}
  2550  
  2551  	var fn string
  2552  	if c <= 5 {
  2553  		// small numbers of strings use direct runtime helpers.
  2554  		// note: orderexpr knows this cutoff too.
  2555  		fn = fmt.Sprintf("concatstring%d", c)
  2556  	} else {
  2557  		// large numbers of strings are passed to the runtime as a slice.
  2558  		fn = "concatstrings"
  2559  
  2560  		t := types.NewSlice(types.Types[TSTRING])
  2561  		slice := nod(OCOMPLIT, nil, typenod(t))
  2562  		if prealloc[n] != nil {
  2563  			prealloc[slice] = prealloc[n]
  2564  		}
  2565  		slice.List.Set(args[1:]) // skip buf arg
  2566  		args = []*Node{buf, slice}
  2567  		slice.Esc = EscNone
  2568  	}
  2569  
  2570  	cat := syslook(fn)
  2571  	r := nod(OCALL, cat, nil)
  2572  	r.List.Set(args)
  2573  	r = typecheck(r, Erv)
  2574  	r = walkexpr(r, init)
  2575  	r.Type = n.Type
  2576  
  2577  	return r
  2578  }
  2579  
  2580  func walkAppendArgs(n *Node, init *Nodes) {
  2581  	walkexprlistsafe(n.List.Slice(), init)
  2582  
  2583  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2584  	// and n are name or literal, but those may index the slice we're
  2585  	// modifying here. Fix explicitly.
  2586  	ls := n.List.Slice()
  2587  	for i1, n1 := range ls {
  2588  		ls[i1] = cheapexpr(n1, init)
  2589  	}
  2590  }
  2591  
  2592  // expand append(l1, l2...) to
  2593  //   init {
  2594  //     s := l1
  2595  //     n := len(s) + len(l2)
  2596  //     // Compare as uint so growslice can panic on overflow.
  2597  //     if uint(n) > uint(cap(s)) {
  2598  //       s = growslice(s, n)
  2599  //     }
  2600  //     s = s[:n]
  2601  //     memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2602  //   }
  2603  //   s
  2604  //
  2605  // l2 is allowed to be a string.
  2606  func appendslice(n *Node, init *Nodes) *Node {
  2607  	walkAppendArgs(n, init)
  2608  
  2609  	l1 := n.List.First()
  2610  	l2 := n.List.Second()
  2611  
  2612  	var nodes Nodes
  2613  
  2614  	// var s []T
  2615  	s := temp(l1.Type)
  2616  	nodes.Append(nod(OAS, s, l1)) // s = l1
  2617  
  2618  	elemtype := s.Type.Elem()
  2619  
  2620  	// n := len(s) + len(l2)
  2621  	nn := temp(types.Types[TINT])
  2622  	nodes.Append(nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil))))
  2623  
  2624  	// if uint(n) > uint(cap(s))
  2625  	nif := nod(OIF, nil, nil)
  2626  	nuint := conv(nn, types.Types[TUINT])
  2627  	scapuint := conv(nod(OCAP, s, nil), types.Types[TUINT])
  2628  	nif.Left = nod(OGT, nuint, scapuint)
  2629  
  2630  	// instantiate growslice(typ *type, []any, int) []any
  2631  	fn := syslook("growslice")
  2632  	fn = substArgTypes(fn, elemtype, elemtype)
  2633  
  2634  	// s = growslice(T, s, n)
  2635  	nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(elemtype), s, nn)))
  2636  	nodes.Append(nif)
  2637  
  2638  	// s = s[:n]
  2639  	nt := nod(OSLICE, s, nil)
  2640  	nt.SetSliceBounds(nil, nn, nil)
  2641  	nt.SetBounded(true)
  2642  	nodes.Append(nod(OAS, s, nt))
  2643  
  2644  	var ncopy *Node
  2645  	if elemtype.HasHeapPointer() {
  2646  		// copy(s[len(l1):], l2)
  2647  		nptr1 := nod(OSLICE, s, nil)
  2648  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2649  
  2650  		nptr2 := l2
  2651  
  2652  		Curfn.Func.setWBPos(n.Pos)
  2653  
  2654  		// instantiate typedslicecopy(typ *type, dst any, src any) int
  2655  		fn := syslook("typedslicecopy")
  2656  		fn = substArgTypes(fn, l1.Type, l2.Type)
  2657  		ncopy = mkcall1(fn, types.Types[TINT], &nodes, typename(elemtype), nptr1, nptr2)
  2658  
  2659  	} else if instrumenting && !compiling_runtime {
  2660  		// rely on runtime to instrument copy.
  2661  		// copy(s[len(l1):], l2)
  2662  		nptr1 := nod(OSLICE, s, nil)
  2663  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2664  
  2665  		nptr2 := l2
  2666  
  2667  		if l2.Type.IsString() {
  2668  			// instantiate func slicestringcopy(to any, fr any) int
  2669  			fn := syslook("slicestringcopy")
  2670  			fn = substArgTypes(fn, l1.Type, l2.Type)
  2671  			ncopy = mkcall1(fn, types.Types[TINT], &nodes, nptr1, nptr2)
  2672  		} else {
  2673  			// instantiate func slicecopy(to any, fr any, wid uintptr) int
  2674  			fn := syslook("slicecopy")
  2675  			fn = substArgTypes(fn, l1.Type, l2.Type)
  2676  			ncopy = mkcall1(fn, types.Types[TINT], &nodes, nptr1, nptr2, nodintconst(elemtype.Width))
  2677  		}
  2678  
  2679  	} else {
  2680  		// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2681  		nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil))
  2682  		nptr1.SetBounded(true)
  2683  		nptr1 = nod(OADDR, nptr1, nil)
  2684  
  2685  		nptr2 := nod(OSPTR, l2, nil)
  2686  
  2687  		nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &nodes)
  2688  		nwid = nod(OMUL, nwid, nodintconst(elemtype.Width))
  2689  
  2690  		// instantiate func memmove(to *any, frm *any, length uintptr)
  2691  		fn := syslook("memmove")
  2692  		fn = substArgTypes(fn, elemtype, elemtype)
  2693  		ncopy = mkcall1(fn, nil, &nodes, nptr1, nptr2, nwid)
  2694  	}
  2695  	ln := append(nodes.Slice(), ncopy)
  2696  
  2697  	typecheckslice(ln, Etop)
  2698  	walkstmtlist(ln)
  2699  	init.Append(ln...)
  2700  	return s
  2701  }
  2702  
  2703  // isAppendOfMake reports whether n is of the form append(x , make([]T, y)...).
  2704  // isAppendOfMake assumes n has already been typechecked.
  2705  func isAppendOfMake(n *Node) bool {
  2706  	if Debug['N'] != 0 || instrumenting {
  2707  		return false
  2708  	}
  2709  
  2710  	if n.Typecheck() == 0 {
  2711  		Fatalf("missing typecheck: %+v", n)
  2712  	}
  2713  
  2714  	if n.Op != OAPPEND || !n.Isddd() || n.List.Len() != 2 {
  2715  		return false
  2716  	}
  2717  
  2718  	second := n.List.Second()
  2719  	if second.Op != OMAKESLICE || second.Right != nil {
  2720  		return false
  2721  	}
  2722  
  2723  	// y must be either an integer constant or a variable of type int.
  2724  	// typecheck checks that constant arguments to make are not negative and
  2725  	// fit into an int.
  2726  	// runtime.growslice uses int as type for the newcap argument.
  2727  	// Constraining variables to be type int avoids the need for runtime checks
  2728  	// that e.g. check if an int64 value fits into an int.
  2729  	// TODO(moehrmann): support other integer types that always fit in an int
  2730  	y := second.Left
  2731  	if !Isconst(y, CTINT) && y.Type.Etype != TINT {
  2732  		return false
  2733  	}
  2734  
  2735  	return true
  2736  }
  2737  
  2738  // extendslice rewrites append(l1, make([]T, l2)...) to
  2739  //   init {
  2740  //     if l2 < 0 {
  2741  //       panicmakeslicelen()
  2742  //     }
  2743  //     s := l1
  2744  //     n := len(s) + l2
  2745  //     // Compare n and s as uint so growslice can panic on overflow of len(s) + l2.
  2746  //     // cap is a positive int and n can become negative when len(s) + l2
  2747  //     // overflows int. Interpreting n when negative as uint makes it larger
  2748  //     // than cap(s). growslice will check the int n arg and panic if n is
  2749  //     // negative. This prevents the overflow from being undetected.
  2750  //     if uint(n) > uint(cap(s)) {
  2751  //       s = growslice(T, s, n)
  2752  //     }
  2753  //     s = s[:n]
  2754  //     lptr := &l1[0]
  2755  //     sptr := &s[0]
  2756  //     if lptr == sptr || !hasPointers(T) {
  2757  //       // growslice did not clear the whole underlying array (or did not get called)
  2758  //       hp := &s[len(l1)]
  2759  //       hn := l2 * sizeof(T)
  2760  //       memclr(hp, hn)
  2761  //     }
  2762  //   }
  2763  //   s
  2764  func extendslice(n *Node, init *Nodes) *Node {
  2765  	// isAppendOfMake made sure l2 fits in an int.
  2766  	l2 := conv(n.List.Second().Left, types.Types[TINT])
  2767  	l2 = typecheck(l2, Erv)
  2768  	n.List.SetSecond(l2) // walkAppendArgs expects l2 in n.List.Second().
  2769  
  2770  	walkAppendArgs(n, init)
  2771  
  2772  	l1 := n.List.First()
  2773  	l2 = n.List.Second() // re-read l2, as it may have been updated by walkAppendArgs
  2774  
  2775  	var nodes []*Node
  2776  
  2777  	// if l2 < 0
  2778  	nifneg := nod(OIF, nod(OLT, l2, nodintconst(0)), nil)
  2779  	nifneg.SetLikely(false)
  2780  
  2781  	// panicmakeslicelen()
  2782  	nifneg.Nbody.Set1(mkcall("panicmakeslicelen", nil, init))
  2783  	nodes = append(nodes, nifneg)
  2784  
  2785  	// s := l1
  2786  	s := temp(l1.Type)
  2787  	nodes = append(nodes, nod(OAS, s, l1))
  2788  
  2789  	elemtype := s.Type.Elem()
  2790  
  2791  	// n := len(s) + l2
  2792  	nn := temp(types.Types[TINT])
  2793  	nodes = append(nodes, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), l2)))
  2794  
  2795  	// if uint(n) > uint(cap(s))
  2796  	nuint := conv(nn, types.Types[TUINT])
  2797  	capuint := conv(nod(OCAP, s, nil), types.Types[TUINT])
  2798  	nif := nod(OIF, nod(OGT, nuint, capuint), nil)
  2799  
  2800  	// instantiate growslice(typ *type, old []any, newcap int) []any
  2801  	fn := syslook("growslice")
  2802  	fn = substArgTypes(fn, elemtype, elemtype)
  2803  
  2804  	// s = growslice(T, s, n)
  2805  	nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(elemtype), s, nn)))
  2806  	nodes = append(nodes, nif)
  2807  
  2808  	// s = s[:n]
  2809  	nt := nod(OSLICE, s, nil)
  2810  	nt.SetSliceBounds(nil, nn, nil)
  2811  	nt.SetBounded(true)
  2812  	nodes = append(nodes, nod(OAS, s, nt))
  2813  
  2814  	// lptr := &l1[0]
  2815  	l1ptr := temp(l1.Type.Elem().PtrTo())
  2816  	tmp := nod(OSPTR, l1, nil)
  2817  	nodes = append(nodes, nod(OAS, l1ptr, tmp))
  2818  
  2819  	// sptr := &s[0]
  2820  	sptr := temp(elemtype.PtrTo())
  2821  	tmp = nod(OSPTR, s, nil)
  2822  	nodes = append(nodes, nod(OAS, sptr, tmp))
  2823  
  2824  	// hp := &s[len(l1)]
  2825  	hp := nod(OINDEX, s, nod(OLEN, l1, nil))
  2826  	hp.SetBounded(true)
  2827  	hp = nod(OADDR, hp, nil)
  2828  	hp = convnop(hp, types.Types[TUNSAFEPTR])
  2829  
  2830  	// hn := l2 * sizeof(elem(s))
  2831  	hn := nod(OMUL, l2, nodintconst(elemtype.Width))
  2832  	hn = conv(hn, types.Types[TUINTPTR])
  2833  
  2834  	clrname := "memclrNoHeapPointers"
  2835  	hasPointers := types.Haspointers(elemtype)
  2836  	if hasPointers {
  2837  		clrname = "memclrHasPointers"
  2838  	}
  2839  
  2840  	var clr Nodes
  2841  	clrfn := mkcall(clrname, nil, &clr, hp, hn)
  2842  	clr.Append(clrfn)
  2843  
  2844  	if hasPointers {
  2845  		// if l1ptr == sptr
  2846  		nifclr := nod(OIF, nod(OEQ, l1ptr, sptr), nil)
  2847  		nifclr.Nbody = clr
  2848  		nodes = append(nodes, nifclr)
  2849  	} else {
  2850  		nodes = append(nodes, clr.Slice()...)
  2851  	}
  2852  
  2853  	typecheckslice(nodes, Etop)
  2854  	walkstmtlist(nodes)
  2855  	init.Append(nodes...)
  2856  	return s
  2857  }
  2858  
  2859  // Rewrite append(src, x, y, z) so that any side effects in
  2860  // x, y, z (including runtime panics) are evaluated in
  2861  // initialization statements before the append.
  2862  // For normal code generation, stop there and leave the
  2863  // rest to cgen_append.
  2864  //
  2865  // For race detector, expand append(src, a [, b]* ) to
  2866  //
  2867  //   init {
  2868  //     s := src
  2869  //     const argc = len(args) - 1
  2870  //     if cap(s) - len(s) < argc {
  2871  //	    s = growslice(s, len(s)+argc)
  2872  //     }
  2873  //     n := len(s)
  2874  //     s = s[:n+argc]
  2875  //     s[n] = a
  2876  //     s[n+1] = b
  2877  //     ...
  2878  //   }
  2879  //   s
  2880  func walkappend(n *Node, init *Nodes, dst *Node) *Node {
  2881  	if !samesafeexpr(dst, n.List.First()) {
  2882  		n.List.SetFirst(safeexpr(n.List.First(), init))
  2883  		n.List.SetFirst(walkexpr(n.List.First(), init))
  2884  	}
  2885  	walkexprlistsafe(n.List.Slice()[1:], init)
  2886  
  2887  	nsrc := n.List.First()
  2888  
  2889  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2890  	// and n are name or literal, but those may index the slice we're
  2891  	// modifying here. Fix explicitly.
  2892  	// Using cheapexpr also makes sure that the evaluation
  2893  	// of all arguments (and especially any panics) happen
  2894  	// before we begin to modify the slice in a visible way.
  2895  	ls := n.List.Slice()[1:]
  2896  	for i, n := range ls {
  2897  		n = cheapexpr(n, init)
  2898  		if !types.Identical(n.Type, nsrc.Type.Elem()) {
  2899  			n = assignconv(n, nsrc.Type.Elem(), "append")
  2900  			n = walkexpr(n, init)
  2901  		}
  2902  		ls[i] = n
  2903  	}
  2904  
  2905  	argc := n.List.Len() - 1
  2906  	if argc < 1 {
  2907  		return nsrc
  2908  	}
  2909  
  2910  	// General case, with no function calls left as arguments.
  2911  	// Leave for gen, except that instrumentation requires old form.
  2912  	if !instrumenting || compiling_runtime {
  2913  		return n
  2914  	}
  2915  
  2916  	var l []*Node
  2917  
  2918  	ns := temp(nsrc.Type)
  2919  	l = append(l, nod(OAS, ns, nsrc)) // s = src
  2920  
  2921  	na := nodintconst(int64(argc)) // const argc
  2922  	nx := nod(OIF, nil, nil)       // if cap(s) - len(s) < argc
  2923  	nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na)
  2924  
  2925  	fn := syslook("growslice") //   growslice(<type>, old []T, mincap int) (ret []T)
  2926  	fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem())
  2927  
  2928  	nx.Nbody.Set1(nod(OAS, ns,
  2929  		mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns,
  2930  			nod(OADD, nod(OLEN, ns, nil), na))))
  2931  
  2932  	l = append(l, nx)
  2933  
  2934  	nn := temp(types.Types[TINT])
  2935  	l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s)
  2936  
  2937  	nx = nod(OSLICE, ns, nil) // ...s[:n+argc]
  2938  	nx.SetSliceBounds(nil, nod(OADD, nn, na), nil)
  2939  	nx.SetBounded(true)
  2940  	l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc]
  2941  
  2942  	ls = n.List.Slice()[1:]
  2943  	for i, n := range ls {
  2944  		nx = nod(OINDEX, ns, nn) // s[n] ...
  2945  		nx.SetBounded(true)
  2946  		l = append(l, nod(OAS, nx, n)) // s[n] = arg
  2947  		if i+1 < len(ls) {
  2948  			l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1
  2949  		}
  2950  	}
  2951  
  2952  	typecheckslice(l, Etop)
  2953  	walkstmtlist(l)
  2954  	init.Append(l...)
  2955  	return ns
  2956  }
  2957  
  2958  // Lower copy(a, b) to a memmove call or a runtime call.
  2959  //
  2960  // init {
  2961  //   n := len(a)
  2962  //   if n > len(b) { n = len(b) }
  2963  //   if a.ptr != b.ptr { memmove(a.ptr, b.ptr, n*sizeof(elem(a))) }
  2964  // }
  2965  // n;
  2966  //
  2967  // Also works if b is a string.
  2968  //
  2969  func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
  2970  	if n.Left.Type.Elem().HasHeapPointer() {
  2971  		Curfn.Func.setWBPos(n.Pos)
  2972  		fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
  2973  		return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right)
  2974  	}
  2975  
  2976  	if runtimecall {
  2977  		if n.Right.Type.IsString() {
  2978  			fn := syslook("slicestringcopy")
  2979  			fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  2980  			return mkcall1(fn, n.Type, init, n.Left, n.Right)
  2981  		}
  2982  
  2983  		fn := syslook("slicecopy")
  2984  		fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  2985  		return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width))
  2986  	}
  2987  
  2988  	n.Left = walkexpr(n.Left, init)
  2989  	n.Right = walkexpr(n.Right, init)
  2990  	nl := temp(n.Left.Type)
  2991  	nr := temp(n.Right.Type)
  2992  	var l []*Node
  2993  	l = append(l, nod(OAS, nl, n.Left))
  2994  	l = append(l, nod(OAS, nr, n.Right))
  2995  
  2996  	nfrm := nod(OSPTR, nr, nil)
  2997  	nto := nod(OSPTR, nl, nil)
  2998  
  2999  	nlen := temp(types.Types[TINT])
  3000  
  3001  	// n = len(to)
  3002  	l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil)))
  3003  
  3004  	// if n > len(frm) { n = len(frm) }
  3005  	nif := nod(OIF, nil, nil)
  3006  
  3007  	nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil))
  3008  	nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil)))
  3009  	l = append(l, nif)
  3010  
  3011  	// if to.ptr != frm.ptr { memmove( ... ) }
  3012  	ne := nod(OIF, nod(ONE, nto, nfrm), nil)
  3013  	ne.SetLikely(true)
  3014  	l = append(l, ne)
  3015  
  3016  	fn := syslook("memmove")
  3017  	fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem())
  3018  	nwid := temp(types.Types[TUINTPTR])
  3019  	setwid := nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR]))
  3020  	ne.Nbody.Append(setwid)
  3021  	nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width))
  3022  	call := mkcall1(fn, nil, init, nto, nfrm, nwid)
  3023  	ne.Nbody.Append(call)
  3024  
  3025  	typecheckslice(l, Etop)
  3026  	walkstmtlist(l)
  3027  	init.Append(l...)
  3028  	return nlen
  3029  }
  3030  
  3031  func eqfor(t *types.Type) (n *Node, needsize bool) {
  3032  	// Should only arrive here with large memory or
  3033  	// a struct/array containing a non-memory field/element.
  3034  	// Small memory is handled inline, and single non-memory
  3035  	// is handled by walkcompare.
  3036  	switch a, _ := algtype1(t); a {
  3037  	case AMEM:
  3038  		n := syslook("memequal")
  3039  		n = substArgTypes(n, t, t)
  3040  		return n, true
  3041  	case ASPECIAL:
  3042  		sym := typesymprefix(".eq", t)
  3043  		n := newname(sym)
  3044  		n.SetClass(PFUNC)
  3045  		n.Type = functype(nil, []*Node{
  3046  			anonfield(types.NewPtr(t)),
  3047  			anonfield(types.NewPtr(t)),
  3048  		}, []*Node{
  3049  			anonfield(types.Types[TBOOL]),
  3050  		})
  3051  		return n, false
  3052  	}
  3053  	Fatalf("eqfor %v", t)
  3054  	return nil, false
  3055  }
  3056  
  3057  // The result of walkcompare MUST be assigned back to n, e.g.
  3058  // 	n.Left = walkcompare(n.Left, init)
  3059  func walkcompare(n *Node, init *Nodes) *Node {
  3060  	if n.Left.Type.IsInterface() && n.Right.Type.IsInterface() && n.Left.Op != OLITERAL && n.Right.Op != OLITERAL {
  3061  		return walkcompareInterface(n, init)
  3062  	}
  3063  
  3064  	if n.Left.Type.IsString() && n.Right.Type.IsString() {
  3065  		return walkcompareString(n, init)
  3066  	}
  3067  
  3068  	n.Left = walkexpr(n.Left, init)
  3069  	n.Right = walkexpr(n.Right, init)
  3070  
  3071  	// Given interface value l and concrete value r, rewrite
  3072  	//   l == r
  3073  	// into types-equal && data-equal.
  3074  	// This is efficient, avoids allocations, and avoids runtime calls.
  3075  	var l, r *Node
  3076  	if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() {
  3077  		l = n.Left
  3078  		r = n.Right
  3079  	} else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() {
  3080  		l = n.Right
  3081  		r = n.Left
  3082  	}
  3083  
  3084  	if l != nil {
  3085  		// Handle both == and !=.
  3086  		eq := n.Op
  3087  		var andor Op
  3088  		if eq == OEQ {
  3089  			andor = OANDAND
  3090  		} else {
  3091  			andor = OOROR
  3092  		}
  3093  		// Check for types equal.
  3094  		// For empty interface, this is:
  3095  		//   l.tab == type(r)
  3096  		// For non-empty interface, this is:
  3097  		//   l.tab != nil && l.tab._type == type(r)
  3098  		var eqtype *Node
  3099  		tab := nod(OITAB, l, nil)
  3100  		rtyp := typename(r.Type)
  3101  		if l.Type.IsEmptyInterface() {
  3102  			tab.Type = types.NewPtr(types.Types[TUINT8])
  3103  			tab.SetTypecheck(1)
  3104  			eqtype = nod(eq, tab, rtyp)
  3105  		} else {
  3106  			nonnil := nod(brcom(eq), nodnil(), tab)
  3107  			match := nod(eq, itabType(tab), rtyp)
  3108  			eqtype = nod(andor, nonnil, match)
  3109  		}
  3110  		// Check for data equal.
  3111  		eqdata := nod(eq, ifaceData(l, r.Type), r)
  3112  		// Put it all together.
  3113  		expr := nod(andor, eqtype, eqdata)
  3114  		n = finishcompare(n, expr, init)
  3115  		return n
  3116  	}
  3117  
  3118  	// Must be comparison of array or struct.
  3119  	// Otherwise back end handles it.
  3120  	// While we're here, decide whether to
  3121  	// inline or call an eq alg.
  3122  	t := n.Left.Type
  3123  	var inline bool
  3124  
  3125  	maxcmpsize := int64(4)
  3126  	unalignedLoad := canMergeLoads()
  3127  	if unalignedLoad {
  3128  		// Keep this low enough to generate less code than a function call.
  3129  		maxcmpsize = 2 * int64(thearch.LinkArch.RegSize)
  3130  	}
  3131  
  3132  	switch t.Etype {
  3133  	default:
  3134  		return n
  3135  	case TARRAY:
  3136  		// We can compare several elements at once with 2/4/8 byte integer compares
  3137  		inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize))
  3138  	case TSTRUCT:
  3139  		inline = t.NumComponents(types.IgnoreBlankFields) <= 4
  3140  	}
  3141  
  3142  	cmpl := n.Left
  3143  	for cmpl != nil && cmpl.Op == OCONVNOP {
  3144  		cmpl = cmpl.Left
  3145  	}
  3146  	cmpr := n.Right
  3147  	for cmpr != nil && cmpr.Op == OCONVNOP {
  3148  		cmpr = cmpr.Left
  3149  	}
  3150  
  3151  	// Chose not to inline. Call equality function directly.
  3152  	if !inline {
  3153  		if isvaluelit(cmpl) {
  3154  			var_ := temp(cmpl.Type)
  3155  			anylit(cmpl, var_, init)
  3156  			cmpl = var_
  3157  		}
  3158  		if isvaluelit(cmpr) {
  3159  			var_ := temp(cmpr.Type)
  3160  			anylit(cmpr, var_, init)
  3161  			cmpr = var_
  3162  		}
  3163  		if !islvalue(cmpl) || !islvalue(cmpr) {
  3164  			Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
  3165  		}
  3166  
  3167  		// eq algs take pointers
  3168  		pl := temp(types.NewPtr(t))
  3169  		al := nod(OAS, pl, nod(OADDR, cmpl, nil))
  3170  		al = typecheck(al, Etop)
  3171  		init.Append(al)
  3172  
  3173  		pr := temp(types.NewPtr(t))
  3174  		ar := nod(OAS, pr, nod(OADDR, cmpr, nil))
  3175  		ar = typecheck(ar, Etop)
  3176  		init.Append(ar)
  3177  
  3178  		fn, needsize := eqfor(t)
  3179  		call := nod(OCALL, fn, nil)
  3180  		call.List.Append(pl)
  3181  		call.List.Append(pr)
  3182  		if needsize {
  3183  			call.List.Append(nodintconst(t.Width))
  3184  		}
  3185  		res := call
  3186  		if n.Op != OEQ {
  3187  			res = nod(ONOT, res, nil)
  3188  		}
  3189  		n = finishcompare(n, res, init)
  3190  		return n
  3191  	}
  3192  
  3193  	// inline: build boolean expression comparing element by element
  3194  	andor := OANDAND
  3195  	if n.Op == ONE {
  3196  		andor = OOROR
  3197  	}
  3198  	var expr *Node
  3199  	compare := func(el, er *Node) {
  3200  		a := nod(n.Op, el, er)
  3201  		if expr == nil {
  3202  			expr = a
  3203  		} else {
  3204  			expr = nod(andor, expr, a)
  3205  		}
  3206  	}
  3207  	cmpl = safeexpr(cmpl, init)
  3208  	cmpr = safeexpr(cmpr, init)
  3209  	if t.IsStruct() {
  3210  		for _, f := range t.Fields().Slice() {
  3211  			sym := f.Sym
  3212  			if sym.IsBlank() {
  3213  				continue
  3214  			}
  3215  			compare(
  3216  				nodSym(OXDOT, cmpl, sym),
  3217  				nodSym(OXDOT, cmpr, sym),
  3218  			)
  3219  		}
  3220  	} else {
  3221  		step := int64(1)
  3222  		remains := t.NumElem() * t.Elem().Width
  3223  		combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger()
  3224  		combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger()
  3225  		combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger()
  3226  		for i := int64(0); remains > 0; {
  3227  			var convType *types.Type
  3228  			switch {
  3229  			case remains >= 8 && combine64bit:
  3230  				convType = types.Types[TINT64]
  3231  				step = 8 / t.Elem().Width
  3232  			case remains >= 4 && combine32bit:
  3233  				convType = types.Types[TUINT32]
  3234  				step = 4 / t.Elem().Width
  3235  			case remains >= 2 && combine16bit:
  3236  				convType = types.Types[TUINT16]
  3237  				step = 2 / t.Elem().Width
  3238  			default:
  3239  				step = 1
  3240  			}
  3241  			if step == 1 {
  3242  				compare(
  3243  					nod(OINDEX, cmpl, nodintconst(i)),
  3244  					nod(OINDEX, cmpr, nodintconst(i)),
  3245  				)
  3246  				i++
  3247  				remains -= t.Elem().Width
  3248  			} else {
  3249  				elemType := t.Elem().ToUnsigned()
  3250  				cmplw := nod(OINDEX, cmpl, nodintconst(i))
  3251  				cmplw = conv(cmplw, elemType) // convert to unsigned
  3252  				cmplw = conv(cmplw, convType) // widen
  3253  				cmprw := nod(OINDEX, cmpr, nodintconst(i))
  3254  				cmprw = conv(cmprw, elemType)
  3255  				cmprw = conv(cmprw, convType)
  3256  				// For code like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3257  				// ssa will generate a single large load.
  3258  				for offset := int64(1); offset < step; offset++ {
  3259  					lb := nod(OINDEX, cmpl, nodintconst(i+offset))
  3260  					lb = conv(lb, elemType)
  3261  					lb = conv(lb, convType)
  3262  					lb = nod(OLSH, lb, nodintconst(8*t.Elem().Width*offset))
  3263  					cmplw = nod(OOR, cmplw, lb)
  3264  					rb := nod(OINDEX, cmpr, nodintconst(i+offset))
  3265  					rb = conv(rb, elemType)
  3266  					rb = conv(rb, convType)
  3267  					rb = nod(OLSH, rb, nodintconst(8*t.Elem().Width*offset))
  3268  					cmprw = nod(OOR, cmprw, rb)
  3269  				}
  3270  				compare(cmplw, cmprw)
  3271  				i += step
  3272  				remains -= step * t.Elem().Width
  3273  			}
  3274  		}
  3275  	}
  3276  	if expr == nil {
  3277  		expr = nodbool(n.Op == OEQ)
  3278  	}
  3279  	n = finishcompare(n, expr, init)
  3280  	return n
  3281  }
  3282  
  3283  func walkcompareInterface(n *Node, init *Nodes) *Node {
  3284  	// ifaceeq(i1 any-1, i2 any-2) (ret bool);
  3285  	if !types.Identical(n.Left.Type, n.Right.Type) {
  3286  		Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type)
  3287  	}
  3288  	var fn *Node
  3289  	if n.Left.Type.IsEmptyInterface() {
  3290  		fn = syslook("efaceeq")
  3291  	} else {
  3292  		fn = syslook("ifaceeq")
  3293  	}
  3294  
  3295  	n.Right = cheapexpr(n.Right, init)
  3296  	n.Left = cheapexpr(n.Left, init)
  3297  	lt := nod(OITAB, n.Left, nil)
  3298  	rt := nod(OITAB, n.Right, nil)
  3299  	ld := nod(OIDATA, n.Left, nil)
  3300  	rd := nod(OIDATA, n.Right, nil)
  3301  	ld.Type = types.Types[TUNSAFEPTR]
  3302  	rd.Type = types.Types[TUNSAFEPTR]
  3303  	ld.SetTypecheck(1)
  3304  	rd.SetTypecheck(1)
  3305  	call := mkcall1(fn, n.Type, init, lt, ld, rd)
  3306  
  3307  	// Check itable/type before full compare.
  3308  	// Note: short-circuited because order matters.
  3309  	var cmp *Node
  3310  	if n.Op == OEQ {
  3311  		cmp = nod(OANDAND, nod(OEQ, lt, rt), call)
  3312  	} else {
  3313  		cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil))
  3314  	}
  3315  	return finishcompare(n, cmp, init)
  3316  }
  3317  
  3318  func walkcompareString(n *Node, init *Nodes) *Node {
  3319  	// s + "badgerbadgerbadger" == "badgerbadgerbadger"
  3320  	if (n.Op == OEQ || n.Op == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) {
  3321  		r := nod(n.Op, nod(OLEN, n.Left.List.First(), nil), nodintconst(0))
  3322  		return finishcompare(n, r, init)
  3323  	}
  3324  
  3325  	// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
  3326  	var cs, ncs *Node // const string, non-const string
  3327  	switch {
  3328  	case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR):
  3329  		// ignore; will be constant evaluated
  3330  	case Isconst(n.Left, CTSTR):
  3331  		cs = n.Left
  3332  		ncs = n.Right
  3333  	case Isconst(n.Right, CTSTR):
  3334  		cs = n.Right
  3335  		ncs = n.Left
  3336  	}
  3337  	if cs != nil {
  3338  		cmp := n.Op
  3339  		// Our comparison below assumes that the non-constant string
  3340  		// is on the left hand side, so rewrite "" cmp x to x cmp "".
  3341  		// See issue 24817.
  3342  		if Isconst(n.Left, CTSTR) {
  3343  			cmp = brrev(cmp)
  3344  		}
  3345  
  3346  		// maxRewriteLen was chosen empirically.
  3347  		// It is the value that minimizes cmd/go file size
  3348  		// across most architectures.
  3349  		// See the commit description for CL 26758 for details.
  3350  		maxRewriteLen := 6
  3351  		// Some architectures can load unaligned byte sequence as 1 word.
  3352  		// So we can cover longer strings with the same amount of code.
  3353  		canCombineLoads := canMergeLoads()
  3354  		combine64bit := false
  3355  		if canCombineLoads {
  3356  			// Keep this low enough to generate less code than a function call.
  3357  			maxRewriteLen = 2 * thearch.LinkArch.RegSize
  3358  			combine64bit = thearch.LinkArch.RegSize >= 8
  3359  		}
  3360  
  3361  		var and Op
  3362  		switch cmp {
  3363  		case OEQ:
  3364  			and = OANDAND
  3365  		case ONE:
  3366  			and = OOROR
  3367  		default:
  3368  			// Don't do byte-wise comparisons for <, <=, etc.
  3369  			// They're fairly complicated.
  3370  			// Length-only checks are ok, though.
  3371  			maxRewriteLen = 0
  3372  		}
  3373  		if s := cs.Val().U.(string); len(s) <= maxRewriteLen {
  3374  			if len(s) > 0 {
  3375  				ncs = safeexpr(ncs, init)
  3376  			}
  3377  			r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s))))
  3378  			remains := len(s)
  3379  			for i := 0; remains > 0; {
  3380  				if remains == 1 || !canCombineLoads {
  3381  					cb := nodintconst(int64(s[i]))
  3382  					ncb := nod(OINDEX, ncs, nodintconst(int64(i)))
  3383  					r = nod(and, r, nod(cmp, ncb, cb))
  3384  					remains--
  3385  					i++
  3386  					continue
  3387  				}
  3388  				var step int
  3389  				var convType *types.Type
  3390  				switch {
  3391  				case remains >= 8 && combine64bit:
  3392  					convType = types.Types[TINT64]
  3393  					step = 8
  3394  				case remains >= 4:
  3395  					convType = types.Types[TUINT32]
  3396  					step = 4
  3397  				case remains >= 2:
  3398  					convType = types.Types[TUINT16]
  3399  					step = 2
  3400  				}
  3401  				ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i)))
  3402  				ncsubstr = conv(ncsubstr, convType)
  3403  				csubstr := int64(s[i])
  3404  				// Calculate large constant from bytes as sequence of shifts and ors.
  3405  				// Like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3406  				// ssa will combine this into a single large load.
  3407  				for offset := 1; offset < step; offset++ {
  3408  					b := nod(OINDEX, ncs, nodintconst(int64(i+offset)))
  3409  					b = conv(b, convType)
  3410  					b = nod(OLSH, b, nodintconst(int64(8*offset)))
  3411  					ncsubstr = nod(OOR, ncsubstr, b)
  3412  					csubstr |= int64(s[i+offset]) << uint8(8*offset)
  3413  				}
  3414  				csubstrPart := nodintconst(csubstr)
  3415  				// Compare "step" bytes as once
  3416  				r = nod(and, r, nod(cmp, csubstrPart, ncsubstr))
  3417  				remains -= step
  3418  				i += step
  3419  			}
  3420  			return finishcompare(n, r, init)
  3421  		}
  3422  	}
  3423  
  3424  	var r *Node
  3425  	if n.Op == OEQ || n.Op == ONE {
  3426  		// prepare for rewrite below
  3427  		n.Left = cheapexpr(n.Left, init)
  3428  		n.Right = cheapexpr(n.Right, init)
  3429  
  3430  		lstr := conv(n.Left, types.Types[TSTRING])
  3431  		rstr := conv(n.Right, types.Types[TSTRING])
  3432  		lptr := nod(OSPTR, lstr, nil)
  3433  		rptr := nod(OSPTR, rstr, nil)
  3434  		llen := conv(nod(OLEN, lstr, nil), types.Types[TUINTPTR])
  3435  		rlen := conv(nod(OLEN, rstr, nil), types.Types[TUINTPTR])
  3436  
  3437  		fn := syslook("memequal")
  3438  		fn = substArgTypes(fn, types.Types[TUINT8], types.Types[TUINT8])
  3439  		r = mkcall1(fn, types.Types[TBOOL], init, lptr, rptr, llen)
  3440  
  3441  		// quick check of len before full compare for == or !=.
  3442  		// memequal then tests equality up to length len.
  3443  		if n.Op == OEQ {
  3444  			// len(left) == len(right) && memequal(left, right, len)
  3445  			r = nod(OANDAND, nod(OEQ, llen, rlen), r)
  3446  		} else {
  3447  			// len(left) != len(right) || !memequal(left, right, len)
  3448  			r = nod(ONOT, r, nil)
  3449  			r = nod(OOROR, nod(ONE, llen, rlen), r)
  3450  		}
  3451  	} else {
  3452  		// sys_cmpstring(s1, s2) :: 0
  3453  		r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  3454  		r = nod(n.Op, r, nodintconst(0))
  3455  	}
  3456  
  3457  	return finishcompare(n, r, init)
  3458  }
  3459  
  3460  // The result of finishcompare MUST be assigned back to n, e.g.
  3461  // 	n.Left = finishcompare(n.Left, x, r, init)
  3462  func finishcompare(n, r *Node, init *Nodes) *Node {
  3463  	r = typecheck(r, Erv)
  3464  	r = conv(r, n.Type)
  3465  	r = walkexpr(r, init)
  3466  	return r
  3467  }
  3468  
  3469  // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers.
  3470  func (n *Node) isIntOrdering() bool {
  3471  	switch n.Op {
  3472  	case OLE, OLT, OGE, OGT:
  3473  	default:
  3474  		return false
  3475  	}
  3476  	return n.Left.Type.IsInteger() && n.Right.Type.IsInteger()
  3477  }
  3478  
  3479  // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10.
  3480  // n must be an OANDAND or OOROR node.
  3481  // The result of walkinrange MUST be assigned back to n, e.g.
  3482  // 	n.Left = walkinrange(n.Left)
  3483  func walkinrange(n *Node, init *Nodes) *Node {
  3484  	// We are looking for something equivalent to a opl b OP b opr c, where:
  3485  	// * a, b, and c have integer type
  3486  	// * b is side-effect-free
  3487  	// * opl and opr are each < or ≤
  3488  	// * OP is &&
  3489  	l := n.Left
  3490  	r := n.Right
  3491  	if !l.isIntOrdering() || !r.isIntOrdering() {
  3492  		return n
  3493  	}
  3494  
  3495  	// Find b, if it exists, and rename appropriately.
  3496  	// Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right
  3497  	// Output is: a opl b(==x) ANDAND/OROR b(==x) opr c
  3498  	a, opl, b := l.Left, l.Op, l.Right
  3499  	x, opr, c := r.Left, r.Op, r.Right
  3500  	for i := 0; ; i++ {
  3501  		if samesafeexpr(b, x) {
  3502  			break
  3503  		}
  3504  		if i == 3 {
  3505  			// Tried all permutations and couldn't find an appropriate b == x.
  3506  			return n
  3507  		}
  3508  		if i&1 == 0 {
  3509  			a, opl, b = b, brrev(opl), a
  3510  		} else {
  3511  			x, opr, c = c, brrev(opr), x
  3512  		}
  3513  	}
  3514  
  3515  	// If n.Op is ||, apply de Morgan.
  3516  	// Negate the internal ops now; we'll negate the top level op at the end.
  3517  	// Henceforth assume &&.
  3518  	negateResult := n.Op == OOROR
  3519  	if negateResult {
  3520  		opl = brcom(opl)
  3521  		opr = brcom(opr)
  3522  	}
  3523  
  3524  	cmpdir := func(o Op) int {
  3525  		switch o {
  3526  		case OLE, OLT:
  3527  			return -1
  3528  		case OGE, OGT:
  3529  			return +1
  3530  		}
  3531  		Fatalf("walkinrange cmpdir %v", o)
  3532  		return 0
  3533  	}
  3534  	if cmpdir(opl) != cmpdir(opr) {
  3535  		// Not a range check; something like b < a && b < c.
  3536  		return n
  3537  	}
  3538  
  3539  	switch opl {
  3540  	case OGE, OGT:
  3541  		// We have something like a > b && b ≥ c.
  3542  		// Switch and reverse ops and rename constants,
  3543  		// to make it look like a ≤ b && b < c.
  3544  		a, c = c, a
  3545  		opl, opr = brrev(opr), brrev(opl)
  3546  	}
  3547  
  3548  	// We must ensure that c-a is non-negative.
  3549  	// For now, require a and c to be constants.
  3550  	// In the future, we could also support a == 0 and c == len/cap(...).
  3551  	// Unfortunately, by this point, most len/cap expressions have been
  3552  	// stored into temporary variables.
  3553  	if !Isconst(a, CTINT) || !Isconst(c, CTINT) {
  3554  		return n
  3555  	}
  3556  
  3557  	// Ensure that Int64() does not overflow on a and c (it'll happen
  3558  	// for any const above 2**63; see issue #27143).
  3559  	if !a.CanInt64() || !c.CanInt64() {
  3560  		return n
  3561  	}
  3562  
  3563  	if opl == OLT {
  3564  		// We have a < b && ...
  3565  		// We need a ≤ b && ... to safely use unsigned comparison tricks.
  3566  		// If a is not the maximum constant for b's type,
  3567  		// we can increment a and switch to ≤.
  3568  		if a.Int64() >= maxintval[b.Type.Etype].Int64() {
  3569  			return n
  3570  		}
  3571  		a = nodintconst(a.Int64() + 1)
  3572  		opl = OLE
  3573  	}
  3574  
  3575  	bound := c.Int64() - a.Int64()
  3576  	if bound < 0 {
  3577  		// Bad news. Something like 5 <= x && x < 3.
  3578  		// Rare in practice, and we still need to generate side-effects,
  3579  		// so just leave it alone.
  3580  		return n
  3581  	}
  3582  
  3583  	// We have a ≤ b && b < c (or a ≤ b && b ≤ c).
  3584  	// This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a),
  3585  	// which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a),
  3586  	// which is equivalent to uint(b-a) < uint(c-a).
  3587  	ut := b.Type.ToUnsigned()
  3588  	lhs := conv(nod(OSUB, b, a), ut)
  3589  	rhs := nodintconst(bound)
  3590  	if negateResult {
  3591  		// Negate top level.
  3592  		opr = brcom(opr)
  3593  	}
  3594  	cmp := nod(opr, lhs, rhs)
  3595  	cmp.Pos = n.Pos
  3596  	cmp = addinit(cmp, l.Ninit.Slice())
  3597  	cmp = addinit(cmp, r.Ninit.Slice())
  3598  	// Typecheck the AST rooted at cmp...
  3599  	cmp = typecheck(cmp, Erv)
  3600  	// ...but then reset cmp's type to match n's type.
  3601  	cmp.Type = n.Type
  3602  	cmp = walkexpr(cmp, init)
  3603  	return cmp
  3604  }
  3605  
  3606  // return 1 if integer n must be in range [0, max), 0 otherwise
  3607  func bounded(n *Node, max int64) bool {
  3608  	if n.Type == nil || !n.Type.IsInteger() {
  3609  		return false
  3610  	}
  3611  
  3612  	sign := n.Type.IsSigned()
  3613  	bits := int32(8 * n.Type.Width)
  3614  
  3615  	if smallintconst(n) {
  3616  		v := n.Int64()
  3617  		return 0 <= v && v < max
  3618  	}
  3619  
  3620  	switch n.Op {
  3621  	case OAND:
  3622  		v := int64(-1)
  3623  		if smallintconst(n.Left) {
  3624  			v = n.Left.Int64()
  3625  		} else if smallintconst(n.Right) {
  3626  			v = n.Right.Int64()
  3627  		}
  3628  
  3629  		if 0 <= v && v < max {
  3630  			return true
  3631  		}
  3632  
  3633  	case OMOD:
  3634  		if !sign && smallintconst(n.Right) {
  3635  			v := n.Right.Int64()
  3636  			if 0 <= v && v <= max {
  3637  				return true
  3638  			}
  3639  		}
  3640  
  3641  	case ODIV:
  3642  		if !sign && smallintconst(n.Right) {
  3643  			v := n.Right.Int64()
  3644  			for bits > 0 && v >= 2 {
  3645  				bits--
  3646  				v >>= 1
  3647  			}
  3648  		}
  3649  
  3650  	case ORSH:
  3651  		if !sign && smallintconst(n.Right) {
  3652  			v := n.Right.Int64()
  3653  			if v > int64(bits) {
  3654  				return true
  3655  			}
  3656  			bits -= int32(v)
  3657  		}
  3658  	}
  3659  
  3660  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
  3661  		return true
  3662  	}
  3663  
  3664  	return false
  3665  }
  3666  
  3667  // usemethod checks interface method calls for uses of reflect.Type.Method.
  3668  func usemethod(n *Node) {
  3669  	t := n.Left.Type
  3670  
  3671  	// Looking for either of:
  3672  	//	Method(int) reflect.Method
  3673  	//	MethodByName(string) (reflect.Method, bool)
  3674  	//
  3675  	// TODO(crawshaw): improve precision of match by working out
  3676  	//                 how to check the method name.
  3677  	if n := t.NumParams(); n != 1 {
  3678  		return
  3679  	}
  3680  	if n := t.NumResults(); n != 1 && n != 2 {
  3681  		return
  3682  	}
  3683  	p0 := t.Params().Field(0)
  3684  	res0 := t.Results().Field(0)
  3685  	var res1 *types.Field
  3686  	if t.NumResults() == 2 {
  3687  		res1 = t.Results().Field(1)
  3688  	}
  3689  
  3690  	if res1 == nil {
  3691  		if p0.Type.Etype != TINT {
  3692  			return
  3693  		}
  3694  	} else {
  3695  		if !p0.Type.IsString() {
  3696  			return
  3697  		}
  3698  		if !res1.Type.IsBoolean() {
  3699  			return
  3700  		}
  3701  	}
  3702  
  3703  	// Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
  3704  	//       (including global variables such as numImports - was issue #19028).
  3705  	if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" {
  3706  		Curfn.Func.SetReflectMethod(true)
  3707  	}
  3708  }
  3709  
  3710  func usefield(n *Node) {
  3711  	if objabi.Fieldtrack_enabled == 0 {
  3712  		return
  3713  	}
  3714  
  3715  	switch n.Op {
  3716  	default:
  3717  		Fatalf("usefield %v", n.Op)
  3718  
  3719  	case ODOT, ODOTPTR:
  3720  		break
  3721  	}
  3722  	if n.Sym == nil {
  3723  		// No field name.  This DOTPTR was built by the compiler for access
  3724  		// to runtime data structures.  Ignore.
  3725  		return
  3726  	}
  3727  
  3728  	t := n.Left.Type
  3729  	if t.IsPtr() {
  3730  		t = t.Elem()
  3731  	}
  3732  	field := dotField[typeSymKey{t.Orig, n.Sym}]
  3733  	if field == nil {
  3734  		Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
  3735  	}
  3736  	if !strings.Contains(field.Note, "go:\"track\"") {
  3737  		return
  3738  	}
  3739  
  3740  	outer := n.Left.Type
  3741  	if outer.IsPtr() {
  3742  		outer = outer.Elem()
  3743  	}
  3744  	if outer.Sym == nil {
  3745  		yyerror("tracked field must be in named struct type")
  3746  	}
  3747  	if !types.IsExported(field.Sym.Name) {
  3748  		yyerror("tracked field must be exported (upper case)")
  3749  	}
  3750  
  3751  	sym := tracksym(outer, field)
  3752  	if Curfn.Func.FieldTrack == nil {
  3753  		Curfn.Func.FieldTrack = make(map[*types.Sym]struct{})
  3754  	}
  3755  	Curfn.Func.FieldTrack[sym] = struct{}{}
  3756  }
  3757  
  3758  func candiscardlist(l Nodes) bool {
  3759  	for _, n := range l.Slice() {
  3760  		if !candiscard(n) {
  3761  			return false
  3762  		}
  3763  	}
  3764  	return true
  3765  }
  3766  
  3767  func candiscard(n *Node) bool {
  3768  	if n == nil {
  3769  		return true
  3770  	}
  3771  
  3772  	switch n.Op {
  3773  	default:
  3774  		return false
  3775  
  3776  		// Discardable as long as the subpieces are.
  3777  	case ONAME,
  3778  		ONONAME,
  3779  		OTYPE,
  3780  		OPACK,
  3781  		OLITERAL,
  3782  		OADD,
  3783  		OSUB,
  3784  		OOR,
  3785  		OXOR,
  3786  		OADDSTR,
  3787  		OADDR,
  3788  		OANDAND,
  3789  		OARRAYBYTESTR,
  3790  		OARRAYRUNESTR,
  3791  		OSTRARRAYBYTE,
  3792  		OSTRARRAYRUNE,
  3793  		OCAP,
  3794  		OCOMPLIT,
  3795  		OMAPLIT,
  3796  		OSTRUCTLIT,
  3797  		OARRAYLIT,
  3798  		OSLICELIT,
  3799  		OPTRLIT,
  3800  		OCONV,
  3801  		OCONVIFACE,
  3802  		OCONVNOP,
  3803  		ODOT,
  3804  		OEQ,
  3805  		ONE,
  3806  		OLT,
  3807  		OLE,
  3808  		OGT,
  3809  		OGE,
  3810  		OKEY,
  3811  		OSTRUCTKEY,
  3812  		OLEN,
  3813  		OMUL,
  3814  		OLSH,
  3815  		ORSH,
  3816  		OAND,
  3817  		OANDNOT,
  3818  		ONEW,
  3819  		ONOT,
  3820  		OCOM,
  3821  		OPLUS,
  3822  		OMINUS,
  3823  		OOROR,
  3824  		OPAREN,
  3825  		ORUNESTR,
  3826  		OREAL,
  3827  		OIMAG,
  3828  		OCOMPLEX:
  3829  		break
  3830  
  3831  		// Discardable as long as we know it's not division by zero.
  3832  	case ODIV, OMOD:
  3833  		if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 {
  3834  			break
  3835  		}
  3836  		if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 {
  3837  			break
  3838  		}
  3839  		return false
  3840  
  3841  		// Discardable as long as we know it won't fail because of a bad size.
  3842  	case OMAKECHAN, OMAKEMAP:
  3843  		if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 {
  3844  			break
  3845  		}
  3846  		return false
  3847  
  3848  		// Difficult to tell what sizes are okay.
  3849  	case OMAKESLICE:
  3850  		return false
  3851  	}
  3852  
  3853  	if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) {
  3854  		return false
  3855  	}
  3856  
  3857  	return true
  3858  }
  3859  
  3860  // Rewrite
  3861  //	go builtin(x, y, z)
  3862  // into
  3863  //	go func(a1, a2, a3) {
  3864  //		builtin(a1, a2, a3)
  3865  //	}(x, y, z)
  3866  // for print, println, and delete.
  3867  
  3868  var wrapCall_prgen int
  3869  
  3870  // The result of wrapCall MUST be assigned back to n, e.g.
  3871  // 	n.Left = wrapCall(n.Left, init)
  3872  func wrapCall(n *Node, init *Nodes) *Node {
  3873  	if n.Ninit.Len() != 0 {
  3874  		walkstmtlist(n.Ninit.Slice())
  3875  		init.AppendNodes(&n.Ninit)
  3876  	}
  3877  
  3878  	t := nod(OTFUNC, nil, nil)
  3879  	for i, arg := range n.List.Slice() {
  3880  		s := lookupN("a", i)
  3881  		t.List.Append(symfield(s, arg.Type))
  3882  	}
  3883  
  3884  	wrapCall_prgen++
  3885  	sym := lookupN("wrap·", wrapCall_prgen)
  3886  	fn := dclfunc(sym, t)
  3887  
  3888  	a := nod(n.Op, nil, nil)
  3889  	a.List.Set(paramNnames(t.Type))
  3890  	a = typecheck(a, Etop)
  3891  	fn.Nbody.Set1(a)
  3892  
  3893  	funcbody()
  3894  
  3895  	fn = typecheck(fn, Etop)
  3896  	typecheckslice(fn.Nbody.Slice(), Etop)
  3897  	xtop = append(xtop, fn)
  3898  
  3899  	a = nod(OCALL, nil, nil)
  3900  	a.Left = fn.Func.Nname
  3901  	a.List.Set(n.List.Slice())
  3902  	a = typecheck(a, Etop)
  3903  	a = walkexpr(a, init)
  3904  	return a
  3905  }
  3906  
  3907  // substArgTypes substitutes the given list of types for
  3908  // successive occurrences of the "any" placeholder in the
  3909  // type syntax expression n.Type.
  3910  // The result of substArgTypes MUST be assigned back to old, e.g.
  3911  // 	n.Left = substArgTypes(n.Left, t1, t2)
  3912  func substArgTypes(old *Node, types_ ...*types.Type) *Node {
  3913  	n := old.copy()
  3914  
  3915  	for _, t := range types_ {
  3916  		dowidth(t)
  3917  	}
  3918  	n.Type = types.SubstAny(n.Type, &types_)
  3919  	if len(types_) > 0 {
  3920  		Fatalf("substArgTypes: too many argument types")
  3921  	}
  3922  	return n
  3923  }
  3924  
  3925  // canMergeLoads reports whether the backend optimization passes for
  3926  // the current architecture can combine adjacent loads into a single
  3927  // larger, possibly unaligned, load. Note that currently the
  3928  // optimizations must be able to handle little endian byte order.
  3929  func canMergeLoads() bool {
  3930  	switch thearch.LinkArch.Family {
  3931  	case sys.ARM64, sys.AMD64, sys.I386, sys.S390X:
  3932  		return true
  3933  	case sys.PPC64:
  3934  		// Load combining only supported on ppc64le.
  3935  		return thearch.LinkArch.ByteOrder == binary.LittleEndian
  3936  	}
  3937  	return false
  3938  }
  3939  
  3940  // isRuneCount reports whether n is of the form len([]rune(string)).
  3941  // These are optimized into a call to runtime.countrunes.
  3942  func isRuneCount(n *Node) bool {
  3943  	return Debug['N'] == 0 && !instrumenting && n.Op == OLEN && n.Left.Op == OSTRARRAYRUNE
  3944  }