github.com/corona10/go@v0.0.0-20180224231303-7a218942be57/src/cmd/compile/internal/gc/walk.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc
     6  
     7  import (
     8  	"cmd/compile/internal/types"
     9  	"cmd/internal/objabi"
    10  	"cmd/internal/sys"
    11  	"fmt"
    12  	"strings"
    13  )
    14  
    15  // The constant is known to runtime.
    16  const tmpstringbufsize = 32
    17  
    18  func walk(fn *Node) {
    19  	Curfn = fn
    20  
    21  	if Debug['W'] != 0 {
    22  		s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym)
    23  		dumplist(s, Curfn.Nbody)
    24  	}
    25  
    26  	lno := lineno
    27  
    28  	// Final typecheck for any unused variables.
    29  	for i, ln := range fn.Func.Dcl {
    30  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) {
    31  			ln = typecheck(ln, Erv|Easgn)
    32  			fn.Func.Dcl[i] = ln
    33  		}
    34  	}
    35  
    36  	// Propagate the used flag for typeswitch variables up to the NONAME in it's definition.
    37  	for _, ln := range fn.Func.Dcl {
    38  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() {
    39  			ln.Name.Defn.Left.Name.SetUsed(true)
    40  		}
    41  	}
    42  
    43  	for _, ln := range fn.Func.Dcl {
    44  		if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() {
    45  			continue
    46  		}
    47  		if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW {
    48  			if defn.Left.Name.Used() {
    49  				continue
    50  			}
    51  			yyerrorl(defn.Left.Pos, "%v declared and not used", ln.Sym)
    52  			defn.Left.Name.SetUsed(true) // suppress repeats
    53  		} else {
    54  			yyerrorl(ln.Pos, "%v declared and not used", ln.Sym)
    55  		}
    56  	}
    57  
    58  	lineno = lno
    59  	if nerrors != 0 {
    60  		return
    61  	}
    62  	walkstmtlist(Curfn.Nbody.Slice())
    63  	if Debug['W'] != 0 {
    64  		s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym)
    65  		dumplist(s, Curfn.Nbody)
    66  	}
    67  
    68  	zeroResults()
    69  	heapmoves()
    70  	if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 {
    71  		s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym)
    72  		dumplist(s, Curfn.Func.Enter)
    73  	}
    74  }
    75  
    76  func walkstmtlist(s []*Node) {
    77  	for i := range s {
    78  		s[i] = walkstmt(s[i])
    79  	}
    80  }
    81  
    82  func samelist(a, b []*Node) bool {
    83  	if len(a) != len(b) {
    84  		return false
    85  	}
    86  	for i, n := range a {
    87  		if n != b[i] {
    88  			return false
    89  		}
    90  	}
    91  	return true
    92  }
    93  
    94  func paramoutheap(fn *Node) bool {
    95  	for _, ln := range fn.Func.Dcl {
    96  		switch ln.Class() {
    97  		case PPARAMOUT:
    98  			if ln.isParamStackCopy() || ln.Addrtaken() {
    99  				return true
   100  			}
   101  
   102  		case PAUTO:
   103  			// stop early - parameters are over
   104  			return false
   105  		}
   106  	}
   107  
   108  	return false
   109  }
   110  
   111  // adds "adjust" to all the argument locations for the call n.
   112  // n must be a defer or go node that has already been walked.
   113  func adjustargs(n *Node, adjust int) {
   114  	callfunc := n.Left
   115  	for _, arg := range callfunc.List.Slice() {
   116  		if arg.Op != OAS {
   117  			Fatalf("call arg not assignment")
   118  		}
   119  		lhs := arg.Left
   120  		if lhs.Op == ONAME {
   121  			// This is a temporary introduced by reorder1.
   122  			// The real store to the stack appears later in the arg list.
   123  			continue
   124  		}
   125  
   126  		if lhs.Op != OINDREGSP {
   127  			Fatalf("call argument store does not use OINDREGSP")
   128  		}
   129  
   130  		// can't really check this in machine-indep code.
   131  		//if(lhs->val.u.reg != D_SP)
   132  		//      Fatalf("call arg assign not indreg(SP)")
   133  		lhs.Xoffset += int64(adjust)
   134  	}
   135  }
   136  
   137  // The result of walkstmt MUST be assigned back to n, e.g.
   138  // 	n.Left = walkstmt(n.Left)
   139  func walkstmt(n *Node) *Node {
   140  	if n == nil {
   141  		return n
   142  	}
   143  
   144  	setlineno(n)
   145  
   146  	walkstmtlist(n.Ninit.Slice())
   147  
   148  	switch n.Op {
   149  	default:
   150  		if n.Op == ONAME {
   151  			yyerror("%v is not a top level statement", n.Sym)
   152  		} else {
   153  			yyerror("%v is not a top level statement", n.Op)
   154  		}
   155  		Dump("nottop", n)
   156  
   157  	case OAS,
   158  		OASOP,
   159  		OAS2,
   160  		OAS2DOTTYPE,
   161  		OAS2RECV,
   162  		OAS2FUNC,
   163  		OAS2MAPR,
   164  		OCLOSE,
   165  		OCOPY,
   166  		OCALLMETH,
   167  		OCALLINTER,
   168  		OCALL,
   169  		OCALLFUNC,
   170  		ODELETE,
   171  		OSEND,
   172  		OPRINT,
   173  		OPRINTN,
   174  		OPANIC,
   175  		OEMPTY,
   176  		ORECOVER,
   177  		OGETG:
   178  		if n.Typecheck() == 0 {
   179  			Fatalf("missing typecheck: %+v", n)
   180  		}
   181  		wascopy := n.Op == OCOPY
   182  		init := n.Ninit
   183  		n.Ninit.Set(nil)
   184  		n = walkexpr(n, &init)
   185  		n = addinit(n, init.Slice())
   186  		if wascopy && n.Op == OCONVNOP {
   187  			n.Op = OEMPTY // don't leave plain values as statements.
   188  		}
   189  
   190  	// special case for a receive where we throw away
   191  	// the value received.
   192  	case ORECV:
   193  		if n.Typecheck() == 0 {
   194  			Fatalf("missing typecheck: %+v", n)
   195  		}
   196  		init := n.Ninit
   197  		n.Ninit.Set(nil)
   198  
   199  		n.Left = walkexpr(n.Left, &init)
   200  		n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil())
   201  		n = walkexpr(n, &init)
   202  
   203  		n = addinit(n, init.Slice())
   204  
   205  	case OBREAK,
   206  		OCONTINUE,
   207  		OFALL,
   208  		OGOTO,
   209  		OLABEL,
   210  		ODCLCONST,
   211  		ODCLTYPE,
   212  		OCHECKNIL,
   213  		OVARKILL,
   214  		OVARLIVE:
   215  		break
   216  
   217  	case ODCL:
   218  		v := n.Left
   219  		if v.Class() == PAUTOHEAP {
   220  			if compiling_runtime {
   221  				yyerror("%v escapes to heap, not allowed in runtime.", v)
   222  			}
   223  			if prealloc[v] == nil {
   224  				prealloc[v] = callnew(v.Type)
   225  			}
   226  			nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v])
   227  			nn.SetColas(true)
   228  			nn = typecheck(nn, Etop)
   229  			return walkstmt(nn)
   230  		}
   231  
   232  	case OBLOCK:
   233  		walkstmtlist(n.List.Slice())
   234  
   235  	case OXCASE:
   236  		yyerror("case statement out of place")
   237  		n.Op = OCASE
   238  		fallthrough
   239  
   240  	case OCASE:
   241  		n.Right = walkstmt(n.Right)
   242  
   243  	case ODEFER:
   244  		Curfn.Func.SetHasDefer(true)
   245  		switch n.Left.Op {
   246  		case OPRINT, OPRINTN:
   247  			n.Left = walkprintfunc(n.Left, &n.Ninit)
   248  
   249  		case OCOPY:
   250  			n.Left = copyany(n.Left, &n.Ninit, true)
   251  
   252  		default:
   253  			n.Left = walkexpr(n.Left, &n.Ninit)
   254  		}
   255  
   256  		// make room for size & fn arguments.
   257  		adjustargs(n, 2*Widthptr)
   258  
   259  	case OFOR, OFORUNTIL:
   260  		if n.Left != nil {
   261  			walkstmtlist(n.Left.Ninit.Slice())
   262  			init := n.Left.Ninit
   263  			n.Left.Ninit.Set(nil)
   264  			n.Left = walkexpr(n.Left, &init)
   265  			n.Left = addinit(n.Left, init.Slice())
   266  		}
   267  
   268  		n.Right = walkstmt(n.Right)
   269  		walkstmtlist(n.Nbody.Slice())
   270  
   271  	case OIF:
   272  		n.Left = walkexpr(n.Left, &n.Ninit)
   273  		walkstmtlist(n.Nbody.Slice())
   274  		walkstmtlist(n.Rlist.Slice())
   275  
   276  	case OPROC:
   277  		switch n.Left.Op {
   278  		case OPRINT, OPRINTN:
   279  			n.Left = walkprintfunc(n.Left, &n.Ninit)
   280  
   281  		case OCOPY:
   282  			n.Left = copyany(n.Left, &n.Ninit, true)
   283  
   284  		default:
   285  			n.Left = walkexpr(n.Left, &n.Ninit)
   286  		}
   287  
   288  		// make room for size & fn arguments.
   289  		adjustargs(n, 2*Widthptr)
   290  
   291  	case ORETURN:
   292  		walkexprlist(n.List.Slice(), &n.Ninit)
   293  		if n.List.Len() == 0 {
   294  			break
   295  		}
   296  		if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) {
   297  			// assign to the function out parameters,
   298  			// so that reorder3 can fix up conflicts
   299  			var rl []*Node
   300  
   301  			for _, ln := range Curfn.Func.Dcl {
   302  				cl := ln.Class()
   303  				if cl == PAUTO || cl == PAUTOHEAP {
   304  					break
   305  				}
   306  				if cl == PPARAMOUT {
   307  					if ln.isParamStackCopy() {
   308  						ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil)
   309  					}
   310  					rl = append(rl, ln)
   311  				}
   312  			}
   313  
   314  			if got, want := n.List.Len(), len(rl); got != want {
   315  				// order should have rewritten multi-value function calls
   316  				// with explicit OAS2FUNC nodes.
   317  				Fatalf("expected %v return arguments, have %v", want, got)
   318  			}
   319  
   320  			if samelist(rl, n.List.Slice()) {
   321  				// special return in disguise
   322  				n.List.Set(nil)
   323  
   324  				break
   325  			}
   326  
   327  			// move function calls out, to make reorder3's job easier.
   328  			walkexprlistsafe(n.List.Slice(), &n.Ninit)
   329  
   330  			ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit)
   331  			n.List.Set(reorder3(ll))
   332  			break
   333  		}
   334  
   335  		ll := ascompatte(nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit)
   336  		n.List.Set(ll)
   337  
   338  	case ORETJMP:
   339  		break
   340  
   341  	case OSELECT:
   342  		walkselect(n)
   343  
   344  	case OSWITCH:
   345  		walkswitch(n)
   346  
   347  	case ORANGE:
   348  		n = walkrange(n)
   349  	}
   350  
   351  	if n.Op == ONAME {
   352  		Fatalf("walkstmt ended up with name: %+v", n)
   353  	}
   354  	return n
   355  }
   356  
   357  func isSmallMakeSlice(n *Node) bool {
   358  	if n.Op != OMAKESLICE {
   359  		return false
   360  	}
   361  	l := n.Left
   362  	r := n.Right
   363  	if r == nil {
   364  		r = l
   365  	}
   366  	t := n.Type
   367  
   368  	return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width)
   369  }
   370  
   371  // walk the whole tree of the body of an
   372  // expression or simple statement.
   373  // the types expressions are calculated.
   374  // compile-time constants are evaluated.
   375  // complex side effects like statements are appended to init
   376  func walkexprlist(s []*Node, init *Nodes) {
   377  	for i := range s {
   378  		s[i] = walkexpr(s[i], init)
   379  	}
   380  }
   381  
   382  func walkexprlistsafe(s []*Node, init *Nodes) {
   383  	for i, n := range s {
   384  		s[i] = safeexpr(n, init)
   385  		s[i] = walkexpr(s[i], init)
   386  	}
   387  }
   388  
   389  func walkexprlistcheap(s []*Node, init *Nodes) {
   390  	for i, n := range s {
   391  		s[i] = cheapexpr(n, init)
   392  		s[i] = walkexpr(s[i], init)
   393  	}
   394  }
   395  
   396  // Build name of function for interface conversion.
   397  // Not all names are possible
   398  // (e.g., we'll never generate convE2E or convE2I or convI2E).
   399  func convFuncName(from, to *types.Type) string {
   400  	tkind := to.Tie()
   401  	switch from.Tie() {
   402  	case 'I':
   403  		switch tkind {
   404  		case 'I':
   405  			return "convI2I"
   406  		}
   407  	case 'T':
   408  		switch tkind {
   409  		case 'E':
   410  			switch {
   411  			case from.Size() == 2 && from.Align == 2:
   412  				return "convT2E16"
   413  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   414  				return "convT2E32"
   415  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   416  				return "convT2E64"
   417  			case from.IsString():
   418  				return "convT2Estring"
   419  			case from.IsSlice():
   420  				return "convT2Eslice"
   421  			case !types.Haspointers(from):
   422  				return "convT2Enoptr"
   423  			}
   424  			return "convT2E"
   425  		case 'I':
   426  			switch {
   427  			case from.Size() == 2 && from.Align == 2:
   428  				return "convT2I16"
   429  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   430  				return "convT2I32"
   431  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   432  				return "convT2I64"
   433  			case from.IsString():
   434  				return "convT2Istring"
   435  			case from.IsSlice():
   436  				return "convT2Islice"
   437  			case !types.Haspointers(from):
   438  				return "convT2Inoptr"
   439  			}
   440  			return "convT2I"
   441  		}
   442  	}
   443  	Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
   444  	panic("unreachable")
   445  }
   446  
   447  // The result of walkexpr MUST be assigned back to n, e.g.
   448  // 	n.Left = walkexpr(n.Left, init)
   449  func walkexpr(n *Node, init *Nodes) *Node {
   450  	if n == nil {
   451  		return n
   452  	}
   453  
   454  	// Eagerly checkwidth all expressions for the back end.
   455  	if n.Type != nil && !n.Type.WidthCalculated() {
   456  		switch n.Type.Etype {
   457  		case TBLANK, TNIL, TIDEAL:
   458  		default:
   459  			checkwidth(n.Type)
   460  		}
   461  	}
   462  
   463  	if init == &n.Ninit {
   464  		// not okay to use n->ninit when walking n,
   465  		// because we might replace n with some other node
   466  		// and would lose the init list.
   467  		Fatalf("walkexpr init == &n->ninit")
   468  	}
   469  
   470  	if n.Ninit.Len() != 0 {
   471  		walkstmtlist(n.Ninit.Slice())
   472  		init.AppendNodes(&n.Ninit)
   473  	}
   474  
   475  	lno := setlineno(n)
   476  
   477  	if Debug['w'] > 1 {
   478  		Dump("walk-before", n)
   479  	}
   480  
   481  	if n.Typecheck() != 1 {
   482  		Fatalf("missed typecheck: %+v", n)
   483  	}
   484  
   485  	if n.Op == ONAME && n.Class() == PAUTOHEAP {
   486  		nn := nod(OIND, n.Name.Param.Heapaddr, nil)
   487  		nn = typecheck(nn, Erv)
   488  		nn = walkexpr(nn, init)
   489  		nn.Left.SetNonNil(true)
   490  		return nn
   491  	}
   492  
   493  opswitch:
   494  	switch n.Op {
   495  	default:
   496  		Dump("walk", n)
   497  		Fatalf("walkexpr: switch 1 unknown op %+S", n)
   498  
   499  	case ONONAME, OINDREGSP, OEMPTY, OGETG:
   500  
   501  	case OTYPE, ONAME, OLITERAL:
   502  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
   503  		// Perhaps refactor to use Node.mayBeShared for these instead.
   504  		// If these return early, make sure to still call
   505  		// stringsym for constant strings.
   506  
   507  	case ONOT, OMINUS, OPLUS, OCOM, OREAL, OIMAG, ODOTMETH, ODOTINTER,
   508  		OIND, OSPTR, OITAB, OIDATA, OADDR:
   509  		n.Left = walkexpr(n.Left, init)
   510  
   511  	case OEFACE, OAND, OSUB, OMUL, OLT, OLE, OGE, OGT, OADD, OOR, OXOR:
   512  		n.Left = walkexpr(n.Left, init)
   513  		n.Right = walkexpr(n.Right, init)
   514  
   515  	case ODOT:
   516  		usefield(n)
   517  		n.Left = walkexpr(n.Left, init)
   518  
   519  	case ODOTTYPE, ODOTTYPE2:
   520  		n.Left = walkexpr(n.Left, init)
   521  		// Set up interface type addresses for back end.
   522  		n.Right = typename(n.Type)
   523  		if n.Op == ODOTTYPE {
   524  			n.Right.Right = typename(n.Left.Type)
   525  		}
   526  		if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   527  			n.List.Set1(itabname(n.Type, n.Left.Type))
   528  		}
   529  
   530  	case ODOTPTR:
   531  		usefield(n)
   532  		if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 {
   533  			// No actual copy will be generated, so emit an explicit nil check.
   534  			n.Left = cheapexpr(n.Left, init)
   535  
   536  			checknil(n.Left, init)
   537  		}
   538  
   539  		n.Left = walkexpr(n.Left, init)
   540  
   541  	case OLEN, OCAP:
   542  		n.Left = walkexpr(n.Left, init)
   543  
   544  		// replace len(*[10]int) with 10.
   545  		// delayed until now to preserve side effects.
   546  		t := n.Left.Type
   547  
   548  		if t.IsPtr() {
   549  			t = t.Elem()
   550  		}
   551  		if t.IsArray() {
   552  			safeexpr(n.Left, init)
   553  			nodconst(n, n.Type, t.NumElem())
   554  			n.SetTypecheck(1)
   555  		}
   556  
   557  	case OLSH, ORSH:
   558  		n.Left = walkexpr(n.Left, init)
   559  		n.Right = walkexpr(n.Right, init)
   560  		t := n.Left.Type
   561  		n.SetBounded(bounded(n.Right, 8*t.Width))
   562  		if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) {
   563  			Warn("shift bounds check elided")
   564  		}
   565  
   566  	case OCOMPLEX:
   567  		// Use results from call expression as arguments for complex.
   568  		if n.Left == nil && n.Right == nil {
   569  			n.Left = n.List.First()
   570  			n.Right = n.List.Second()
   571  		}
   572  		n.Left = walkexpr(n.Left, init)
   573  		n.Right = walkexpr(n.Right, init)
   574  
   575  	case OEQ, ONE:
   576  		n.Left = walkexpr(n.Left, init)
   577  		n.Right = walkexpr(n.Right, init)
   578  
   579  		// Disable safemode while compiling this code: the code we
   580  		// generate internally can refer to unsafe.Pointer.
   581  		// In this case it can happen if we need to generate an ==
   582  		// for a struct containing a reflect.Value, which itself has
   583  		// an unexported field of type unsafe.Pointer.
   584  		old_safemode := safemode
   585  		safemode = false
   586  		n = walkcompare(n, init)
   587  		safemode = old_safemode
   588  
   589  	case OANDAND, OOROR:
   590  		n.Left = walkexpr(n.Left, init)
   591  
   592  		// cannot put side effects from n.Right on init,
   593  		// because they cannot run before n.Left is checked.
   594  		// save elsewhere and store on the eventual n.Right.
   595  		var ll Nodes
   596  
   597  		n.Right = walkexpr(n.Right, &ll)
   598  		n.Right = addinit(n.Right, ll.Slice())
   599  		n = walkinrange(n, init)
   600  
   601  	case OPRINT, OPRINTN:
   602  		walkexprlist(n.List.Slice(), init)
   603  		n = walkprint(n, init)
   604  
   605  	case OPANIC:
   606  		n = mkcall("gopanic", nil, init, n.Left)
   607  
   608  	case ORECOVER:
   609  		n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil))
   610  
   611  	case OCLOSUREVAR, OCFUNC:
   612  		n.SetAddable(true)
   613  
   614  	case OCALLINTER:
   615  		usemethod(n)
   616  		t := n.Left.Type
   617  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   618  			break
   619  		}
   620  		n.Left = walkexpr(n.Left, init)
   621  		walkexprlist(n.List.Slice(), init)
   622  		ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   623  		n.List.Set(reorder1(ll))
   624  
   625  	case OCALLFUNC:
   626  		if n.Left.Op == OCLOSURE {
   627  			// Transform direct call of a closure to call of a normal function.
   628  			// transformclosure already did all preparation work.
   629  
   630  			// Prepend captured variables to argument list.
   631  			n.List.Prepend(n.Left.Func.Enter.Slice()...)
   632  
   633  			n.Left.Func.Enter.Set(nil)
   634  
   635  			// Replace OCLOSURE with ONAME/PFUNC.
   636  			n.Left = n.Left.Func.Closure.Func.Nname
   637  
   638  			// Update type of OCALLFUNC node.
   639  			// Output arguments had not changed, but their offsets could.
   640  			if n.Left.Type.NumResults() == 1 {
   641  				n.Type = n.Left.Type.Results().Field(0).Type
   642  			} else {
   643  				n.Type = n.Left.Type.Results()
   644  			}
   645  		}
   646  
   647  		t := n.Left.Type
   648  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   649  			break
   650  		}
   651  
   652  		n.Left = walkexpr(n.Left, init)
   653  		walkexprlist(n.List.Slice(), init)
   654  
   655  		ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   656  		n.List.Set(reorder1(ll))
   657  
   658  	case OCALLMETH:
   659  		t := n.Left.Type
   660  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   661  			break
   662  		}
   663  		n.Left = walkexpr(n.Left, init)
   664  		walkexprlist(n.List.Slice(), init)
   665  		ll := ascompatte(n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init)
   666  		lr := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   667  		ll = append(ll, lr...)
   668  		n.Left.Left = nil
   669  		updateHasCall(n.Left)
   670  		n.List.Set(reorder1(ll))
   671  
   672  	case OAS:
   673  		init.AppendNodes(&n.Ninit)
   674  
   675  		n.Left = walkexpr(n.Left, init)
   676  		n.Left = safeexpr(n.Left, init)
   677  
   678  		if oaslit(n, init) {
   679  			break
   680  		}
   681  
   682  		if n.Right == nil {
   683  			// TODO(austin): Check all "implicit zeroing"
   684  			break
   685  		}
   686  
   687  		if !instrumenting && iszero(n.Right) {
   688  			break
   689  		}
   690  
   691  		switch n.Right.Op {
   692  		default:
   693  			n.Right = walkexpr(n.Right, init)
   694  
   695  		case ORECV:
   696  			// x = <-c; n.Left is x, n.Right.Left is c.
   697  			// orderstmt made sure x is addressable.
   698  			n.Right.Left = walkexpr(n.Right.Left, init)
   699  
   700  			n1 := nod(OADDR, n.Left, nil)
   701  			r := n.Right.Left // the channel
   702  			n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1)
   703  			n = walkexpr(n, init)
   704  			break opswitch
   705  
   706  		case OAPPEND:
   707  			// x = append(...)
   708  			r := n.Right
   709  			if r.Type.Elem().NotInHeap() {
   710  				yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem())
   711  			}
   712  			if r.Isddd() {
   713  				r = appendslice(r, init) // also works for append(slice, string).
   714  			} else {
   715  				r = walkappend(r, init, n)
   716  			}
   717  			n.Right = r
   718  			if r.Op == OAPPEND {
   719  				// Left in place for back end.
   720  				// Do not add a new write barrier.
   721  				// Set up address of type for back end.
   722  				r.Left = typename(r.Type.Elem())
   723  				break opswitch
   724  			}
   725  			// Otherwise, lowered for race detector.
   726  			// Treat as ordinary assignment.
   727  		}
   728  
   729  		if n.Left != nil && n.Right != nil {
   730  			n = convas(n, init)
   731  		}
   732  
   733  	case OAS2:
   734  		init.AppendNodes(&n.Ninit)
   735  		walkexprlistsafe(n.List.Slice(), init)
   736  		walkexprlistsafe(n.Rlist.Slice(), init)
   737  		ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init)
   738  		ll = reorder3(ll)
   739  		n = liststmt(ll)
   740  
   741  	// a,b,... = fn()
   742  	case OAS2FUNC:
   743  		init.AppendNodes(&n.Ninit)
   744  
   745  		r := n.Rlist.First()
   746  		walkexprlistsafe(n.List.Slice(), init)
   747  		r = walkexpr(r, init)
   748  
   749  		if isIntrinsicCall(r) {
   750  			n.Rlist.Set1(r)
   751  			break
   752  		}
   753  		init.Append(r)
   754  
   755  		ll := ascompatet(n.List, r.Type)
   756  		n = liststmt(ll)
   757  
   758  	// x, y = <-c
   759  	// orderstmt made sure x is addressable.
   760  	case OAS2RECV:
   761  		init.AppendNodes(&n.Ninit)
   762  
   763  		r := n.Rlist.First()
   764  		walkexprlistsafe(n.List.Slice(), init)
   765  		r.Left = walkexpr(r.Left, init)
   766  		var n1 *Node
   767  		if isblank(n.List.First()) {
   768  			n1 = nodnil()
   769  		} else {
   770  			n1 = nod(OADDR, n.List.First(), nil)
   771  		}
   772  		n1.Etype = 1 // addr does not escape
   773  		fn := chanfn("chanrecv2", 2, r.Left.Type)
   774  		ok := n.List.Second()
   775  		call := mkcall1(fn, ok.Type, init, r.Left, n1)
   776  		n = nod(OAS, ok, call)
   777  		n = typecheck(n, Etop)
   778  
   779  	// a,b = m[i]
   780  	case OAS2MAPR:
   781  		init.AppendNodes(&n.Ninit)
   782  
   783  		r := n.Rlist.First()
   784  		walkexprlistsafe(n.List.Slice(), init)
   785  		r.Left = walkexpr(r.Left, init)
   786  		r.Right = walkexpr(r.Right, init)
   787  		t := r.Left.Type
   788  
   789  		fast := mapfast(t)
   790  		var key *Node
   791  		if fast != mapslow {
   792  			// fast versions take key by value
   793  			key = r.Right
   794  		} else {
   795  			// standard version takes key by reference
   796  			// orderexpr made sure key is addressable.
   797  			key = nod(OADDR, r.Right, nil)
   798  		}
   799  
   800  		// from:
   801  		//   a,b = m[i]
   802  		// to:
   803  		//   var,b = mapaccess2*(t, m, i)
   804  		//   a = *var
   805  		a := n.List.First()
   806  
   807  		if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/map.go:maxZero
   808  			fn := mapfn(mapaccess2[fast], t)
   809  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key)
   810  		} else {
   811  			fn := mapfn("mapaccess2_fat", t)
   812  			z := zeroaddr(w)
   813  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z)
   814  		}
   815  
   816  		// mapaccess2* returns a typed bool, but due to spec changes,
   817  		// the boolean result of i.(T) is now untyped so we make it the
   818  		// same type as the variable on the lhs.
   819  		if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() {
   820  			r.Type.Field(1).Type = ok.Type
   821  		}
   822  		n.Rlist.Set1(r)
   823  		n.Op = OAS2FUNC
   824  
   825  		// don't generate a = *var if a is _
   826  		if !isblank(a) {
   827  			var_ := temp(types.NewPtr(t.Val()))
   828  			var_.SetTypecheck(1)
   829  			var_.SetNonNil(true) // mapaccess always returns a non-nil pointer
   830  			n.List.SetFirst(var_)
   831  			n = walkexpr(n, init)
   832  			init.Append(n)
   833  			n = nod(OAS, a, nod(OIND, var_, nil))
   834  		}
   835  
   836  		n = typecheck(n, Etop)
   837  		n = walkexpr(n, init)
   838  
   839  	case ODELETE:
   840  		init.AppendNodes(&n.Ninit)
   841  		map_ := n.List.First()
   842  		key := n.List.Second()
   843  		map_ = walkexpr(map_, init)
   844  		key = walkexpr(key, init)
   845  
   846  		t := map_.Type
   847  		fast := mapfast(t)
   848  		if fast == mapslow {
   849  			// orderstmt made sure key is addressable.
   850  			key = nod(OADDR, key, nil)
   851  		}
   852  		n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key)
   853  
   854  	case OAS2DOTTYPE:
   855  		walkexprlistsafe(n.List.Slice(), init)
   856  		n.Rlist.SetFirst(walkexpr(n.Rlist.First(), init))
   857  
   858  	case OCONVIFACE:
   859  		n.Left = walkexpr(n.Left, init)
   860  
   861  		// Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
   862  		if isdirectiface(n.Left.Type) {
   863  			var t *Node
   864  			if n.Type.IsEmptyInterface() {
   865  				t = typename(n.Left.Type)
   866  			} else {
   867  				t = itabname(n.Left.Type, n.Type)
   868  			}
   869  			l := nod(OEFACE, t, n.Left)
   870  			l.Type = n.Type
   871  			l.SetTypecheck(n.Typecheck())
   872  			n = l
   873  			break
   874  		}
   875  
   876  		if staticbytes == nil {
   877  			staticbytes = newname(Runtimepkg.Lookup("staticbytes"))
   878  			staticbytes.SetClass(PEXTERN)
   879  			staticbytes.Type = types.NewArray(types.Types[TUINT8], 256)
   880  			zerobase = newname(Runtimepkg.Lookup("zerobase"))
   881  			zerobase.SetClass(PEXTERN)
   882  			zerobase.Type = types.Types[TUINTPTR]
   883  		}
   884  
   885  		// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
   886  		// by using an existing addressable value identical to n.Left
   887  		// or creating one on the stack.
   888  		var value *Node
   889  		switch {
   890  		case n.Left.Type.Size() == 0:
   891  			// n.Left is zero-sized. Use zerobase.
   892  			cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246.
   893  			value = zerobase
   894  		case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()):
   895  			// n.Left is a bool/byte. Use staticbytes[n.Left].
   896  			n.Left = cheapexpr(n.Left, init)
   897  			value = nod(OINDEX, staticbytes, byteindex(n.Left))
   898  			value.SetBounded(true)
   899  		case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly():
   900  			// n.Left is a readonly global; use it directly.
   901  			value = n.Left
   902  		case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024:
   903  			// n.Left does not escape. Use a stack temporary initialized to n.Left.
   904  			value = temp(n.Left.Type)
   905  			init.Append(typecheck(nod(OAS, value, n.Left), Etop))
   906  		}
   907  
   908  		if value != nil {
   909  			// Value is identical to n.Left.
   910  			// Construct the interface directly: {type/itab, &value}.
   911  			var t *Node
   912  			if n.Type.IsEmptyInterface() {
   913  				t = typename(n.Left.Type)
   914  			} else {
   915  				t = itabname(n.Left.Type, n.Type)
   916  			}
   917  			l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv))
   918  			l.Type = n.Type
   919  			l.SetTypecheck(n.Typecheck())
   920  			n = l
   921  			break
   922  		}
   923  
   924  		// Implement interface to empty interface conversion.
   925  		// tmp = i.itab
   926  		// if tmp != nil {
   927  		//    tmp = tmp.type
   928  		// }
   929  		// e = iface{tmp, i.data}
   930  		if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   931  			// Evaluate the input interface.
   932  			c := temp(n.Left.Type)
   933  			init.Append(nod(OAS, c, n.Left))
   934  
   935  			// Get the itab out of the interface.
   936  			tmp := temp(types.NewPtr(types.Types[TUINT8]))
   937  			init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv)))
   938  
   939  			// Get the type out of the itab.
   940  			nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil)
   941  			nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp)))
   942  			init.Append(nif)
   943  
   944  			// Build the result.
   945  			e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8])))
   946  			e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE.
   947  			e.SetTypecheck(1)
   948  			n = e
   949  			break
   950  		}
   951  
   952  		var ll []*Node
   953  		if n.Type.IsEmptyInterface() {
   954  			if !n.Left.Type.IsInterface() {
   955  				ll = append(ll, typename(n.Left.Type))
   956  			}
   957  		} else {
   958  			if n.Left.Type.IsInterface() {
   959  				ll = append(ll, typename(n.Type))
   960  			} else {
   961  				ll = append(ll, itabname(n.Left.Type, n.Type))
   962  			}
   963  		}
   964  
   965  		if n.Left.Type.IsInterface() {
   966  			ll = append(ll, n.Left)
   967  		} else {
   968  			// regular types are passed by reference to avoid C vararg calls
   969  			// orderexpr arranged for n.Left to be a temporary for all
   970  			// the conversions it could see. comparison of an interface
   971  			// with a non-interface, especially in a switch on interface value
   972  			// with non-interface cases, is not visible to orderstmt, so we
   973  			// have to fall back on allocating a temp here.
   974  			if islvalue(n.Left) {
   975  				ll = append(ll, nod(OADDR, n.Left, nil))
   976  			} else {
   977  				ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil))
   978  			}
   979  			dowidth(n.Left.Type)
   980  		}
   981  
   982  		fn := syslook(convFuncName(n.Left.Type, n.Type))
   983  		fn = substArgTypes(fn, n.Left.Type, n.Type)
   984  		dowidth(fn.Type)
   985  		n = nod(OCALL, fn, nil)
   986  		n.List.Set(ll)
   987  		n = typecheck(n, Erv)
   988  		n = walkexpr(n, init)
   989  
   990  	case OCONV, OCONVNOP:
   991  		if thearch.SoftFloat {
   992  			// For the soft-float case, ssa.go handles these conversions.
   993  			goto oconv_walkexpr
   994  		}
   995  		switch thearch.LinkArch.Family {
   996  		case sys.ARM, sys.MIPS:
   997  			if n.Left.Type.IsFloat() {
   998  				switch n.Type.Etype {
   999  				case TINT64:
  1000  					n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1001  					break opswitch
  1002  				case TUINT64:
  1003  					n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1004  					break opswitch
  1005  				}
  1006  			}
  1007  
  1008  			if n.Type.IsFloat() {
  1009  				switch n.Left.Type.Etype {
  1010  				case TINT64:
  1011  					n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
  1012  					break opswitch
  1013  				case TUINT64:
  1014  					n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
  1015  					break opswitch
  1016  				}
  1017  			}
  1018  
  1019  		case sys.I386:
  1020  			if n.Left.Type.IsFloat() {
  1021  				switch n.Type.Etype {
  1022  				case TINT64:
  1023  					n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1024  					break opswitch
  1025  				case TUINT64:
  1026  					n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1027  					break opswitch
  1028  				case TUINT32, TUINT, TUINTPTR:
  1029  					n = mkcall("float64touint32", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1030  					break opswitch
  1031  				}
  1032  			}
  1033  			if n.Type.IsFloat() {
  1034  				switch n.Left.Type.Etype {
  1035  				case TINT64:
  1036  					n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
  1037  					break opswitch
  1038  				case TUINT64:
  1039  					n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
  1040  					break opswitch
  1041  				case TUINT32, TUINT, TUINTPTR:
  1042  					n = conv(mkcall("uint32tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT32])), n.Type)
  1043  					break opswitch
  1044  				}
  1045  			}
  1046  		}
  1047  
  1048  	oconv_walkexpr:
  1049  		n.Left = walkexpr(n.Left, init)
  1050  
  1051  	case OANDNOT:
  1052  		n.Left = walkexpr(n.Left, init)
  1053  		n.Op = OAND
  1054  		n.Right = nod(OCOM, n.Right, nil)
  1055  		n.Right = typecheck(n.Right, Erv)
  1056  		n.Right = walkexpr(n.Right, init)
  1057  
  1058  	case ODIV, OMOD:
  1059  		n.Left = walkexpr(n.Left, init)
  1060  		n.Right = walkexpr(n.Right, init)
  1061  
  1062  		// rewrite complex div into function call.
  1063  		et := n.Left.Type.Etype
  1064  
  1065  		if isComplex[et] && n.Op == ODIV {
  1066  			t := n.Type
  1067  			n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128]))
  1068  			n = conv(n, t)
  1069  			break
  1070  		}
  1071  
  1072  		// Nothing to do for float divisions.
  1073  		if isFloat[et] {
  1074  			break
  1075  		}
  1076  
  1077  		// rewrite 64-bit div and mod on 32-bit architectures.
  1078  		// TODO: Remove this code once we can introduce
  1079  		// runtime calls late in SSA processing.
  1080  		if Widthreg < 8 && (et == TINT64 || et == TUINT64) {
  1081  			if n.Right.Op == OLITERAL {
  1082  				// Leave div/mod by constant powers of 2.
  1083  				// The SSA backend will handle those.
  1084  				switch et {
  1085  				case TINT64:
  1086  					c := n.Right.Int64()
  1087  					if c < 0 {
  1088  						c = -c
  1089  					}
  1090  					if c != 0 && c&(c-1) == 0 {
  1091  						break opswitch
  1092  					}
  1093  				case TUINT64:
  1094  					c := uint64(n.Right.Int64())
  1095  					if c != 0 && c&(c-1) == 0 {
  1096  						break opswitch
  1097  					}
  1098  				}
  1099  			}
  1100  			var fn string
  1101  			if et == TINT64 {
  1102  				fn = "int64"
  1103  			} else {
  1104  				fn = "uint64"
  1105  			}
  1106  			if n.Op == ODIV {
  1107  				fn += "div"
  1108  			} else {
  1109  				fn += "mod"
  1110  			}
  1111  			n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et]))
  1112  		}
  1113  
  1114  	case OINDEX:
  1115  		n.Left = walkexpr(n.Left, init)
  1116  
  1117  		// save the original node for bounds checking elision.
  1118  		// If it was a ODIV/OMOD walk might rewrite it.
  1119  		r := n.Right
  1120  
  1121  		n.Right = walkexpr(n.Right, init)
  1122  
  1123  		// if range of type cannot exceed static array bound,
  1124  		// disable bounds check.
  1125  		if n.Bounded() {
  1126  			break
  1127  		}
  1128  		t := n.Left.Type
  1129  		if t != nil && t.IsPtr() {
  1130  			t = t.Elem()
  1131  		}
  1132  		if t.IsArray() {
  1133  			n.SetBounded(bounded(r, t.NumElem()))
  1134  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1135  				Warn("index bounds check elided")
  1136  			}
  1137  			if smallintconst(n.Right) && !n.Bounded() {
  1138  				yyerror("index out of bounds")
  1139  			}
  1140  		} else if Isconst(n.Left, CTSTR) {
  1141  			n.SetBounded(bounded(r, int64(len(n.Left.Val().U.(string)))))
  1142  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1143  				Warn("index bounds check elided")
  1144  			}
  1145  			if smallintconst(n.Right) && !n.Bounded() {
  1146  				yyerror("index out of bounds")
  1147  			}
  1148  		}
  1149  
  1150  		if Isconst(n.Right, CTINT) {
  1151  			if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
  1152  				yyerror("index out of bounds")
  1153  			}
  1154  		}
  1155  
  1156  	case OINDEXMAP:
  1157  		// Replace m[k] with *map{access1,assign}(maptype, m, &k)
  1158  		n.Left = walkexpr(n.Left, init)
  1159  		n.Right = walkexpr(n.Right, init)
  1160  		map_ := n.Left
  1161  		key := n.Right
  1162  		t := map_.Type
  1163  		if n.Etype == 1 {
  1164  			// This m[k] expression is on the left-hand side of an assignment.
  1165  			fast := mapfast(t)
  1166  			if fast == mapslow {
  1167  				// standard version takes key by reference.
  1168  				// orderexpr made sure key is addressable.
  1169  				key = nod(OADDR, key, nil)
  1170  			}
  1171  			n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key)
  1172  		} else {
  1173  			// m[k] is not the target of an assignment.
  1174  			fast := mapfast(t)
  1175  			if fast == mapslow {
  1176  				// standard version takes key by reference.
  1177  				// orderexpr made sure key is addressable.
  1178  				key = nod(OADDR, key, nil)
  1179  			}
  1180  
  1181  			if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/map.go:maxZero
  1182  				n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Val()), init, typename(t), map_, key)
  1183  			} else {
  1184  				z := zeroaddr(w)
  1185  				n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Val()), init, typename(t), map_, key, z)
  1186  			}
  1187  		}
  1188  		n.Type = types.NewPtr(t.Val())
  1189  		n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers.
  1190  		n = nod(OIND, n, nil)
  1191  		n.Type = t.Val()
  1192  		n.SetTypecheck(1)
  1193  
  1194  	case ORECV:
  1195  		Fatalf("walkexpr ORECV") // should see inside OAS only
  1196  
  1197  	case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
  1198  		n.Left = walkexpr(n.Left, init)
  1199  		low, high, max := n.SliceBounds()
  1200  		low = walkexpr(low, init)
  1201  		if low != nil && iszero(low) {
  1202  			// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
  1203  			low = nil
  1204  		}
  1205  		high = walkexpr(high, init)
  1206  		max = walkexpr(max, init)
  1207  		n.SetSliceBounds(low, high, max)
  1208  		if n.Op.IsSlice3() {
  1209  			if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) {
  1210  				// Reduce x[i:j:cap(x)] to x[i:j].
  1211  				if n.Op == OSLICE3 {
  1212  					n.Op = OSLICE
  1213  				} else {
  1214  					n.Op = OSLICEARR
  1215  				}
  1216  				n = reduceSlice(n)
  1217  			}
  1218  		} else {
  1219  			n = reduceSlice(n)
  1220  		}
  1221  
  1222  	case ONEW:
  1223  		if n.Esc == EscNone {
  1224  			if n.Type.Elem().Width >= 1<<16 {
  1225  				Fatalf("large ONEW with EscNone: %v", n)
  1226  			}
  1227  			r := temp(n.Type.Elem())
  1228  			r = nod(OAS, r, nil) // zero temp
  1229  			r = typecheck(r, Etop)
  1230  			init.Append(r)
  1231  			r = nod(OADDR, r.Left, nil)
  1232  			r = typecheck(r, Erv)
  1233  			n = r
  1234  		} else {
  1235  			n = callnew(n.Type.Elem())
  1236  		}
  1237  
  1238  	case OCMPSTR:
  1239  		// s + "badgerbadgerbadger" == "badgerbadgerbadger"
  1240  		if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) {
  1241  			// TODO(marvin): Fix Node.EType type union.
  1242  			r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0))
  1243  			r = typecheck(r, Erv)
  1244  			r = walkexpr(r, init)
  1245  			r.Type = n.Type
  1246  			n = r
  1247  			break
  1248  		}
  1249  
  1250  		// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
  1251  		var cs, ncs *Node // const string, non-const string
  1252  		switch {
  1253  		case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR):
  1254  			// ignore; will be constant evaluated
  1255  		case Isconst(n.Left, CTSTR):
  1256  			cs = n.Left
  1257  			ncs = n.Right
  1258  		case Isconst(n.Right, CTSTR):
  1259  			cs = n.Right
  1260  			ncs = n.Left
  1261  		}
  1262  		if cs != nil {
  1263  			cmp := Op(n.Etype)
  1264  			// maxRewriteLen was chosen empirically.
  1265  			// It is the value that minimizes cmd/go file size
  1266  			// across most architectures.
  1267  			// See the commit description for CL 26758 for details.
  1268  			maxRewriteLen := 6
  1269  			// Some architectures can load unaligned byte sequence as 1 word.
  1270  			// So we can cover longer strings with the same amount of code.
  1271  			canCombineLoads := false
  1272  			combine64bit := false
  1273  			// TODO: does this improve performance on any other architectures?
  1274  			switch thearch.LinkArch.Family {
  1275  			case sys.AMD64:
  1276  				// Larger compare require longer instructions, so keep this reasonably low.
  1277  				// Data from CL 26758 shows that longer strings are rare.
  1278  				// If we really want we can do 16 byte SSE comparisons in the future.
  1279  				maxRewriteLen = 16
  1280  				canCombineLoads = true
  1281  				combine64bit = true
  1282  			case sys.I386:
  1283  				maxRewriteLen = 8
  1284  				canCombineLoads = true
  1285  			}
  1286  			var and Op
  1287  			switch cmp {
  1288  			case OEQ:
  1289  				and = OANDAND
  1290  			case ONE:
  1291  				and = OOROR
  1292  			default:
  1293  				// Don't do byte-wise comparisons for <, <=, etc.
  1294  				// They're fairly complicated.
  1295  				// Length-only checks are ok, though.
  1296  				maxRewriteLen = 0
  1297  			}
  1298  			if s := cs.Val().U.(string); len(s) <= maxRewriteLen {
  1299  				if len(s) > 0 {
  1300  					ncs = safeexpr(ncs, init)
  1301  				}
  1302  				// TODO(marvin): Fix Node.EType type union.
  1303  				r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s))))
  1304  				remains := len(s)
  1305  				for i := 0; remains > 0; {
  1306  					if remains == 1 || !canCombineLoads {
  1307  						cb := nodintconst(int64(s[i]))
  1308  						ncb := nod(OINDEX, ncs, nodintconst(int64(i)))
  1309  						r = nod(and, r, nod(cmp, ncb, cb))
  1310  						remains--
  1311  						i++
  1312  						continue
  1313  					}
  1314  					var step int
  1315  					var convType *types.Type
  1316  					switch {
  1317  					case remains >= 8 && combine64bit:
  1318  						convType = types.Types[TINT64]
  1319  						step = 8
  1320  					case remains >= 4:
  1321  						convType = types.Types[TUINT32]
  1322  						step = 4
  1323  					case remains >= 2:
  1324  						convType = types.Types[TUINT16]
  1325  						step = 2
  1326  					}
  1327  					ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i)))
  1328  					ncsubstr = conv(ncsubstr, convType)
  1329  					csubstr := int64(s[i])
  1330  					// Calculate large constant from bytes as sequence of shifts and ors.
  1331  					// Like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  1332  					// ssa will combine this into a single large load.
  1333  					for offset := 1; offset < step; offset++ {
  1334  						b := nod(OINDEX, ncs, nodintconst(int64(i+offset)))
  1335  						b = conv(b, convType)
  1336  						b = nod(OLSH, b, nodintconst(int64(8*offset)))
  1337  						ncsubstr = nod(OOR, ncsubstr, b)
  1338  						csubstr = csubstr | int64(s[i+offset])<<uint8(8*offset)
  1339  					}
  1340  					csubstrPart := nodintconst(csubstr)
  1341  					// Compare "step" bytes as once
  1342  					r = nod(and, r, nod(cmp, csubstrPart, ncsubstr))
  1343  					remains -= step
  1344  					i += step
  1345  				}
  1346  				r = typecheck(r, Erv)
  1347  				r = walkexpr(r, init)
  1348  				r.Type = n.Type
  1349  				n = r
  1350  				break
  1351  			}
  1352  		}
  1353  
  1354  		var r *Node
  1355  		// TODO(marvin): Fix Node.EType type union.
  1356  		if Op(n.Etype) == OEQ || Op(n.Etype) == ONE {
  1357  			// prepare for rewrite below
  1358  			n.Left = cheapexpr(n.Left, init)
  1359  			n.Right = cheapexpr(n.Right, init)
  1360  
  1361  			lstr := conv(n.Left, types.Types[TSTRING])
  1362  			rstr := conv(n.Right, types.Types[TSTRING])
  1363  			lptr := nod(OSPTR, lstr, nil)
  1364  			rptr := nod(OSPTR, rstr, nil)
  1365  			llen := conv(nod(OLEN, lstr, nil), types.Types[TUINTPTR])
  1366  			rlen := conv(nod(OLEN, rstr, nil), types.Types[TUINTPTR])
  1367  
  1368  			fn := syslook("memequal")
  1369  			fn = substArgTypes(fn, types.Types[TUINT8], types.Types[TUINT8])
  1370  			r = mkcall1(fn, types.Types[TBOOL], init, lptr, rptr, llen)
  1371  
  1372  			// quick check of len before full compare for == or !=.
  1373  			// memequal then tests equality up to length len.
  1374  			// TODO(marvin): Fix Node.EType type union.
  1375  			if Op(n.Etype) == OEQ {
  1376  				// len(left) == len(right) && memequal(left, right, len)
  1377  				r = nod(OANDAND, nod(OEQ, llen, rlen), r)
  1378  			} else {
  1379  				// len(left) != len(right) || !memequal(left, right, len)
  1380  				r = nod(ONOT, r, nil)
  1381  				r = nod(OOROR, nod(ONE, llen, rlen), r)
  1382  			}
  1383  
  1384  			r = typecheck(r, Erv)
  1385  			r = walkexpr(r, nil)
  1386  		} else {
  1387  			// sys_cmpstring(s1, s2) :: 0
  1388  			r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  1389  			// TODO(marvin): Fix Node.EType type union.
  1390  			r = nod(Op(n.Etype), r, nodintconst(0))
  1391  		}
  1392  
  1393  		r = typecheck(r, Erv)
  1394  		if !n.Type.IsBoolean() {
  1395  			Fatalf("cmp %v", n.Type)
  1396  		}
  1397  		r.Type = n.Type
  1398  		n = r
  1399  
  1400  	case OADDSTR:
  1401  		n = addstr(n, init)
  1402  
  1403  	case OAPPEND:
  1404  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
  1405  		Fatalf("append outside assignment")
  1406  
  1407  	case OCOPY:
  1408  		n = copyany(n, init, instrumenting && !compiling_runtime)
  1409  
  1410  		// cannot use chanfn - closechan takes any, not chan any
  1411  	case OCLOSE:
  1412  		fn := syslook("closechan")
  1413  
  1414  		fn = substArgTypes(fn, n.Left.Type)
  1415  		n = mkcall1(fn, nil, init, n.Left)
  1416  
  1417  	case OMAKECHAN:
  1418  		// When size fits into int, use makechan instead of
  1419  		// makechan64, which is faster and shorter on 32 bit platforms.
  1420  		size := n.Left
  1421  		fnname := "makechan64"
  1422  		argtype := types.Types[TINT64]
  1423  
  1424  		// Type checking guarantees that TIDEAL size is positive and fits in an int.
  1425  		// The case of size overflow when converting TUINT or TUINTPTR to TINT
  1426  		// will be handled by the negative range checks in makechan during runtime.
  1427  		if size.Type.IsKind(TIDEAL) || maxintval[size.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1428  			fnname = "makechan"
  1429  			argtype = types.Types[TINT]
  1430  		}
  1431  
  1432  		n = mkcall1(chanfn(fnname, 1, n.Type), n.Type, init, typename(n.Type), conv(size, argtype))
  1433  
  1434  	case OMAKEMAP:
  1435  		t := n.Type
  1436  		hmapType := hmap(t)
  1437  		hint := n.Left
  1438  
  1439  		// var h *hmap
  1440  		var h *Node
  1441  		if n.Esc == EscNone {
  1442  			// Allocate hmap on stack.
  1443  
  1444  			// var hv hmap
  1445  			hv := temp(hmapType)
  1446  			zero := nod(OAS, hv, nil)
  1447  			zero = typecheck(zero, Etop)
  1448  			init.Append(zero)
  1449  			// h = &hv
  1450  			h = nod(OADDR, hv, nil)
  1451  
  1452  			// Allocate one bucket pointed to by hmap.buckets on stack if hint
  1453  			// is not larger than BUCKETSIZE. In case hint is larger than
  1454  			// BUCKETSIZE runtime.makemap will allocate the buckets on the heap.
  1455  			// Maximum key and value size is 128 bytes, larger objects
  1456  			// are stored with an indirection. So max bucket size is 2048+eps.
  1457  			if !Isconst(hint, CTINT) ||
  1458  				!(hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) > 0) {
  1459  				// var bv bmap
  1460  				bv := temp(bmap(t))
  1461  
  1462  				zero = nod(OAS, bv, nil)
  1463  				zero = typecheck(zero, Etop)
  1464  				init.Append(zero)
  1465  
  1466  				// b = &bv
  1467  				b := nod(OADDR, bv, nil)
  1468  
  1469  				// h.buckets = b
  1470  				bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap
  1471  				na := nod(OAS, nodSym(ODOT, h, bsym), b)
  1472  				na = typecheck(na, Etop)
  1473  				init.Append(na)
  1474  			}
  1475  		}
  1476  
  1477  		if Isconst(hint, CTINT) && hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) <= 0 {
  1478  			// Handling make(map[any]any) and
  1479  			// make(map[any]any, hint) where hint <= BUCKETSIZE
  1480  			// special allows for faster map initialization and
  1481  			// improves binary size by using calls with fewer arguments.
  1482  			// For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false
  1483  			// and no buckets will be allocated by makemap. Therefore,
  1484  			// no buckets need to be allocated in this code path.
  1485  			if n.Esc == EscNone {
  1486  				// Only need to initialize h.hash0 since
  1487  				// hmap h has been allocated on the stack already.
  1488  				// h.hash0 = fastrand()
  1489  				rand := mkcall("fastrand", types.Types[TUINT32], init)
  1490  				hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap
  1491  				a := nod(OAS, nodSym(ODOT, h, hashsym), rand)
  1492  				a = typecheck(a, Etop)
  1493  				a = walkexpr(a, init)
  1494  				init.Append(a)
  1495  				n = nod(OCONVNOP, h, nil)
  1496  				n.Type = t
  1497  				n = typecheck(n, Erv)
  1498  			} else {
  1499  				// Call runtime.makehmap to allocate an
  1500  				// hmap on the heap and initialize hmap's hash0 field.
  1501  				fn := syslook("makemap_small")
  1502  				fn = substArgTypes(fn, t.Key(), t.Val())
  1503  				n = mkcall1(fn, n.Type, init)
  1504  			}
  1505  		} else {
  1506  			if n.Esc != EscNone {
  1507  				h = nodnil()
  1508  			}
  1509  			// Map initialization with a variable or large hint is
  1510  			// more complicated. We therefore generate a call to
  1511  			// runtime.makemap to intialize hmap and allocate the
  1512  			// map buckets.
  1513  
  1514  			// When hint fits into int, use makemap instead of
  1515  			// makemap64, which is faster and shorter on 32 bit platforms.
  1516  			fnname := "makemap64"
  1517  			argtype := types.Types[TINT64]
  1518  
  1519  			// Type checking guarantees that TIDEAL hint is positive and fits in an int.
  1520  			// See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function.
  1521  			// The case of hint overflow when converting TUINT or TUINTPTR to TINT
  1522  			// will be handled by the negative range checks in makemap during runtime.
  1523  			if hint.Type.IsKind(TIDEAL) || maxintval[hint.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1524  				fnname = "makemap"
  1525  				argtype = types.Types[TINT]
  1526  			}
  1527  
  1528  			fn := syslook(fnname)
  1529  			fn = substArgTypes(fn, hmapType, t.Key(), t.Val())
  1530  			n = mkcall1(fn, n.Type, init, typename(n.Type), conv(hint, argtype), h)
  1531  		}
  1532  
  1533  	case OMAKESLICE:
  1534  		l := n.Left
  1535  		r := n.Right
  1536  		if r == nil {
  1537  			r = safeexpr(l, init)
  1538  			l = r
  1539  		}
  1540  		t := n.Type
  1541  		if n.Esc == EscNone {
  1542  			if !isSmallMakeSlice(n) {
  1543  				Fatalf("non-small OMAKESLICE with EscNone: %v", n)
  1544  			}
  1545  			// var arr [r]T
  1546  			// n = arr[:l]
  1547  			t = types.NewArray(t.Elem(), nonnegintconst(r)) // [r]T
  1548  			var_ := temp(t)
  1549  			a := nod(OAS, var_, nil) // zero temp
  1550  			a = typecheck(a, Etop)
  1551  			init.Append(a)
  1552  			r := nod(OSLICE, var_, nil) // arr[:l]
  1553  			r.SetSliceBounds(nil, l, nil)
  1554  			r = conv(r, n.Type) // in case n.Type is named.
  1555  			r = typecheck(r, Erv)
  1556  			r = walkexpr(r, init)
  1557  			n = r
  1558  		} else {
  1559  			// n escapes; set up a call to makeslice.
  1560  			// When len and cap can fit into int, use makeslice instead of
  1561  			// makeslice64, which is faster and shorter on 32 bit platforms.
  1562  
  1563  			if t.Elem().NotInHeap() {
  1564  				yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem())
  1565  			}
  1566  
  1567  			len, cap := l, r
  1568  
  1569  			fnname := "makeslice64"
  1570  			argtype := types.Types[TINT64]
  1571  
  1572  			// Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
  1573  			// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
  1574  			// will be handled by the negative range checks in makeslice during runtime.
  1575  			if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) &&
  1576  				(cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) {
  1577  				fnname = "makeslice"
  1578  				argtype = types.Types[TINT]
  1579  			}
  1580  
  1581  			fn := syslook(fnname)
  1582  			fn = substArgTypes(fn, t.Elem()) // any-1
  1583  			n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
  1584  		}
  1585  
  1586  	case ORUNESTR:
  1587  		a := nodnil()
  1588  		if n.Esc == EscNone {
  1589  			t := types.NewArray(types.Types[TUINT8], 4)
  1590  			var_ := temp(t)
  1591  			a = nod(OADDR, var_, nil)
  1592  		}
  1593  
  1594  		// intstring(*[4]byte, rune)
  1595  		n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64]))
  1596  
  1597  	case OARRAYBYTESTR:
  1598  		a := nodnil()
  1599  		if n.Esc == EscNone {
  1600  			// Create temporary buffer for string on stack.
  1601  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1602  
  1603  			a = nod(OADDR, temp(t), nil)
  1604  		}
  1605  
  1606  		// slicebytetostring(*[32]byte, []byte) string;
  1607  		n = mkcall("slicebytetostring", n.Type, init, a, n.Left)
  1608  
  1609  		// slicebytetostringtmp([]byte) string;
  1610  	case OARRAYBYTESTRTMP:
  1611  		n.Left = walkexpr(n.Left, init)
  1612  
  1613  		if !instrumenting {
  1614  			// Let the backend handle OARRAYBYTESTRTMP directly
  1615  			// to avoid a function call to slicebytetostringtmp.
  1616  			break
  1617  		}
  1618  
  1619  		n = mkcall("slicebytetostringtmp", n.Type, init, n.Left)
  1620  
  1621  		// slicerunetostring(*[32]byte, []rune) string;
  1622  	case OARRAYRUNESTR:
  1623  		a := nodnil()
  1624  
  1625  		if n.Esc == EscNone {
  1626  			// Create temporary buffer for string on stack.
  1627  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1628  
  1629  			a = nod(OADDR, temp(t), nil)
  1630  		}
  1631  
  1632  		n = mkcall("slicerunetostring", n.Type, init, a, n.Left)
  1633  
  1634  		// stringtoslicebyte(*32[byte], string) []byte;
  1635  	case OSTRARRAYBYTE:
  1636  		a := nodnil()
  1637  
  1638  		if n.Esc == EscNone {
  1639  			// Create temporary buffer for slice on stack.
  1640  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1641  
  1642  			a = nod(OADDR, temp(t), nil)
  1643  		}
  1644  
  1645  		n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, types.Types[TSTRING]))
  1646  
  1647  	case OSTRARRAYBYTETMP:
  1648  		// []byte(string) conversion that creates a slice
  1649  		// referring to the actual string bytes.
  1650  		// This conversion is handled later by the backend and
  1651  		// is only for use by internal compiler optimizations
  1652  		// that know that the slice won't be mutated.
  1653  		// The only such case today is:
  1654  		// for i, c := range []byte(string)
  1655  		n.Left = walkexpr(n.Left, init)
  1656  
  1657  		// stringtoslicerune(*[32]rune, string) []rune
  1658  	case OSTRARRAYRUNE:
  1659  		a := nodnil()
  1660  
  1661  		if n.Esc == EscNone {
  1662  			// Create temporary buffer for slice on stack.
  1663  			t := types.NewArray(types.Types[TINT32], tmpstringbufsize)
  1664  
  1665  			a = nod(OADDR, temp(t), nil)
  1666  		}
  1667  
  1668  		n = mkcall("stringtoslicerune", n.Type, init, a, conv(n.Left, types.Types[TSTRING]))
  1669  
  1670  		// ifaceeq(i1 any-1, i2 any-2) (ret bool);
  1671  	case OCMPIFACE:
  1672  		if !eqtype(n.Left.Type, n.Right.Type) {
  1673  			Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type)
  1674  		}
  1675  		var fn *Node
  1676  		if n.Left.Type.IsEmptyInterface() {
  1677  			fn = syslook("efaceeq")
  1678  		} else {
  1679  			fn = syslook("ifaceeq")
  1680  		}
  1681  
  1682  		n.Right = cheapexpr(n.Right, init)
  1683  		n.Left = cheapexpr(n.Left, init)
  1684  		lt := nod(OITAB, n.Left, nil)
  1685  		rt := nod(OITAB, n.Right, nil)
  1686  		ld := nod(OIDATA, n.Left, nil)
  1687  		rd := nod(OIDATA, n.Right, nil)
  1688  		ld.Type = types.Types[TUNSAFEPTR]
  1689  		rd.Type = types.Types[TUNSAFEPTR]
  1690  		ld.SetTypecheck(1)
  1691  		rd.SetTypecheck(1)
  1692  		call := mkcall1(fn, n.Type, init, lt, ld, rd)
  1693  
  1694  		// Check itable/type before full compare.
  1695  		// Note: short-circuited because order matters.
  1696  		// TODO(marvin): Fix Node.EType type union.
  1697  		var cmp *Node
  1698  		if Op(n.Etype) == OEQ {
  1699  			cmp = nod(OANDAND, nod(OEQ, lt, rt), call)
  1700  		} else {
  1701  			cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil))
  1702  		}
  1703  		cmp = typecheck(cmp, Erv)
  1704  		cmp = walkexpr(cmp, init)
  1705  		cmp.Type = n.Type
  1706  		n = cmp
  1707  
  1708  	case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT:
  1709  		if isStaticCompositeLiteral(n) && !canSSAType(n.Type) {
  1710  			// n can be directly represented in the read-only data section.
  1711  			// Make direct reference to the static data. See issue 12841.
  1712  			vstat := staticname(n.Type)
  1713  			vstat.Name.SetReadonly(true)
  1714  			fixedlit(inInitFunction, initKindStatic, n, vstat, init)
  1715  			n = vstat
  1716  			n = typecheck(n, Erv)
  1717  			break
  1718  		}
  1719  		var_ := temp(n.Type)
  1720  		anylit(n, var_, init)
  1721  		n = var_
  1722  
  1723  	case OSEND:
  1724  		n1 := n.Right
  1725  		n1 = assignconv(n1, n.Left.Type.Elem(), "chan send")
  1726  		n1 = walkexpr(n1, init)
  1727  		n1 = nod(OADDR, n1, nil)
  1728  		n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1)
  1729  
  1730  	case OCLOSURE:
  1731  		n = walkclosure(n, init)
  1732  
  1733  	case OCALLPART:
  1734  		n = walkpartialcall(n, init)
  1735  	}
  1736  
  1737  	// Expressions that are constant at run time but not
  1738  	// considered const by the language spec are not turned into
  1739  	// constants until walk. For example, if n is y%1 == 0, the
  1740  	// walk of y%1 may have replaced it by 0.
  1741  	// Check whether n with its updated args is itself now a constant.
  1742  	t := n.Type
  1743  	evconst(n)
  1744  	if n.Type != t {
  1745  		Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type)
  1746  	}
  1747  	if n.Op == OLITERAL {
  1748  		n = typecheck(n, Erv)
  1749  		// Emit string symbol now to avoid emitting
  1750  		// any concurrently during the backend.
  1751  		if s, ok := n.Val().U.(string); ok {
  1752  			_ = stringsym(n.Pos, s)
  1753  		}
  1754  	}
  1755  
  1756  	updateHasCall(n)
  1757  
  1758  	if Debug['w'] != 0 && n != nil {
  1759  		Dump("walk", n)
  1760  	}
  1761  
  1762  	lineno = lno
  1763  	return n
  1764  }
  1765  
  1766  // TODO(josharian): combine this with its caller and simplify
  1767  func reduceSlice(n *Node) *Node {
  1768  	low, high, max := n.SliceBounds()
  1769  	if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) {
  1770  		// Reduce x[i:len(x)] to x[i:].
  1771  		high = nil
  1772  	}
  1773  	n.SetSliceBounds(low, high, max)
  1774  	if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil {
  1775  		// Reduce x[:] to x.
  1776  		if Debug_slice > 0 {
  1777  			Warn("slice: omit slice operation")
  1778  		}
  1779  		return n.Left
  1780  	}
  1781  	return n
  1782  }
  1783  
  1784  func ascompatee1(l *Node, r *Node, init *Nodes) *Node {
  1785  	// convas will turn map assigns into function calls,
  1786  	// making it impossible for reorder3 to work.
  1787  	n := nod(OAS, l, r)
  1788  
  1789  	if l.Op == OINDEXMAP {
  1790  		return n
  1791  	}
  1792  
  1793  	return convas(n, init)
  1794  }
  1795  
  1796  func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
  1797  	// check assign expression list to
  1798  	// an expression list. called in
  1799  	//	expr-list = expr-list
  1800  
  1801  	// ensure order of evaluation for function calls
  1802  	for i := range nl {
  1803  		nl[i] = safeexpr(nl[i], init)
  1804  	}
  1805  	for i1 := range nr {
  1806  		nr[i1] = safeexpr(nr[i1], init)
  1807  	}
  1808  
  1809  	var nn []*Node
  1810  	i := 0
  1811  	for ; i < len(nl); i++ {
  1812  		if i >= len(nr) {
  1813  			break
  1814  		}
  1815  		// Do not generate 'x = x' during return. See issue 4014.
  1816  		if op == ORETURN && samesafeexpr(nl[i], nr[i]) {
  1817  			continue
  1818  		}
  1819  		nn = append(nn, ascompatee1(nl[i], nr[i], init))
  1820  	}
  1821  
  1822  	// cannot happen: caller checked that lists had same length
  1823  	if i < len(nl) || i < len(nr) {
  1824  		var nln, nrn Nodes
  1825  		nln.Set(nl)
  1826  		nrn.Set(nr)
  1827  		Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname())
  1828  	}
  1829  	return nn
  1830  }
  1831  
  1832  // l is an lv and rt is the type of an rv
  1833  // return 1 if this implies a function call
  1834  // evaluating the lv or a function call
  1835  // in the conversion of the types
  1836  func fncall(l *Node, rt *types.Type) bool {
  1837  	if l.HasCall() || l.Op == OINDEXMAP {
  1838  		return true
  1839  	}
  1840  	if eqtype(l.Type, rt) {
  1841  		return false
  1842  	}
  1843  	return true
  1844  }
  1845  
  1846  // check assign type list to
  1847  // an expression list. called in
  1848  //	expr-list = func()
  1849  func ascompatet(nl Nodes, nr *types.Type) []*Node {
  1850  	if nl.Len() != nr.NumFields() {
  1851  		Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
  1852  	}
  1853  
  1854  	var nn, mm Nodes
  1855  	for i, l := range nl.Slice() {
  1856  		if isblank(l) {
  1857  			continue
  1858  		}
  1859  		r := nr.Field(i)
  1860  
  1861  		// any lv that causes a fn call must be
  1862  		// deferred until all the return arguments
  1863  		// have been pulled from the output arguments
  1864  		if fncall(l, r.Type) {
  1865  			tmp := temp(r.Type)
  1866  			tmp = typecheck(tmp, Erv)
  1867  			a := nod(OAS, l, tmp)
  1868  			a = convas(a, &mm)
  1869  			mm.Append(a)
  1870  			l = tmp
  1871  		}
  1872  
  1873  		a := nod(OAS, l, nodarg(r, 0))
  1874  		a = convas(a, &nn)
  1875  		updateHasCall(a)
  1876  		if a.HasCall() {
  1877  			Dump("ascompatet ucount", a)
  1878  			Fatalf("ascompatet: too many function calls evaluating parameters")
  1879  		}
  1880  
  1881  		nn.Append(a)
  1882  	}
  1883  	return append(nn.Slice(), mm.Slice()...)
  1884  }
  1885  
  1886  // nodarg returns a Node for the function argument denoted by t,
  1887  // which is either the entire function argument or result struct (t is a  struct *types.Type)
  1888  // or a specific argument (t is a *types.Field within a struct *types.Type).
  1889  //
  1890  // If fp is 0, the node is for use by a caller invoking the given
  1891  // function, preparing the arguments before the call
  1892  // or retrieving the results after the call.
  1893  // In this case, the node will correspond to an outgoing argument
  1894  // slot like 8(SP).
  1895  //
  1896  // If fp is 1, the node is for use by the function itself
  1897  // (the callee), to retrieve its arguments or write its results.
  1898  // In this case the node will be an ONAME with an appropriate
  1899  // type and offset.
  1900  func nodarg(t interface{}, fp int) *Node {
  1901  	var n *Node
  1902  
  1903  	var funarg types.Funarg
  1904  	switch t := t.(type) {
  1905  	default:
  1906  		Fatalf("bad nodarg %T(%v)", t, t)
  1907  
  1908  	case *types.Type:
  1909  		// Entire argument struct, not just one arg
  1910  		if !t.IsFuncArgStruct() {
  1911  			Fatalf("nodarg: bad type %v", t)
  1912  		}
  1913  		funarg = t.StructType().Funarg
  1914  
  1915  		// Build fake variable name for whole arg struct.
  1916  		n = newname(lookup(".args"))
  1917  		n.Type = t
  1918  		first := t.Field(0)
  1919  		if first == nil {
  1920  			Fatalf("nodarg: bad struct")
  1921  		}
  1922  		if first.Offset == BADWIDTH {
  1923  			Fatalf("nodarg: offset not computed for %v", t)
  1924  		}
  1925  		n.Xoffset = first.Offset
  1926  
  1927  	case *types.Field:
  1928  		funarg = t.Funarg
  1929  		if fp == 1 {
  1930  			// NOTE(rsc): This should be using t.Nname directly,
  1931  			// except in the case where t.Nname.Sym is the blank symbol and
  1932  			// so the assignment would be discarded during code generation.
  1933  			// In that case we need to make a new node, and there is no harm
  1934  			// in optimization passes to doing so. But otherwise we should
  1935  			// definitely be using the actual declaration and not a newly built node.
  1936  			// The extra Fatalf checks here are verifying that this is the case,
  1937  			// without changing the actual logic (at time of writing, it's getting
  1938  			// toward time for the Go 1.7 beta).
  1939  			// At some quieter time (assuming we've never seen these Fatalfs happen)
  1940  			// we could change this code to use "expect" directly.
  1941  			expect := asNode(t.Nname)
  1942  			if expect.isParamHeapCopy() {
  1943  				expect = expect.Name.Param.Stackcopy
  1944  			}
  1945  
  1946  			for _, n := range Curfn.Func.Dcl {
  1947  				if (n.Class() == PPARAM || n.Class() == PPARAMOUT) && !t.Sym.IsBlank() && n.Sym == t.Sym {
  1948  					if n != expect {
  1949  						Fatalf("nodarg: unexpected node: %v (%p %v) vs %v (%p %v)", n, n, n.Op, asNode(t.Nname), asNode(t.Nname), asNode(t.Nname).Op)
  1950  					}
  1951  					return n
  1952  				}
  1953  			}
  1954  
  1955  			if !expect.Sym.IsBlank() {
  1956  				Fatalf("nodarg: did not find node in dcl list: %v", expect)
  1957  			}
  1958  		}
  1959  
  1960  		// Build fake name for individual variable.
  1961  		// This is safe because if there was a real declared name
  1962  		// we'd have used it above.
  1963  		n = newname(lookup("__"))
  1964  		n.Type = t.Type
  1965  		if t.Offset == BADWIDTH {
  1966  			Fatalf("nodarg: offset not computed for %v", t)
  1967  		}
  1968  		n.Xoffset = t.Offset
  1969  		n.Orig = asNode(t.Nname)
  1970  	}
  1971  
  1972  	// Rewrite argument named _ to __,
  1973  	// or else the assignment to _ will be
  1974  	// discarded during code generation.
  1975  	if isblank(n) {
  1976  		n.Sym = lookup("__")
  1977  	}
  1978  
  1979  	switch fp {
  1980  	default:
  1981  		Fatalf("bad fp")
  1982  
  1983  	case 0: // preparing arguments for call
  1984  		n.Op = OINDREGSP
  1985  		n.Xoffset += Ctxt.FixedFrameSize()
  1986  
  1987  	case 1: // reading arguments inside call
  1988  		n.SetClass(PPARAM)
  1989  		if funarg == types.FunargResults {
  1990  			n.SetClass(PPARAMOUT)
  1991  		}
  1992  	}
  1993  
  1994  	n.SetTypecheck(1)
  1995  	n.SetAddrtaken(true) // keep optimizers at bay
  1996  	return n
  1997  }
  1998  
  1999  // package all the arguments that match a ... T parameter into a []T.
  2000  func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node {
  2001  	esc := uint16(EscUnknown)
  2002  	if ddd != nil {
  2003  		esc = ddd.Esc
  2004  	}
  2005  
  2006  	if len(args) == 0 {
  2007  		n := nodnil()
  2008  		n.Type = typ
  2009  		return n
  2010  	}
  2011  
  2012  	n := nod(OCOMPLIT, nil, typenod(typ))
  2013  	if ddd != nil && prealloc[ddd] != nil {
  2014  		prealloc[n] = prealloc[ddd] // temporary to use
  2015  	}
  2016  	n.List.Set(args)
  2017  	n.Esc = esc
  2018  	n = typecheck(n, Erv)
  2019  	if n.Type == nil {
  2020  		Fatalf("mkdotargslice: typecheck failed")
  2021  	}
  2022  	n = walkexpr(n, init)
  2023  	return n
  2024  }
  2025  
  2026  // check assign expression list to
  2027  // a type list. called in
  2028  //	return expr-list
  2029  //	func(expr-list)
  2030  func ascompatte(call *Node, isddd bool, lhs *types.Type, rhs []*Node, fp int, init *Nodes) []*Node {
  2031  	// f(g()) where g has multiple return values
  2032  	if len(rhs) == 1 && rhs[0].Type.IsFuncArgStruct() {
  2033  		// optimization - can do block copy
  2034  		if eqtypenoname(rhs[0].Type, lhs) {
  2035  			nl := nodarg(lhs, fp)
  2036  			nr := nod(OCONVNOP, rhs[0], nil)
  2037  			nr.Type = nl.Type
  2038  			n := convas(nod(OAS, nl, nr), init)
  2039  			n.SetTypecheck(1)
  2040  			return []*Node{n}
  2041  		}
  2042  
  2043  		// conversions involved.
  2044  		// copy into temporaries.
  2045  		var tmps []*Node
  2046  		for _, nr := range rhs[0].Type.FieldSlice() {
  2047  			tmps = append(tmps, temp(nr.Type))
  2048  		}
  2049  
  2050  		a := nod(OAS2, nil, nil)
  2051  		a.List.Set(tmps)
  2052  		a.Rlist.Set(rhs)
  2053  		a = typecheck(a, Etop)
  2054  		a = walkstmt(a)
  2055  		init.Append(a)
  2056  
  2057  		rhs = tmps
  2058  	}
  2059  
  2060  	// For each parameter (LHS), assign its corresponding argument (RHS).
  2061  	// If there's a ... parameter (which is only valid as the final
  2062  	// parameter) and this is not a ... call expression,
  2063  	// then assign the remaining arguments as a slice.
  2064  	var nn []*Node
  2065  	for i, nl := range lhs.FieldSlice() {
  2066  		var nr *Node
  2067  		if nl.Isddd() && !isddd {
  2068  			nr = mkdotargslice(nl.Type, rhs[i:], init, call.Right)
  2069  		} else {
  2070  			nr = rhs[i]
  2071  		}
  2072  
  2073  		a := nod(OAS, nodarg(nl, fp), nr)
  2074  		a = convas(a, init)
  2075  		a.SetTypecheck(1)
  2076  		nn = append(nn, a)
  2077  	}
  2078  
  2079  	return nn
  2080  }
  2081  
  2082  // generate code for print
  2083  func walkprint(nn *Node, init *Nodes) *Node {
  2084  	// Hoist all the argument evaluation up before the lock.
  2085  	walkexprlistcheap(nn.List.Slice(), init)
  2086  
  2087  	// For println, add " " between elements and "\n" at the end.
  2088  	if nn.Op == OPRINTN {
  2089  		s := nn.List.Slice()
  2090  		t := make([]*Node, 0, len(s)*2)
  2091  		for i, n := range s {
  2092  			if i != 0 {
  2093  				t = append(t, nodstr(" "))
  2094  			}
  2095  			t = append(t, n)
  2096  		}
  2097  		t = append(t, nodstr("\n"))
  2098  		nn.List.Set(t)
  2099  	}
  2100  
  2101  	// Collapse runs of constant strings.
  2102  	s := nn.List.Slice()
  2103  	t := make([]*Node, 0, len(s))
  2104  	for i := 0; i < len(s); {
  2105  		var strs []string
  2106  		for i < len(s) && Isconst(s[i], CTSTR) {
  2107  			strs = append(strs, s[i].Val().U.(string))
  2108  			i++
  2109  		}
  2110  		if len(strs) > 0 {
  2111  			t = append(t, nodstr(strings.Join(strs, "")))
  2112  		}
  2113  		if i < len(s) {
  2114  			t = append(t, s[i])
  2115  			i++
  2116  		}
  2117  	}
  2118  	nn.List.Set(t)
  2119  
  2120  	calls := []*Node{mkcall("printlock", nil, init)}
  2121  	for i, n := range nn.List.Slice() {
  2122  		if n.Op == OLITERAL {
  2123  			switch n.Val().Ctype() {
  2124  			case CTRUNE:
  2125  				n = defaultlit(n, types.Runetype)
  2126  
  2127  			case CTINT:
  2128  				n = defaultlit(n, types.Types[TINT64])
  2129  
  2130  			case CTFLT:
  2131  				n = defaultlit(n, types.Types[TFLOAT64])
  2132  			}
  2133  		}
  2134  
  2135  		if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
  2136  			n = defaultlit(n, types.Types[TINT64])
  2137  		}
  2138  		n = defaultlit(n, nil)
  2139  		nn.List.SetIndex(i, n)
  2140  		if n.Type == nil || n.Type.Etype == TFORW {
  2141  			continue
  2142  		}
  2143  
  2144  		var on *Node
  2145  		switch n.Type.Etype {
  2146  		case TINTER:
  2147  			if n.Type.IsEmptyInterface() {
  2148  				on = syslook("printeface")
  2149  			} else {
  2150  				on = syslook("printiface")
  2151  			}
  2152  			on = substArgTypes(on, n.Type) // any-1
  2153  		case TPTR32, TPTR64, TCHAN, TMAP, TFUNC, TUNSAFEPTR:
  2154  			on = syslook("printpointer")
  2155  			on = substArgTypes(on, n.Type) // any-1
  2156  		case TSLICE:
  2157  			on = syslook("printslice")
  2158  			on = substArgTypes(on, n.Type) // any-1
  2159  		case TUINT, TUINT8, TUINT16, TUINT32, TUINT64, TUINTPTR:
  2160  			if isRuntimePkg(n.Type.Sym.Pkg) && n.Type.Sym.Name == "hex" {
  2161  				on = syslook("printhex")
  2162  			} else {
  2163  				on = syslook("printuint")
  2164  			}
  2165  		case TINT, TINT8, TINT16, TINT32, TINT64:
  2166  			on = syslook("printint")
  2167  		case TFLOAT32, TFLOAT64:
  2168  			on = syslook("printfloat")
  2169  		case TCOMPLEX64, TCOMPLEX128:
  2170  			on = syslook("printcomplex")
  2171  		case TBOOL:
  2172  			on = syslook("printbool")
  2173  		case TSTRING:
  2174  			cs := ""
  2175  			if Isconst(n, CTSTR) {
  2176  				cs = n.Val().U.(string)
  2177  			}
  2178  			switch cs {
  2179  			case " ":
  2180  				on = syslook("printsp")
  2181  			case "\n":
  2182  				on = syslook("printnl")
  2183  			default:
  2184  				on = syslook("printstring")
  2185  			}
  2186  		default:
  2187  			badtype(OPRINT, n.Type, nil)
  2188  			continue
  2189  		}
  2190  
  2191  		r := nod(OCALL, on, nil)
  2192  		if params := on.Type.Params().FieldSlice(); len(params) > 0 {
  2193  			t := params[0].Type
  2194  			if !eqtype(t, n.Type) {
  2195  				n = nod(OCONV, n, nil)
  2196  				n.Type = t
  2197  			}
  2198  			r.List.Append(n)
  2199  		}
  2200  		calls = append(calls, r)
  2201  	}
  2202  
  2203  	calls = append(calls, mkcall("printunlock", nil, init))
  2204  
  2205  	typecheckslice(calls, Etop)
  2206  	walkexprlist(calls, init)
  2207  
  2208  	r := nod(OEMPTY, nil, nil)
  2209  	r = typecheck(r, Etop)
  2210  	r = walkexpr(r, init)
  2211  	r.Ninit.Set(calls)
  2212  	return r
  2213  }
  2214  
  2215  func callnew(t *types.Type) *Node {
  2216  	if t.NotInHeap() {
  2217  		yyerror("%v is go:notinheap; heap allocation disallowed", t)
  2218  	}
  2219  	dowidth(t)
  2220  	fn := syslook("newobject")
  2221  	fn = substArgTypes(fn, t)
  2222  	v := mkcall1(fn, types.NewPtr(t), nil, typename(t))
  2223  	v.SetNonNil(true)
  2224  	return v
  2225  }
  2226  
  2227  func iscallret(n *Node) bool {
  2228  	if n == nil {
  2229  		return false
  2230  	}
  2231  	n = outervalue(n)
  2232  	return n.Op == OINDREGSP
  2233  }
  2234  
  2235  // isReflectHeaderDataField reports whether l is an expression p.Data
  2236  // where p has type reflect.SliceHeader or reflect.StringHeader.
  2237  func isReflectHeaderDataField(l *Node) bool {
  2238  	if l.Type != types.Types[TUINTPTR] {
  2239  		return false
  2240  	}
  2241  
  2242  	var tsym *types.Sym
  2243  	switch l.Op {
  2244  	case ODOT:
  2245  		tsym = l.Left.Type.Sym
  2246  	case ODOTPTR:
  2247  		tsym = l.Left.Type.Elem().Sym
  2248  	default:
  2249  		return false
  2250  	}
  2251  
  2252  	if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" {
  2253  		return false
  2254  	}
  2255  	return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader"
  2256  }
  2257  
  2258  func convas(n *Node, init *Nodes) *Node {
  2259  	if n.Op != OAS {
  2260  		Fatalf("convas: not OAS %v", n.Op)
  2261  	}
  2262  	defer updateHasCall(n)
  2263  
  2264  	n.SetTypecheck(1)
  2265  
  2266  	if n.Left == nil || n.Right == nil {
  2267  		return n
  2268  	}
  2269  
  2270  	lt := n.Left.Type
  2271  	rt := n.Right.Type
  2272  	if lt == nil || rt == nil {
  2273  		return n
  2274  	}
  2275  
  2276  	if isblank(n.Left) {
  2277  		n.Right = defaultlit(n.Right, nil)
  2278  		return n
  2279  	}
  2280  
  2281  	if !eqtype(lt, rt) {
  2282  		n.Right = assignconv(n.Right, lt, "assignment")
  2283  		n.Right = walkexpr(n.Right, init)
  2284  	}
  2285  	dowidth(n.Right.Type)
  2286  
  2287  	return n
  2288  }
  2289  
  2290  // from ascompat[te]
  2291  // evaluating actual function arguments.
  2292  //	f(a,b)
  2293  // if there is exactly one function expr,
  2294  // then it is done first. otherwise must
  2295  // make temp variables
  2296  func reorder1(all []*Node) []*Node {
  2297  	if len(all) == 1 {
  2298  		return all
  2299  	}
  2300  
  2301  	funcCalls := 0
  2302  	for _, n := range all {
  2303  		updateHasCall(n)
  2304  		if n.HasCall() {
  2305  			funcCalls++
  2306  		}
  2307  	}
  2308  	if funcCalls == 0 {
  2309  		return all
  2310  	}
  2311  
  2312  	var g []*Node // fncalls assigned to tempnames
  2313  	var f *Node   // last fncall assigned to stack
  2314  	var r []*Node // non fncalls and tempnames assigned to stack
  2315  	d := 0
  2316  	for _, n := range all {
  2317  		if !n.HasCall() {
  2318  			r = append(r, n)
  2319  			continue
  2320  		}
  2321  
  2322  		d++
  2323  		if d == funcCalls {
  2324  			f = n
  2325  			continue
  2326  		}
  2327  
  2328  		// make assignment of fncall to tempname
  2329  		a := temp(n.Right.Type)
  2330  
  2331  		a = nod(OAS, a, n.Right)
  2332  		g = append(g, a)
  2333  
  2334  		// put normal arg assignment on list
  2335  		// with fncall replaced by tempname
  2336  		n.Right = a.Left
  2337  
  2338  		r = append(r, n)
  2339  	}
  2340  
  2341  	if f != nil {
  2342  		g = append(g, f)
  2343  	}
  2344  	return append(g, r...)
  2345  }
  2346  
  2347  // from ascompat[ee]
  2348  //	a,b = c,d
  2349  // simultaneous assignment. there cannot
  2350  // be later use of an earlier lvalue.
  2351  //
  2352  // function calls have been removed.
  2353  func reorder3(all []*Node) []*Node {
  2354  	// If a needed expression may be affected by an
  2355  	// earlier assignment, make an early copy of that
  2356  	// expression and use the copy instead.
  2357  	var early []*Node
  2358  
  2359  	var mapinit Nodes
  2360  	for i, n := range all {
  2361  		l := n.Left
  2362  
  2363  		// Save subexpressions needed on left side.
  2364  		// Drill through non-dereferences.
  2365  		for {
  2366  			if l.Op == ODOT || l.Op == OPAREN {
  2367  				l = l.Left
  2368  				continue
  2369  			}
  2370  
  2371  			if l.Op == OINDEX && l.Left.Type.IsArray() {
  2372  				l.Right = reorder3save(l.Right, all, i, &early)
  2373  				l = l.Left
  2374  				continue
  2375  			}
  2376  
  2377  			break
  2378  		}
  2379  
  2380  		switch l.Op {
  2381  		default:
  2382  			Fatalf("reorder3 unexpected lvalue %#v", l.Op)
  2383  
  2384  		case ONAME:
  2385  			break
  2386  
  2387  		case OINDEX, OINDEXMAP:
  2388  			l.Left = reorder3save(l.Left, all, i, &early)
  2389  			l.Right = reorder3save(l.Right, all, i, &early)
  2390  			if l.Op == OINDEXMAP {
  2391  				all[i] = convas(all[i], &mapinit)
  2392  			}
  2393  
  2394  		case OIND, ODOTPTR:
  2395  			l.Left = reorder3save(l.Left, all, i, &early)
  2396  		}
  2397  
  2398  		// Save expression on right side.
  2399  		all[i].Right = reorder3save(all[i].Right, all, i, &early)
  2400  	}
  2401  
  2402  	early = append(mapinit.Slice(), early...)
  2403  	return append(early, all...)
  2404  }
  2405  
  2406  // if the evaluation of *np would be affected by the
  2407  // assignments in all up to but not including the ith assignment,
  2408  // copy into a temporary during *early and
  2409  // replace *np with that temp.
  2410  // The result of reorder3save MUST be assigned back to n, e.g.
  2411  // 	n.Left = reorder3save(n.Left, all, i, early)
  2412  func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node {
  2413  	if !aliased(n, all, i) {
  2414  		return n
  2415  	}
  2416  
  2417  	q := temp(n.Type)
  2418  	q = nod(OAS, q, n)
  2419  	q = typecheck(q, Etop)
  2420  	*early = append(*early, q)
  2421  	return q.Left
  2422  }
  2423  
  2424  // what's the outer value that a write to n affects?
  2425  // outer value means containing struct or array.
  2426  func outervalue(n *Node) *Node {
  2427  	for {
  2428  		switch n.Op {
  2429  		case OXDOT:
  2430  			Fatalf("OXDOT in walk")
  2431  		case ODOT, OPAREN, OCONVNOP:
  2432  			n = n.Left
  2433  			continue
  2434  		case OINDEX:
  2435  			if n.Left.Type != nil && n.Left.Type.IsArray() {
  2436  				n = n.Left
  2437  				continue
  2438  			}
  2439  		}
  2440  
  2441  		return n
  2442  	}
  2443  }
  2444  
  2445  // Is it possible that the computation of n might be
  2446  // affected by writes in as up to but not including the ith element?
  2447  func aliased(n *Node, all []*Node, i int) bool {
  2448  	if n == nil {
  2449  		return false
  2450  	}
  2451  
  2452  	// Treat all fields of a struct as referring to the whole struct.
  2453  	// We could do better but we would have to keep track of the fields.
  2454  	for n.Op == ODOT {
  2455  		n = n.Left
  2456  	}
  2457  
  2458  	// Look for obvious aliasing: a variable being assigned
  2459  	// during the all list and appearing in n.
  2460  	// Also record whether there are any writes to main memory.
  2461  	// Also record whether there are any writes to variables
  2462  	// whose addresses have been taken.
  2463  	memwrite := false
  2464  	varwrite := false
  2465  	for _, an := range all[:i] {
  2466  		a := outervalue(an.Left)
  2467  
  2468  		for a.Op == ODOT {
  2469  			a = a.Left
  2470  		}
  2471  
  2472  		if a.Op != ONAME {
  2473  			memwrite = true
  2474  			continue
  2475  		}
  2476  
  2477  		switch n.Class() {
  2478  		default:
  2479  			varwrite = true
  2480  			continue
  2481  
  2482  		case PAUTO, PPARAM, PPARAMOUT:
  2483  			if n.Addrtaken() {
  2484  				varwrite = true
  2485  				continue
  2486  			}
  2487  
  2488  			if vmatch2(a, n) {
  2489  				// Direct hit.
  2490  				return true
  2491  			}
  2492  		}
  2493  	}
  2494  
  2495  	// The variables being written do not appear in n.
  2496  	// However, n might refer to computed addresses
  2497  	// that are being written.
  2498  
  2499  	// If no computed addresses are affected by the writes, no aliasing.
  2500  	if !memwrite && !varwrite {
  2501  		return false
  2502  	}
  2503  
  2504  	// If n does not refer to computed addresses
  2505  	// (that is, if n only refers to variables whose addresses
  2506  	// have not been taken), no aliasing.
  2507  	if varexpr(n) {
  2508  		return false
  2509  	}
  2510  
  2511  	// Otherwise, both the writes and n refer to computed memory addresses.
  2512  	// Assume that they might conflict.
  2513  	return true
  2514  }
  2515  
  2516  // does the evaluation of n only refer to variables
  2517  // whose addresses have not been taken?
  2518  // (and no other memory)
  2519  func varexpr(n *Node) bool {
  2520  	if n == nil {
  2521  		return true
  2522  	}
  2523  
  2524  	switch n.Op {
  2525  	case OLITERAL:
  2526  		return true
  2527  
  2528  	case ONAME:
  2529  		switch n.Class() {
  2530  		case PAUTO, PPARAM, PPARAMOUT:
  2531  			if !n.Addrtaken() {
  2532  				return true
  2533  			}
  2534  		}
  2535  
  2536  		return false
  2537  
  2538  	case OADD,
  2539  		OSUB,
  2540  		OOR,
  2541  		OXOR,
  2542  		OMUL,
  2543  		ODIV,
  2544  		OMOD,
  2545  		OLSH,
  2546  		ORSH,
  2547  		OAND,
  2548  		OANDNOT,
  2549  		OPLUS,
  2550  		OMINUS,
  2551  		OCOM,
  2552  		OPAREN,
  2553  		OANDAND,
  2554  		OOROR,
  2555  		OCONV,
  2556  		OCONVNOP,
  2557  		OCONVIFACE,
  2558  		ODOTTYPE:
  2559  		return varexpr(n.Left) && varexpr(n.Right)
  2560  
  2561  	case ODOT: // but not ODOTPTR
  2562  		// Should have been handled in aliased.
  2563  		Fatalf("varexpr unexpected ODOT")
  2564  	}
  2565  
  2566  	// Be conservative.
  2567  	return false
  2568  }
  2569  
  2570  // is the name l mentioned in r?
  2571  func vmatch2(l *Node, r *Node) bool {
  2572  	if r == nil {
  2573  		return false
  2574  	}
  2575  	switch r.Op {
  2576  	// match each right given left
  2577  	case ONAME:
  2578  		return l == r
  2579  
  2580  	case OLITERAL:
  2581  		return false
  2582  	}
  2583  
  2584  	if vmatch2(l, r.Left) {
  2585  		return true
  2586  	}
  2587  	if vmatch2(l, r.Right) {
  2588  		return true
  2589  	}
  2590  	for _, n := range r.List.Slice() {
  2591  		if vmatch2(l, n) {
  2592  			return true
  2593  		}
  2594  	}
  2595  	return false
  2596  }
  2597  
  2598  // is any name mentioned in l also mentioned in r?
  2599  // called by sinit.go
  2600  func vmatch1(l *Node, r *Node) bool {
  2601  	// isolate all left sides
  2602  	if l == nil || r == nil {
  2603  		return false
  2604  	}
  2605  	switch l.Op {
  2606  	case ONAME:
  2607  		switch l.Class() {
  2608  		case PPARAM, PAUTO:
  2609  			break
  2610  
  2611  		default:
  2612  			// assignment to non-stack variable must be
  2613  			// delayed if right has function calls.
  2614  			if r.HasCall() {
  2615  				return true
  2616  			}
  2617  		}
  2618  
  2619  		return vmatch2(l, r)
  2620  
  2621  	case OLITERAL:
  2622  		return false
  2623  	}
  2624  
  2625  	if vmatch1(l.Left, r) {
  2626  		return true
  2627  	}
  2628  	if vmatch1(l.Right, r) {
  2629  		return true
  2630  	}
  2631  	for _, n := range l.List.Slice() {
  2632  		if vmatch1(n, r) {
  2633  			return true
  2634  		}
  2635  	}
  2636  	return false
  2637  }
  2638  
  2639  // paramstoheap returns code to allocate memory for heap-escaped parameters
  2640  // and to copy non-result parameters' values from the stack.
  2641  func paramstoheap(params *types.Type) []*Node {
  2642  	var nn []*Node
  2643  	for _, t := range params.Fields().Slice() {
  2644  		v := asNode(t.Nname)
  2645  		if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result
  2646  			v = nil
  2647  		}
  2648  		if v == nil {
  2649  			continue
  2650  		}
  2651  
  2652  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil {
  2653  			nn = append(nn, walkstmt(nod(ODCL, v, nil)))
  2654  			if stackcopy.Class() == PPARAM {
  2655  				nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop)))
  2656  			}
  2657  		}
  2658  	}
  2659  
  2660  	return nn
  2661  }
  2662  
  2663  // zeroResults zeros the return values at the start of the function.
  2664  // We need to do this very early in the function.  Defer might stop a
  2665  // panic and show the return values as they exist at the time of
  2666  // panic.  For precise stacks, the garbage collector assumes results
  2667  // are always live, so we need to zero them before any allocations,
  2668  // even allocations to move params/results to the heap.
  2669  // The generated code is added to Curfn's Enter list.
  2670  func zeroResults() {
  2671  	lno := lineno
  2672  	lineno = Curfn.Pos
  2673  	for _, f := range Curfn.Type.Results().Fields().Slice() {
  2674  		if v := asNode(f.Nname); v != nil && v.Name.Param.Heapaddr != nil {
  2675  			// The local which points to the return value is the
  2676  			// thing that needs zeroing. This is already handled
  2677  			// by a Needzero annotation in plive.go:livenessepilogue.
  2678  			continue
  2679  		}
  2680  		// Zero the stack location containing f.
  2681  		Curfn.Func.Enter.Append(nod(OAS, nodarg(f, 1), nil))
  2682  	}
  2683  	lineno = lno
  2684  }
  2685  
  2686  // returnsfromheap returns code to copy values for heap-escaped parameters
  2687  // back to the stack.
  2688  func returnsfromheap(params *types.Type) []*Node {
  2689  	var nn []*Node
  2690  	for _, t := range params.Fields().Slice() {
  2691  		v := asNode(t.Nname)
  2692  		if v == nil {
  2693  			continue
  2694  		}
  2695  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT {
  2696  			nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop)))
  2697  		}
  2698  	}
  2699  
  2700  	return nn
  2701  }
  2702  
  2703  // heapmoves generates code to handle migrating heap-escaped parameters
  2704  // between the stack and the heap. The generated code is added to Curfn's
  2705  // Enter and Exit lists.
  2706  func heapmoves() {
  2707  	lno := lineno
  2708  	lineno = Curfn.Pos
  2709  	nn := paramstoheap(Curfn.Type.Recvs())
  2710  	nn = append(nn, paramstoheap(Curfn.Type.Params())...)
  2711  	nn = append(nn, paramstoheap(Curfn.Type.Results())...)
  2712  	Curfn.Func.Enter.Append(nn...)
  2713  	lineno = Curfn.Func.Endlineno
  2714  	Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...)
  2715  	lineno = lno
  2716  }
  2717  
  2718  func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node {
  2719  	if fn.Type == nil || fn.Type.Etype != TFUNC {
  2720  		Fatalf("mkcall %v %v", fn, fn.Type)
  2721  	}
  2722  
  2723  	n := fn.Type.NumParams()
  2724  	if n != len(va) {
  2725  		Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
  2726  	}
  2727  
  2728  	r := nod(OCALL, fn, nil)
  2729  	r.List.Set(va)
  2730  	if fn.Type.NumResults() > 0 {
  2731  		r = typecheck(r, Erv|Efnstruct)
  2732  	} else {
  2733  		r = typecheck(r, Etop)
  2734  	}
  2735  	r = walkexpr(r, init)
  2736  	r.Type = t
  2737  	return r
  2738  }
  2739  
  2740  func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node {
  2741  	return vmkcall(syslook(name), t, init, args)
  2742  }
  2743  
  2744  func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node {
  2745  	return vmkcall(fn, t, init, args)
  2746  }
  2747  
  2748  func conv(n *Node, t *types.Type) *Node {
  2749  	if eqtype(n.Type, t) {
  2750  		return n
  2751  	}
  2752  	n = nod(OCONV, n, nil)
  2753  	n.Type = t
  2754  	n = typecheck(n, Erv)
  2755  	return n
  2756  }
  2757  
  2758  // byteindex converts n, which is byte-sized, to a uint8.
  2759  // We cannot use conv, because we allow converting bool to uint8 here,
  2760  // which is forbidden in user code.
  2761  func byteindex(n *Node) *Node {
  2762  	if eqtype(n.Type, types.Types[TUINT8]) {
  2763  		return n
  2764  	}
  2765  	n = nod(OCONV, n, nil)
  2766  	n.Type = types.Types[TUINT8]
  2767  	n.SetTypecheck(1)
  2768  	return n
  2769  }
  2770  
  2771  func chanfn(name string, n int, t *types.Type) *Node {
  2772  	if !t.IsChan() {
  2773  		Fatalf("chanfn %v", t)
  2774  	}
  2775  	fn := syslook(name)
  2776  	switch n {
  2777  	default:
  2778  		Fatalf("chanfn %d", n)
  2779  	case 1:
  2780  		fn = substArgTypes(fn, t.Elem())
  2781  	case 2:
  2782  		fn = substArgTypes(fn, t.Elem(), t.Elem())
  2783  	}
  2784  	return fn
  2785  }
  2786  
  2787  func mapfn(name string, t *types.Type) *Node {
  2788  	if !t.IsMap() {
  2789  		Fatalf("mapfn %v", t)
  2790  	}
  2791  	fn := syslook(name)
  2792  	fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val())
  2793  	return fn
  2794  }
  2795  
  2796  func mapfndel(name string, t *types.Type) *Node {
  2797  	if !t.IsMap() {
  2798  		Fatalf("mapfn %v", t)
  2799  	}
  2800  	fn := syslook(name)
  2801  	fn = substArgTypes(fn, t.Key(), t.Val(), t.Key())
  2802  	return fn
  2803  }
  2804  
  2805  const (
  2806  	mapslow = iota
  2807  	mapfast32
  2808  	mapfast32ptr
  2809  	mapfast64
  2810  	mapfast64ptr
  2811  	mapfaststr
  2812  	nmapfast
  2813  )
  2814  
  2815  type mapnames [nmapfast]string
  2816  
  2817  func mkmapnames(base string, ptr string) mapnames {
  2818  	return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
  2819  }
  2820  
  2821  var mapaccess1 = mkmapnames("mapaccess1", "")
  2822  var mapaccess2 = mkmapnames("mapaccess2", "")
  2823  var mapassign = mkmapnames("mapassign", "ptr")
  2824  var mapdelete = mkmapnames("mapdelete", "")
  2825  
  2826  func mapfast(t *types.Type) int {
  2827  	// Check ../../runtime/map.go:maxValueSize before changing.
  2828  	if t.Val().Width > 128 {
  2829  		return mapslow
  2830  	}
  2831  	switch algtype(t.Key()) {
  2832  	case AMEM32:
  2833  		if !t.Key().HasHeapPointer() {
  2834  			return mapfast32
  2835  		}
  2836  		if Widthptr == 4 {
  2837  			return mapfast32ptr
  2838  		}
  2839  		Fatalf("small pointer %v", t.Key())
  2840  	case AMEM64:
  2841  		if !t.Key().HasHeapPointer() {
  2842  			return mapfast64
  2843  		}
  2844  		if Widthptr == 8 {
  2845  			return mapfast64ptr
  2846  		}
  2847  		// Two-word object, at least one of which is a pointer.
  2848  		// Use the slow path.
  2849  	case ASTRING:
  2850  		return mapfaststr
  2851  	}
  2852  	return mapslow
  2853  }
  2854  
  2855  func writebarrierfn(name string, l *types.Type, r *types.Type) *Node {
  2856  	fn := syslook(name)
  2857  	fn = substArgTypes(fn, l, r)
  2858  	return fn
  2859  }
  2860  
  2861  func addstr(n *Node, init *Nodes) *Node {
  2862  	// orderexpr rewrote OADDSTR to have a list of strings.
  2863  	c := n.List.Len()
  2864  
  2865  	if c < 2 {
  2866  		Fatalf("addstr count %d too small", c)
  2867  	}
  2868  
  2869  	buf := nodnil()
  2870  	if n.Esc == EscNone {
  2871  		sz := int64(0)
  2872  		for _, n1 := range n.List.Slice() {
  2873  			if n1.Op == OLITERAL {
  2874  				sz += int64(len(n1.Val().U.(string)))
  2875  			}
  2876  		}
  2877  
  2878  		// Don't allocate the buffer if the result won't fit.
  2879  		if sz < tmpstringbufsize {
  2880  			// Create temporary buffer for result string on stack.
  2881  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  2882  
  2883  			buf = nod(OADDR, temp(t), nil)
  2884  		}
  2885  	}
  2886  
  2887  	// build list of string arguments
  2888  	args := []*Node{buf}
  2889  	for _, n2 := range n.List.Slice() {
  2890  		args = append(args, conv(n2, types.Types[TSTRING]))
  2891  	}
  2892  
  2893  	var fn string
  2894  	if c <= 5 {
  2895  		// small numbers of strings use direct runtime helpers.
  2896  		// note: orderexpr knows this cutoff too.
  2897  		fn = fmt.Sprintf("concatstring%d", c)
  2898  	} else {
  2899  		// large numbers of strings are passed to the runtime as a slice.
  2900  		fn = "concatstrings"
  2901  
  2902  		t := types.NewSlice(types.Types[TSTRING])
  2903  		slice := nod(OCOMPLIT, nil, typenod(t))
  2904  		if prealloc[n] != nil {
  2905  			prealloc[slice] = prealloc[n]
  2906  		}
  2907  		slice.List.Set(args[1:]) // skip buf arg
  2908  		args = []*Node{buf, slice}
  2909  		slice.Esc = EscNone
  2910  	}
  2911  
  2912  	cat := syslook(fn)
  2913  	r := nod(OCALL, cat, nil)
  2914  	r.List.Set(args)
  2915  	r = typecheck(r, Erv)
  2916  	r = walkexpr(r, init)
  2917  	r.Type = n.Type
  2918  
  2919  	return r
  2920  }
  2921  
  2922  // expand append(l1, l2...) to
  2923  //   init {
  2924  //     s := l1
  2925  //     n := len(s) + len(l2)
  2926  //     // Compare as uint so growslice can panic on overflow.
  2927  //     if uint(n) > uint(cap(s)) {
  2928  //       s = growslice(s, n)
  2929  //     }
  2930  //     s = s[:n]
  2931  //     memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2932  //   }
  2933  //   s
  2934  //
  2935  // l2 is allowed to be a string.
  2936  func appendslice(n *Node, init *Nodes) *Node {
  2937  	walkexprlistsafe(n.List.Slice(), init)
  2938  
  2939  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2940  	// and n are name or literal, but those may index the slice we're
  2941  	// modifying here. Fix explicitly.
  2942  	ls := n.List.Slice()
  2943  	for i1, n1 := range ls {
  2944  		ls[i1] = cheapexpr(n1, init)
  2945  	}
  2946  
  2947  	l1 := n.List.First()
  2948  	l2 := n.List.Second()
  2949  
  2950  	var l []*Node
  2951  
  2952  	// var s []T
  2953  	s := temp(l1.Type)
  2954  	l = append(l, nod(OAS, s, l1)) // s = l1
  2955  
  2956  	// n := len(s) + len(l2)
  2957  	nn := temp(types.Types[TINT])
  2958  	l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil))))
  2959  
  2960  	// if uint(n) > uint(cap(s))
  2961  	nif := nod(OIF, nil, nil)
  2962  	nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil))
  2963  	nif.Left.Left.Type = types.Types[TUINT]
  2964  	nif.Left.Right.Type = types.Types[TUINT]
  2965  
  2966  	// instantiate growslice(Type*, []any, int) []any
  2967  	fn := syslook("growslice")
  2968  	fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
  2969  
  2970  	// s = growslice(T, s, n)
  2971  	nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn)))
  2972  	l = append(l, nif)
  2973  
  2974  	// s = s[:n]
  2975  	nt := nod(OSLICE, s, nil)
  2976  	nt.SetSliceBounds(nil, nn, nil)
  2977  	nt.Etype = 1
  2978  	l = append(l, nod(OAS, s, nt))
  2979  
  2980  	if l1.Type.Elem().HasHeapPointer() {
  2981  		// copy(s[len(l1):], l2)
  2982  		nptr1 := nod(OSLICE, s, nil)
  2983  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2984  		nptr1.Etype = 1
  2985  		nptr2 := l2
  2986  		Curfn.Func.setWBPos(n.Pos)
  2987  		fn := syslook("typedslicecopy")
  2988  		fn = substArgTypes(fn, l1.Type, l2.Type)
  2989  		var ln Nodes
  2990  		ln.Set(l)
  2991  		nt := mkcall1(fn, types.Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2)
  2992  		l = append(ln.Slice(), nt)
  2993  	} else if instrumenting && !compiling_runtime {
  2994  		// rely on runtime to instrument copy.
  2995  		// copy(s[len(l1):], l2)
  2996  		nptr1 := nod(OSLICE, s, nil)
  2997  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2998  		nptr1.Etype = 1
  2999  		nptr2 := l2
  3000  
  3001  		var ln Nodes
  3002  		ln.Set(l)
  3003  		var nt *Node
  3004  		if l2.Type.IsString() {
  3005  			fn := syslook("slicestringcopy")
  3006  			fn = substArgTypes(fn, l1.Type, l2.Type)
  3007  			nt = mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2)
  3008  		} else {
  3009  			fn := syslook("slicecopy")
  3010  			fn = substArgTypes(fn, l1.Type, l2.Type)
  3011  			nt = mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width))
  3012  		}
  3013  
  3014  		l = append(ln.Slice(), nt)
  3015  	} else {
  3016  		// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  3017  		nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil))
  3018  		nptr1.SetBounded(true)
  3019  
  3020  		nptr1 = nod(OADDR, nptr1, nil)
  3021  
  3022  		nptr2 := nod(OSPTR, l2, nil)
  3023  
  3024  		fn := syslook("memmove")
  3025  		fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
  3026  
  3027  		var ln Nodes
  3028  		ln.Set(l)
  3029  		nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &ln)
  3030  
  3031  		nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width))
  3032  		nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid)
  3033  		l = append(ln.Slice(), nt)
  3034  	}
  3035  
  3036  	typecheckslice(l, Etop)
  3037  	walkstmtlist(l)
  3038  	init.Append(l...)
  3039  	return s
  3040  }
  3041  
  3042  // Rewrite append(src, x, y, z) so that any side effects in
  3043  // x, y, z (including runtime panics) are evaluated in
  3044  // initialization statements before the append.
  3045  // For normal code generation, stop there and leave the
  3046  // rest to cgen_append.
  3047  //
  3048  // For race detector, expand append(src, a [, b]* ) to
  3049  //
  3050  //   init {
  3051  //     s := src
  3052  //     const argc = len(args) - 1
  3053  //     if cap(s) - len(s) < argc {
  3054  //	    s = growslice(s, len(s)+argc)
  3055  //     }
  3056  //     n := len(s)
  3057  //     s = s[:n+argc]
  3058  //     s[n] = a
  3059  //     s[n+1] = b
  3060  //     ...
  3061  //   }
  3062  //   s
  3063  func walkappend(n *Node, init *Nodes, dst *Node) *Node {
  3064  	if !samesafeexpr(dst, n.List.First()) {
  3065  		n.List.SetFirst(safeexpr(n.List.First(), init))
  3066  		n.List.SetFirst(walkexpr(n.List.First(), init))
  3067  	}
  3068  	walkexprlistsafe(n.List.Slice()[1:], init)
  3069  
  3070  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  3071  	// and n are name or literal, but those may index the slice we're
  3072  	// modifying here. Fix explicitly.
  3073  	// Using cheapexpr also makes sure that the evaluation
  3074  	// of all arguments (and especially any panics) happen
  3075  	// before we begin to modify the slice in a visible way.
  3076  	ls := n.List.Slice()[1:]
  3077  	for i, n := range ls {
  3078  		ls[i] = cheapexpr(n, init)
  3079  	}
  3080  
  3081  	nsrc := n.List.First()
  3082  
  3083  	argc := n.List.Len() - 1
  3084  	if argc < 1 {
  3085  		return nsrc
  3086  	}
  3087  
  3088  	// General case, with no function calls left as arguments.
  3089  	// Leave for gen, except that instrumentation requires old form.
  3090  	if !instrumenting || compiling_runtime {
  3091  		return n
  3092  	}
  3093  
  3094  	var l []*Node
  3095  
  3096  	ns := temp(nsrc.Type)
  3097  	l = append(l, nod(OAS, ns, nsrc)) // s = src
  3098  
  3099  	na := nodintconst(int64(argc)) // const argc
  3100  	nx := nod(OIF, nil, nil)       // if cap(s) - len(s) < argc
  3101  	nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na)
  3102  
  3103  	fn := syslook("growslice") //   growslice(<type>, old []T, mincap int) (ret []T)
  3104  	fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem())
  3105  
  3106  	nx.Nbody.Set1(nod(OAS, ns,
  3107  		mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns,
  3108  			nod(OADD, nod(OLEN, ns, nil), na))))
  3109  
  3110  	l = append(l, nx)
  3111  
  3112  	nn := temp(types.Types[TINT])
  3113  	l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s)
  3114  
  3115  	nx = nod(OSLICE, ns, nil) // ...s[:n+argc]
  3116  	nx.SetSliceBounds(nil, nod(OADD, nn, na), nil)
  3117  	nx.Etype = 1
  3118  	l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc]
  3119  
  3120  	ls = n.List.Slice()[1:]
  3121  	for i, n := range ls {
  3122  		nx = nod(OINDEX, ns, nn) // s[n] ...
  3123  		nx.SetBounded(true)
  3124  		l = append(l, nod(OAS, nx, n)) // s[n] = arg
  3125  		if i+1 < len(ls) {
  3126  			l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1
  3127  		}
  3128  	}
  3129  
  3130  	typecheckslice(l, Etop)
  3131  	walkstmtlist(l)
  3132  	init.Append(l...)
  3133  	return ns
  3134  }
  3135  
  3136  // Lower copy(a, b) to a memmove call or a runtime call.
  3137  //
  3138  // init {
  3139  //   n := len(a)
  3140  //   if n > len(b) { n = len(b) }
  3141  //   memmove(a.ptr, b.ptr, n*sizeof(elem(a)))
  3142  // }
  3143  // n;
  3144  //
  3145  // Also works if b is a string.
  3146  //
  3147  func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
  3148  	if n.Left.Type.Elem().HasHeapPointer() {
  3149  		Curfn.Func.setWBPos(n.Pos)
  3150  		fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
  3151  		return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right)
  3152  	}
  3153  
  3154  	if runtimecall {
  3155  		if n.Right.Type.IsString() {
  3156  			fn := syslook("slicestringcopy")
  3157  			fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  3158  			return mkcall1(fn, n.Type, init, n.Left, n.Right)
  3159  		}
  3160  
  3161  		fn := syslook("slicecopy")
  3162  		fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  3163  		return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width))
  3164  	}
  3165  
  3166  	n.Left = walkexpr(n.Left, init)
  3167  	n.Right = walkexpr(n.Right, init)
  3168  	nl := temp(n.Left.Type)
  3169  	nr := temp(n.Right.Type)
  3170  	var l []*Node
  3171  	l = append(l, nod(OAS, nl, n.Left))
  3172  	l = append(l, nod(OAS, nr, n.Right))
  3173  
  3174  	nfrm := nod(OSPTR, nr, nil)
  3175  	nto := nod(OSPTR, nl, nil)
  3176  
  3177  	nlen := temp(types.Types[TINT])
  3178  
  3179  	// n = len(to)
  3180  	l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil)))
  3181  
  3182  	// if n > len(frm) { n = len(frm) }
  3183  	nif := nod(OIF, nil, nil)
  3184  
  3185  	nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil))
  3186  	nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil)))
  3187  	l = append(l, nif)
  3188  
  3189  	// Call memmove.
  3190  	fn := syslook("memmove")
  3191  
  3192  	fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem())
  3193  	nwid := temp(types.Types[TUINTPTR])
  3194  	l = append(l, nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR])))
  3195  	nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width))
  3196  	l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid))
  3197  
  3198  	typecheckslice(l, Etop)
  3199  	walkstmtlist(l)
  3200  	init.Append(l...)
  3201  	return nlen
  3202  }
  3203  
  3204  func eqfor(t *types.Type) (n *Node, needsize bool) {
  3205  	// Should only arrive here with large memory or
  3206  	// a struct/array containing a non-memory field/element.
  3207  	// Small memory is handled inline, and single non-memory
  3208  	// is handled during type check (OCMPSTR etc).
  3209  	switch a, _ := algtype1(t); a {
  3210  	case AMEM:
  3211  		n := syslook("memequal")
  3212  		n = substArgTypes(n, t, t)
  3213  		return n, true
  3214  	case ASPECIAL:
  3215  		sym := typesymprefix(".eq", t)
  3216  		n := newname(sym)
  3217  		n.SetClass(PFUNC)
  3218  		n.Type = functype(nil, []*Node{
  3219  			anonfield(types.NewPtr(t)),
  3220  			anonfield(types.NewPtr(t)),
  3221  		}, []*Node{
  3222  			anonfield(types.Types[TBOOL]),
  3223  		})
  3224  		return n, false
  3225  	}
  3226  	Fatalf("eqfor %v", t)
  3227  	return nil, false
  3228  }
  3229  
  3230  // The result of walkcompare MUST be assigned back to n, e.g.
  3231  // 	n.Left = walkcompare(n.Left, init)
  3232  func walkcompare(n *Node, init *Nodes) *Node {
  3233  	// Given interface value l and concrete value r, rewrite
  3234  	//   l == r
  3235  	// into types-equal && data-equal.
  3236  	// This is efficient, avoids allocations, and avoids runtime calls.
  3237  	var l, r *Node
  3238  	if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() {
  3239  		l = n.Left
  3240  		r = n.Right
  3241  	} else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() {
  3242  		l = n.Right
  3243  		r = n.Left
  3244  	}
  3245  
  3246  	if l != nil {
  3247  		// Handle both == and !=.
  3248  		eq := n.Op
  3249  		var andor Op
  3250  		if eq == OEQ {
  3251  			andor = OANDAND
  3252  		} else {
  3253  			andor = OOROR
  3254  		}
  3255  		// Check for types equal.
  3256  		// For empty interface, this is:
  3257  		//   l.tab == type(r)
  3258  		// For non-empty interface, this is:
  3259  		//   l.tab != nil && l.tab._type == type(r)
  3260  		var eqtype *Node
  3261  		tab := nod(OITAB, l, nil)
  3262  		rtyp := typename(r.Type)
  3263  		if l.Type.IsEmptyInterface() {
  3264  			tab.Type = types.NewPtr(types.Types[TUINT8])
  3265  			tab.SetTypecheck(1)
  3266  			eqtype = nod(eq, tab, rtyp)
  3267  		} else {
  3268  			nonnil := nod(brcom(eq), nodnil(), tab)
  3269  			match := nod(eq, itabType(tab), rtyp)
  3270  			eqtype = nod(andor, nonnil, match)
  3271  		}
  3272  		// Check for data equal.
  3273  		eqdata := nod(eq, ifaceData(l, r.Type), r)
  3274  		// Put it all together.
  3275  		expr := nod(andor, eqtype, eqdata)
  3276  		n = finishcompare(n, expr, init)
  3277  		return n
  3278  	}
  3279  
  3280  	// Must be comparison of array or struct.
  3281  	// Otherwise back end handles it.
  3282  	// While we're here, decide whether to
  3283  	// inline or call an eq alg.
  3284  	t := n.Left.Type
  3285  	var inline bool
  3286  
  3287  	maxcmpsize := int64(4)
  3288  	unalignedLoad := false
  3289  	switch thearch.LinkArch.Family {
  3290  	case sys.AMD64, sys.ARM64, sys.S390X:
  3291  		// Keep this low enough, to generate less code than function call.
  3292  		maxcmpsize = 16
  3293  		unalignedLoad = true
  3294  	case sys.I386:
  3295  		maxcmpsize = 8
  3296  		unalignedLoad = true
  3297  	}
  3298  
  3299  	switch t.Etype {
  3300  	default:
  3301  		return n
  3302  	case TARRAY:
  3303  		// We can compare several elements at once with 2/4/8 byte integer compares
  3304  		inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize))
  3305  	case TSTRUCT:
  3306  		inline = t.NumFields() <= 4
  3307  	}
  3308  
  3309  	cmpl := n.Left
  3310  	for cmpl != nil && cmpl.Op == OCONVNOP {
  3311  		cmpl = cmpl.Left
  3312  	}
  3313  	cmpr := n.Right
  3314  	for cmpr != nil && cmpr.Op == OCONVNOP {
  3315  		cmpr = cmpr.Left
  3316  	}
  3317  
  3318  	// Chose not to inline. Call equality function directly.
  3319  	if !inline {
  3320  		if isvaluelit(cmpl) {
  3321  			var_ := temp(cmpl.Type)
  3322  			anylit(cmpl, var_, init)
  3323  			cmpl = var_
  3324  		}
  3325  		if isvaluelit(cmpr) {
  3326  			var_ := temp(cmpr.Type)
  3327  			anylit(cmpr, var_, init)
  3328  			cmpr = var_
  3329  		}
  3330  		if !islvalue(cmpl) || !islvalue(cmpr) {
  3331  			Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
  3332  		}
  3333  
  3334  		// eq algs take pointers
  3335  		pl := temp(types.NewPtr(t))
  3336  		al := nod(OAS, pl, nod(OADDR, cmpl, nil))
  3337  		al.Right.Etype = 1 // addr does not escape
  3338  		al = typecheck(al, Etop)
  3339  		init.Append(al)
  3340  
  3341  		pr := temp(types.NewPtr(t))
  3342  		ar := nod(OAS, pr, nod(OADDR, cmpr, nil))
  3343  		ar.Right.Etype = 1 // addr does not escape
  3344  		ar = typecheck(ar, Etop)
  3345  		init.Append(ar)
  3346  
  3347  		fn, needsize := eqfor(t)
  3348  		call := nod(OCALL, fn, nil)
  3349  		call.List.Append(pl)
  3350  		call.List.Append(pr)
  3351  		if needsize {
  3352  			call.List.Append(nodintconst(t.Width))
  3353  		}
  3354  		res := call
  3355  		if n.Op != OEQ {
  3356  			res = nod(ONOT, res, nil)
  3357  		}
  3358  		n = finishcompare(n, res, init)
  3359  		return n
  3360  	}
  3361  
  3362  	// inline: build boolean expression comparing element by element
  3363  	andor := OANDAND
  3364  	if n.Op == ONE {
  3365  		andor = OOROR
  3366  	}
  3367  	var expr *Node
  3368  	compare := func(el, er *Node) {
  3369  		a := nod(n.Op, el, er)
  3370  		if expr == nil {
  3371  			expr = a
  3372  		} else {
  3373  			expr = nod(andor, expr, a)
  3374  		}
  3375  	}
  3376  	cmpl = safeexpr(cmpl, init)
  3377  	cmpr = safeexpr(cmpr, init)
  3378  	if t.IsStruct() {
  3379  		for _, f := range t.Fields().Slice() {
  3380  			sym := f.Sym
  3381  			if sym.IsBlank() {
  3382  				continue
  3383  			}
  3384  			compare(
  3385  				nodSym(OXDOT, cmpl, sym),
  3386  				nodSym(OXDOT, cmpr, sym),
  3387  			)
  3388  		}
  3389  	} else {
  3390  		step := int64(1)
  3391  		remains := t.NumElem() * t.Elem().Width
  3392  		combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger()
  3393  		combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger()
  3394  		combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger()
  3395  		for i := int64(0); remains > 0; {
  3396  			var convType *types.Type
  3397  			switch {
  3398  			case remains >= 8 && combine64bit:
  3399  				convType = types.Types[TINT64]
  3400  				step = 8 / t.Elem().Width
  3401  			case remains >= 4 && combine32bit:
  3402  				convType = types.Types[TUINT32]
  3403  				step = 4 / t.Elem().Width
  3404  			case remains >= 2 && combine16bit:
  3405  				convType = types.Types[TUINT16]
  3406  				step = 2 / t.Elem().Width
  3407  			default:
  3408  				step = 1
  3409  			}
  3410  			if step == 1 {
  3411  				compare(
  3412  					nod(OINDEX, cmpl, nodintconst(int64(i))),
  3413  					nod(OINDEX, cmpr, nodintconst(int64(i))),
  3414  				)
  3415  				i++
  3416  				remains -= t.Elem().Width
  3417  			} else {
  3418  				elemType := t.Elem().ToUnsigned()
  3419  				cmplw := nod(OINDEX, cmpl, nodintconst(int64(i)))
  3420  				cmplw = conv(cmplw, elemType) // convert to unsigned
  3421  				cmplw = conv(cmplw, convType) // widen
  3422  				cmprw := nod(OINDEX, cmpr, nodintconst(int64(i)))
  3423  				cmprw = conv(cmprw, elemType)
  3424  				cmprw = conv(cmprw, convType)
  3425  				// For code like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3426  				// ssa will generate a single large load.
  3427  				for offset := int64(1); offset < step; offset++ {
  3428  					lb := nod(OINDEX, cmpl, nodintconst(int64(i+offset)))
  3429  					lb = conv(lb, elemType)
  3430  					lb = conv(lb, convType)
  3431  					lb = nod(OLSH, lb, nodintconst(int64(8*t.Elem().Width*offset)))
  3432  					cmplw = nod(OOR, cmplw, lb)
  3433  					rb := nod(OINDEX, cmpr, nodintconst(int64(i+offset)))
  3434  					rb = conv(rb, elemType)
  3435  					rb = conv(rb, convType)
  3436  					rb = nod(OLSH, rb, nodintconst(int64(8*t.Elem().Width*offset)))
  3437  					cmprw = nod(OOR, cmprw, rb)
  3438  				}
  3439  				compare(cmplw, cmprw)
  3440  				i += step
  3441  				remains -= step * t.Elem().Width
  3442  			}
  3443  		}
  3444  	}
  3445  	if expr == nil {
  3446  		expr = nodbool(n.Op == OEQ)
  3447  	}
  3448  	n = finishcompare(n, expr, init)
  3449  	return n
  3450  }
  3451  
  3452  // The result of finishcompare MUST be assigned back to n, e.g.
  3453  // 	n.Left = finishcompare(n.Left, x, r, init)
  3454  func finishcompare(n, r *Node, init *Nodes) *Node {
  3455  	// Use nn here to avoid passing r to typecheck.
  3456  	nn := r
  3457  	nn = typecheck(nn, Erv)
  3458  	nn = walkexpr(nn, init)
  3459  	r = nn
  3460  	if r.Type != n.Type {
  3461  		r = nod(OCONVNOP, r, nil)
  3462  		r.Type = n.Type
  3463  		r.SetTypecheck(1)
  3464  		nn = r
  3465  	}
  3466  	return nn
  3467  }
  3468  
  3469  // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers.
  3470  func (n *Node) isIntOrdering() bool {
  3471  	switch n.Op {
  3472  	case OLE, OLT, OGE, OGT:
  3473  	default:
  3474  		return false
  3475  	}
  3476  	return n.Left.Type.IsInteger() && n.Right.Type.IsInteger()
  3477  }
  3478  
  3479  // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10.
  3480  // n must be an OANDAND or OOROR node.
  3481  // The result of walkinrange MUST be assigned back to n, e.g.
  3482  // 	n.Left = walkinrange(n.Left)
  3483  func walkinrange(n *Node, init *Nodes) *Node {
  3484  	// We are looking for something equivalent to a opl b OP b opr c, where:
  3485  	// * a, b, and c have integer type
  3486  	// * b is side-effect-free
  3487  	// * opl and opr are each < or ≤
  3488  	// * OP is &&
  3489  	l := n.Left
  3490  	r := n.Right
  3491  	if !l.isIntOrdering() || !r.isIntOrdering() {
  3492  		return n
  3493  	}
  3494  
  3495  	// Find b, if it exists, and rename appropriately.
  3496  	// Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right
  3497  	// Output is: a opl b(==x) ANDAND/OROR b(==x) opr c
  3498  	a, opl, b := l.Left, l.Op, l.Right
  3499  	x, opr, c := r.Left, r.Op, r.Right
  3500  	for i := 0; ; i++ {
  3501  		if samesafeexpr(b, x) {
  3502  			break
  3503  		}
  3504  		if i == 3 {
  3505  			// Tried all permutations and couldn't find an appropriate b == x.
  3506  			return n
  3507  		}
  3508  		if i&1 == 0 {
  3509  			a, opl, b = b, brrev(opl), a
  3510  		} else {
  3511  			x, opr, c = c, brrev(opr), x
  3512  		}
  3513  	}
  3514  
  3515  	// If n.Op is ||, apply de Morgan.
  3516  	// Negate the internal ops now; we'll negate the top level op at the end.
  3517  	// Henceforth assume &&.
  3518  	negateResult := n.Op == OOROR
  3519  	if negateResult {
  3520  		opl = brcom(opl)
  3521  		opr = brcom(opr)
  3522  	}
  3523  
  3524  	cmpdir := func(o Op) int {
  3525  		switch o {
  3526  		case OLE, OLT:
  3527  			return -1
  3528  		case OGE, OGT:
  3529  			return +1
  3530  		}
  3531  		Fatalf("walkinrange cmpdir %v", o)
  3532  		return 0
  3533  	}
  3534  	if cmpdir(opl) != cmpdir(opr) {
  3535  		// Not a range check; something like b < a && b < c.
  3536  		return n
  3537  	}
  3538  
  3539  	switch opl {
  3540  	case OGE, OGT:
  3541  		// We have something like a > b && b ≥ c.
  3542  		// Switch and reverse ops and rename constants,
  3543  		// to make it look like a ≤ b && b < c.
  3544  		a, c = c, a
  3545  		opl, opr = brrev(opr), brrev(opl)
  3546  	}
  3547  
  3548  	// We must ensure that c-a is non-negative.
  3549  	// For now, require a and c to be constants.
  3550  	// In the future, we could also support a == 0 and c == len/cap(...).
  3551  	// Unfortunately, by this point, most len/cap expressions have been
  3552  	// stored into temporary variables.
  3553  	if !Isconst(a, CTINT) || !Isconst(c, CTINT) {
  3554  		return n
  3555  	}
  3556  
  3557  	if opl == OLT {
  3558  		// We have a < b && ...
  3559  		// We need a ≤ b && ... to safely use unsigned comparison tricks.
  3560  		// If a is not the maximum constant for b's type,
  3561  		// we can increment a and switch to ≤.
  3562  		if a.Int64() >= maxintval[b.Type.Etype].Int64() {
  3563  			return n
  3564  		}
  3565  		a = nodintconst(a.Int64() + 1)
  3566  		opl = OLE
  3567  	}
  3568  
  3569  	bound := c.Int64() - a.Int64()
  3570  	if bound < 0 {
  3571  		// Bad news. Something like 5 <= x && x < 3.
  3572  		// Rare in practice, and we still need to generate side-effects,
  3573  		// so just leave it alone.
  3574  		return n
  3575  	}
  3576  
  3577  	// We have a ≤ b && b < c (or a ≤ b && b ≤ c).
  3578  	// This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a),
  3579  	// which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a),
  3580  	// which is equivalent to uint(b-a) < uint(c-a).
  3581  	ut := b.Type.ToUnsigned()
  3582  	lhs := conv(nod(OSUB, b, a), ut)
  3583  	rhs := nodintconst(bound)
  3584  	if negateResult {
  3585  		// Negate top level.
  3586  		opr = brcom(opr)
  3587  	}
  3588  	cmp := nod(opr, lhs, rhs)
  3589  	cmp.Pos = n.Pos
  3590  	cmp = addinit(cmp, l.Ninit.Slice())
  3591  	cmp = addinit(cmp, r.Ninit.Slice())
  3592  	// Typecheck the AST rooted at cmp...
  3593  	cmp = typecheck(cmp, Erv)
  3594  	// ...but then reset cmp's type to match n's type.
  3595  	cmp.Type = n.Type
  3596  	cmp = walkexpr(cmp, init)
  3597  	return cmp
  3598  }
  3599  
  3600  // return 1 if integer n must be in range [0, max), 0 otherwise
  3601  func bounded(n *Node, max int64) bool {
  3602  	if n.Type == nil || !n.Type.IsInteger() {
  3603  		return false
  3604  	}
  3605  
  3606  	sign := n.Type.IsSigned()
  3607  	bits := int32(8 * n.Type.Width)
  3608  
  3609  	if smallintconst(n) {
  3610  		v := n.Int64()
  3611  		return 0 <= v && v < max
  3612  	}
  3613  
  3614  	switch n.Op {
  3615  	case OAND:
  3616  		v := int64(-1)
  3617  		if smallintconst(n.Left) {
  3618  			v = n.Left.Int64()
  3619  		} else if smallintconst(n.Right) {
  3620  			v = n.Right.Int64()
  3621  		}
  3622  
  3623  		if 0 <= v && v < max {
  3624  			return true
  3625  		}
  3626  
  3627  	case OMOD:
  3628  		if !sign && smallintconst(n.Right) {
  3629  			v := n.Right.Int64()
  3630  			if 0 <= v && v <= max {
  3631  				return true
  3632  			}
  3633  		}
  3634  
  3635  	case ODIV:
  3636  		if !sign && smallintconst(n.Right) {
  3637  			v := n.Right.Int64()
  3638  			for bits > 0 && v >= 2 {
  3639  				bits--
  3640  				v >>= 1
  3641  			}
  3642  		}
  3643  
  3644  	case ORSH:
  3645  		if !sign && smallintconst(n.Right) {
  3646  			v := n.Right.Int64()
  3647  			if v > int64(bits) {
  3648  				return true
  3649  			}
  3650  			bits -= int32(v)
  3651  		}
  3652  	}
  3653  
  3654  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
  3655  		return true
  3656  	}
  3657  
  3658  	return false
  3659  }
  3660  
  3661  // usemethod checks interface method calls for uses of reflect.Type.Method.
  3662  func usemethod(n *Node) {
  3663  	t := n.Left.Type
  3664  
  3665  	// Looking for either of:
  3666  	//	Method(int) reflect.Method
  3667  	//	MethodByName(string) (reflect.Method, bool)
  3668  	//
  3669  	// TODO(crawshaw): improve precision of match by working out
  3670  	//                 how to check the method name.
  3671  	if n := t.NumParams(); n != 1 {
  3672  		return
  3673  	}
  3674  	if n := t.NumResults(); n != 1 && n != 2 {
  3675  		return
  3676  	}
  3677  	p0 := t.Params().Field(0)
  3678  	res0 := t.Results().Field(0)
  3679  	var res1 *types.Field
  3680  	if t.NumResults() == 2 {
  3681  		res1 = t.Results().Field(1)
  3682  	}
  3683  
  3684  	if res1 == nil {
  3685  		if p0.Type.Etype != TINT {
  3686  			return
  3687  		}
  3688  	} else {
  3689  		if !p0.Type.IsString() {
  3690  			return
  3691  		}
  3692  		if !res1.Type.IsBoolean() {
  3693  			return
  3694  		}
  3695  	}
  3696  
  3697  	// Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
  3698  	//       (including global variables such as numImports - was issue #19028).
  3699  	if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" {
  3700  		Curfn.Func.SetReflectMethod(true)
  3701  	}
  3702  }
  3703  
  3704  func usefield(n *Node) {
  3705  	if objabi.Fieldtrack_enabled == 0 {
  3706  		return
  3707  	}
  3708  
  3709  	switch n.Op {
  3710  	default:
  3711  		Fatalf("usefield %v", n.Op)
  3712  
  3713  	case ODOT, ODOTPTR:
  3714  		break
  3715  	}
  3716  	if n.Sym == nil {
  3717  		// No field name.  This DOTPTR was built by the compiler for access
  3718  		// to runtime data structures.  Ignore.
  3719  		return
  3720  	}
  3721  
  3722  	t := n.Left.Type
  3723  	if t.IsPtr() {
  3724  		t = t.Elem()
  3725  	}
  3726  	field := dotField[typeSymKey{t.Orig, n.Sym}]
  3727  	if field == nil {
  3728  		Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
  3729  	}
  3730  	if !strings.Contains(field.Note, "go:\"track\"") {
  3731  		return
  3732  	}
  3733  
  3734  	outer := n.Left.Type
  3735  	if outer.IsPtr() {
  3736  		outer = outer.Elem()
  3737  	}
  3738  	if outer.Sym == nil {
  3739  		yyerror("tracked field must be in named struct type")
  3740  	}
  3741  	if !exportname(field.Sym.Name) {
  3742  		yyerror("tracked field must be exported (upper case)")
  3743  	}
  3744  
  3745  	sym := tracksym(outer, field)
  3746  	if Curfn.Func.FieldTrack == nil {
  3747  		Curfn.Func.FieldTrack = make(map[*types.Sym]struct{})
  3748  	}
  3749  	Curfn.Func.FieldTrack[sym] = struct{}{}
  3750  }
  3751  
  3752  func candiscardlist(l Nodes) bool {
  3753  	for _, n := range l.Slice() {
  3754  		if !candiscard(n) {
  3755  			return false
  3756  		}
  3757  	}
  3758  	return true
  3759  }
  3760  
  3761  func candiscard(n *Node) bool {
  3762  	if n == nil {
  3763  		return true
  3764  	}
  3765  
  3766  	switch n.Op {
  3767  	default:
  3768  		return false
  3769  
  3770  		// Discardable as long as the subpieces are.
  3771  	case ONAME,
  3772  		ONONAME,
  3773  		OTYPE,
  3774  		OPACK,
  3775  		OLITERAL,
  3776  		OADD,
  3777  		OSUB,
  3778  		OOR,
  3779  		OXOR,
  3780  		OADDSTR,
  3781  		OADDR,
  3782  		OANDAND,
  3783  		OARRAYBYTESTR,
  3784  		OARRAYRUNESTR,
  3785  		OSTRARRAYBYTE,
  3786  		OSTRARRAYRUNE,
  3787  		OCAP,
  3788  		OCMPIFACE,
  3789  		OCMPSTR,
  3790  		OCOMPLIT,
  3791  		OMAPLIT,
  3792  		OSTRUCTLIT,
  3793  		OARRAYLIT,
  3794  		OSLICELIT,
  3795  		OPTRLIT,
  3796  		OCONV,
  3797  		OCONVIFACE,
  3798  		OCONVNOP,
  3799  		ODOT,
  3800  		OEQ,
  3801  		ONE,
  3802  		OLT,
  3803  		OLE,
  3804  		OGT,
  3805  		OGE,
  3806  		OKEY,
  3807  		OSTRUCTKEY,
  3808  		OLEN,
  3809  		OMUL,
  3810  		OLSH,
  3811  		ORSH,
  3812  		OAND,
  3813  		OANDNOT,
  3814  		ONEW,
  3815  		ONOT,
  3816  		OCOM,
  3817  		OPLUS,
  3818  		OMINUS,
  3819  		OOROR,
  3820  		OPAREN,
  3821  		ORUNESTR,
  3822  		OREAL,
  3823  		OIMAG,
  3824  		OCOMPLEX:
  3825  		break
  3826  
  3827  		// Discardable as long as we know it's not division by zero.
  3828  	case ODIV, OMOD:
  3829  		if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 {
  3830  			break
  3831  		}
  3832  		if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 {
  3833  			break
  3834  		}
  3835  		return false
  3836  
  3837  		// Discardable as long as we know it won't fail because of a bad size.
  3838  	case OMAKECHAN, OMAKEMAP:
  3839  		if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 {
  3840  			break
  3841  		}
  3842  		return false
  3843  
  3844  		// Difficult to tell what sizes are okay.
  3845  	case OMAKESLICE:
  3846  		return false
  3847  	}
  3848  
  3849  	if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) {
  3850  		return false
  3851  	}
  3852  
  3853  	return true
  3854  }
  3855  
  3856  // rewrite
  3857  //	print(x, y, z)
  3858  // into
  3859  //	func(a1, a2, a3) {
  3860  //		print(a1, a2, a3)
  3861  //	}(x, y, z)
  3862  // and same for println.
  3863  
  3864  var walkprintfunc_prgen int
  3865  
  3866  // The result of walkprintfunc MUST be assigned back to n, e.g.
  3867  // 	n.Left = walkprintfunc(n.Left, init)
  3868  func walkprintfunc(n *Node, init *Nodes) *Node {
  3869  	if n.Ninit.Len() != 0 {
  3870  		walkstmtlist(n.Ninit.Slice())
  3871  		init.AppendNodes(&n.Ninit)
  3872  	}
  3873  
  3874  	t := nod(OTFUNC, nil, nil)
  3875  	var printargs []*Node
  3876  	for i, n1 := range n.List.Slice() {
  3877  		buf := fmt.Sprintf("a%d", i)
  3878  		a := namedfield(buf, n1.Type)
  3879  		t.List.Append(a)
  3880  		printargs = append(printargs, a.Left)
  3881  	}
  3882  
  3883  	oldfn := Curfn
  3884  	Curfn = nil
  3885  
  3886  	walkprintfunc_prgen++
  3887  	sym := lookupN("print·%d", walkprintfunc_prgen)
  3888  	fn := dclfunc(sym, t)
  3889  
  3890  	a := nod(n.Op, nil, nil)
  3891  	a.List.Set(printargs)
  3892  	a = typecheck(a, Etop)
  3893  	a = walkstmt(a)
  3894  
  3895  	fn.Nbody.Set1(a)
  3896  
  3897  	funcbody()
  3898  
  3899  	fn = typecheck(fn, Etop)
  3900  	typecheckslice(fn.Nbody.Slice(), Etop)
  3901  	xtop = append(xtop, fn)
  3902  	Curfn = oldfn
  3903  
  3904  	a = nod(OCALL, nil, nil)
  3905  	a.Left = fn.Func.Nname
  3906  	a.List.Set(n.List.Slice())
  3907  	a = typecheck(a, Etop)
  3908  	a = walkexpr(a, init)
  3909  	return a
  3910  }
  3911  
  3912  // substArgTypes substitutes the given list of types for
  3913  // successive occurrences of the "any" placeholder in the
  3914  // type syntax expression n.Type.
  3915  // The result of substArgTypes MUST be assigned back to old, e.g.
  3916  // 	n.Left = substArgTypes(n.Left, t1, t2)
  3917  func substArgTypes(old *Node, types_ ...*types.Type) *Node {
  3918  	n := *old // make shallow copy
  3919  
  3920  	for _, t := range types_ {
  3921  		dowidth(t)
  3922  	}
  3923  	n.Type = types.SubstAny(n.Type, &types_)
  3924  	if len(types_) > 0 {
  3925  		Fatalf("substArgTypes: too many argument types")
  3926  	}
  3927  	return &n
  3928  }