github.com/Filosottile/go@v0.0.0-20170906193555-dbed9972d994/src/cmd/compile/internal/gc/walk.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc
     6  
     7  import (
     8  	"cmd/compile/internal/types"
     9  	"cmd/internal/objabi"
    10  	"cmd/internal/sys"
    11  	"fmt"
    12  	"strings"
    13  )
    14  
    15  // The constant is known to runtime.
    16  const (
    17  	tmpstringbufsize = 32
    18  )
    19  
    20  func walk(fn *Node) {
    21  	Curfn = fn
    22  
    23  	if Debug['W'] != 0 {
    24  		s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym)
    25  		dumplist(s, Curfn.Nbody)
    26  	}
    27  
    28  	lno := lineno
    29  
    30  	// Final typecheck for any unused variables.
    31  	for i, ln := range fn.Func.Dcl {
    32  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) {
    33  			ln = typecheck(ln, Erv|Easgn)
    34  			fn.Func.Dcl[i] = ln
    35  		}
    36  	}
    37  
    38  	// Propagate the used flag for typeswitch variables up to the NONAME in it's definition.
    39  	for _, ln := range fn.Func.Dcl {
    40  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() {
    41  			ln.Name.Defn.Left.Name.SetUsed(true)
    42  		}
    43  	}
    44  
    45  	for _, ln := range fn.Func.Dcl {
    46  		if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() {
    47  			continue
    48  		}
    49  		if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW {
    50  			if defn.Left.Name.Used() {
    51  				continue
    52  			}
    53  			yyerrorl(defn.Left.Pos, "%v declared and not used", ln.Sym)
    54  			defn.Left.Name.SetUsed(true) // suppress repeats
    55  		} else {
    56  			yyerrorl(ln.Pos, "%v declared and not used", ln.Sym)
    57  		}
    58  	}
    59  
    60  	lineno = lno
    61  	if nerrors != 0 {
    62  		return
    63  	}
    64  	walkstmtlist(Curfn.Nbody.Slice())
    65  	if Debug['W'] != 0 {
    66  		s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym)
    67  		dumplist(s, Curfn.Nbody)
    68  	}
    69  
    70  	zeroResults()
    71  	heapmoves()
    72  	if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 {
    73  		s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym)
    74  		dumplist(s, Curfn.Func.Enter)
    75  	}
    76  }
    77  
    78  func walkstmtlist(s []*Node) {
    79  	for i := range s {
    80  		s[i] = walkstmt(s[i])
    81  	}
    82  }
    83  
    84  func samelist(a, b []*Node) bool {
    85  	if len(a) != len(b) {
    86  		return false
    87  	}
    88  	for i, n := range a {
    89  		if n != b[i] {
    90  			return false
    91  		}
    92  	}
    93  	return true
    94  }
    95  
    96  func paramoutheap(fn *Node) bool {
    97  	for _, ln := range fn.Func.Dcl {
    98  		switch ln.Class() {
    99  		case PPARAMOUT:
   100  			if ln.isParamStackCopy() || ln.Addrtaken() {
   101  				return true
   102  			}
   103  
   104  		case PAUTO:
   105  			// stop early - parameters are over
   106  			return false
   107  		}
   108  	}
   109  
   110  	return false
   111  }
   112  
   113  // adds "adjust" to all the argument locations for the call n.
   114  // n must be a defer or go node that has already been walked.
   115  func adjustargs(n *Node, adjust int) {
   116  	var arg *Node
   117  	var lhs *Node
   118  
   119  	callfunc := n.Left
   120  	for _, arg = range callfunc.List.Slice() {
   121  		if arg.Op != OAS {
   122  			Fatalf("call arg not assignment")
   123  		}
   124  		lhs = arg.Left
   125  		if lhs.Op == ONAME {
   126  			// This is a temporary introduced by reorder1.
   127  			// The real store to the stack appears later in the arg list.
   128  			continue
   129  		}
   130  
   131  		if lhs.Op != OINDREGSP {
   132  			Fatalf("call argument store does not use OINDREGSP")
   133  		}
   134  
   135  		// can't really check this in machine-indep code.
   136  		//if(lhs->val.u.reg != D_SP)
   137  		//      Fatalf("call arg assign not indreg(SP)")
   138  		lhs.Xoffset += int64(adjust)
   139  	}
   140  }
   141  
   142  // The result of walkstmt MUST be assigned back to n, e.g.
   143  // 	n.Left = walkstmt(n.Left)
   144  func walkstmt(n *Node) *Node {
   145  	if n == nil {
   146  		return n
   147  	}
   148  
   149  	setlineno(n)
   150  
   151  	walkstmtlist(n.Ninit.Slice())
   152  
   153  	switch n.Op {
   154  	default:
   155  		if n.Op == ONAME {
   156  			yyerror("%v is not a top level statement", n.Sym)
   157  		} else {
   158  			yyerror("%v is not a top level statement", n.Op)
   159  		}
   160  		Dump("nottop", n)
   161  
   162  	case OAS,
   163  		OASOP,
   164  		OAS2,
   165  		OAS2DOTTYPE,
   166  		OAS2RECV,
   167  		OAS2FUNC,
   168  		OAS2MAPR,
   169  		OCLOSE,
   170  		OCOPY,
   171  		OCALLMETH,
   172  		OCALLINTER,
   173  		OCALL,
   174  		OCALLFUNC,
   175  		ODELETE,
   176  		OSEND,
   177  		OPRINT,
   178  		OPRINTN,
   179  		OPANIC,
   180  		OEMPTY,
   181  		ORECOVER,
   182  		OGETG:
   183  		if n.Typecheck() == 0 {
   184  			Fatalf("missing typecheck: %+v", n)
   185  		}
   186  		wascopy := n.Op == OCOPY
   187  		init := n.Ninit
   188  		n.Ninit.Set(nil)
   189  		n = walkexpr(n, &init)
   190  		n = addinit(n, init.Slice())
   191  		if wascopy && n.Op == OCONVNOP {
   192  			n.Op = OEMPTY // don't leave plain values as statements.
   193  		}
   194  
   195  	// special case for a receive where we throw away
   196  	// the value received.
   197  	case ORECV:
   198  		if n.Typecheck() == 0 {
   199  			Fatalf("missing typecheck: %+v", n)
   200  		}
   201  		init := n.Ninit
   202  		n.Ninit.Set(nil)
   203  
   204  		n.Left = walkexpr(n.Left, &init)
   205  		n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil())
   206  		n = walkexpr(n, &init)
   207  
   208  		n = addinit(n, init.Slice())
   209  
   210  	case OBREAK,
   211  		OCONTINUE,
   212  		OFALL,
   213  		OGOTO,
   214  		OLABEL,
   215  		ODCLCONST,
   216  		ODCLTYPE,
   217  		OCHECKNIL,
   218  		OVARKILL,
   219  		OVARLIVE:
   220  		break
   221  
   222  	case ODCL:
   223  		v := n.Left
   224  		if v.Class() == PAUTOHEAP {
   225  			if compiling_runtime {
   226  				yyerror("%v escapes to heap, not allowed in runtime.", v)
   227  			}
   228  			if prealloc[v] == nil {
   229  				prealloc[v] = callnew(v.Type)
   230  			}
   231  			nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v])
   232  			nn.SetColas(true)
   233  			nn = typecheck(nn, Etop)
   234  			return walkstmt(nn)
   235  		}
   236  
   237  	case OBLOCK:
   238  		walkstmtlist(n.List.Slice())
   239  
   240  	case OXCASE:
   241  		yyerror("case statement out of place")
   242  		n.Op = OCASE
   243  		fallthrough
   244  
   245  	case OCASE:
   246  		n.Right = walkstmt(n.Right)
   247  
   248  	case ODEFER:
   249  		Curfn.Func.SetHasDefer(true)
   250  		switch n.Left.Op {
   251  		case OPRINT, OPRINTN:
   252  			n.Left = walkprintfunc(n.Left, &n.Ninit)
   253  
   254  		case OCOPY:
   255  			n.Left = copyany(n.Left, &n.Ninit, true)
   256  
   257  		default:
   258  			n.Left = walkexpr(n.Left, &n.Ninit)
   259  		}
   260  
   261  		// make room for size & fn arguments.
   262  		adjustargs(n, 2*Widthptr)
   263  
   264  	case OFOR, OFORUNTIL:
   265  		if n.Left != nil {
   266  			walkstmtlist(n.Left.Ninit.Slice())
   267  			init := n.Left.Ninit
   268  			n.Left.Ninit.Set(nil)
   269  			n.Left = walkexpr(n.Left, &init)
   270  			n.Left = addinit(n.Left, init.Slice())
   271  		}
   272  
   273  		n.Right = walkstmt(n.Right)
   274  		walkstmtlist(n.Nbody.Slice())
   275  
   276  	case OIF:
   277  		n.Left = walkexpr(n.Left, &n.Ninit)
   278  		walkstmtlist(n.Nbody.Slice())
   279  		walkstmtlist(n.Rlist.Slice())
   280  
   281  	case OPROC:
   282  		switch n.Left.Op {
   283  		case OPRINT, OPRINTN:
   284  			n.Left = walkprintfunc(n.Left, &n.Ninit)
   285  
   286  		case OCOPY:
   287  			n.Left = copyany(n.Left, &n.Ninit, true)
   288  
   289  		default:
   290  			n.Left = walkexpr(n.Left, &n.Ninit)
   291  		}
   292  
   293  		// make room for size & fn arguments.
   294  		adjustargs(n, 2*Widthptr)
   295  
   296  	case ORETURN:
   297  		walkexprlist(n.List.Slice(), &n.Ninit)
   298  		if n.List.Len() == 0 {
   299  			break
   300  		}
   301  		if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) {
   302  			// assign to the function out parameters,
   303  			// so that reorder3 can fix up conflicts
   304  			var rl []*Node
   305  
   306  			var cl Class
   307  			for _, ln := range Curfn.Func.Dcl {
   308  				cl = ln.Class()
   309  				if cl == PAUTO || cl == PAUTOHEAP {
   310  					break
   311  				}
   312  				if cl == PPARAMOUT {
   313  					if ln.isParamStackCopy() {
   314  						ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil)
   315  					}
   316  					rl = append(rl, ln)
   317  				}
   318  			}
   319  
   320  			if got, want := n.List.Len(), len(rl); got != want {
   321  				// order should have rewritten multi-value function calls
   322  				// with explicit OAS2FUNC nodes.
   323  				Fatalf("expected %v return arguments, have %v", want, got)
   324  			}
   325  
   326  			if samelist(rl, n.List.Slice()) {
   327  				// special return in disguise
   328  				n.List.Set(nil)
   329  
   330  				break
   331  			}
   332  
   333  			// move function calls out, to make reorder3's job easier.
   334  			walkexprlistsafe(n.List.Slice(), &n.Ninit)
   335  
   336  			ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit)
   337  			n.List.Set(reorder3(ll))
   338  			break
   339  		}
   340  
   341  		ll := ascompatte(nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit)
   342  		n.List.Set(ll)
   343  
   344  	case ORETJMP:
   345  		break
   346  
   347  	case OSELECT:
   348  		walkselect(n)
   349  
   350  	case OSWITCH:
   351  		walkswitch(n)
   352  
   353  	case ORANGE:
   354  		n = walkrange(n)
   355  
   356  	case OXFALL:
   357  		yyerror("fallthrough statement out of place")
   358  		n.Op = OFALL
   359  	}
   360  
   361  	if n.Op == ONAME {
   362  		Fatalf("walkstmt ended up with name: %+v", n)
   363  	}
   364  	return n
   365  }
   366  
   367  func isSmallMakeSlice(n *Node) bool {
   368  	if n.Op != OMAKESLICE {
   369  		return false
   370  	}
   371  	l := n.Left
   372  	r := n.Right
   373  	if r == nil {
   374  		r = l
   375  	}
   376  	t := n.Type
   377  
   378  	return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width)
   379  }
   380  
   381  // walk the whole tree of the body of an
   382  // expression or simple statement.
   383  // the types expressions are calculated.
   384  // compile-time constants are evaluated.
   385  // complex side effects like statements are appended to init
   386  func walkexprlist(s []*Node, init *Nodes) {
   387  	for i := range s {
   388  		s[i] = walkexpr(s[i], init)
   389  	}
   390  }
   391  
   392  func walkexprlistsafe(s []*Node, init *Nodes) {
   393  	for i, n := range s {
   394  		s[i] = safeexpr(n, init)
   395  		s[i] = walkexpr(s[i], init)
   396  	}
   397  }
   398  
   399  func walkexprlistcheap(s []*Node, init *Nodes) {
   400  	for i, n := range s {
   401  		s[i] = cheapexpr(n, init)
   402  		s[i] = walkexpr(s[i], init)
   403  	}
   404  }
   405  
   406  // Build name of function for interface conversion.
   407  // Not all names are possible
   408  // (e.g., we'll never generate convE2E or convE2I or convI2E).
   409  func convFuncName(from, to *types.Type) string {
   410  	tkind := to.Tie()
   411  	switch from.Tie() {
   412  	case 'I':
   413  		switch tkind {
   414  		case 'I':
   415  			return "convI2I"
   416  		}
   417  	case 'T':
   418  		switch tkind {
   419  		case 'E':
   420  			switch {
   421  			case from.Size() == 2 && from.Align == 2:
   422  				return "convT2E16"
   423  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   424  				return "convT2E32"
   425  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   426  				return "convT2E64"
   427  			case from.IsString():
   428  				return "convT2Estring"
   429  			case from.IsSlice():
   430  				return "convT2Eslice"
   431  			case !types.Haspointers(from):
   432  				return "convT2Enoptr"
   433  			}
   434  			return "convT2E"
   435  		case 'I':
   436  			switch {
   437  			case from.Size() == 2 && from.Align == 2:
   438  				return "convT2I16"
   439  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   440  				return "convT2I32"
   441  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   442  				return "convT2I64"
   443  			case from.IsString():
   444  				return "convT2Istring"
   445  			case from.IsSlice():
   446  				return "convT2Islice"
   447  			case !types.Haspointers(from):
   448  				return "convT2Inoptr"
   449  			}
   450  			return "convT2I"
   451  		}
   452  	}
   453  	Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
   454  	panic("unreachable")
   455  }
   456  
   457  // The result of walkexpr MUST be assigned back to n, e.g.
   458  // 	n.Left = walkexpr(n.Left, init)
   459  func walkexpr(n *Node, init *Nodes) *Node {
   460  	if n == nil {
   461  		return n
   462  	}
   463  
   464  	// Eagerly checkwidth all expressions for the back end.
   465  	if n.Type != nil && !n.Type.WidthCalculated() {
   466  		switch n.Type.Etype {
   467  		case TBLANK, TNIL, TIDEAL:
   468  		default:
   469  			checkwidth(n.Type)
   470  		}
   471  	}
   472  
   473  	if init == &n.Ninit {
   474  		// not okay to use n->ninit when walking n,
   475  		// because we might replace n with some other node
   476  		// and would lose the init list.
   477  		Fatalf("walkexpr init == &n->ninit")
   478  	}
   479  
   480  	if n.Ninit.Len() != 0 {
   481  		walkstmtlist(n.Ninit.Slice())
   482  		init.AppendNodes(&n.Ninit)
   483  	}
   484  
   485  	lno := setlineno(n)
   486  
   487  	if Debug['w'] > 1 {
   488  		Dump("walk-before", n)
   489  	}
   490  
   491  	if n.Typecheck() != 1 {
   492  		Fatalf("missed typecheck: %+v", n)
   493  	}
   494  
   495  	if n.Op == ONAME && n.Class() == PAUTOHEAP {
   496  		nn := nod(OIND, n.Name.Param.Heapaddr, nil)
   497  		nn = typecheck(nn, Erv)
   498  		nn = walkexpr(nn, init)
   499  		nn.Left.SetNonNil(true)
   500  		return nn
   501  	}
   502  
   503  opswitch:
   504  	switch n.Op {
   505  	default:
   506  		Dump("walk", n)
   507  		Fatalf("walkexpr: switch 1 unknown op %+S", n)
   508  
   509  	case ONONAME, OINDREGSP, OEMPTY, OGETG:
   510  
   511  	case OTYPE, ONAME, OLITERAL:
   512  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
   513  		// Perhaps refactor to use Node.mayBeShared for these instead.
   514  		// If these return early, make sure to still call
   515  		// stringsym for constant strings.
   516  
   517  	case ONOT, OMINUS, OPLUS, OCOM, OREAL, OIMAG, ODOTMETH, ODOTINTER,
   518  		OIND, OSPTR, OITAB, OIDATA, OADDR:
   519  		n.Left = walkexpr(n.Left, init)
   520  
   521  	case OEFACE, OAND, OSUB, OMUL, OLT, OLE, OGE, OGT, OADD, OOR, OXOR:
   522  		n.Left = walkexpr(n.Left, init)
   523  		n.Right = walkexpr(n.Right, init)
   524  
   525  	case ODOT:
   526  		usefield(n)
   527  		n.Left = walkexpr(n.Left, init)
   528  
   529  	case ODOTTYPE, ODOTTYPE2:
   530  		n.Left = walkexpr(n.Left, init)
   531  		// Set up interface type addresses for back end.
   532  		n.Right = typename(n.Type)
   533  		if n.Op == ODOTTYPE {
   534  			n.Right.Right = typename(n.Left.Type)
   535  		}
   536  		if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   537  			n.List.Set1(itabname(n.Type, n.Left.Type))
   538  		}
   539  
   540  	case ODOTPTR:
   541  		usefield(n)
   542  		if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 {
   543  			// No actual copy will be generated, so emit an explicit nil check.
   544  			n.Left = cheapexpr(n.Left, init)
   545  
   546  			checknil(n.Left, init)
   547  		}
   548  
   549  		n.Left = walkexpr(n.Left, init)
   550  
   551  	case OLEN, OCAP:
   552  		n.Left = walkexpr(n.Left, init)
   553  
   554  		// replace len(*[10]int) with 10.
   555  		// delayed until now to preserve side effects.
   556  		t := n.Left.Type
   557  
   558  		if t.IsPtr() {
   559  			t = t.Elem()
   560  		}
   561  		if t.IsArray() {
   562  			safeexpr(n.Left, init)
   563  			nodconst(n, n.Type, t.NumElem())
   564  			n.SetTypecheck(1)
   565  		}
   566  
   567  	case OLSH, ORSH:
   568  		n.Left = walkexpr(n.Left, init)
   569  		n.Right = walkexpr(n.Right, init)
   570  		t := n.Left.Type
   571  		n.SetBounded(bounded(n.Right, 8*t.Width))
   572  		if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) {
   573  			Warn("shift bounds check elided")
   574  		}
   575  
   576  	case OCOMPLEX:
   577  		// Use results from call expression as arguments for complex.
   578  		if n.Left == nil && n.Right == nil {
   579  			n.Left = n.List.First()
   580  			n.Right = n.List.Second()
   581  		}
   582  		n.Left = walkexpr(n.Left, init)
   583  		n.Right = walkexpr(n.Right, init)
   584  
   585  	case OEQ, ONE:
   586  		n.Left = walkexpr(n.Left, init)
   587  		n.Right = walkexpr(n.Right, init)
   588  
   589  		// Disable safemode while compiling this code: the code we
   590  		// generate internally can refer to unsafe.Pointer.
   591  		// In this case it can happen if we need to generate an ==
   592  		// for a struct containing a reflect.Value, which itself has
   593  		// an unexported field of type unsafe.Pointer.
   594  		old_safemode := safemode
   595  		safemode = false
   596  		n = walkcompare(n, init)
   597  		safemode = old_safemode
   598  
   599  	case OANDAND, OOROR:
   600  		n.Left = walkexpr(n.Left, init)
   601  
   602  		// cannot put side effects from n.Right on init,
   603  		// because they cannot run before n.Left is checked.
   604  		// save elsewhere and store on the eventual n.Right.
   605  		var ll Nodes
   606  
   607  		n.Right = walkexpr(n.Right, &ll)
   608  		n.Right = addinit(n.Right, ll.Slice())
   609  		n = walkinrange(n, init)
   610  
   611  	case OPRINT, OPRINTN:
   612  		walkexprlist(n.List.Slice(), init)
   613  		n = walkprint(n, init)
   614  
   615  	case OPANIC:
   616  		n = mkcall("gopanic", nil, init, n.Left)
   617  
   618  	case ORECOVER:
   619  		n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil))
   620  
   621  	case OCLOSUREVAR, OCFUNC:
   622  		n.SetAddable(true)
   623  
   624  	case OCALLINTER:
   625  		usemethod(n)
   626  		t := n.Left.Type
   627  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   628  			break
   629  		}
   630  		n.Left = walkexpr(n.Left, init)
   631  		walkexprlist(n.List.Slice(), init)
   632  		ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   633  		n.List.Set(reorder1(ll))
   634  
   635  	case OCALLFUNC:
   636  		if n.Left.Op == OCLOSURE {
   637  			// Transform direct call of a closure to call of a normal function.
   638  			// transformclosure already did all preparation work.
   639  
   640  			// Prepend captured variables to argument list.
   641  			n.List.Prepend(n.Left.Func.Enter.Slice()...)
   642  
   643  			n.Left.Func.Enter.Set(nil)
   644  
   645  			// Replace OCLOSURE with ONAME/PFUNC.
   646  			n.Left = n.Left.Func.Closure.Func.Nname
   647  
   648  			// Update type of OCALLFUNC node.
   649  			// Output arguments had not changed, but their offsets could.
   650  			if n.Left.Type.NumResults() == 1 {
   651  				n.Type = n.Left.Type.Results().Field(0).Type
   652  			} else {
   653  				n.Type = n.Left.Type.Results()
   654  			}
   655  		}
   656  
   657  		t := n.Left.Type
   658  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   659  			break
   660  		}
   661  
   662  		n.Left = walkexpr(n.Left, init)
   663  		walkexprlist(n.List.Slice(), init)
   664  
   665  		ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   666  		n.List.Set(reorder1(ll))
   667  
   668  	case OCALLMETH:
   669  		t := n.Left.Type
   670  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   671  			break
   672  		}
   673  		n.Left = walkexpr(n.Left, init)
   674  		walkexprlist(n.List.Slice(), init)
   675  		ll := ascompatte(n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init)
   676  		lr := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   677  		ll = append(ll, lr...)
   678  		n.Left.Left = nil
   679  		updateHasCall(n.Left)
   680  		n.List.Set(reorder1(ll))
   681  
   682  	case OAS:
   683  		init.AppendNodes(&n.Ninit)
   684  
   685  		n.Left = walkexpr(n.Left, init)
   686  		n.Left = safeexpr(n.Left, init)
   687  
   688  		if oaslit(n, init) {
   689  			break
   690  		}
   691  
   692  		if n.Right == nil {
   693  			// TODO(austin): Check all "implicit zeroing"
   694  			break
   695  		}
   696  
   697  		if !instrumenting && iszero(n.Right) {
   698  			break
   699  		}
   700  
   701  		switch n.Right.Op {
   702  		default:
   703  			n.Right = walkexpr(n.Right, init)
   704  
   705  		case ORECV:
   706  			// x = <-c; n.Left is x, n.Right.Left is c.
   707  			// orderstmt made sure x is addressable.
   708  			n.Right.Left = walkexpr(n.Right.Left, init)
   709  
   710  			n1 := nod(OADDR, n.Left, nil)
   711  			r := n.Right.Left // the channel
   712  			n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1)
   713  			n = walkexpr(n, init)
   714  			break opswitch
   715  
   716  		case OAPPEND:
   717  			// x = append(...)
   718  			r := n.Right
   719  			if r.Type.Elem().NotInHeap() {
   720  				yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem())
   721  			}
   722  			if r.Isddd() {
   723  				r = appendslice(r, init) // also works for append(slice, string).
   724  			} else {
   725  				r = walkappend(r, init, n)
   726  			}
   727  			n.Right = r
   728  			if r.Op == OAPPEND {
   729  				// Left in place for back end.
   730  				// Do not add a new write barrier.
   731  				// Set up address of type for back end.
   732  				r.Left = typename(r.Type.Elem())
   733  				break opswitch
   734  			}
   735  			// Otherwise, lowered for race detector.
   736  			// Treat as ordinary assignment.
   737  		}
   738  
   739  		if n.Left != nil && n.Right != nil {
   740  			n = convas(n, init)
   741  		}
   742  
   743  	case OAS2:
   744  		init.AppendNodes(&n.Ninit)
   745  		walkexprlistsafe(n.List.Slice(), init)
   746  		walkexprlistsafe(n.Rlist.Slice(), init)
   747  		ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init)
   748  		ll = reorder3(ll)
   749  		n = liststmt(ll)
   750  
   751  	// a,b,... = fn()
   752  	case OAS2FUNC:
   753  		init.AppendNodes(&n.Ninit)
   754  
   755  		r := n.Rlist.First()
   756  		walkexprlistsafe(n.List.Slice(), init)
   757  		r = walkexpr(r, init)
   758  
   759  		if isIntrinsicCall(r) {
   760  			n.Rlist.Set1(r)
   761  			break
   762  		}
   763  		init.Append(r)
   764  
   765  		ll := ascompatet(n.List, r.Type)
   766  		n = liststmt(ll)
   767  
   768  	// x, y = <-c
   769  	// orderstmt made sure x is addressable.
   770  	case OAS2RECV:
   771  		init.AppendNodes(&n.Ninit)
   772  
   773  		r := n.Rlist.First()
   774  		walkexprlistsafe(n.List.Slice(), init)
   775  		r.Left = walkexpr(r.Left, init)
   776  		var n1 *Node
   777  		if isblank(n.List.First()) {
   778  			n1 = nodnil()
   779  		} else {
   780  			n1 = nod(OADDR, n.List.First(), nil)
   781  		}
   782  		n1.Etype = 1 // addr does not escape
   783  		fn := chanfn("chanrecv2", 2, r.Left.Type)
   784  		ok := n.List.Second()
   785  		call := mkcall1(fn, ok.Type, init, r.Left, n1)
   786  		n = nod(OAS, ok, call)
   787  		n = typecheck(n, Etop)
   788  
   789  	// a,b = m[i]
   790  	case OAS2MAPR:
   791  		init.AppendNodes(&n.Ninit)
   792  
   793  		r := n.Rlist.First()
   794  		walkexprlistsafe(n.List.Slice(), init)
   795  		r.Left = walkexpr(r.Left, init)
   796  		r.Right = walkexpr(r.Right, init)
   797  		t := r.Left.Type
   798  
   799  		fast := mapfast(t)
   800  		var key *Node
   801  		if fast != mapslow {
   802  			// fast versions take key by value
   803  			key = r.Right
   804  		} else {
   805  			// standard version takes key by reference
   806  			// orderexpr made sure key is addressable.
   807  			key = nod(OADDR, r.Right, nil)
   808  		}
   809  
   810  		// from:
   811  		//   a,b = m[i]
   812  		// to:
   813  		//   var,b = mapaccess2*(t, m, i)
   814  		//   a = *var
   815  		a := n.List.First()
   816  
   817  		if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
   818  			fn := mapfn(mapaccess2[fast], t)
   819  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key)
   820  		} else {
   821  			fn := mapfn("mapaccess2_fat", t)
   822  			z := zeroaddr(w)
   823  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z)
   824  		}
   825  
   826  		// mapaccess2* returns a typed bool, but due to spec changes,
   827  		// the boolean result of i.(T) is now untyped so we make it the
   828  		// same type as the variable on the lhs.
   829  		if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() {
   830  			r.Type.Field(1).Type = ok.Type
   831  		}
   832  		n.Rlist.Set1(r)
   833  		n.Op = OAS2FUNC
   834  
   835  		// don't generate a = *var if a is _
   836  		if !isblank(a) {
   837  			var_ := temp(types.NewPtr(t.Val()))
   838  			var_.SetTypecheck(1)
   839  			var_.SetNonNil(true) // mapaccess always returns a non-nil pointer
   840  			n.List.SetFirst(var_)
   841  			n = walkexpr(n, init)
   842  			init.Append(n)
   843  			n = nod(OAS, a, nod(OIND, var_, nil))
   844  		}
   845  
   846  		n = typecheck(n, Etop)
   847  		n = walkexpr(n, init)
   848  
   849  	case ODELETE:
   850  		init.AppendNodes(&n.Ninit)
   851  		map_ := n.List.First()
   852  		key := n.List.Second()
   853  		map_ = walkexpr(map_, init)
   854  		key = walkexpr(key, init)
   855  
   856  		t := map_.Type
   857  		fast := mapfast(t)
   858  		if fast == mapslow {
   859  			// orderstmt made sure key is addressable.
   860  			key = nod(OADDR, key, nil)
   861  		}
   862  		n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key)
   863  
   864  	case OAS2DOTTYPE:
   865  		walkexprlistsafe(n.List.Slice(), init)
   866  		n.Rlist.SetFirst(walkexpr(n.Rlist.First(), init))
   867  
   868  	case OCONVIFACE:
   869  		n.Left = walkexpr(n.Left, init)
   870  
   871  		// Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
   872  		if isdirectiface(n.Left.Type) {
   873  			var t *Node
   874  			if n.Type.IsEmptyInterface() {
   875  				t = typename(n.Left.Type)
   876  			} else {
   877  				t = itabname(n.Left.Type, n.Type)
   878  			}
   879  			l := nod(OEFACE, t, n.Left)
   880  			l.Type = n.Type
   881  			l.SetTypecheck(n.Typecheck())
   882  			n = l
   883  			break
   884  		}
   885  
   886  		if staticbytes == nil {
   887  			staticbytes = newname(Runtimepkg.Lookup("staticbytes"))
   888  			staticbytes.SetClass(PEXTERN)
   889  			staticbytes.Type = types.NewArray(types.Types[TUINT8], 256)
   890  			zerobase = newname(Runtimepkg.Lookup("zerobase"))
   891  			zerobase.SetClass(PEXTERN)
   892  			zerobase.Type = types.Types[TUINTPTR]
   893  		}
   894  
   895  		// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
   896  		// by using an existing addressable value identical to n.Left
   897  		// or creating one on the stack.
   898  		var value *Node
   899  		switch {
   900  		case n.Left.Type.Size() == 0:
   901  			// n.Left is zero-sized. Use zerobase.
   902  			cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246.
   903  			value = zerobase
   904  		case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()):
   905  			// n.Left is a bool/byte. Use staticbytes[n.Left].
   906  			n.Left = cheapexpr(n.Left, init)
   907  			value = nod(OINDEX, staticbytes, byteindex(n.Left))
   908  			value.SetBounded(true)
   909  		case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly():
   910  			// n.Left is a readonly global; use it directly.
   911  			value = n.Left
   912  		case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024:
   913  			// n.Left does not escape. Use a stack temporary initialized to n.Left.
   914  			value = temp(n.Left.Type)
   915  			init.Append(typecheck(nod(OAS, value, n.Left), Etop))
   916  		}
   917  
   918  		if value != nil {
   919  			// Value is identical to n.Left.
   920  			// Construct the interface directly: {type/itab, &value}.
   921  			var t *Node
   922  			if n.Type.IsEmptyInterface() {
   923  				t = typename(n.Left.Type)
   924  			} else {
   925  				t = itabname(n.Left.Type, n.Type)
   926  			}
   927  			l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv))
   928  			l.Type = n.Type
   929  			l.SetTypecheck(n.Typecheck())
   930  			n = l
   931  			break
   932  		}
   933  
   934  		// Implement interface to empty interface conversion.
   935  		// tmp = i.itab
   936  		// if tmp != nil {
   937  		//    tmp = tmp.type
   938  		// }
   939  		// e = iface{tmp, i.data}
   940  		if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   941  			// Evaluate the input interface.
   942  			c := temp(n.Left.Type)
   943  			init.Append(nod(OAS, c, n.Left))
   944  
   945  			// Get the itab out of the interface.
   946  			tmp := temp(types.NewPtr(types.Types[TUINT8]))
   947  			init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv)))
   948  
   949  			// Get the type out of the itab.
   950  			nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil)
   951  			nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp)))
   952  			init.Append(nif)
   953  
   954  			// Build the result.
   955  			e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8])))
   956  			e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE.
   957  			e.SetTypecheck(1)
   958  			n = e
   959  			break
   960  		}
   961  
   962  		var ll []*Node
   963  		if n.Type.IsEmptyInterface() {
   964  			if !n.Left.Type.IsInterface() {
   965  				ll = append(ll, typename(n.Left.Type))
   966  			}
   967  		} else {
   968  			if n.Left.Type.IsInterface() {
   969  				ll = append(ll, typename(n.Type))
   970  			} else {
   971  				ll = append(ll, itabname(n.Left.Type, n.Type))
   972  			}
   973  		}
   974  
   975  		if n.Left.Type.IsInterface() {
   976  			ll = append(ll, n.Left)
   977  		} else {
   978  			// regular types are passed by reference to avoid C vararg calls
   979  			// orderexpr arranged for n.Left to be a temporary for all
   980  			// the conversions it could see. comparison of an interface
   981  			// with a non-interface, especially in a switch on interface value
   982  			// with non-interface cases, is not visible to orderstmt, so we
   983  			// have to fall back on allocating a temp here.
   984  			if islvalue(n.Left) {
   985  				ll = append(ll, nod(OADDR, n.Left, nil))
   986  			} else {
   987  				ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil))
   988  			}
   989  			dowidth(n.Left.Type)
   990  		}
   991  
   992  		fn := syslook(convFuncName(n.Left.Type, n.Type))
   993  		fn = substArgTypes(fn, n.Left.Type, n.Type)
   994  		dowidth(fn.Type)
   995  		n = nod(OCALL, fn, nil)
   996  		n.List.Set(ll)
   997  		n = typecheck(n, Erv)
   998  		n = walkexpr(n, init)
   999  
  1000  	case OCONV, OCONVNOP:
  1001  		if thearch.LinkArch.Family == sys.ARM || thearch.LinkArch.Family == sys.MIPS {
  1002  			if n.Left.Type.IsFloat() {
  1003  				if n.Type.Etype == TINT64 {
  1004  					n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1005  					break
  1006  				}
  1007  
  1008  				if n.Type.Etype == TUINT64 {
  1009  					n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1010  					break
  1011  				}
  1012  			}
  1013  
  1014  			if n.Type.IsFloat() {
  1015  				if n.Left.Type.Etype == TINT64 {
  1016  					n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
  1017  					break
  1018  				}
  1019  
  1020  				if n.Left.Type.Etype == TUINT64 {
  1021  					n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
  1022  					break
  1023  				}
  1024  			}
  1025  		}
  1026  
  1027  		if thearch.LinkArch.Family == sys.I386 {
  1028  			if n.Left.Type.IsFloat() {
  1029  				if n.Type.Etype == TINT64 {
  1030  					n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1031  					break
  1032  				}
  1033  
  1034  				if n.Type.Etype == TUINT64 {
  1035  					n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1036  					break
  1037  				}
  1038  				if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR {
  1039  					n = mkcall("float64touint32", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1040  					break
  1041  				}
  1042  			}
  1043  			if n.Type.IsFloat() {
  1044  				if n.Left.Type.Etype == TINT64 {
  1045  					n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
  1046  					break
  1047  				}
  1048  
  1049  				if n.Left.Type.Etype == TUINT64 {
  1050  					n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
  1051  					break
  1052  				}
  1053  				if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR {
  1054  					n = conv(mkcall("uint32tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT32])), n.Type)
  1055  					break
  1056  				}
  1057  			}
  1058  		}
  1059  
  1060  		n.Left = walkexpr(n.Left, init)
  1061  
  1062  	case OANDNOT:
  1063  		n.Left = walkexpr(n.Left, init)
  1064  		n.Op = OAND
  1065  		n.Right = nod(OCOM, n.Right, nil)
  1066  		n.Right = typecheck(n.Right, Erv)
  1067  		n.Right = walkexpr(n.Right, init)
  1068  
  1069  	case ODIV, OMOD:
  1070  		n.Left = walkexpr(n.Left, init)
  1071  		n.Right = walkexpr(n.Right, init)
  1072  
  1073  		// rewrite complex div into function call.
  1074  		et := n.Left.Type.Etype
  1075  
  1076  		if isComplex[et] && n.Op == ODIV {
  1077  			t := n.Type
  1078  			n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128]))
  1079  			n = conv(n, t)
  1080  			break
  1081  		}
  1082  
  1083  		// Nothing to do for float divisions.
  1084  		if isFloat[et] {
  1085  			break
  1086  		}
  1087  
  1088  		// rewrite 64-bit div and mod on 32-bit architectures.
  1089  		// TODO: Remove this code once we can introduce
  1090  		// runtime calls late in SSA processing.
  1091  		if Widthreg < 8 && (et == TINT64 || et == TUINT64) {
  1092  			if n.Right.Op == OLITERAL {
  1093  				// Leave div/mod by constant powers of 2.
  1094  				// The SSA backend will handle those.
  1095  				switch et {
  1096  				case TINT64:
  1097  					c := n.Right.Int64()
  1098  					if c < 0 {
  1099  						c = -c
  1100  					}
  1101  					if c != 0 && c&(c-1) == 0 {
  1102  						break opswitch
  1103  					}
  1104  				case TUINT64:
  1105  					c := uint64(n.Right.Int64())
  1106  					if c != 0 && c&(c-1) == 0 {
  1107  						break opswitch
  1108  					}
  1109  				}
  1110  			}
  1111  			var fn string
  1112  			if et == TINT64 {
  1113  				fn = "int64"
  1114  			} else {
  1115  				fn = "uint64"
  1116  			}
  1117  			if n.Op == ODIV {
  1118  				fn += "div"
  1119  			} else {
  1120  				fn += "mod"
  1121  			}
  1122  			n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et]))
  1123  		}
  1124  
  1125  	case OINDEX:
  1126  		n.Left = walkexpr(n.Left, init)
  1127  
  1128  		// save the original node for bounds checking elision.
  1129  		// If it was a ODIV/OMOD walk might rewrite it.
  1130  		r := n.Right
  1131  
  1132  		n.Right = walkexpr(n.Right, init)
  1133  
  1134  		// if range of type cannot exceed static array bound,
  1135  		// disable bounds check.
  1136  		if n.Bounded() {
  1137  			break
  1138  		}
  1139  		t := n.Left.Type
  1140  		if t != nil && t.IsPtr() {
  1141  			t = t.Elem()
  1142  		}
  1143  		if t.IsArray() {
  1144  			n.SetBounded(bounded(r, t.NumElem()))
  1145  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1146  				Warn("index bounds check elided")
  1147  			}
  1148  			if smallintconst(n.Right) && !n.Bounded() {
  1149  				yyerror("index out of bounds")
  1150  			}
  1151  		} else if Isconst(n.Left, CTSTR) {
  1152  			n.SetBounded(bounded(r, int64(len(n.Left.Val().U.(string)))))
  1153  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1154  				Warn("index bounds check elided")
  1155  			}
  1156  			if smallintconst(n.Right) && !n.Bounded() {
  1157  				yyerror("index out of bounds")
  1158  			}
  1159  		}
  1160  
  1161  		if Isconst(n.Right, CTINT) {
  1162  			if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
  1163  				yyerror("index out of bounds")
  1164  			}
  1165  		}
  1166  
  1167  	case OINDEXMAP:
  1168  		// Replace m[k] with *map{access1,assign}(maptype, m, &k)
  1169  		n.Left = walkexpr(n.Left, init)
  1170  		n.Right = walkexpr(n.Right, init)
  1171  		map_ := n.Left
  1172  		key := n.Right
  1173  		t := map_.Type
  1174  		if n.Etype == 1 {
  1175  			// This m[k] expression is on the left-hand side of an assignment.
  1176  			fast := mapfast(t)
  1177  			if fast == mapslow {
  1178  				// standard version takes key by reference.
  1179  				// orderexpr made sure key is addressable.
  1180  				key = nod(OADDR, key, nil)
  1181  			}
  1182  			n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key)
  1183  		} else {
  1184  			// m[k] is not the target of an assignment.
  1185  			fast := mapfast(t)
  1186  			if fast == mapslow {
  1187  				// standard version takes key by reference.
  1188  				// orderexpr made sure key is addressable.
  1189  				key = nod(OADDR, key, nil)
  1190  			}
  1191  
  1192  			if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
  1193  				n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Val()), init, typename(t), map_, key)
  1194  			} else {
  1195  				z := zeroaddr(w)
  1196  				n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Val()), init, typename(t), map_, key, z)
  1197  			}
  1198  		}
  1199  		n.Type = types.NewPtr(t.Val())
  1200  		n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers.
  1201  		n = nod(OIND, n, nil)
  1202  		n.Type = t.Val()
  1203  		n.SetTypecheck(1)
  1204  
  1205  	case ORECV:
  1206  		Fatalf("walkexpr ORECV") // should see inside OAS only
  1207  
  1208  	case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
  1209  		n.Left = walkexpr(n.Left, init)
  1210  		low, high, max := n.SliceBounds()
  1211  		low = walkexpr(low, init)
  1212  		if low != nil && iszero(low) {
  1213  			// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
  1214  			low = nil
  1215  		}
  1216  		high = walkexpr(high, init)
  1217  		max = walkexpr(max, init)
  1218  		n.SetSliceBounds(low, high, max)
  1219  		if n.Op.IsSlice3() {
  1220  			if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) {
  1221  				// Reduce x[i:j:cap(x)] to x[i:j].
  1222  				if n.Op == OSLICE3 {
  1223  					n.Op = OSLICE
  1224  				} else {
  1225  					n.Op = OSLICEARR
  1226  				}
  1227  				n = reduceSlice(n)
  1228  			}
  1229  		} else {
  1230  			n = reduceSlice(n)
  1231  		}
  1232  
  1233  	case ONEW:
  1234  		if n.Esc == EscNone {
  1235  			if n.Type.Elem().Width >= 1<<16 {
  1236  				Fatalf("large ONEW with EscNone: %v", n)
  1237  			}
  1238  			r := temp(n.Type.Elem())
  1239  			r = nod(OAS, r, nil) // zero temp
  1240  			r = typecheck(r, Etop)
  1241  			init.Append(r)
  1242  			r = nod(OADDR, r.Left, nil)
  1243  			r = typecheck(r, Erv)
  1244  			n = r
  1245  		} else {
  1246  			n = callnew(n.Type.Elem())
  1247  		}
  1248  
  1249  	case OCMPSTR:
  1250  		// s + "badgerbadgerbadger" == "badgerbadgerbadger"
  1251  		if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) {
  1252  			// TODO(marvin): Fix Node.EType type union.
  1253  			r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0))
  1254  			r = typecheck(r, Erv)
  1255  			r = walkexpr(r, init)
  1256  			r.Type = n.Type
  1257  			n = r
  1258  			break
  1259  		}
  1260  
  1261  		// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
  1262  		var cs, ncs *Node // const string, non-const string
  1263  		switch {
  1264  		case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR):
  1265  			// ignore; will be constant evaluated
  1266  		case Isconst(n.Left, CTSTR):
  1267  			cs = n.Left
  1268  			ncs = n.Right
  1269  		case Isconst(n.Right, CTSTR):
  1270  			cs = n.Right
  1271  			ncs = n.Left
  1272  		}
  1273  		if cs != nil {
  1274  			cmp := Op(n.Etype)
  1275  			// maxRewriteLen was chosen empirically.
  1276  			// It is the value that minimizes cmd/go file size
  1277  			// across most architectures.
  1278  			// See the commit description for CL 26758 for details.
  1279  			maxRewriteLen := 6
  1280  			// Some architectures can load unaligned byte sequence as 1 word.
  1281  			// So we can cover longer strings with the same amount of code.
  1282  			canCombineLoads := false
  1283  			combine64bit := false
  1284  			// TODO: does this improve performance on any other architectures?
  1285  			switch thearch.LinkArch.Family {
  1286  			case sys.AMD64:
  1287  				// Larger compare require longer instructions, so keep this reasonably low.
  1288  				// Data from CL 26758 shows that longer strings are rare.
  1289  				// If we really want we can do 16 byte SSE comparisons in the future.
  1290  				maxRewriteLen = 16
  1291  				canCombineLoads = true
  1292  				combine64bit = true
  1293  			case sys.I386:
  1294  				maxRewriteLen = 8
  1295  				canCombineLoads = true
  1296  			}
  1297  			var and Op
  1298  			switch cmp {
  1299  			case OEQ:
  1300  				and = OANDAND
  1301  			case ONE:
  1302  				and = OOROR
  1303  			default:
  1304  				// Don't do byte-wise comparisons for <, <=, etc.
  1305  				// They're fairly complicated.
  1306  				// Length-only checks are ok, though.
  1307  				maxRewriteLen = 0
  1308  			}
  1309  			if s := cs.Val().U.(string); len(s) <= maxRewriteLen {
  1310  				if len(s) > 0 {
  1311  					ncs = safeexpr(ncs, init)
  1312  				}
  1313  				// TODO(marvin): Fix Node.EType type union.
  1314  				r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s))))
  1315  				remains := len(s)
  1316  				for i := 0; remains > 0; {
  1317  					if remains == 1 || !canCombineLoads {
  1318  						cb := nodintconst(int64(s[i]))
  1319  						ncb := nod(OINDEX, ncs, nodintconst(int64(i)))
  1320  						r = nod(and, r, nod(cmp, ncb, cb))
  1321  						remains--
  1322  						i++
  1323  						continue
  1324  					}
  1325  					var step int
  1326  					var convType *types.Type
  1327  					switch {
  1328  					case remains >= 8 && combine64bit:
  1329  						convType = types.Types[TINT64]
  1330  						step = 8
  1331  					case remains >= 4:
  1332  						convType = types.Types[TUINT32]
  1333  						step = 4
  1334  					case remains >= 2:
  1335  						convType = types.Types[TUINT16]
  1336  						step = 2
  1337  					}
  1338  					ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i)))
  1339  					ncsubstr = conv(ncsubstr, convType)
  1340  					csubstr := int64(s[i])
  1341  					// Calculate large constant from bytes as sequence of shifts and ors.
  1342  					// Like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  1343  					// ssa will combine this into a single large load.
  1344  					for offset := 1; offset < step; offset++ {
  1345  						b := nod(OINDEX, ncs, nodintconst(int64(i+offset)))
  1346  						b = conv(b, convType)
  1347  						b = nod(OLSH, b, nodintconst(int64(8*offset)))
  1348  						ncsubstr = nod(OOR, ncsubstr, b)
  1349  						csubstr = csubstr | int64(s[i+offset])<<uint8(8*offset)
  1350  					}
  1351  					csubstrPart := nodintconst(csubstr)
  1352  					// Compare "step" bytes as once
  1353  					r = nod(and, r, nod(cmp, csubstrPart, ncsubstr))
  1354  					remains -= step
  1355  					i += step
  1356  				}
  1357  				r = typecheck(r, Erv)
  1358  				r = walkexpr(r, init)
  1359  				r.Type = n.Type
  1360  				n = r
  1361  				break
  1362  			}
  1363  		}
  1364  
  1365  		var r *Node
  1366  		// TODO(marvin): Fix Node.EType type union.
  1367  		if Op(n.Etype) == OEQ || Op(n.Etype) == ONE {
  1368  			// prepare for rewrite below
  1369  			n.Left = cheapexpr(n.Left, init)
  1370  			n.Right = cheapexpr(n.Right, init)
  1371  
  1372  			lstr := conv(n.Left, types.Types[TSTRING])
  1373  			rstr := conv(n.Right, types.Types[TSTRING])
  1374  			lptr := nod(OSPTR, lstr, nil)
  1375  			rptr := nod(OSPTR, rstr, nil)
  1376  			llen := conv(nod(OLEN, lstr, nil), types.Types[TUINTPTR])
  1377  			rlen := conv(nod(OLEN, rstr, nil), types.Types[TUINTPTR])
  1378  
  1379  			fn := syslook("memequal")
  1380  			fn = substArgTypes(fn, types.Types[TUINT8], types.Types[TUINT8])
  1381  			r = mkcall1(fn, types.Types[TBOOL], init, lptr, rptr, llen)
  1382  
  1383  			// quick check of len before full compare for == or !=.
  1384  			// memequal then tests equality up to length len.
  1385  			// TODO(marvin): Fix Node.EType type union.
  1386  			if Op(n.Etype) == OEQ {
  1387  				// len(left) == len(right) && memequal(left, right, len)
  1388  				r = nod(OANDAND, nod(OEQ, llen, rlen), r)
  1389  			} else {
  1390  				// len(left) != len(right) || !memequal(left, right, len)
  1391  				r = nod(ONOT, r, nil)
  1392  				r = nod(OOROR, nod(ONE, llen, rlen), r)
  1393  			}
  1394  
  1395  			r = typecheck(r, Erv)
  1396  			r = walkexpr(r, nil)
  1397  		} else {
  1398  			// sys_cmpstring(s1, s2) :: 0
  1399  			r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  1400  			// TODO(marvin): Fix Node.EType type union.
  1401  			r = nod(Op(n.Etype), r, nodintconst(0))
  1402  		}
  1403  
  1404  		r = typecheck(r, Erv)
  1405  		if !n.Type.IsBoolean() {
  1406  			Fatalf("cmp %v", n.Type)
  1407  		}
  1408  		r.Type = n.Type
  1409  		n = r
  1410  
  1411  	case OADDSTR:
  1412  		n = addstr(n, init)
  1413  
  1414  	case OAPPEND:
  1415  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
  1416  		Fatalf("append outside assignment")
  1417  
  1418  	case OCOPY:
  1419  		n = copyany(n, init, instrumenting && !compiling_runtime)
  1420  
  1421  		// cannot use chanfn - closechan takes any, not chan any
  1422  	case OCLOSE:
  1423  		fn := syslook("closechan")
  1424  
  1425  		fn = substArgTypes(fn, n.Left.Type)
  1426  		n = mkcall1(fn, nil, init, n.Left)
  1427  
  1428  	case OMAKECHAN:
  1429  		// When size fits into int, use makechan instead of
  1430  		// makechan64, which is faster and shorter on 32 bit platforms.
  1431  		size := n.Left
  1432  		fnname := "makechan64"
  1433  		argtype := types.Types[TINT64]
  1434  
  1435  		// Type checking guarantees that TIDEAL size is positive and fits in an int.
  1436  		// The case of size overflow when converting TUINT or TUINTPTR to TINT
  1437  		// will be handled by the negative range checks in makechan during runtime.
  1438  		if size.Type.IsKind(TIDEAL) || maxintval[size.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1439  			fnname = "makechan"
  1440  			argtype = types.Types[TINT]
  1441  		}
  1442  
  1443  		n = mkcall1(chanfn(fnname, 1, n.Type), n.Type, init, typename(n.Type), conv(size, argtype))
  1444  
  1445  	case OMAKEMAP:
  1446  		t := n.Type
  1447  		hmapType := hmap(t)
  1448  
  1449  		// var h *hmap
  1450  		var h *Node
  1451  		if n.Esc == EscNone {
  1452  			// Allocate hmap and one bucket on stack.
  1453  
  1454  			// var hv hmap
  1455  			hv := temp(hmapType)
  1456  			zero := nod(OAS, hv, nil)
  1457  			zero = typecheck(zero, Etop)
  1458  			init.Append(zero)
  1459  			// h = &hv
  1460  			h = nod(OADDR, hv, nil)
  1461  
  1462  			// Allocate one bucket pointed to by hmap.buckets on stack.
  1463  			// Maximum key/value size is 128 bytes, larger objects
  1464  			// are stored with an indirection. So max bucket size is 2048+eps.
  1465  
  1466  			// var bv bmap
  1467  			bv := temp(bmap(t))
  1468  
  1469  			zero = nod(OAS, bv, nil)
  1470  			zero = typecheck(zero, Etop)
  1471  			init.Append(zero)
  1472  
  1473  			// b = &bv
  1474  			b := nod(OADDR, bv, nil)
  1475  
  1476  			// h.buckets = b
  1477  			bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap
  1478  			na := nod(OAS, nodSym(ODOT, h, bsym), b)
  1479  			na = typecheck(na, Etop)
  1480  			init.Append(na)
  1481  
  1482  		} else {
  1483  			// h = nil
  1484  			h = nodnil()
  1485  		}
  1486  
  1487  		// When hint fits into int, use makemap instead of
  1488  		// makemap64, which is faster and shorter on 32 bit platforms.
  1489  		hint := n.Left
  1490  		fnname := "makemap64"
  1491  		argtype := types.Types[TINT64]
  1492  
  1493  		// Type checking guarantees that TIDEAL hint is positive and fits in an int.
  1494  		// See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function.
  1495  		// The case of hint overflow when converting TUINT or TUINTPTR to TINT
  1496  		// will be handled by the negative range checks in makemap during runtime.
  1497  		if hint.Type.IsKind(TIDEAL) || maxintval[hint.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1498  			fnname = "makemap"
  1499  			argtype = types.Types[TINT]
  1500  		}
  1501  
  1502  		fn := syslook(fnname)
  1503  		fn = substArgTypes(fn, hmapType, t.Key(), t.Val())
  1504  		n = mkcall1(fn, n.Type, init, typename(n.Type), conv(hint, argtype), h)
  1505  
  1506  	case OMAKESLICE:
  1507  		l := n.Left
  1508  		r := n.Right
  1509  		if r == nil {
  1510  			r = safeexpr(l, init)
  1511  			l = r
  1512  		}
  1513  		t := n.Type
  1514  		if n.Esc == EscNone {
  1515  			if !isSmallMakeSlice(n) {
  1516  				Fatalf("non-small OMAKESLICE with EscNone: %v", n)
  1517  			}
  1518  			// var arr [r]T
  1519  			// n = arr[:l]
  1520  			t = types.NewArray(t.Elem(), nonnegintconst(r)) // [r]T
  1521  			var_ := temp(t)
  1522  			a := nod(OAS, var_, nil) // zero temp
  1523  			a = typecheck(a, Etop)
  1524  			init.Append(a)
  1525  			r := nod(OSLICE, var_, nil) // arr[:l]
  1526  			r.SetSliceBounds(nil, l, nil)
  1527  			r = conv(r, n.Type) // in case n.Type is named.
  1528  			r = typecheck(r, Erv)
  1529  			r = walkexpr(r, init)
  1530  			n = r
  1531  		} else {
  1532  			// n escapes; set up a call to makeslice.
  1533  			// When len and cap can fit into int, use makeslice instead of
  1534  			// makeslice64, which is faster and shorter on 32 bit platforms.
  1535  
  1536  			if t.Elem().NotInHeap() {
  1537  				yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem())
  1538  			}
  1539  
  1540  			len, cap := l, r
  1541  
  1542  			fnname := "makeslice64"
  1543  			argtype := types.Types[TINT64]
  1544  
  1545  			// Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
  1546  			// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
  1547  			// will be handled by the negative range checks in makeslice during runtime.
  1548  			if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) &&
  1549  				(cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) {
  1550  				fnname = "makeslice"
  1551  				argtype = types.Types[TINT]
  1552  			}
  1553  
  1554  			fn := syslook(fnname)
  1555  			fn = substArgTypes(fn, t.Elem()) // any-1
  1556  			n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
  1557  		}
  1558  
  1559  	case ORUNESTR:
  1560  		a := nodnil()
  1561  		if n.Esc == EscNone {
  1562  			t := types.NewArray(types.Types[TUINT8], 4)
  1563  			var_ := temp(t)
  1564  			a = nod(OADDR, var_, nil)
  1565  		}
  1566  
  1567  		// intstring(*[4]byte, rune)
  1568  		n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64]))
  1569  
  1570  	case OARRAYBYTESTR:
  1571  		a := nodnil()
  1572  		if n.Esc == EscNone {
  1573  			// Create temporary buffer for string on stack.
  1574  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1575  
  1576  			a = nod(OADDR, temp(t), nil)
  1577  		}
  1578  
  1579  		// slicebytetostring(*[32]byte, []byte) string;
  1580  		n = mkcall("slicebytetostring", n.Type, init, a, n.Left)
  1581  
  1582  		// slicebytetostringtmp([]byte) string;
  1583  	case OARRAYBYTESTRTMP:
  1584  		n.Left = walkexpr(n.Left, init)
  1585  
  1586  		if !instrumenting {
  1587  			// Let the backend handle OARRAYBYTESTRTMP directly
  1588  			// to avoid a function call to slicebytetostringtmp.
  1589  			break
  1590  		}
  1591  
  1592  		n = mkcall("slicebytetostringtmp", n.Type, init, n.Left)
  1593  
  1594  		// slicerunetostring(*[32]byte, []rune) string;
  1595  	case OARRAYRUNESTR:
  1596  		a := nodnil()
  1597  
  1598  		if n.Esc == EscNone {
  1599  			// Create temporary buffer for string on stack.
  1600  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1601  
  1602  			a = nod(OADDR, temp(t), nil)
  1603  		}
  1604  
  1605  		n = mkcall("slicerunetostring", n.Type, init, a, n.Left)
  1606  
  1607  		// stringtoslicebyte(*32[byte], string) []byte;
  1608  	case OSTRARRAYBYTE:
  1609  		a := nodnil()
  1610  
  1611  		if n.Esc == EscNone {
  1612  			// Create temporary buffer for slice on stack.
  1613  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1614  
  1615  			a = nod(OADDR, temp(t), nil)
  1616  		}
  1617  
  1618  		n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, types.Types[TSTRING]))
  1619  
  1620  	case OSTRARRAYBYTETMP:
  1621  		// []byte(string) conversion that creates a slice
  1622  		// referring to the actual string bytes.
  1623  		// This conversion is handled later by the backend and
  1624  		// is only for use by internal compiler optimizations
  1625  		// that know that the slice won't be mutated.
  1626  		// The only such case today is:
  1627  		// for i, c := range []byte(string)
  1628  		n.Left = walkexpr(n.Left, init)
  1629  
  1630  		// stringtoslicerune(*[32]rune, string) []rune
  1631  	case OSTRARRAYRUNE:
  1632  		a := nodnil()
  1633  
  1634  		if n.Esc == EscNone {
  1635  			// Create temporary buffer for slice on stack.
  1636  			t := types.NewArray(types.Types[TINT32], tmpstringbufsize)
  1637  
  1638  			a = nod(OADDR, temp(t), nil)
  1639  		}
  1640  
  1641  		n = mkcall("stringtoslicerune", n.Type, init, a, n.Left)
  1642  
  1643  		// ifaceeq(i1 any-1, i2 any-2) (ret bool);
  1644  	case OCMPIFACE:
  1645  		if !eqtype(n.Left.Type, n.Right.Type) {
  1646  			Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type)
  1647  		}
  1648  		var fn *Node
  1649  		if n.Left.Type.IsEmptyInterface() {
  1650  			fn = syslook("efaceeq")
  1651  		} else {
  1652  			fn = syslook("ifaceeq")
  1653  		}
  1654  
  1655  		n.Right = cheapexpr(n.Right, init)
  1656  		n.Left = cheapexpr(n.Left, init)
  1657  		lt := nod(OITAB, n.Left, nil)
  1658  		rt := nod(OITAB, n.Right, nil)
  1659  		ld := nod(OIDATA, n.Left, nil)
  1660  		rd := nod(OIDATA, n.Right, nil)
  1661  		ld.Type = types.Types[TUNSAFEPTR]
  1662  		rd.Type = types.Types[TUNSAFEPTR]
  1663  		ld.SetTypecheck(1)
  1664  		rd.SetTypecheck(1)
  1665  		call := mkcall1(fn, n.Type, init, lt, ld, rd)
  1666  
  1667  		// Check itable/type before full compare.
  1668  		// Note: short-circuited because order matters.
  1669  		// TODO(marvin): Fix Node.EType type union.
  1670  		var cmp *Node
  1671  		if Op(n.Etype) == OEQ {
  1672  			cmp = nod(OANDAND, nod(OEQ, lt, rt), call)
  1673  		} else {
  1674  			cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil))
  1675  		}
  1676  		cmp = typecheck(cmp, Erv)
  1677  		cmp = walkexpr(cmp, init)
  1678  		cmp.Type = n.Type
  1679  		n = cmp
  1680  
  1681  	case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT:
  1682  		if isStaticCompositeLiteral(n) && !canSSAType(n.Type) {
  1683  			// n can be directly represented in the read-only data section.
  1684  			// Make direct reference to the static data. See issue 12841.
  1685  			vstat := staticname(n.Type)
  1686  			vstat.Name.SetReadonly(true)
  1687  			fixedlit(inInitFunction, initKindStatic, n, vstat, init)
  1688  			n = vstat
  1689  			n = typecheck(n, Erv)
  1690  			break
  1691  		}
  1692  		var_ := temp(n.Type)
  1693  		anylit(n, var_, init)
  1694  		n = var_
  1695  
  1696  	case OSEND:
  1697  		n1 := n.Right
  1698  		n1 = assignconv(n1, n.Left.Type.Elem(), "chan send")
  1699  		n1 = walkexpr(n1, init)
  1700  		n1 = nod(OADDR, n1, nil)
  1701  		n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1)
  1702  
  1703  	case OCLOSURE:
  1704  		n = walkclosure(n, init)
  1705  
  1706  	case OCALLPART:
  1707  		n = walkpartialcall(n, init)
  1708  	}
  1709  
  1710  	// Expressions that are constant at run time but not
  1711  	// considered const by the language spec are not turned into
  1712  	// constants until walk. For example, if n is y%1 == 0, the
  1713  	// walk of y%1 may have replaced it by 0.
  1714  	// Check whether n with its updated args is itself now a constant.
  1715  	t := n.Type
  1716  	evconst(n)
  1717  	if n.Type != t {
  1718  		Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type)
  1719  	}
  1720  	if n.Op == OLITERAL {
  1721  		n = typecheck(n, Erv)
  1722  		// Emit string symbol now to avoid emitting
  1723  		// any concurrently during the backend.
  1724  		if s, ok := n.Val().U.(string); ok {
  1725  			_ = stringsym(s)
  1726  		}
  1727  	}
  1728  
  1729  	updateHasCall(n)
  1730  
  1731  	if Debug['w'] != 0 && n != nil {
  1732  		Dump("walk", n)
  1733  	}
  1734  
  1735  	lineno = lno
  1736  	return n
  1737  }
  1738  
  1739  // TODO(josharian): combine this with its caller and simplify
  1740  func reduceSlice(n *Node) *Node {
  1741  	low, high, max := n.SliceBounds()
  1742  	if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) {
  1743  		// Reduce x[i:len(x)] to x[i:].
  1744  		high = nil
  1745  	}
  1746  	n.SetSliceBounds(low, high, max)
  1747  	if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil {
  1748  		// Reduce x[:] to x.
  1749  		if Debug_slice > 0 {
  1750  			Warn("slice: omit slice operation")
  1751  		}
  1752  		return n.Left
  1753  	}
  1754  	return n
  1755  }
  1756  
  1757  func ascompatee1(l *Node, r *Node, init *Nodes) *Node {
  1758  	// convas will turn map assigns into function calls,
  1759  	// making it impossible for reorder3 to work.
  1760  	n := nod(OAS, l, r)
  1761  
  1762  	if l.Op == OINDEXMAP {
  1763  		return n
  1764  	}
  1765  
  1766  	return convas(n, init)
  1767  }
  1768  
  1769  func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
  1770  	// check assign expression list to
  1771  	// a expression list. called in
  1772  	//	expr-list = expr-list
  1773  
  1774  	// ensure order of evaluation for function calls
  1775  	for i := range nl {
  1776  		nl[i] = safeexpr(nl[i], init)
  1777  	}
  1778  	for i1 := range nr {
  1779  		nr[i1] = safeexpr(nr[i1], init)
  1780  	}
  1781  
  1782  	var nn []*Node
  1783  	i := 0
  1784  	for ; i < len(nl); i++ {
  1785  		if i >= len(nr) {
  1786  			break
  1787  		}
  1788  		// Do not generate 'x = x' during return. See issue 4014.
  1789  		if op == ORETURN && samesafeexpr(nl[i], nr[i]) {
  1790  			continue
  1791  		}
  1792  		nn = append(nn, ascompatee1(nl[i], nr[i], init))
  1793  	}
  1794  
  1795  	// cannot happen: caller checked that lists had same length
  1796  	if i < len(nl) || i < len(nr) {
  1797  		var nln, nrn Nodes
  1798  		nln.Set(nl)
  1799  		nrn.Set(nr)
  1800  		Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname())
  1801  	}
  1802  	return nn
  1803  }
  1804  
  1805  // l is an lv and rt is the type of an rv
  1806  // return 1 if this implies a function call
  1807  // evaluating the lv or a function call
  1808  // in the conversion of the types
  1809  func fncall(l *Node, rt *types.Type) bool {
  1810  	if l.HasCall() || l.Op == OINDEXMAP {
  1811  		return true
  1812  	}
  1813  	if needwritebarrier(l) {
  1814  		return true
  1815  	}
  1816  	if eqtype(l.Type, rt) {
  1817  		return false
  1818  	}
  1819  	return true
  1820  }
  1821  
  1822  // check assign type list to
  1823  // a expression list. called in
  1824  //	expr-list = func()
  1825  func ascompatet(nl Nodes, nr *types.Type) []*Node {
  1826  	if nl.Len() != nr.NumFields() {
  1827  		Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
  1828  	}
  1829  
  1830  	var nn, mm Nodes
  1831  	for i, l := range nl.Slice() {
  1832  		if isblank(l) {
  1833  			continue
  1834  		}
  1835  		r := nr.Field(i)
  1836  
  1837  		// any lv that causes a fn call must be
  1838  		// deferred until all the return arguments
  1839  		// have been pulled from the output arguments
  1840  		if fncall(l, r.Type) {
  1841  			tmp := temp(r.Type)
  1842  			tmp = typecheck(tmp, Erv)
  1843  			a := nod(OAS, l, tmp)
  1844  			a = convas(a, &mm)
  1845  			mm.Append(a)
  1846  			l = tmp
  1847  		}
  1848  
  1849  		a := nod(OAS, l, nodarg(r, 0))
  1850  		a = convas(a, &nn)
  1851  		updateHasCall(a)
  1852  		if a.HasCall() {
  1853  			Dump("ascompatet ucount", a)
  1854  			Fatalf("ascompatet: too many function calls evaluating parameters")
  1855  		}
  1856  
  1857  		nn.Append(a)
  1858  	}
  1859  	return append(nn.Slice(), mm.Slice()...)
  1860  }
  1861  
  1862  // nodarg returns a Node for the function argument denoted by t,
  1863  // which is either the entire function argument or result struct (t is a  struct *types.Type)
  1864  // or a specific argument (t is a *types.Field within a struct *types.Type).
  1865  //
  1866  // If fp is 0, the node is for use by a caller invoking the given
  1867  // function, preparing the arguments before the call
  1868  // or retrieving the results after the call.
  1869  // In this case, the node will correspond to an outgoing argument
  1870  // slot like 8(SP).
  1871  //
  1872  // If fp is 1, the node is for use by the function itself
  1873  // (the callee), to retrieve its arguments or write its results.
  1874  // In this case the node will be an ONAME with an appropriate
  1875  // type and offset.
  1876  func nodarg(t interface{}, fp int) *Node {
  1877  	var n *Node
  1878  
  1879  	var funarg types.Funarg
  1880  	switch t := t.(type) {
  1881  	default:
  1882  		Fatalf("bad nodarg %T(%v)", t, t)
  1883  
  1884  	case *types.Type:
  1885  		// Entire argument struct, not just one arg
  1886  		if !t.IsFuncArgStruct() {
  1887  			Fatalf("nodarg: bad type %v", t)
  1888  		}
  1889  		funarg = t.StructType().Funarg
  1890  
  1891  		// Build fake variable name for whole arg struct.
  1892  		n = newname(lookup(".args"))
  1893  		n.Type = t
  1894  		first := t.Field(0)
  1895  		if first == nil {
  1896  			Fatalf("nodarg: bad struct")
  1897  		}
  1898  		if first.Offset == BADWIDTH {
  1899  			Fatalf("nodarg: offset not computed for %v", t)
  1900  		}
  1901  		n.Xoffset = first.Offset
  1902  
  1903  	case *types.Field:
  1904  		funarg = t.Funarg
  1905  		if fp == 1 {
  1906  			// NOTE(rsc): This should be using t.Nname directly,
  1907  			// except in the case where t.Nname.Sym is the blank symbol and
  1908  			// so the assignment would be discarded during code generation.
  1909  			// In that case we need to make a new node, and there is no harm
  1910  			// in optimization passes to doing so. But otherwise we should
  1911  			// definitely be using the actual declaration and not a newly built node.
  1912  			// The extra Fatalf checks here are verifying that this is the case,
  1913  			// without changing the actual logic (at time of writing, it's getting
  1914  			// toward time for the Go 1.7 beta).
  1915  			// At some quieter time (assuming we've never seen these Fatalfs happen)
  1916  			// we could change this code to use "expect" directly.
  1917  			expect := asNode(t.Nname)
  1918  			if expect.isParamHeapCopy() {
  1919  				expect = expect.Name.Param.Stackcopy
  1920  			}
  1921  
  1922  			for _, n := range Curfn.Func.Dcl {
  1923  				if (n.Class() == PPARAM || n.Class() == PPARAMOUT) && !t.Sym.IsBlank() && n.Sym == t.Sym {
  1924  					if n != expect {
  1925  						Fatalf("nodarg: unexpected node: %v (%p %v) vs %v (%p %v)", n, n, n.Op, asNode(t.Nname), asNode(t.Nname), asNode(t.Nname).Op)
  1926  					}
  1927  					return n
  1928  				}
  1929  			}
  1930  
  1931  			if !expect.Sym.IsBlank() {
  1932  				Fatalf("nodarg: did not find node in dcl list: %v", expect)
  1933  			}
  1934  		}
  1935  
  1936  		// Build fake name for individual variable.
  1937  		// This is safe because if there was a real declared name
  1938  		// we'd have used it above.
  1939  		n = newname(lookup("__"))
  1940  		n.Type = t.Type
  1941  		if t.Offset == BADWIDTH {
  1942  			Fatalf("nodarg: offset not computed for %v", t)
  1943  		}
  1944  		n.Xoffset = t.Offset
  1945  		n.Orig = asNode(t.Nname)
  1946  	}
  1947  
  1948  	// Rewrite argument named _ to __,
  1949  	// or else the assignment to _ will be
  1950  	// discarded during code generation.
  1951  	if isblank(n) {
  1952  		n.Sym = lookup("__")
  1953  	}
  1954  
  1955  	switch fp {
  1956  	default:
  1957  		Fatalf("bad fp")
  1958  
  1959  	case 0: // preparing arguments for call
  1960  		n.Op = OINDREGSP
  1961  		n.Xoffset += Ctxt.FixedFrameSize()
  1962  
  1963  	case 1: // reading arguments inside call
  1964  		n.SetClass(PPARAM)
  1965  		if funarg == types.FunargResults {
  1966  			n.SetClass(PPARAMOUT)
  1967  		}
  1968  	}
  1969  
  1970  	n.SetTypecheck(1)
  1971  	n.SetAddrtaken(true) // keep optimizers at bay
  1972  	return n
  1973  }
  1974  
  1975  // package all the arguments that match a ... T parameter into a []T.
  1976  func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node {
  1977  	esc := uint16(EscUnknown)
  1978  	if ddd != nil {
  1979  		esc = ddd.Esc
  1980  	}
  1981  
  1982  	if len(args) == 0 {
  1983  		n := nodnil()
  1984  		n.Type = typ
  1985  		return n
  1986  	}
  1987  
  1988  	n := nod(OCOMPLIT, nil, typenod(typ))
  1989  	if ddd != nil && prealloc[ddd] != nil {
  1990  		prealloc[n] = prealloc[ddd] // temporary to use
  1991  	}
  1992  	n.List.Set(args)
  1993  	n.Esc = esc
  1994  	n = typecheck(n, Erv)
  1995  	if n.Type == nil {
  1996  		Fatalf("mkdotargslice: typecheck failed")
  1997  	}
  1998  	n = walkexpr(n, init)
  1999  	return n
  2000  }
  2001  
  2002  // check assign expression list to
  2003  // a type list. called in
  2004  //	return expr-list
  2005  //	func(expr-list)
  2006  func ascompatte(call *Node, isddd bool, lhs *types.Type, rhs []*Node, fp int, init *Nodes) []*Node {
  2007  	var nn []*Node
  2008  
  2009  	// f(g()) where g has multiple return values
  2010  	if len(rhs) == 1 && rhs[0].Type.IsFuncArgStruct() {
  2011  		// optimization - can do block copy
  2012  		if eqtypenoname(rhs[0].Type, lhs) {
  2013  			nl := nodarg(lhs, fp)
  2014  			nr := nod(OCONVNOP, rhs[0], nil)
  2015  			nr.Type = nl.Type
  2016  			nn = []*Node{convas(nod(OAS, nl, nr), init)}
  2017  			goto ret
  2018  		}
  2019  
  2020  		// conversions involved.
  2021  		// copy into temporaries.
  2022  		var tmps []*Node
  2023  		for _, nr := range rhs[0].Type.FieldSlice() {
  2024  			tmps = append(tmps, temp(nr.Type))
  2025  		}
  2026  
  2027  		a := nod(OAS2, nil, nil)
  2028  		a.List.Set(tmps)
  2029  		a.Rlist.Set(rhs)
  2030  		a = typecheck(a, Etop)
  2031  		a = walkstmt(a)
  2032  		init.Append(a)
  2033  
  2034  		rhs = tmps
  2035  	}
  2036  
  2037  	// For each parameter (LHS), assign its corresponding argument (RHS).
  2038  	// If there's a ... parameter (which is only valid as the final
  2039  	// parameter) and this is not a ... call expression,
  2040  	// then assign the remaining arguments as a slice.
  2041  	for i, nl := range lhs.FieldSlice() {
  2042  		var nr *Node
  2043  		if nl.Isddd() && !isddd {
  2044  			nr = mkdotargslice(nl.Type, rhs[i:], init, call.Right)
  2045  		} else {
  2046  			nr = rhs[i]
  2047  		}
  2048  
  2049  		a := nod(OAS, nodarg(nl, fp), nr)
  2050  		a = convas(a, init)
  2051  		nn = append(nn, a)
  2052  	}
  2053  
  2054  ret:
  2055  	for _, n := range nn {
  2056  		n.SetTypecheck(1)
  2057  	}
  2058  	return nn
  2059  }
  2060  
  2061  // generate code for print
  2062  func walkprint(nn *Node, init *Nodes) *Node {
  2063  	// Hoist all the argument evaluation up before the lock.
  2064  	walkexprlistcheap(nn.List.Slice(), init)
  2065  
  2066  	// For println, add " " between elements and "\n" at the end.
  2067  	if nn.Op == OPRINTN {
  2068  		s := nn.List.Slice()
  2069  		t := make([]*Node, 0, len(s)*2)
  2070  		for i, n := range s {
  2071  			x := " "
  2072  			if len(s)-1 == i {
  2073  				x = "\n"
  2074  			}
  2075  			t = append(t, n, nodstr(x))
  2076  		}
  2077  		nn.List.Set(t)
  2078  	}
  2079  
  2080  	// Collapse runs of constant strings.
  2081  	s := nn.List.Slice()
  2082  	t := make([]*Node, 0, len(s))
  2083  	for i := 0; i < len(s); {
  2084  		var strs []string
  2085  		for i < len(s) && Isconst(s[i], CTSTR) {
  2086  			strs = append(strs, s[i].Val().U.(string))
  2087  			i++
  2088  		}
  2089  		if len(strs) > 0 {
  2090  			t = append(t, nodstr(strings.Join(strs, "")))
  2091  		}
  2092  		if i < len(s) {
  2093  			t = append(t, s[i])
  2094  			i++
  2095  		}
  2096  	}
  2097  	nn.List.Set(t)
  2098  
  2099  	calls := []*Node{mkcall("printlock", nil, init)}
  2100  	for i, n := range nn.List.Slice() {
  2101  		if n.Op == OLITERAL {
  2102  			switch n.Val().Ctype() {
  2103  			case CTRUNE:
  2104  				n = defaultlit(n, types.Runetype)
  2105  
  2106  			case CTINT:
  2107  				n = defaultlit(n, types.Types[TINT64])
  2108  
  2109  			case CTFLT:
  2110  				n = defaultlit(n, types.Types[TFLOAT64])
  2111  			}
  2112  		}
  2113  
  2114  		if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
  2115  			n = defaultlit(n, types.Types[TINT64])
  2116  		}
  2117  		n = defaultlit(n, nil)
  2118  		nn.List.SetIndex(i, n)
  2119  		if n.Type == nil || n.Type.Etype == TFORW {
  2120  			continue
  2121  		}
  2122  
  2123  		var on *Node
  2124  		switch n.Type.Etype {
  2125  		case TINTER:
  2126  			if n.Type.IsEmptyInterface() {
  2127  				on = syslook("printeface")
  2128  			} else {
  2129  				on = syslook("printiface")
  2130  			}
  2131  			on = substArgTypes(on, n.Type) // any-1
  2132  		case TPTR32, TPTR64, TCHAN, TMAP, TFUNC, TUNSAFEPTR:
  2133  			on = syslook("printpointer")
  2134  			on = substArgTypes(on, n.Type) // any-1
  2135  		case TSLICE:
  2136  			on = syslook("printslice")
  2137  			on = substArgTypes(on, n.Type) // any-1
  2138  		case TUINT64:
  2139  			if isRuntimePkg(n.Type.Sym.Pkg) && n.Type.Sym.Name == "hex" {
  2140  				on = syslook("printhex")
  2141  			} else {
  2142  				on = syslook("printuint")
  2143  			}
  2144  		case TINT, TUINT, TUINTPTR, TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64:
  2145  			on = syslook("printint")
  2146  		case TFLOAT32, TFLOAT64:
  2147  			on = syslook("printfloat")
  2148  		case TCOMPLEX64, TCOMPLEX128:
  2149  			on = syslook("printcomplex")
  2150  		case TBOOL:
  2151  			on = syslook("printbool")
  2152  		case TSTRING:
  2153  			cs := ""
  2154  			if Isconst(n, CTSTR) {
  2155  				cs = n.Val().U.(string)
  2156  			}
  2157  			switch cs {
  2158  			case " ":
  2159  				on = syslook("printsp")
  2160  			case "\n":
  2161  				on = syslook("printnl")
  2162  			default:
  2163  				on = syslook("printstring")
  2164  			}
  2165  		default:
  2166  			badtype(OPRINT, n.Type, nil)
  2167  			continue
  2168  		}
  2169  
  2170  		r := nod(OCALL, on, nil)
  2171  		if params := on.Type.Params().FieldSlice(); len(params) > 0 {
  2172  			t := params[0].Type
  2173  			if !eqtype(t, n.Type) {
  2174  				n = nod(OCONV, n, nil)
  2175  				n.Type = t
  2176  			}
  2177  			r.List.Append(n)
  2178  		}
  2179  		calls = append(calls, r)
  2180  	}
  2181  
  2182  	calls = append(calls, mkcall("printunlock", nil, init))
  2183  
  2184  	typecheckslice(calls, Etop)
  2185  	walkexprlist(calls, init)
  2186  
  2187  	r := nod(OEMPTY, nil, nil)
  2188  	r = typecheck(r, Etop)
  2189  	r = walkexpr(r, init)
  2190  	r.Ninit.Set(calls)
  2191  	return r
  2192  }
  2193  
  2194  func callnew(t *types.Type) *Node {
  2195  	if t.NotInHeap() {
  2196  		yyerror("%v is go:notinheap; heap allocation disallowed", t)
  2197  	}
  2198  	dowidth(t)
  2199  	fn := syslook("newobject")
  2200  	fn = substArgTypes(fn, t)
  2201  	v := mkcall1(fn, types.NewPtr(t), nil, typename(t))
  2202  	v.SetNonNil(true)
  2203  	return v
  2204  }
  2205  
  2206  func iscallret(n *Node) bool {
  2207  	n = outervalue(n)
  2208  	return n.Op == OINDREGSP
  2209  }
  2210  
  2211  func isstack(n *Node) bool {
  2212  	n = outervalue(n)
  2213  
  2214  	// If n is *autotmp and autotmp = &foo, replace n with foo.
  2215  	// We introduce such temps when initializing struct literals.
  2216  	if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() {
  2217  		defn := n.Left.Name.Defn
  2218  		if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR {
  2219  			n = defn.Right.Left
  2220  		}
  2221  	}
  2222  
  2223  	switch n.Op {
  2224  	case OINDREGSP:
  2225  		return true
  2226  
  2227  	case ONAME:
  2228  		switch n.Class() {
  2229  		case PAUTO, PPARAM, PPARAMOUT:
  2230  			return true
  2231  		}
  2232  	}
  2233  
  2234  	return false
  2235  }
  2236  
  2237  // isReflectHeaderDataField reports whether l is an expression p.Data
  2238  // where p has type reflect.SliceHeader or reflect.StringHeader.
  2239  func isReflectHeaderDataField(l *Node) bool {
  2240  	if l.Type != types.Types[TUINTPTR] {
  2241  		return false
  2242  	}
  2243  
  2244  	var tsym *types.Sym
  2245  	switch l.Op {
  2246  	case ODOT:
  2247  		tsym = l.Left.Type.Sym
  2248  	case ODOTPTR:
  2249  		tsym = l.Left.Type.Elem().Sym
  2250  	default:
  2251  		return false
  2252  	}
  2253  
  2254  	if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" {
  2255  		return false
  2256  	}
  2257  	return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader"
  2258  }
  2259  
  2260  // Do we need a write barrier for assigning to l?
  2261  func needwritebarrier(l *Node) bool {
  2262  	if !use_writebarrier {
  2263  		return false
  2264  	}
  2265  
  2266  	if l == nil || isblank(l) {
  2267  		return false
  2268  	}
  2269  
  2270  	// No write barrier for write to stack.
  2271  	if isstack(l) {
  2272  		return false
  2273  	}
  2274  
  2275  	// Package unsafe's documentation says storing pointers into
  2276  	// reflect.SliceHeader and reflect.StringHeader's Data fields
  2277  	// is valid, even though they have type uintptr (#19168).
  2278  	if isReflectHeaderDataField(l) {
  2279  		return true
  2280  	}
  2281  
  2282  	// No write barrier for write of non-pointers.
  2283  	dowidth(l.Type)
  2284  	if !types.Haspointers(l.Type) {
  2285  		return false
  2286  	}
  2287  
  2288  	// No write barrier if this is a pointer to a go:notinheap
  2289  	// type, since the write barrier's inheap(ptr) check will fail.
  2290  	if l.Type.IsPtr() && l.Type.Elem().NotInHeap() {
  2291  		return false
  2292  	}
  2293  
  2294  	// TODO: We can eliminate write barriers if we know *both* the
  2295  	// current and new content of the slot must already be shaded.
  2296  	// We know a pointer is shaded if it's nil, or points to
  2297  	// static data, a global (variable or function), or the stack.
  2298  	// The nil optimization could be particularly useful for
  2299  	// writes to just-allocated objects. Unfortunately, knowing
  2300  	// the "current" value of the slot requires flow analysis.
  2301  
  2302  	// Otherwise, be conservative and use write barrier.
  2303  	return true
  2304  }
  2305  
  2306  func convas(n *Node, init *Nodes) *Node {
  2307  	if n.Op != OAS {
  2308  		Fatalf("convas: not OAS %v", n.Op)
  2309  	}
  2310  
  2311  	n.SetTypecheck(1)
  2312  
  2313  	var lt *types.Type
  2314  	var rt *types.Type
  2315  	if n.Left == nil || n.Right == nil {
  2316  		goto out
  2317  	}
  2318  
  2319  	lt = n.Left.Type
  2320  	rt = n.Right.Type
  2321  	if lt == nil || rt == nil {
  2322  		goto out
  2323  	}
  2324  
  2325  	if isblank(n.Left) {
  2326  		n.Right = defaultlit(n.Right, nil)
  2327  		goto out
  2328  	}
  2329  
  2330  	if !eqtype(lt, rt) {
  2331  		n.Right = assignconv(n.Right, lt, "assignment")
  2332  		n.Right = walkexpr(n.Right, init)
  2333  	}
  2334  	dowidth(n.Right.Type)
  2335  
  2336  out:
  2337  	updateHasCall(n)
  2338  	return n
  2339  }
  2340  
  2341  // from ascompat[te]
  2342  // evaluating actual function arguments.
  2343  //	f(a,b)
  2344  // if there is exactly one function expr,
  2345  // then it is done first. otherwise must
  2346  // make temp variables
  2347  func reorder1(all []*Node) []*Node {
  2348  	c := 0 // function calls
  2349  	t := 0 // total parameters
  2350  
  2351  	for _, n := range all {
  2352  		t++
  2353  		updateHasCall(n)
  2354  		if n.HasCall() {
  2355  			c++
  2356  		}
  2357  	}
  2358  
  2359  	if c == 0 || t == 1 {
  2360  		return all
  2361  	}
  2362  
  2363  	var g []*Node // fncalls assigned to tempnames
  2364  	var f *Node   // last fncall assigned to stack
  2365  	var r []*Node // non fncalls and tempnames assigned to stack
  2366  	d := 0
  2367  	var a *Node
  2368  	for _, n := range all {
  2369  		if !n.HasCall() {
  2370  			r = append(r, n)
  2371  			continue
  2372  		}
  2373  
  2374  		d++
  2375  		if d == c {
  2376  			f = n
  2377  			continue
  2378  		}
  2379  
  2380  		// make assignment of fncall to tempname
  2381  		a = temp(n.Right.Type)
  2382  
  2383  		a = nod(OAS, a, n.Right)
  2384  		g = append(g, a)
  2385  
  2386  		// put normal arg assignment on list
  2387  		// with fncall replaced by tempname
  2388  		n.Right = a.Left
  2389  
  2390  		r = append(r, n)
  2391  	}
  2392  
  2393  	if f != nil {
  2394  		g = append(g, f)
  2395  	}
  2396  	return append(g, r...)
  2397  }
  2398  
  2399  // from ascompat[ee]
  2400  //	a,b = c,d
  2401  // simultaneous assignment. there cannot
  2402  // be later use of an earlier lvalue.
  2403  //
  2404  // function calls have been removed.
  2405  func reorder3(all []*Node) []*Node {
  2406  	var l *Node
  2407  
  2408  	// If a needed expression may be affected by an
  2409  	// earlier assignment, make an early copy of that
  2410  	// expression and use the copy instead.
  2411  	var early []*Node
  2412  
  2413  	var mapinit Nodes
  2414  	for i, n := range all {
  2415  		l = n.Left
  2416  
  2417  		// Save subexpressions needed on left side.
  2418  		// Drill through non-dereferences.
  2419  		for {
  2420  			if l.Op == ODOT || l.Op == OPAREN {
  2421  				l = l.Left
  2422  				continue
  2423  			}
  2424  
  2425  			if l.Op == OINDEX && l.Left.Type.IsArray() {
  2426  				l.Right = reorder3save(l.Right, all, i, &early)
  2427  				l = l.Left
  2428  				continue
  2429  			}
  2430  
  2431  			break
  2432  		}
  2433  
  2434  		switch l.Op {
  2435  		default:
  2436  			Fatalf("reorder3 unexpected lvalue %#v", l.Op)
  2437  
  2438  		case ONAME:
  2439  			break
  2440  
  2441  		case OINDEX, OINDEXMAP:
  2442  			l.Left = reorder3save(l.Left, all, i, &early)
  2443  			l.Right = reorder3save(l.Right, all, i, &early)
  2444  			if l.Op == OINDEXMAP {
  2445  				all[i] = convas(all[i], &mapinit)
  2446  			}
  2447  
  2448  		case OIND, ODOTPTR:
  2449  			l.Left = reorder3save(l.Left, all, i, &early)
  2450  		}
  2451  
  2452  		// Save expression on right side.
  2453  		all[i].Right = reorder3save(all[i].Right, all, i, &early)
  2454  	}
  2455  
  2456  	early = append(mapinit.Slice(), early...)
  2457  	return append(early, all...)
  2458  }
  2459  
  2460  // if the evaluation of *np would be affected by the
  2461  // assignments in all up to but not including the ith assignment,
  2462  // copy into a temporary during *early and
  2463  // replace *np with that temp.
  2464  // The result of reorder3save MUST be assigned back to n, e.g.
  2465  // 	n.Left = reorder3save(n.Left, all, i, early)
  2466  func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node {
  2467  	if !aliased(n, all, i) {
  2468  		return n
  2469  	}
  2470  
  2471  	q := temp(n.Type)
  2472  	q = nod(OAS, q, n)
  2473  	q = typecheck(q, Etop)
  2474  	*early = append(*early, q)
  2475  	return q.Left
  2476  }
  2477  
  2478  // what's the outer value that a write to n affects?
  2479  // outer value means containing struct or array.
  2480  func outervalue(n *Node) *Node {
  2481  	for {
  2482  		if n.Op == OXDOT {
  2483  			Fatalf("OXDOT in walk")
  2484  		}
  2485  		if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP {
  2486  			n = n.Left
  2487  			continue
  2488  		}
  2489  
  2490  		if n.Op == OINDEX && n.Left.Type != nil && n.Left.Type.IsArray() {
  2491  			n = n.Left
  2492  			continue
  2493  		}
  2494  
  2495  		break
  2496  	}
  2497  
  2498  	return n
  2499  }
  2500  
  2501  // Is it possible that the computation of n might be
  2502  // affected by writes in as up to but not including the ith element?
  2503  func aliased(n *Node, all []*Node, i int) bool {
  2504  	if n == nil {
  2505  		return false
  2506  	}
  2507  
  2508  	// Treat all fields of a struct as referring to the whole struct.
  2509  	// We could do better but we would have to keep track of the fields.
  2510  	for n.Op == ODOT {
  2511  		n = n.Left
  2512  	}
  2513  
  2514  	// Look for obvious aliasing: a variable being assigned
  2515  	// during the all list and appearing in n.
  2516  	// Also record whether there are any writes to main memory.
  2517  	// Also record whether there are any writes to variables
  2518  	// whose addresses have been taken.
  2519  	memwrite := 0
  2520  
  2521  	varwrite := 0
  2522  	var a *Node
  2523  	for _, an := range all[:i] {
  2524  		a = outervalue(an.Left)
  2525  
  2526  		for a.Op == ODOT {
  2527  			a = a.Left
  2528  		}
  2529  
  2530  		if a.Op != ONAME {
  2531  			memwrite = 1
  2532  			continue
  2533  		}
  2534  
  2535  		switch n.Class() {
  2536  		default:
  2537  			varwrite = 1
  2538  			continue
  2539  
  2540  		case PAUTO, PPARAM, PPARAMOUT:
  2541  			if n.Addrtaken() {
  2542  				varwrite = 1
  2543  				continue
  2544  			}
  2545  
  2546  			if vmatch2(a, n) {
  2547  				// Direct hit.
  2548  				return true
  2549  			}
  2550  		}
  2551  	}
  2552  
  2553  	// The variables being written do not appear in n.
  2554  	// However, n might refer to computed addresses
  2555  	// that are being written.
  2556  
  2557  	// If no computed addresses are affected by the writes, no aliasing.
  2558  	if memwrite == 0 && varwrite == 0 {
  2559  		return false
  2560  	}
  2561  
  2562  	// If n does not refer to computed addresses
  2563  	// (that is, if n only refers to variables whose addresses
  2564  	// have not been taken), no aliasing.
  2565  	if varexpr(n) {
  2566  		return false
  2567  	}
  2568  
  2569  	// Otherwise, both the writes and n refer to computed memory addresses.
  2570  	// Assume that they might conflict.
  2571  	return true
  2572  }
  2573  
  2574  // does the evaluation of n only refer to variables
  2575  // whose addresses have not been taken?
  2576  // (and no other memory)
  2577  func varexpr(n *Node) bool {
  2578  	if n == nil {
  2579  		return true
  2580  	}
  2581  
  2582  	switch n.Op {
  2583  	case OLITERAL:
  2584  		return true
  2585  
  2586  	case ONAME:
  2587  		switch n.Class() {
  2588  		case PAUTO, PPARAM, PPARAMOUT:
  2589  			if !n.Addrtaken() {
  2590  				return true
  2591  			}
  2592  		}
  2593  
  2594  		return false
  2595  
  2596  	case OADD,
  2597  		OSUB,
  2598  		OOR,
  2599  		OXOR,
  2600  		OMUL,
  2601  		ODIV,
  2602  		OMOD,
  2603  		OLSH,
  2604  		ORSH,
  2605  		OAND,
  2606  		OANDNOT,
  2607  		OPLUS,
  2608  		OMINUS,
  2609  		OCOM,
  2610  		OPAREN,
  2611  		OANDAND,
  2612  		OOROR,
  2613  		OCONV,
  2614  		OCONVNOP,
  2615  		OCONVIFACE,
  2616  		ODOTTYPE:
  2617  		return varexpr(n.Left) && varexpr(n.Right)
  2618  
  2619  	case ODOT: // but not ODOTPTR
  2620  		// Should have been handled in aliased.
  2621  		Fatalf("varexpr unexpected ODOT")
  2622  	}
  2623  
  2624  	// Be conservative.
  2625  	return false
  2626  }
  2627  
  2628  // is the name l mentioned in r?
  2629  func vmatch2(l *Node, r *Node) bool {
  2630  	if r == nil {
  2631  		return false
  2632  	}
  2633  	switch r.Op {
  2634  	// match each right given left
  2635  	case ONAME:
  2636  		return l == r
  2637  
  2638  	case OLITERAL:
  2639  		return false
  2640  	}
  2641  
  2642  	if vmatch2(l, r.Left) {
  2643  		return true
  2644  	}
  2645  	if vmatch2(l, r.Right) {
  2646  		return true
  2647  	}
  2648  	for _, n := range r.List.Slice() {
  2649  		if vmatch2(l, n) {
  2650  			return true
  2651  		}
  2652  	}
  2653  	return false
  2654  }
  2655  
  2656  // is any name mentioned in l also mentioned in r?
  2657  // called by sinit.go
  2658  func vmatch1(l *Node, r *Node) bool {
  2659  	// isolate all left sides
  2660  	if l == nil || r == nil {
  2661  		return false
  2662  	}
  2663  	switch l.Op {
  2664  	case ONAME:
  2665  		switch l.Class() {
  2666  		case PPARAM, PAUTO:
  2667  			break
  2668  
  2669  		default:
  2670  			// assignment to non-stack variable must be
  2671  			// delayed if right has function calls.
  2672  			if r.HasCall() {
  2673  				return true
  2674  			}
  2675  		}
  2676  
  2677  		return vmatch2(l, r)
  2678  
  2679  	case OLITERAL:
  2680  		return false
  2681  	}
  2682  
  2683  	if vmatch1(l.Left, r) {
  2684  		return true
  2685  	}
  2686  	if vmatch1(l.Right, r) {
  2687  		return true
  2688  	}
  2689  	for _, n := range l.List.Slice() {
  2690  		if vmatch1(n, r) {
  2691  			return true
  2692  		}
  2693  	}
  2694  	return false
  2695  }
  2696  
  2697  // paramstoheap returns code to allocate memory for heap-escaped parameters
  2698  // and to copy non-result parameters' values from the stack.
  2699  func paramstoheap(params *types.Type) []*Node {
  2700  	var nn []*Node
  2701  	for _, t := range params.Fields().Slice() {
  2702  		v := asNode(t.Nname)
  2703  		if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result
  2704  			v = nil
  2705  		}
  2706  		if v == nil {
  2707  			continue
  2708  		}
  2709  
  2710  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil {
  2711  			nn = append(nn, walkstmt(nod(ODCL, v, nil)))
  2712  			if stackcopy.Class() == PPARAM {
  2713  				nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop)))
  2714  			}
  2715  		}
  2716  	}
  2717  
  2718  	return nn
  2719  }
  2720  
  2721  // zeroResults zeros the return values at the start of the function.
  2722  // We need to do this very early in the function.  Defer might stop a
  2723  // panic and show the return values as they exist at the time of
  2724  // panic.  For precise stacks, the garbage collector assumes results
  2725  // are always live, so we need to zero them before any allocations,
  2726  // even allocations to move params/results to the heap.
  2727  // The generated code is added to Curfn's Enter list.
  2728  func zeroResults() {
  2729  	lno := lineno
  2730  	lineno = Curfn.Pos
  2731  	for _, f := range Curfn.Type.Results().Fields().Slice() {
  2732  		if v := asNode(f.Nname); v != nil && v.Name.Param.Heapaddr != nil {
  2733  			// The local which points to the return value is the
  2734  			// thing that needs zeroing. This is already handled
  2735  			// by a Needzero annotation in plive.go:livenessepilogue.
  2736  			continue
  2737  		}
  2738  		// Zero the stack location containing f.
  2739  		Curfn.Func.Enter.Append(nod(OAS, nodarg(f, 1), nil))
  2740  	}
  2741  	lineno = lno
  2742  }
  2743  
  2744  // returnsfromheap returns code to copy values for heap-escaped parameters
  2745  // back to the stack.
  2746  func returnsfromheap(params *types.Type) []*Node {
  2747  	var nn []*Node
  2748  	for _, t := range params.Fields().Slice() {
  2749  		v := asNode(t.Nname)
  2750  		if v == nil {
  2751  			continue
  2752  		}
  2753  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT {
  2754  			nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop)))
  2755  		}
  2756  	}
  2757  
  2758  	return nn
  2759  }
  2760  
  2761  // heapmoves generates code to handle migrating heap-escaped parameters
  2762  // between the stack and the heap. The generated code is added to Curfn's
  2763  // Enter and Exit lists.
  2764  func heapmoves() {
  2765  	lno := lineno
  2766  	lineno = Curfn.Pos
  2767  	nn := paramstoheap(Curfn.Type.Recvs())
  2768  	nn = append(nn, paramstoheap(Curfn.Type.Params())...)
  2769  	nn = append(nn, paramstoheap(Curfn.Type.Results())...)
  2770  	Curfn.Func.Enter.Append(nn...)
  2771  	lineno = Curfn.Func.Endlineno
  2772  	Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...)
  2773  	lineno = lno
  2774  }
  2775  
  2776  func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node {
  2777  	if fn.Type == nil || fn.Type.Etype != TFUNC {
  2778  		Fatalf("mkcall %v %v", fn, fn.Type)
  2779  	}
  2780  
  2781  	n := fn.Type.NumParams()
  2782  	if n != len(va) {
  2783  		Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
  2784  	}
  2785  
  2786  	r := nod(OCALL, fn, nil)
  2787  	r.List.Set(va)
  2788  	if fn.Type.NumResults() > 0 {
  2789  		r = typecheck(r, Erv|Efnstruct)
  2790  	} else {
  2791  		r = typecheck(r, Etop)
  2792  	}
  2793  	r = walkexpr(r, init)
  2794  	r.Type = t
  2795  	return r
  2796  }
  2797  
  2798  func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node {
  2799  	return vmkcall(syslook(name), t, init, args)
  2800  }
  2801  
  2802  func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node {
  2803  	return vmkcall(fn, t, init, args)
  2804  }
  2805  
  2806  func conv(n *Node, t *types.Type) *Node {
  2807  	if eqtype(n.Type, t) {
  2808  		return n
  2809  	}
  2810  	n = nod(OCONV, n, nil)
  2811  	n.Type = t
  2812  	n = typecheck(n, Erv)
  2813  	return n
  2814  }
  2815  
  2816  // byteindex converts n, which is byte-sized, to a uint8.
  2817  // We cannot use conv, because we allow converting bool to uint8 here,
  2818  // which is forbidden in user code.
  2819  func byteindex(n *Node) *Node {
  2820  	if eqtype(n.Type, types.Types[TUINT8]) {
  2821  		return n
  2822  	}
  2823  	n = nod(OCONV, n, nil)
  2824  	n.Type = types.Types[TUINT8]
  2825  	n.SetTypecheck(1)
  2826  	return n
  2827  }
  2828  
  2829  func chanfn(name string, n int, t *types.Type) *Node {
  2830  	if !t.IsChan() {
  2831  		Fatalf("chanfn %v", t)
  2832  	}
  2833  	fn := syslook(name)
  2834  	switch n {
  2835  	default:
  2836  		Fatalf("chanfn %d", n)
  2837  	case 1:
  2838  		fn = substArgTypes(fn, t.Elem())
  2839  	case 2:
  2840  		fn = substArgTypes(fn, t.Elem(), t.Elem())
  2841  	}
  2842  	return fn
  2843  }
  2844  
  2845  func mapfn(name string, t *types.Type) *Node {
  2846  	if !t.IsMap() {
  2847  		Fatalf("mapfn %v", t)
  2848  	}
  2849  	fn := syslook(name)
  2850  	fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val())
  2851  	return fn
  2852  }
  2853  
  2854  func mapfndel(name string, t *types.Type) *Node {
  2855  	if !t.IsMap() {
  2856  		Fatalf("mapfn %v", t)
  2857  	}
  2858  	fn := syslook(name)
  2859  	fn = substArgTypes(fn, t.Key(), t.Val(), t.Key())
  2860  	return fn
  2861  }
  2862  
  2863  const (
  2864  	mapslow = iota
  2865  	mapfast32
  2866  	mapfast64
  2867  	mapfaststr
  2868  	nmapfast
  2869  )
  2870  
  2871  type mapnames [nmapfast]string
  2872  
  2873  func mkmapnames(base string) mapnames {
  2874  	return mapnames{base, base + "_fast32", base + "_fast64", base + "_faststr"}
  2875  }
  2876  
  2877  var mapaccess1 mapnames = mkmapnames("mapaccess1")
  2878  var mapaccess2 mapnames = mkmapnames("mapaccess2")
  2879  var mapassign mapnames = mkmapnames("mapassign")
  2880  var mapdelete mapnames = mkmapnames("mapdelete")
  2881  
  2882  func mapfast(t *types.Type) int {
  2883  	// Check ../../runtime/hashmap.go:maxValueSize before changing.
  2884  	if t.Val().Width > 128 {
  2885  		return mapslow
  2886  	}
  2887  	switch algtype(t.Key()) {
  2888  	case AMEM32:
  2889  		return mapfast32
  2890  	case AMEM64:
  2891  		return mapfast64
  2892  	case ASTRING:
  2893  		return mapfaststr
  2894  	}
  2895  	return mapslow
  2896  }
  2897  
  2898  func writebarrierfn(name string, l *types.Type, r *types.Type) *Node {
  2899  	fn := syslook(name)
  2900  	fn = substArgTypes(fn, l, r)
  2901  	return fn
  2902  }
  2903  
  2904  func addstr(n *Node, init *Nodes) *Node {
  2905  	// orderexpr rewrote OADDSTR to have a list of strings.
  2906  	c := n.List.Len()
  2907  
  2908  	if c < 2 {
  2909  		Fatalf("addstr count %d too small", c)
  2910  	}
  2911  
  2912  	buf := nodnil()
  2913  	if n.Esc == EscNone {
  2914  		sz := int64(0)
  2915  		for _, n1 := range n.List.Slice() {
  2916  			if n1.Op == OLITERAL {
  2917  				sz += int64(len(n1.Val().U.(string)))
  2918  			}
  2919  		}
  2920  
  2921  		// Don't allocate the buffer if the result won't fit.
  2922  		if sz < tmpstringbufsize {
  2923  			// Create temporary buffer for result string on stack.
  2924  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  2925  
  2926  			buf = nod(OADDR, temp(t), nil)
  2927  		}
  2928  	}
  2929  
  2930  	// build list of string arguments
  2931  	args := []*Node{buf}
  2932  	for _, n2 := range n.List.Slice() {
  2933  		args = append(args, conv(n2, types.Types[TSTRING]))
  2934  	}
  2935  
  2936  	var fn string
  2937  	if c <= 5 {
  2938  		// small numbers of strings use direct runtime helpers.
  2939  		// note: orderexpr knows this cutoff too.
  2940  		fn = fmt.Sprintf("concatstring%d", c)
  2941  	} else {
  2942  		// large numbers of strings are passed to the runtime as a slice.
  2943  		fn = "concatstrings"
  2944  
  2945  		t := types.NewSlice(types.Types[TSTRING])
  2946  		slice := nod(OCOMPLIT, nil, typenod(t))
  2947  		if prealloc[n] != nil {
  2948  			prealloc[slice] = prealloc[n]
  2949  		}
  2950  		slice.List.Set(args[1:]) // skip buf arg
  2951  		args = []*Node{buf, slice}
  2952  		slice.Esc = EscNone
  2953  	}
  2954  
  2955  	cat := syslook(fn)
  2956  	r := nod(OCALL, cat, nil)
  2957  	r.List.Set(args)
  2958  	r = typecheck(r, Erv)
  2959  	r = walkexpr(r, init)
  2960  	r.Type = n.Type
  2961  
  2962  	return r
  2963  }
  2964  
  2965  // expand append(l1, l2...) to
  2966  //   init {
  2967  //     s := l1
  2968  //     n := len(s) + len(l2)
  2969  //     // Compare as uint so growslice can panic on overflow.
  2970  //     if uint(n) > uint(cap(s)) {
  2971  //       s = growslice(s, n)
  2972  //     }
  2973  //     s = s[:n]
  2974  //     memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2975  //   }
  2976  //   s
  2977  //
  2978  // l2 is allowed to be a string.
  2979  func appendslice(n *Node, init *Nodes) *Node {
  2980  	walkexprlistsafe(n.List.Slice(), init)
  2981  
  2982  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2983  	// and n are name or literal, but those may index the slice we're
  2984  	// modifying here. Fix explicitly.
  2985  	ls := n.List.Slice()
  2986  	for i1, n1 := range ls {
  2987  		ls[i1] = cheapexpr(n1, init)
  2988  	}
  2989  
  2990  	l1 := n.List.First()
  2991  	l2 := n.List.Second()
  2992  
  2993  	var l []*Node
  2994  
  2995  	// var s []T
  2996  	s := temp(l1.Type)
  2997  	l = append(l, nod(OAS, s, l1)) // s = l1
  2998  
  2999  	// n := len(s) + len(l2)
  3000  	nn := temp(types.Types[TINT])
  3001  	l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil))))
  3002  
  3003  	// if uint(n) > uint(cap(s))
  3004  	nif := nod(OIF, nil, nil)
  3005  	nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil))
  3006  	nif.Left.Left.Type = types.Types[TUINT]
  3007  	nif.Left.Right.Type = types.Types[TUINT]
  3008  
  3009  	// instantiate growslice(Type*, []any, int) []any
  3010  	fn := syslook("growslice")
  3011  	fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
  3012  
  3013  	// s = growslice(T, s, n)
  3014  	nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn)))
  3015  	l = append(l, nif)
  3016  
  3017  	// s = s[:n]
  3018  	nt := nod(OSLICE, s, nil)
  3019  	nt.SetSliceBounds(nil, nn, nil)
  3020  	nt.Etype = 1
  3021  	l = append(l, nod(OAS, s, nt))
  3022  
  3023  	if types.Haspointers(l1.Type.Elem()) {
  3024  		// copy(s[len(l1):], l2)
  3025  		nptr1 := nod(OSLICE, s, nil)
  3026  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  3027  		nptr1.Etype = 1
  3028  		nptr2 := l2
  3029  		fn := syslook("typedslicecopy")
  3030  		fn = substArgTypes(fn, l1.Type, l2.Type)
  3031  		var ln Nodes
  3032  		ln.Set(l)
  3033  		nt := mkcall1(fn, types.Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2)
  3034  		l = append(ln.Slice(), nt)
  3035  	} else if instrumenting && !compiling_runtime {
  3036  		// rely on runtime to instrument copy.
  3037  		// copy(s[len(l1):], l2)
  3038  		nptr1 := nod(OSLICE, s, nil)
  3039  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  3040  		nptr1.Etype = 1
  3041  		nptr2 := l2
  3042  
  3043  		var ln Nodes
  3044  		ln.Set(l)
  3045  		var nt *Node
  3046  		if l2.Type.IsString() {
  3047  			fn := syslook("slicestringcopy")
  3048  			fn = substArgTypes(fn, l1.Type, l2.Type)
  3049  			nt = mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2)
  3050  		} else {
  3051  			fn := syslook("slicecopy")
  3052  			fn = substArgTypes(fn, l1.Type, l2.Type)
  3053  			nt = mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width))
  3054  		}
  3055  
  3056  		l = append(ln.Slice(), nt)
  3057  	} else {
  3058  		// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  3059  		nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil))
  3060  		nptr1.SetBounded(true)
  3061  
  3062  		nptr1 = nod(OADDR, nptr1, nil)
  3063  
  3064  		nptr2 := nod(OSPTR, l2, nil)
  3065  
  3066  		fn := syslook("memmove")
  3067  		fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
  3068  
  3069  		var ln Nodes
  3070  		ln.Set(l)
  3071  		nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &ln)
  3072  
  3073  		nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width))
  3074  		nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid)
  3075  		l = append(ln.Slice(), nt)
  3076  	}
  3077  
  3078  	typecheckslice(l, Etop)
  3079  	walkstmtlist(l)
  3080  	init.Append(l...)
  3081  	return s
  3082  }
  3083  
  3084  // Rewrite append(src, x, y, z) so that any side effects in
  3085  // x, y, z (including runtime panics) are evaluated in
  3086  // initialization statements before the append.
  3087  // For normal code generation, stop there and leave the
  3088  // rest to cgen_append.
  3089  //
  3090  // For race detector, expand append(src, a [, b]* ) to
  3091  //
  3092  //   init {
  3093  //     s := src
  3094  //     const argc = len(args) - 1
  3095  //     if cap(s) - len(s) < argc {
  3096  //	    s = growslice(s, len(s)+argc)
  3097  //     }
  3098  //     n := len(s)
  3099  //     s = s[:n+argc]
  3100  //     s[n] = a
  3101  //     s[n+1] = b
  3102  //     ...
  3103  //   }
  3104  //   s
  3105  func walkappend(n *Node, init *Nodes, dst *Node) *Node {
  3106  	if !samesafeexpr(dst, n.List.First()) {
  3107  		n.List.SetFirst(safeexpr(n.List.First(), init))
  3108  		n.List.SetFirst(walkexpr(n.List.First(), init))
  3109  	}
  3110  	walkexprlistsafe(n.List.Slice()[1:], init)
  3111  
  3112  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  3113  	// and n are name or literal, but those may index the slice we're
  3114  	// modifying here. Fix explicitly.
  3115  	// Using cheapexpr also makes sure that the evaluation
  3116  	// of all arguments (and especially any panics) happen
  3117  	// before we begin to modify the slice in a visible way.
  3118  	ls := n.List.Slice()[1:]
  3119  	for i, n := range ls {
  3120  		ls[i] = cheapexpr(n, init)
  3121  	}
  3122  
  3123  	nsrc := n.List.First()
  3124  
  3125  	argc := n.List.Len() - 1
  3126  	if argc < 1 {
  3127  		return nsrc
  3128  	}
  3129  
  3130  	// General case, with no function calls left as arguments.
  3131  	// Leave for gen, except that instrumentation requires old form.
  3132  	if !instrumenting || compiling_runtime {
  3133  		return n
  3134  	}
  3135  
  3136  	var l []*Node
  3137  
  3138  	ns := temp(nsrc.Type)
  3139  	l = append(l, nod(OAS, ns, nsrc)) // s = src
  3140  
  3141  	na := nodintconst(int64(argc)) // const argc
  3142  	nx := nod(OIF, nil, nil)       // if cap(s) - len(s) < argc
  3143  	nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na)
  3144  
  3145  	fn := syslook("growslice") //   growslice(<type>, old []T, mincap int) (ret []T)
  3146  	fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem())
  3147  
  3148  	nx.Nbody.Set1(nod(OAS, ns,
  3149  		mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns,
  3150  			nod(OADD, nod(OLEN, ns, nil), na))))
  3151  
  3152  	l = append(l, nx)
  3153  
  3154  	nn := temp(types.Types[TINT])
  3155  	l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s)
  3156  
  3157  	nx = nod(OSLICE, ns, nil) // ...s[:n+argc]
  3158  	nx.SetSliceBounds(nil, nod(OADD, nn, na), nil)
  3159  	nx.Etype = 1
  3160  	l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc]
  3161  
  3162  	ls = n.List.Slice()[1:]
  3163  	for i, n := range ls {
  3164  		nx = nod(OINDEX, ns, nn) // s[n] ...
  3165  		nx.SetBounded(true)
  3166  		l = append(l, nod(OAS, nx, n)) // s[n] = arg
  3167  		if i+1 < len(ls) {
  3168  			l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1
  3169  		}
  3170  	}
  3171  
  3172  	typecheckslice(l, Etop)
  3173  	walkstmtlist(l)
  3174  	init.Append(l...)
  3175  	return ns
  3176  }
  3177  
  3178  // Lower copy(a, b) to a memmove call or a runtime call.
  3179  //
  3180  // init {
  3181  //   n := len(a)
  3182  //   if n > len(b) { n = len(b) }
  3183  //   memmove(a.ptr, b.ptr, n*sizeof(elem(a)))
  3184  // }
  3185  // n;
  3186  //
  3187  // Also works if b is a string.
  3188  //
  3189  func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
  3190  	if types.Haspointers(n.Left.Type.Elem()) {
  3191  		fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
  3192  		return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right)
  3193  	}
  3194  
  3195  	if runtimecall {
  3196  		if n.Right.Type.IsString() {
  3197  			fn := syslook("slicestringcopy")
  3198  			fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  3199  			return mkcall1(fn, n.Type, init, n.Left, n.Right)
  3200  		}
  3201  
  3202  		fn := syslook("slicecopy")
  3203  		fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  3204  		return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width))
  3205  	}
  3206  
  3207  	n.Left = walkexpr(n.Left, init)
  3208  	n.Right = walkexpr(n.Right, init)
  3209  	nl := temp(n.Left.Type)
  3210  	nr := temp(n.Right.Type)
  3211  	var l []*Node
  3212  	l = append(l, nod(OAS, nl, n.Left))
  3213  	l = append(l, nod(OAS, nr, n.Right))
  3214  
  3215  	nfrm := nod(OSPTR, nr, nil)
  3216  	nto := nod(OSPTR, nl, nil)
  3217  
  3218  	nlen := temp(types.Types[TINT])
  3219  
  3220  	// n = len(to)
  3221  	l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil)))
  3222  
  3223  	// if n > len(frm) { n = len(frm) }
  3224  	nif := nod(OIF, nil, nil)
  3225  
  3226  	nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil))
  3227  	nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil)))
  3228  	l = append(l, nif)
  3229  
  3230  	// Call memmove.
  3231  	fn := syslook("memmove")
  3232  
  3233  	fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem())
  3234  	nwid := temp(types.Types[TUINTPTR])
  3235  	l = append(l, nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR])))
  3236  	nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width))
  3237  	l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid))
  3238  
  3239  	typecheckslice(l, Etop)
  3240  	walkstmtlist(l)
  3241  	init.Append(l...)
  3242  	return nlen
  3243  }
  3244  
  3245  func eqfor(t *types.Type, needsize *int) *Node {
  3246  	// Should only arrive here with large memory or
  3247  	// a struct/array containing a non-memory field/element.
  3248  	// Small memory is handled inline, and single non-memory
  3249  	// is handled during type check (OCMPSTR etc).
  3250  	switch a, _ := algtype1(t); a {
  3251  	case AMEM:
  3252  		n := syslook("memequal")
  3253  		n = substArgTypes(n, t, t)
  3254  		*needsize = 1
  3255  		return n
  3256  	case ASPECIAL:
  3257  		sym := typesymprefix(".eq", t)
  3258  		n := newname(sym)
  3259  		n.SetClass(PFUNC)
  3260  		ntype := nod(OTFUNC, nil, nil)
  3261  		ntype.List.Append(anonfield(types.NewPtr(t)))
  3262  		ntype.List.Append(anonfield(types.NewPtr(t)))
  3263  		ntype.Rlist.Append(anonfield(types.Types[TBOOL]))
  3264  		ntype = typecheck(ntype, Etype)
  3265  		n.Type = ntype.Type
  3266  		*needsize = 0
  3267  		return n
  3268  	}
  3269  	Fatalf("eqfor %v", t)
  3270  	return nil
  3271  }
  3272  
  3273  // The result of walkcompare MUST be assigned back to n, e.g.
  3274  // 	n.Left = walkcompare(n.Left, init)
  3275  func walkcompare(n *Node, init *Nodes) *Node {
  3276  	// Given interface value l and concrete value r, rewrite
  3277  	//   l == r
  3278  	// into types-equal && data-equal.
  3279  	// This is efficient, avoids allocations, and avoids runtime calls.
  3280  	var l, r *Node
  3281  	if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() {
  3282  		l = n.Left
  3283  		r = n.Right
  3284  	} else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() {
  3285  		l = n.Right
  3286  		r = n.Left
  3287  	}
  3288  
  3289  	if l != nil {
  3290  		// Handle both == and !=.
  3291  		eq := n.Op
  3292  		var andor Op
  3293  		if eq == OEQ {
  3294  			andor = OANDAND
  3295  		} else {
  3296  			andor = OOROR
  3297  		}
  3298  		// Check for types equal.
  3299  		// For empty interface, this is:
  3300  		//   l.tab == type(r)
  3301  		// For non-empty interface, this is:
  3302  		//   l.tab != nil && l.tab._type == type(r)
  3303  		var eqtype *Node
  3304  		tab := nod(OITAB, l, nil)
  3305  		rtyp := typename(r.Type)
  3306  		if l.Type.IsEmptyInterface() {
  3307  			tab.Type = types.NewPtr(types.Types[TUINT8])
  3308  			tab.SetTypecheck(1)
  3309  			eqtype = nod(eq, tab, rtyp)
  3310  		} else {
  3311  			nonnil := nod(brcom(eq), nodnil(), tab)
  3312  			match := nod(eq, itabType(tab), rtyp)
  3313  			eqtype = nod(andor, nonnil, match)
  3314  		}
  3315  		// Check for data equal.
  3316  		eqdata := nod(eq, ifaceData(l, r.Type), r)
  3317  		// Put it all together.
  3318  		expr := nod(andor, eqtype, eqdata)
  3319  		n = finishcompare(n, expr, init)
  3320  		return n
  3321  	}
  3322  
  3323  	// Must be comparison of array or struct.
  3324  	// Otherwise back end handles it.
  3325  	// While we're here, decide whether to
  3326  	// inline or call an eq alg.
  3327  	t := n.Left.Type
  3328  	var inline bool
  3329  
  3330  	maxcmpsize := int64(4)
  3331  	unalignedLoad := false
  3332  	switch thearch.LinkArch.Family {
  3333  	case sys.AMD64, sys.ARM64, sys.S390X:
  3334  		// Keep this low enough, to generate less code than function call.
  3335  		maxcmpsize = 16
  3336  		unalignedLoad = true
  3337  	case sys.I386:
  3338  		maxcmpsize = 8
  3339  		unalignedLoad = true
  3340  	}
  3341  
  3342  	switch t.Etype {
  3343  	default:
  3344  		return n
  3345  	case TARRAY:
  3346  		// We can compare several elements at once with 2/4/8 byte integer compares
  3347  		inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize))
  3348  	case TSTRUCT:
  3349  		inline = t.NumFields() <= 4
  3350  	}
  3351  
  3352  	cmpl := n.Left
  3353  	for cmpl != nil && cmpl.Op == OCONVNOP {
  3354  		cmpl = cmpl.Left
  3355  	}
  3356  	cmpr := n.Right
  3357  	for cmpr != nil && cmpr.Op == OCONVNOP {
  3358  		cmpr = cmpr.Left
  3359  	}
  3360  
  3361  	// Chose not to inline. Call equality function directly.
  3362  	if !inline {
  3363  		if isvaluelit(cmpl) {
  3364  			var_ := temp(cmpl.Type)
  3365  			anylit(cmpl, var_, init)
  3366  			cmpl = var_
  3367  		}
  3368  		if isvaluelit(cmpr) {
  3369  			var_ := temp(cmpr.Type)
  3370  			anylit(cmpr, var_, init)
  3371  			cmpr = var_
  3372  		}
  3373  		if !islvalue(cmpl) || !islvalue(cmpr) {
  3374  			Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
  3375  		}
  3376  
  3377  		// eq algs take pointers
  3378  		pl := temp(types.NewPtr(t))
  3379  		al := nod(OAS, pl, nod(OADDR, cmpl, nil))
  3380  		al.Right.Etype = 1 // addr does not escape
  3381  		al = typecheck(al, Etop)
  3382  		init.Append(al)
  3383  
  3384  		pr := temp(types.NewPtr(t))
  3385  		ar := nod(OAS, pr, nod(OADDR, cmpr, nil))
  3386  		ar.Right.Etype = 1 // addr does not escape
  3387  		ar = typecheck(ar, Etop)
  3388  		init.Append(ar)
  3389  
  3390  		var needsize int
  3391  		call := nod(OCALL, eqfor(t, &needsize), nil)
  3392  		call.List.Append(pl)
  3393  		call.List.Append(pr)
  3394  		if needsize != 0 {
  3395  			call.List.Append(nodintconst(t.Width))
  3396  		}
  3397  		res := call
  3398  		if n.Op != OEQ {
  3399  			res = nod(ONOT, res, nil)
  3400  		}
  3401  		n = finishcompare(n, res, init)
  3402  		return n
  3403  	}
  3404  
  3405  	// inline: build boolean expression comparing element by element
  3406  	andor := OANDAND
  3407  	if n.Op == ONE {
  3408  		andor = OOROR
  3409  	}
  3410  	var expr *Node
  3411  	compare := func(el, er *Node) {
  3412  		a := nod(n.Op, el, er)
  3413  		if expr == nil {
  3414  			expr = a
  3415  		} else {
  3416  			expr = nod(andor, expr, a)
  3417  		}
  3418  	}
  3419  	cmpl = safeexpr(cmpl, init)
  3420  	cmpr = safeexpr(cmpr, init)
  3421  	if t.IsStruct() {
  3422  		for _, f := range t.Fields().Slice() {
  3423  			sym := f.Sym
  3424  			if sym.IsBlank() {
  3425  				continue
  3426  			}
  3427  			compare(
  3428  				nodSym(OXDOT, cmpl, sym),
  3429  				nodSym(OXDOT, cmpr, sym),
  3430  			)
  3431  		}
  3432  	} else {
  3433  		step := int64(1)
  3434  		remains := t.NumElem() * t.Elem().Width
  3435  		combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger()
  3436  		combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger()
  3437  		combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger()
  3438  		for i := int64(0); remains > 0; {
  3439  			var convType *types.Type
  3440  			switch {
  3441  			case remains >= 8 && combine64bit:
  3442  				convType = types.Types[TINT64]
  3443  				step = 8 / t.Elem().Width
  3444  			case remains >= 4 && combine32bit:
  3445  				convType = types.Types[TUINT32]
  3446  				step = 4 / t.Elem().Width
  3447  			case remains >= 2 && combine16bit:
  3448  				convType = types.Types[TUINT16]
  3449  				step = 2 / t.Elem().Width
  3450  			default:
  3451  				step = 1
  3452  			}
  3453  			if step == 1 {
  3454  				compare(
  3455  					nod(OINDEX, cmpl, nodintconst(int64(i))),
  3456  					nod(OINDEX, cmpr, nodintconst(int64(i))),
  3457  				)
  3458  				i++
  3459  				remains -= t.Elem().Width
  3460  			} else {
  3461  				cmplw := nod(OINDEX, cmpl, nodintconst(int64(i)))
  3462  				cmplw = conv(cmplw, convType)
  3463  				cmprw := nod(OINDEX, cmpr, nodintconst(int64(i)))
  3464  				cmprw = conv(cmprw, convType)
  3465  				// For code like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3466  				// ssa will generate a single large load.
  3467  				for offset := int64(1); offset < step; offset++ {
  3468  					lb := nod(OINDEX, cmpl, nodintconst(int64(i+offset)))
  3469  					lb = conv(lb, convType)
  3470  					lb = nod(OLSH, lb, nodintconst(int64(8*t.Elem().Width*offset)))
  3471  					cmplw = nod(OOR, cmplw, lb)
  3472  					rb := nod(OINDEX, cmpr, nodintconst(int64(i+offset)))
  3473  					rb = conv(rb, convType)
  3474  					rb = nod(OLSH, rb, nodintconst(int64(8*t.Elem().Width*offset)))
  3475  					cmprw = nod(OOR, cmprw, rb)
  3476  				}
  3477  				compare(cmplw, cmprw)
  3478  				i += step
  3479  				remains -= step * t.Elem().Width
  3480  			}
  3481  		}
  3482  	}
  3483  	if expr == nil {
  3484  		expr = nodbool(n.Op == OEQ)
  3485  	}
  3486  	n = finishcompare(n, expr, init)
  3487  	return n
  3488  }
  3489  
  3490  // The result of finishcompare MUST be assigned back to n, e.g.
  3491  // 	n.Left = finishcompare(n.Left, x, r, init)
  3492  func finishcompare(n, r *Node, init *Nodes) *Node {
  3493  	// Use nn here to avoid passing r to typecheck.
  3494  	nn := r
  3495  	nn = typecheck(nn, Erv)
  3496  	nn = walkexpr(nn, init)
  3497  	r = nn
  3498  	if r.Type != n.Type {
  3499  		r = nod(OCONVNOP, r, nil)
  3500  		r.Type = n.Type
  3501  		r.SetTypecheck(1)
  3502  		nn = r
  3503  	}
  3504  	return nn
  3505  }
  3506  
  3507  // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers.
  3508  func (n *Node) isIntOrdering() bool {
  3509  	switch n.Op {
  3510  	case OLE, OLT, OGE, OGT:
  3511  	default:
  3512  		return false
  3513  	}
  3514  	return n.Left.Type.IsInteger() && n.Right.Type.IsInteger()
  3515  }
  3516  
  3517  // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10.
  3518  // n must be an OANDAND or OOROR node.
  3519  // The result of walkinrange MUST be assigned back to n, e.g.
  3520  // 	n.Left = walkinrange(n.Left)
  3521  func walkinrange(n *Node, init *Nodes) *Node {
  3522  	// We are looking for something equivalent to a opl b OP b opr c, where:
  3523  	// * a, b, and c have integer type
  3524  	// * b is side-effect-free
  3525  	// * opl and opr are each < or ≤
  3526  	// * OP is &&
  3527  	l := n.Left
  3528  	r := n.Right
  3529  	if !l.isIntOrdering() || !r.isIntOrdering() {
  3530  		return n
  3531  	}
  3532  
  3533  	// Find b, if it exists, and rename appropriately.
  3534  	// Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right
  3535  	// Output is: a opl b(==x) ANDAND/OROR b(==x) opr c
  3536  	a, opl, b := l.Left, l.Op, l.Right
  3537  	x, opr, c := r.Left, r.Op, r.Right
  3538  	for i := 0; ; i++ {
  3539  		if samesafeexpr(b, x) {
  3540  			break
  3541  		}
  3542  		if i == 3 {
  3543  			// Tried all permutations and couldn't find an appropriate b == x.
  3544  			return n
  3545  		}
  3546  		if i&1 == 0 {
  3547  			a, opl, b = b, brrev(opl), a
  3548  		} else {
  3549  			x, opr, c = c, brrev(opr), x
  3550  		}
  3551  	}
  3552  
  3553  	// If n.Op is ||, apply de Morgan.
  3554  	// Negate the internal ops now; we'll negate the top level op at the end.
  3555  	// Henceforth assume &&.
  3556  	negateResult := n.Op == OOROR
  3557  	if negateResult {
  3558  		opl = brcom(opl)
  3559  		opr = brcom(opr)
  3560  	}
  3561  
  3562  	cmpdir := func(o Op) int {
  3563  		switch o {
  3564  		case OLE, OLT:
  3565  			return -1
  3566  		case OGE, OGT:
  3567  			return +1
  3568  		}
  3569  		Fatalf("walkinrange cmpdir %v", o)
  3570  		return 0
  3571  	}
  3572  	if cmpdir(opl) != cmpdir(opr) {
  3573  		// Not a range check; something like b < a && b < c.
  3574  		return n
  3575  	}
  3576  
  3577  	switch opl {
  3578  	case OGE, OGT:
  3579  		// We have something like a > b && b ≥ c.
  3580  		// Switch and reverse ops and rename constants,
  3581  		// to make it look like a ≤ b && b < c.
  3582  		a, c = c, a
  3583  		opl, opr = brrev(opr), brrev(opl)
  3584  	}
  3585  
  3586  	// We must ensure that c-a is non-negative.
  3587  	// For now, require a and c to be constants.
  3588  	// In the future, we could also support a == 0 and c == len/cap(...).
  3589  	// Unfortunately, by this point, most len/cap expressions have been
  3590  	// stored into temporary variables.
  3591  	if !Isconst(a, CTINT) || !Isconst(c, CTINT) {
  3592  		return n
  3593  	}
  3594  
  3595  	if opl == OLT {
  3596  		// We have a < b && ...
  3597  		// We need a ≤ b && ... to safely use unsigned comparison tricks.
  3598  		// If a is not the maximum constant for b's type,
  3599  		// we can increment a and switch to ≤.
  3600  		if a.Int64() >= maxintval[b.Type.Etype].Int64() {
  3601  			return n
  3602  		}
  3603  		a = nodintconst(a.Int64() + 1)
  3604  		opl = OLE
  3605  	}
  3606  
  3607  	bound := c.Int64() - a.Int64()
  3608  	if bound < 0 {
  3609  		// Bad news. Something like 5 <= x && x < 3.
  3610  		// Rare in practice, and we still need to generate side-effects,
  3611  		// so just leave it alone.
  3612  		return n
  3613  	}
  3614  
  3615  	// We have a ≤ b && b < c (or a ≤ b && b ≤ c).
  3616  	// This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a),
  3617  	// which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a),
  3618  	// which is equivalent to uint(b-a) < uint(c-a).
  3619  	ut := b.Type.ToUnsigned()
  3620  	lhs := conv(nod(OSUB, b, a), ut)
  3621  	rhs := nodintconst(bound)
  3622  	if negateResult {
  3623  		// Negate top level.
  3624  		opr = brcom(opr)
  3625  	}
  3626  	cmp := nod(opr, lhs, rhs)
  3627  	cmp.Pos = n.Pos
  3628  	cmp = addinit(cmp, l.Ninit.Slice())
  3629  	cmp = addinit(cmp, r.Ninit.Slice())
  3630  	// Typecheck the AST rooted at cmp...
  3631  	cmp = typecheck(cmp, Erv)
  3632  	// ...but then reset cmp's type to match n's type.
  3633  	cmp.Type = n.Type
  3634  	cmp = walkexpr(cmp, init)
  3635  	return cmp
  3636  }
  3637  
  3638  // return 1 if integer n must be in range [0, max), 0 otherwise
  3639  func bounded(n *Node, max int64) bool {
  3640  	if n.Type == nil || !n.Type.IsInteger() {
  3641  		return false
  3642  	}
  3643  
  3644  	sign := n.Type.IsSigned()
  3645  	bits := int32(8 * n.Type.Width)
  3646  
  3647  	if smallintconst(n) {
  3648  		v := n.Int64()
  3649  		return 0 <= v && v < max
  3650  	}
  3651  
  3652  	switch n.Op {
  3653  	case OAND:
  3654  		v := int64(-1)
  3655  		if smallintconst(n.Left) {
  3656  			v = n.Left.Int64()
  3657  		} else if smallintconst(n.Right) {
  3658  			v = n.Right.Int64()
  3659  		}
  3660  
  3661  		if 0 <= v && v < max {
  3662  			return true
  3663  		}
  3664  
  3665  	case OMOD:
  3666  		if !sign && smallintconst(n.Right) {
  3667  			v := n.Right.Int64()
  3668  			if 0 <= v && v <= max {
  3669  				return true
  3670  			}
  3671  		}
  3672  
  3673  	case ODIV:
  3674  		if !sign && smallintconst(n.Right) {
  3675  			v := n.Right.Int64()
  3676  			for bits > 0 && v >= 2 {
  3677  				bits--
  3678  				v >>= 1
  3679  			}
  3680  		}
  3681  
  3682  	case ORSH:
  3683  		if !sign && smallintconst(n.Right) {
  3684  			v := n.Right.Int64()
  3685  			if v > int64(bits) {
  3686  				return true
  3687  			}
  3688  			bits -= int32(v)
  3689  		}
  3690  	}
  3691  
  3692  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
  3693  		return true
  3694  	}
  3695  
  3696  	return false
  3697  }
  3698  
  3699  // usemethod checks interface method calls for uses of reflect.Type.Method.
  3700  func usemethod(n *Node) {
  3701  	t := n.Left.Type
  3702  
  3703  	// Looking for either of:
  3704  	//	Method(int) reflect.Method
  3705  	//	MethodByName(string) (reflect.Method, bool)
  3706  	//
  3707  	// TODO(crawshaw): improve precision of match by working out
  3708  	//                 how to check the method name.
  3709  	if n := t.NumParams(); n != 1 {
  3710  		return
  3711  	}
  3712  	if n := t.NumResults(); n != 1 && n != 2 {
  3713  		return
  3714  	}
  3715  	p0 := t.Params().Field(0)
  3716  	res0 := t.Results().Field(0)
  3717  	var res1 *types.Field
  3718  	if t.NumResults() == 2 {
  3719  		res1 = t.Results().Field(1)
  3720  	}
  3721  
  3722  	if res1 == nil {
  3723  		if p0.Type.Etype != TINT {
  3724  			return
  3725  		}
  3726  	} else {
  3727  		if !p0.Type.IsString() {
  3728  			return
  3729  		}
  3730  		if !res1.Type.IsBoolean() {
  3731  			return
  3732  		}
  3733  	}
  3734  
  3735  	// Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
  3736  	//       (including global variables such as numImports - was issue #19028).
  3737  	if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" {
  3738  		Curfn.Func.SetReflectMethod(true)
  3739  	}
  3740  }
  3741  
  3742  func usefield(n *Node) {
  3743  	if objabi.Fieldtrack_enabled == 0 {
  3744  		return
  3745  	}
  3746  
  3747  	switch n.Op {
  3748  	default:
  3749  		Fatalf("usefield %v", n.Op)
  3750  
  3751  	case ODOT, ODOTPTR:
  3752  		break
  3753  	}
  3754  	if n.Sym == nil {
  3755  		// No field name.  This DOTPTR was built by the compiler for access
  3756  		// to runtime data structures.  Ignore.
  3757  		return
  3758  	}
  3759  
  3760  	t := n.Left.Type
  3761  	if t.IsPtr() {
  3762  		t = t.Elem()
  3763  	}
  3764  	field := dotField[typeSymKey{t.Orig, n.Sym}]
  3765  	if field == nil {
  3766  		Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
  3767  	}
  3768  	if !strings.Contains(field.Note, "go:\"track\"") {
  3769  		return
  3770  	}
  3771  
  3772  	outer := n.Left.Type
  3773  	if outer.IsPtr() {
  3774  		outer = outer.Elem()
  3775  	}
  3776  	if outer.Sym == nil {
  3777  		yyerror("tracked field must be in named struct type")
  3778  	}
  3779  	if !exportname(field.Sym.Name) {
  3780  		yyerror("tracked field must be exported (upper case)")
  3781  	}
  3782  
  3783  	sym := tracksym(outer, field)
  3784  	if Curfn.Func.FieldTrack == nil {
  3785  		Curfn.Func.FieldTrack = make(map[*types.Sym]struct{})
  3786  	}
  3787  	Curfn.Func.FieldTrack[sym] = struct{}{}
  3788  }
  3789  
  3790  func candiscardlist(l Nodes) bool {
  3791  	for _, n := range l.Slice() {
  3792  		if !candiscard(n) {
  3793  			return false
  3794  		}
  3795  	}
  3796  	return true
  3797  }
  3798  
  3799  func candiscard(n *Node) bool {
  3800  	if n == nil {
  3801  		return true
  3802  	}
  3803  
  3804  	switch n.Op {
  3805  	default:
  3806  		return false
  3807  
  3808  		// Discardable as long as the subpieces are.
  3809  	case ONAME,
  3810  		ONONAME,
  3811  		OTYPE,
  3812  		OPACK,
  3813  		OLITERAL,
  3814  		OADD,
  3815  		OSUB,
  3816  		OOR,
  3817  		OXOR,
  3818  		OADDSTR,
  3819  		OADDR,
  3820  		OANDAND,
  3821  		OARRAYBYTESTR,
  3822  		OARRAYRUNESTR,
  3823  		OSTRARRAYBYTE,
  3824  		OSTRARRAYRUNE,
  3825  		OCAP,
  3826  		OCMPIFACE,
  3827  		OCMPSTR,
  3828  		OCOMPLIT,
  3829  		OMAPLIT,
  3830  		OSTRUCTLIT,
  3831  		OARRAYLIT,
  3832  		OSLICELIT,
  3833  		OPTRLIT,
  3834  		OCONV,
  3835  		OCONVIFACE,
  3836  		OCONVNOP,
  3837  		ODOT,
  3838  		OEQ,
  3839  		ONE,
  3840  		OLT,
  3841  		OLE,
  3842  		OGT,
  3843  		OGE,
  3844  		OKEY,
  3845  		OSTRUCTKEY,
  3846  		OLEN,
  3847  		OMUL,
  3848  		OLSH,
  3849  		ORSH,
  3850  		OAND,
  3851  		OANDNOT,
  3852  		ONEW,
  3853  		ONOT,
  3854  		OCOM,
  3855  		OPLUS,
  3856  		OMINUS,
  3857  		OOROR,
  3858  		OPAREN,
  3859  		ORUNESTR,
  3860  		OREAL,
  3861  		OIMAG,
  3862  		OCOMPLEX:
  3863  		break
  3864  
  3865  		// Discardable as long as we know it's not division by zero.
  3866  	case ODIV, OMOD:
  3867  		if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 {
  3868  			break
  3869  		}
  3870  		if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 {
  3871  			break
  3872  		}
  3873  		return false
  3874  
  3875  		// Discardable as long as we know it won't fail because of a bad size.
  3876  	case OMAKECHAN, OMAKEMAP:
  3877  		if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 {
  3878  			break
  3879  		}
  3880  		return false
  3881  
  3882  		// Difficult to tell what sizes are okay.
  3883  	case OMAKESLICE:
  3884  		return false
  3885  	}
  3886  
  3887  	if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) {
  3888  		return false
  3889  	}
  3890  
  3891  	return true
  3892  }
  3893  
  3894  // rewrite
  3895  //	print(x, y, z)
  3896  // into
  3897  //	func(a1, a2, a3) {
  3898  //		print(a1, a2, a3)
  3899  //	}(x, y, z)
  3900  // and same for println.
  3901  
  3902  var walkprintfunc_prgen int
  3903  
  3904  // The result of walkprintfunc MUST be assigned back to n, e.g.
  3905  // 	n.Left = walkprintfunc(n.Left, init)
  3906  func walkprintfunc(n *Node, init *Nodes) *Node {
  3907  	if n.Ninit.Len() != 0 {
  3908  		walkstmtlist(n.Ninit.Slice())
  3909  		init.AppendNodes(&n.Ninit)
  3910  	}
  3911  
  3912  	t := nod(OTFUNC, nil, nil)
  3913  	num := 0
  3914  	var printargs []*Node
  3915  	var a *Node
  3916  	var buf string
  3917  	for _, n1 := range n.List.Slice() {
  3918  		buf = fmt.Sprintf("a%d", num)
  3919  		num++
  3920  		a = namedfield(buf, n1.Type)
  3921  		t.List.Append(a)
  3922  		printargs = append(printargs, a.Left)
  3923  	}
  3924  
  3925  	oldfn := Curfn
  3926  	Curfn = nil
  3927  
  3928  	walkprintfunc_prgen++
  3929  	sym := lookupN("print·%d", walkprintfunc_prgen)
  3930  	fn := dclfunc(sym, t)
  3931  
  3932  	a = nod(n.Op, nil, nil)
  3933  	a.List.Set(printargs)
  3934  	a = typecheck(a, Etop)
  3935  	a = walkstmt(a)
  3936  
  3937  	fn.Nbody.Set1(a)
  3938  
  3939  	funcbody()
  3940  
  3941  	fn = typecheck(fn, Etop)
  3942  	typecheckslice(fn.Nbody.Slice(), Etop)
  3943  	xtop = append(xtop, fn)
  3944  	Curfn = oldfn
  3945  
  3946  	a = nod(OCALL, nil, nil)
  3947  	a.Left = fn.Func.Nname
  3948  	a.List.Set(n.List.Slice())
  3949  	a = typecheck(a, Etop)
  3950  	a = walkexpr(a, init)
  3951  	return a
  3952  }
  3953  
  3954  // substArgTypes substitutes the given list of types for
  3955  // successive occurrences of the "any" placeholder in the
  3956  // type syntax expression n.Type.
  3957  // The result of substArgTypes MUST be assigned back to old, e.g.
  3958  // 	n.Left = substArgTypes(n.Left, t1, t2)
  3959  func substArgTypes(old *Node, types_ ...*types.Type) *Node {
  3960  	n := *old // make shallow copy
  3961  
  3962  	for _, t := range types_ {
  3963  		dowidth(t)
  3964  	}
  3965  	n.Type = types.SubstAny(n.Type, &types_)
  3966  	if len(types_) > 0 {
  3967  		Fatalf("substArgTypes: too many argument types")
  3968  	}
  3969  	return &n
  3970  }