github.com/zebozhuang/go@v0.0.0-20200207033046-f8a98f6f5c5d/src/cmd/compile/internal/gc/walk.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc
     6  
     7  import (
     8  	"cmd/compile/internal/types"
     9  	"cmd/internal/objabi"
    10  	"cmd/internal/sys"
    11  	"fmt"
    12  	"strings"
    13  )
    14  
    15  // The constant is known to runtime.
    16  const (
    17  	tmpstringbufsize = 32
    18  )
    19  
    20  func walk(fn *Node) {
    21  	Curfn = fn
    22  
    23  	if Debug['W'] != 0 {
    24  		s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym)
    25  		dumplist(s, Curfn.Nbody)
    26  	}
    27  
    28  	lno := lineno
    29  
    30  	// Final typecheck for any unused variables.
    31  	for i, ln := range fn.Func.Dcl {
    32  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) {
    33  			ln = typecheck(ln, Erv|Easgn)
    34  			fn.Func.Dcl[i] = ln
    35  		}
    36  	}
    37  
    38  	// Propagate the used flag for typeswitch variables up to the NONAME in it's definition.
    39  	for _, ln := range fn.Func.Dcl {
    40  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() {
    41  			ln.Name.Defn.Left.Name.SetUsed(true)
    42  		}
    43  	}
    44  
    45  	for _, ln := range fn.Func.Dcl {
    46  		if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() {
    47  			continue
    48  		}
    49  		if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW {
    50  			if defn.Left.Name.Used() {
    51  				continue
    52  			}
    53  			yyerrorl(defn.Left.Pos, "%v declared and not used", ln.Sym)
    54  			defn.Left.Name.SetUsed(true) // suppress repeats
    55  		} else {
    56  			yyerrorl(ln.Pos, "%v declared and not used", ln.Sym)
    57  		}
    58  	}
    59  
    60  	lineno = lno
    61  	if nerrors != 0 {
    62  		return
    63  	}
    64  	walkstmtlist(Curfn.Nbody.Slice())
    65  	if Debug['W'] != 0 {
    66  		s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym)
    67  		dumplist(s, Curfn.Nbody)
    68  	}
    69  
    70  	zeroResults()
    71  	heapmoves()
    72  	if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 {
    73  		s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym)
    74  		dumplist(s, Curfn.Func.Enter)
    75  	}
    76  }
    77  
    78  func walkstmtlist(s []*Node) {
    79  	for i := range s {
    80  		s[i] = walkstmt(s[i])
    81  	}
    82  }
    83  
    84  func samelist(a, b []*Node) bool {
    85  	if len(a) != len(b) {
    86  		return false
    87  	}
    88  	for i, n := range a {
    89  		if n != b[i] {
    90  			return false
    91  		}
    92  	}
    93  	return true
    94  }
    95  
    96  func paramoutheap(fn *Node) bool {
    97  	for _, ln := range fn.Func.Dcl {
    98  		switch ln.Class() {
    99  		case PPARAMOUT:
   100  			if ln.isParamStackCopy() || ln.Addrtaken() {
   101  				return true
   102  			}
   103  
   104  		case PAUTO:
   105  			// stop early - parameters are over
   106  			return false
   107  		}
   108  	}
   109  
   110  	return false
   111  }
   112  
   113  // adds "adjust" to all the argument locations for the call n.
   114  // n must be a defer or go node that has already been walked.
   115  func adjustargs(n *Node, adjust int) {
   116  	var arg *Node
   117  	var lhs *Node
   118  
   119  	callfunc := n.Left
   120  	for _, arg = range callfunc.List.Slice() {
   121  		if arg.Op != OAS {
   122  			Fatalf("call arg not assignment")
   123  		}
   124  		lhs = arg.Left
   125  		if lhs.Op == ONAME {
   126  			// This is a temporary introduced by reorder1.
   127  			// The real store to the stack appears later in the arg list.
   128  			continue
   129  		}
   130  
   131  		if lhs.Op != OINDREGSP {
   132  			Fatalf("call argument store does not use OINDREGSP")
   133  		}
   134  
   135  		// can't really check this in machine-indep code.
   136  		//if(lhs->val.u.reg != D_SP)
   137  		//      Fatalf("call arg assign not indreg(SP)")
   138  		lhs.Xoffset += int64(adjust)
   139  	}
   140  }
   141  
   142  // The result of walkstmt MUST be assigned back to n, e.g.
   143  // 	n.Left = walkstmt(n.Left)
   144  func walkstmt(n *Node) *Node {
   145  	if n == nil {
   146  		return n
   147  	}
   148  
   149  	setlineno(n)
   150  
   151  	walkstmtlist(n.Ninit.Slice())
   152  
   153  	switch n.Op {
   154  	default:
   155  		if n.Op == ONAME {
   156  			yyerror("%v is not a top level statement", n.Sym)
   157  		} else {
   158  			yyerror("%v is not a top level statement", n.Op)
   159  		}
   160  		Dump("nottop", n)
   161  
   162  	case OAS,
   163  		OASOP,
   164  		OAS2,
   165  		OAS2DOTTYPE,
   166  		OAS2RECV,
   167  		OAS2FUNC,
   168  		OAS2MAPR,
   169  		OCLOSE,
   170  		OCOPY,
   171  		OCALLMETH,
   172  		OCALLINTER,
   173  		OCALL,
   174  		OCALLFUNC,
   175  		ODELETE,
   176  		OSEND,
   177  		OPRINT,
   178  		OPRINTN,
   179  		OPANIC,
   180  		OEMPTY,
   181  		ORECOVER,
   182  		OGETG:
   183  		if n.Typecheck() == 0 {
   184  			Fatalf("missing typecheck: %+v", n)
   185  		}
   186  		wascopy := n.Op == OCOPY
   187  		init := n.Ninit
   188  		n.Ninit.Set(nil)
   189  		n = walkexpr(n, &init)
   190  		n = addinit(n, init.Slice())
   191  		if wascopy && n.Op == OCONVNOP {
   192  			n.Op = OEMPTY // don't leave plain values as statements.
   193  		}
   194  
   195  	// special case for a receive where we throw away
   196  	// the value received.
   197  	case ORECV:
   198  		if n.Typecheck() == 0 {
   199  			Fatalf("missing typecheck: %+v", n)
   200  		}
   201  		init := n.Ninit
   202  		n.Ninit.Set(nil)
   203  
   204  		n.Left = walkexpr(n.Left, &init)
   205  		n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil())
   206  		n = walkexpr(n, &init)
   207  
   208  		n = addinit(n, init.Slice())
   209  
   210  	case OBREAK,
   211  		OCONTINUE,
   212  		OFALL,
   213  		OGOTO,
   214  		OLABEL,
   215  		ODCLCONST,
   216  		ODCLTYPE,
   217  		OCHECKNIL,
   218  		OVARKILL,
   219  		OVARLIVE:
   220  		break
   221  
   222  	case ODCL:
   223  		v := n.Left
   224  		if v.Class() == PAUTOHEAP {
   225  			if compiling_runtime {
   226  				yyerror("%v escapes to heap, not allowed in runtime.", v)
   227  			}
   228  			if prealloc[v] == nil {
   229  				prealloc[v] = callnew(v.Type)
   230  			}
   231  			nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v])
   232  			nn.SetColas(true)
   233  			nn = typecheck(nn, Etop)
   234  			return walkstmt(nn)
   235  		}
   236  
   237  	case OBLOCK:
   238  		walkstmtlist(n.List.Slice())
   239  
   240  	case OXCASE:
   241  		yyerror("case statement out of place")
   242  		n.Op = OCASE
   243  		fallthrough
   244  
   245  	case OCASE:
   246  		n.Right = walkstmt(n.Right)
   247  
   248  	case ODEFER:
   249  		Curfn.Func.SetHasDefer(true)
   250  		switch n.Left.Op {
   251  		case OPRINT, OPRINTN:
   252  			n.Left = walkprintfunc(n.Left, &n.Ninit)
   253  
   254  		case OCOPY:
   255  			n.Left = copyany(n.Left, &n.Ninit, true)
   256  
   257  		default:
   258  			n.Left = walkexpr(n.Left, &n.Ninit)
   259  		}
   260  
   261  		// make room for size & fn arguments.
   262  		adjustargs(n, 2*Widthptr)
   263  
   264  	case OFOR, OFORUNTIL:
   265  		if n.Left != nil {
   266  			walkstmtlist(n.Left.Ninit.Slice())
   267  			init := n.Left.Ninit
   268  			n.Left.Ninit.Set(nil)
   269  			n.Left = walkexpr(n.Left, &init)
   270  			n.Left = addinit(n.Left, init.Slice())
   271  		}
   272  
   273  		n.Right = walkstmt(n.Right)
   274  		walkstmtlist(n.Nbody.Slice())
   275  
   276  	case OIF:
   277  		n.Left = walkexpr(n.Left, &n.Ninit)
   278  		walkstmtlist(n.Nbody.Slice())
   279  		walkstmtlist(n.Rlist.Slice())
   280  
   281  	case OPROC:
   282  		switch n.Left.Op {
   283  		case OPRINT, OPRINTN:
   284  			n.Left = walkprintfunc(n.Left, &n.Ninit)
   285  
   286  		case OCOPY:
   287  			n.Left = copyany(n.Left, &n.Ninit, true)
   288  
   289  		default:
   290  			n.Left = walkexpr(n.Left, &n.Ninit)
   291  		}
   292  
   293  		// make room for size & fn arguments.
   294  		adjustargs(n, 2*Widthptr)
   295  
   296  	case ORETURN:
   297  		walkexprlist(n.List.Slice(), &n.Ninit)
   298  		if n.List.Len() == 0 {
   299  			break
   300  		}
   301  		if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) {
   302  			// assign to the function out parameters,
   303  			// so that reorder3 can fix up conflicts
   304  			var rl []*Node
   305  
   306  			var cl Class
   307  			for _, ln := range Curfn.Func.Dcl {
   308  				cl = ln.Class()
   309  				if cl == PAUTO || cl == PAUTOHEAP {
   310  					break
   311  				}
   312  				if cl == PPARAMOUT {
   313  					if ln.isParamStackCopy() {
   314  						ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil)
   315  					}
   316  					rl = append(rl, ln)
   317  				}
   318  			}
   319  
   320  			if got, want := n.List.Len(), len(rl); got != want {
   321  				// order should have rewritten multi-value function calls
   322  				// with explicit OAS2FUNC nodes.
   323  				Fatalf("expected %v return arguments, have %v", want, got)
   324  			}
   325  
   326  			if samelist(rl, n.List.Slice()) {
   327  				// special return in disguise
   328  				n.List.Set(nil)
   329  
   330  				break
   331  			}
   332  
   333  			// move function calls out, to make reorder3's job easier.
   334  			walkexprlistsafe(n.List.Slice(), &n.Ninit)
   335  
   336  			ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit)
   337  			n.List.Set(reorder3(ll))
   338  			break
   339  		}
   340  
   341  		ll := ascompatte(nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit)
   342  		n.List.Set(ll)
   343  
   344  	case ORETJMP:
   345  		break
   346  
   347  	case OSELECT:
   348  		walkselect(n)
   349  
   350  	case OSWITCH:
   351  		walkswitch(n)
   352  
   353  	case ORANGE:
   354  		n = walkrange(n)
   355  
   356  	case OXFALL:
   357  		yyerror("fallthrough statement out of place")
   358  		n.Op = OFALL
   359  	}
   360  
   361  	if n.Op == ONAME {
   362  		Fatalf("walkstmt ended up with name: %+v", n)
   363  	}
   364  	return n
   365  }
   366  
   367  func isSmallMakeSlice(n *Node) bool {
   368  	if n.Op != OMAKESLICE {
   369  		return false
   370  	}
   371  	l := n.Left
   372  	r := n.Right
   373  	if r == nil {
   374  		r = l
   375  	}
   376  	t := n.Type
   377  
   378  	return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width)
   379  }
   380  
   381  // walk the whole tree of the body of an
   382  // expression or simple statement.
   383  // the types expressions are calculated.
   384  // compile-time constants are evaluated.
   385  // complex side effects like statements are appended to init
   386  func walkexprlist(s []*Node, init *Nodes) {
   387  	for i := range s {
   388  		s[i] = walkexpr(s[i], init)
   389  	}
   390  }
   391  
   392  func walkexprlistsafe(s []*Node, init *Nodes) {
   393  	for i, n := range s {
   394  		s[i] = safeexpr(n, init)
   395  		s[i] = walkexpr(s[i], init)
   396  	}
   397  }
   398  
   399  func walkexprlistcheap(s []*Node, init *Nodes) {
   400  	for i, n := range s {
   401  		s[i] = cheapexpr(n, init)
   402  		s[i] = walkexpr(s[i], init)
   403  	}
   404  }
   405  
   406  // Build name of function for interface conversion.
   407  // Not all names are possible
   408  // (e.g., we'll never generate convE2E or convE2I or convI2E).
   409  func convFuncName(from, to *types.Type) string {
   410  	tkind := to.Tie()
   411  	switch from.Tie() {
   412  	case 'I':
   413  		switch tkind {
   414  		case 'I':
   415  			return "convI2I"
   416  		}
   417  	case 'T':
   418  		switch tkind {
   419  		case 'E':
   420  			switch {
   421  			case from.Size() == 2 && from.Align == 2:
   422  				return "convT2E16"
   423  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   424  				return "convT2E32"
   425  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   426  				return "convT2E64"
   427  			case from.IsString():
   428  				return "convT2Estring"
   429  			case from.IsSlice():
   430  				return "convT2Eslice"
   431  			case !types.Haspointers(from):
   432  				return "convT2Enoptr"
   433  			}
   434  			return "convT2E"
   435  		case 'I':
   436  			switch {
   437  			case from.Size() == 2 && from.Align == 2:
   438  				return "convT2I16"
   439  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   440  				return "convT2I32"
   441  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   442  				return "convT2I64"
   443  			case from.IsString():
   444  				return "convT2Istring"
   445  			case from.IsSlice():
   446  				return "convT2Islice"
   447  			case !types.Haspointers(from):
   448  				return "convT2Inoptr"
   449  			}
   450  			return "convT2I"
   451  		}
   452  	}
   453  	Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
   454  	panic("unreachable")
   455  }
   456  
   457  // The result of walkexpr MUST be assigned back to n, e.g.
   458  // 	n.Left = walkexpr(n.Left, init)
   459  func walkexpr(n *Node, init *Nodes) *Node {
   460  	if n == nil {
   461  		return n
   462  	}
   463  
   464  	// Eagerly checkwidth all expressions for the back end.
   465  	if n.Type != nil && !n.Type.WidthCalculated() {
   466  		switch n.Type.Etype {
   467  		case TBLANK, TNIL, TIDEAL:
   468  		default:
   469  			checkwidth(n.Type)
   470  		}
   471  	}
   472  
   473  	if init == &n.Ninit {
   474  		// not okay to use n->ninit when walking n,
   475  		// because we might replace n with some other node
   476  		// and would lose the init list.
   477  		Fatalf("walkexpr init == &n->ninit")
   478  	}
   479  
   480  	if n.Ninit.Len() != 0 {
   481  		walkstmtlist(n.Ninit.Slice())
   482  		init.AppendNodes(&n.Ninit)
   483  	}
   484  
   485  	lno := setlineno(n)
   486  
   487  	if Debug['w'] > 1 {
   488  		Dump("walk-before", n)
   489  	}
   490  
   491  	if n.Typecheck() != 1 {
   492  		Fatalf("missed typecheck: %+v", n)
   493  	}
   494  
   495  	if n.Op == ONAME && n.Class() == PAUTOHEAP {
   496  		nn := nod(OIND, n.Name.Param.Heapaddr, nil)
   497  		nn = typecheck(nn, Erv)
   498  		nn = walkexpr(nn, init)
   499  		nn.Left.SetNonNil(true)
   500  		return nn
   501  	}
   502  
   503  opswitch:
   504  	switch n.Op {
   505  	default:
   506  		Dump("walk", n)
   507  		Fatalf("walkexpr: switch 1 unknown op %+S", n)
   508  
   509  	case ONONAME, OINDREGSP, OEMPTY, OGETG:
   510  
   511  	case OTYPE, ONAME, OLITERAL:
   512  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
   513  		// Perhaps refactor to use Node.mayBeShared for these instead.
   514  		// If these return early, make sure to still call
   515  		// stringsym for constant strings.
   516  
   517  	case ONOT, OMINUS, OPLUS, OCOM, OREAL, OIMAG, ODOTMETH, ODOTINTER,
   518  		OIND, OSPTR, OITAB, OIDATA, OADDR:
   519  		n.Left = walkexpr(n.Left, init)
   520  
   521  	case OEFACE, OAND, OSUB, OMUL, OLT, OLE, OGE, OGT, OADD, OOR, OXOR:
   522  		n.Left = walkexpr(n.Left, init)
   523  		n.Right = walkexpr(n.Right, init)
   524  
   525  	case ODOT:
   526  		usefield(n)
   527  		n.Left = walkexpr(n.Left, init)
   528  
   529  	case ODOTTYPE, ODOTTYPE2:
   530  		n.Left = walkexpr(n.Left, init)
   531  		// Set up interface type addresses for back end.
   532  		n.Right = typename(n.Type)
   533  		if n.Op == ODOTTYPE {
   534  			n.Right.Right = typename(n.Left.Type)
   535  		}
   536  		if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   537  			n.List.Set1(itabname(n.Type, n.Left.Type))
   538  		}
   539  
   540  	case ODOTPTR:
   541  		usefield(n)
   542  		if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 {
   543  			// No actual copy will be generated, so emit an explicit nil check.
   544  			n.Left = cheapexpr(n.Left, init)
   545  
   546  			checknil(n.Left, init)
   547  		}
   548  
   549  		n.Left = walkexpr(n.Left, init)
   550  
   551  	case OLEN, OCAP:
   552  		n.Left = walkexpr(n.Left, init)
   553  
   554  		// replace len(*[10]int) with 10.
   555  		// delayed until now to preserve side effects.
   556  		t := n.Left.Type
   557  
   558  		if t.IsPtr() {
   559  			t = t.Elem()
   560  		}
   561  		if t.IsArray() {
   562  			safeexpr(n.Left, init)
   563  			nodconst(n, n.Type, t.NumElem())
   564  			n.SetTypecheck(1)
   565  		}
   566  
   567  	case OLSH, ORSH:
   568  		n.Left = walkexpr(n.Left, init)
   569  		n.Right = walkexpr(n.Right, init)
   570  		t := n.Left.Type
   571  		n.SetBounded(bounded(n.Right, 8*t.Width))
   572  		if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) {
   573  			Warn("shift bounds check elided")
   574  		}
   575  
   576  	case OCOMPLEX:
   577  		// Use results from call expression as arguments for complex.
   578  		if n.Left == nil && n.Right == nil {
   579  			n.Left = n.List.First()
   580  			n.Right = n.List.Second()
   581  		}
   582  		n.Left = walkexpr(n.Left, init)
   583  		n.Right = walkexpr(n.Right, init)
   584  
   585  	case OEQ, ONE:
   586  		n.Left = walkexpr(n.Left, init)
   587  		n.Right = walkexpr(n.Right, init)
   588  
   589  		// Disable safemode while compiling this code: the code we
   590  		// generate internally can refer to unsafe.Pointer.
   591  		// In this case it can happen if we need to generate an ==
   592  		// for a struct containing a reflect.Value, which itself has
   593  		// an unexported field of type unsafe.Pointer.
   594  		old_safemode := safemode
   595  		safemode = false
   596  		n = walkcompare(n, init)
   597  		safemode = old_safemode
   598  
   599  	case OANDAND, OOROR:
   600  		n.Left = walkexpr(n.Left, init)
   601  
   602  		// cannot put side effects from n.Right on init,
   603  		// because they cannot run before n.Left is checked.
   604  		// save elsewhere and store on the eventual n.Right.
   605  		var ll Nodes
   606  
   607  		n.Right = walkexpr(n.Right, &ll)
   608  		n.Right = addinit(n.Right, ll.Slice())
   609  		n = walkinrange(n, init)
   610  
   611  	case OPRINT, OPRINTN:
   612  		walkexprlist(n.List.Slice(), init)
   613  		n = walkprint(n, init)
   614  
   615  	case OPANIC:
   616  		n = mkcall("gopanic", nil, init, n.Left)
   617  
   618  	case ORECOVER:
   619  		n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil))
   620  
   621  	case OCLOSUREVAR, OCFUNC:
   622  		n.SetAddable(true)
   623  
   624  	case OCALLINTER:
   625  		usemethod(n)
   626  		t := n.Left.Type
   627  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   628  			break
   629  		}
   630  		n.Left = walkexpr(n.Left, init)
   631  		walkexprlist(n.List.Slice(), init)
   632  		ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   633  		n.List.Set(reorder1(ll))
   634  
   635  	case OCALLFUNC:
   636  		if n.Left.Op == OCLOSURE {
   637  			// Transform direct call of a closure to call of a normal function.
   638  			// transformclosure already did all preparation work.
   639  
   640  			// Prepend captured variables to argument list.
   641  			n.List.Prepend(n.Left.Func.Enter.Slice()...)
   642  
   643  			n.Left.Func.Enter.Set(nil)
   644  
   645  			// Replace OCLOSURE with ONAME/PFUNC.
   646  			n.Left = n.Left.Func.Closure.Func.Nname
   647  
   648  			// Update type of OCALLFUNC node.
   649  			// Output arguments had not changed, but their offsets could.
   650  			if n.Left.Type.Results().NumFields() == 1 {
   651  				n.Type = n.Left.Type.Results().Field(0).Type
   652  			} else {
   653  				n.Type = n.Left.Type.Results()
   654  			}
   655  		}
   656  
   657  		t := n.Left.Type
   658  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   659  			break
   660  		}
   661  
   662  		n.Left = walkexpr(n.Left, init)
   663  		walkexprlist(n.List.Slice(), init)
   664  
   665  		ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   666  		n.List.Set(reorder1(ll))
   667  
   668  	case OCALLMETH:
   669  		t := n.Left.Type
   670  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   671  			break
   672  		}
   673  		n.Left = walkexpr(n.Left, init)
   674  		walkexprlist(n.List.Slice(), init)
   675  		ll := ascompatte(n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init)
   676  		lr := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   677  		ll = append(ll, lr...)
   678  		n.Left.Left = nil
   679  		updateHasCall(n.Left)
   680  		n.List.Set(reorder1(ll))
   681  
   682  	case OAS:
   683  		init.AppendNodes(&n.Ninit)
   684  
   685  		n.Left = walkexpr(n.Left, init)
   686  		n.Left = safeexpr(n.Left, init)
   687  
   688  		if oaslit(n, init) {
   689  			break
   690  		}
   691  
   692  		if n.Right == nil {
   693  			// TODO(austin): Check all "implicit zeroing"
   694  			break
   695  		}
   696  
   697  		if !instrumenting && iszero(n.Right) {
   698  			break
   699  		}
   700  
   701  		switch n.Right.Op {
   702  		default:
   703  			n.Right = walkexpr(n.Right, init)
   704  
   705  		case ORECV:
   706  			// x = <-c; n.Left is x, n.Right.Left is c.
   707  			// orderstmt made sure x is addressable.
   708  			n.Right.Left = walkexpr(n.Right.Left, init)
   709  
   710  			n1 := nod(OADDR, n.Left, nil)
   711  			r := n.Right.Left // the channel
   712  			n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1)
   713  			n = walkexpr(n, init)
   714  			break opswitch
   715  
   716  		case OAPPEND:
   717  			// x = append(...)
   718  			r := n.Right
   719  			if r.Type.Elem().NotInHeap() {
   720  				yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem())
   721  			}
   722  			if r.Isddd() {
   723  				r = appendslice(r, init) // also works for append(slice, string).
   724  			} else {
   725  				r = walkappend(r, init, n)
   726  			}
   727  			n.Right = r
   728  			if r.Op == OAPPEND {
   729  				// Left in place for back end.
   730  				// Do not add a new write barrier.
   731  				// Set up address of type for back end.
   732  				r.Left = typename(r.Type.Elem())
   733  				break opswitch
   734  			}
   735  			// Otherwise, lowered for race detector.
   736  			// Treat as ordinary assignment.
   737  		}
   738  
   739  		if n.Left != nil && n.Right != nil {
   740  			n = convas(n, init)
   741  		}
   742  
   743  	case OAS2:
   744  		init.AppendNodes(&n.Ninit)
   745  		walkexprlistsafe(n.List.Slice(), init)
   746  		walkexprlistsafe(n.Rlist.Slice(), init)
   747  		ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init)
   748  		ll = reorder3(ll)
   749  		n = liststmt(ll)
   750  
   751  	// a,b,... = fn()
   752  	case OAS2FUNC:
   753  		init.AppendNodes(&n.Ninit)
   754  
   755  		r := n.Rlist.First()
   756  		walkexprlistsafe(n.List.Slice(), init)
   757  		r = walkexpr(r, init)
   758  
   759  		if isIntrinsicCall(r) {
   760  			n.Rlist.Set1(r)
   761  			break
   762  		}
   763  		init.Append(r)
   764  
   765  		ll := ascompatet(n.Op, n.List, r.Type)
   766  		n = liststmt(ll)
   767  
   768  	// x, y = <-c
   769  	// orderstmt made sure x is addressable.
   770  	case OAS2RECV:
   771  		init.AppendNodes(&n.Ninit)
   772  
   773  		r := n.Rlist.First()
   774  		walkexprlistsafe(n.List.Slice(), init)
   775  		r.Left = walkexpr(r.Left, init)
   776  		var n1 *Node
   777  		if isblank(n.List.First()) {
   778  			n1 = nodnil()
   779  		} else {
   780  			n1 = nod(OADDR, n.List.First(), nil)
   781  		}
   782  		n1.Etype = 1 // addr does not escape
   783  		fn := chanfn("chanrecv2", 2, r.Left.Type)
   784  		ok := n.List.Second()
   785  		call := mkcall1(fn, ok.Type, init, r.Left, n1)
   786  		n = nod(OAS, ok, call)
   787  		n = typecheck(n, Etop)
   788  
   789  	// a,b = m[i]
   790  	case OAS2MAPR:
   791  		init.AppendNodes(&n.Ninit)
   792  
   793  		r := n.Rlist.First()
   794  		walkexprlistsafe(n.List.Slice(), init)
   795  		r.Left = walkexpr(r.Left, init)
   796  		r.Right = walkexpr(r.Right, init)
   797  		t := r.Left.Type
   798  
   799  		fast := mapfast(t)
   800  		var key *Node
   801  		if fast != mapslow {
   802  			// fast versions take key by value
   803  			key = r.Right
   804  		} else {
   805  			// standard version takes key by reference
   806  			// orderexpr made sure key is addressable.
   807  			key = nod(OADDR, r.Right, nil)
   808  		}
   809  
   810  		// from:
   811  		//   a,b = m[i]
   812  		// to:
   813  		//   var,b = mapaccess2*(t, m, i)
   814  		//   a = *var
   815  		a := n.List.First()
   816  
   817  		if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
   818  			fn := mapfn(mapaccess2[fast], t)
   819  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key)
   820  		} else {
   821  			fn := mapfn("mapaccess2_fat", t)
   822  			z := zeroaddr(w)
   823  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z)
   824  		}
   825  
   826  		// mapaccess2* returns a typed bool, but due to spec changes,
   827  		// the boolean result of i.(T) is now untyped so we make it the
   828  		// same type as the variable on the lhs.
   829  		if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() {
   830  			r.Type.Field(1).Type = ok.Type
   831  		}
   832  		n.Rlist.Set1(r)
   833  		n.Op = OAS2FUNC
   834  
   835  		// don't generate a = *var if a is _
   836  		if !isblank(a) {
   837  			var_ := temp(types.NewPtr(t.Val()))
   838  			var_.SetTypecheck(1)
   839  			var_.SetNonNil(true) // mapaccess always returns a non-nil pointer
   840  			n.List.SetFirst(var_)
   841  			n = walkexpr(n, init)
   842  			init.Append(n)
   843  			n = nod(OAS, a, nod(OIND, var_, nil))
   844  		}
   845  
   846  		n = typecheck(n, Etop)
   847  		n = walkexpr(n, init)
   848  
   849  	case ODELETE:
   850  		init.AppendNodes(&n.Ninit)
   851  		map_ := n.List.First()
   852  		key := n.List.Second()
   853  		map_ = walkexpr(map_, init)
   854  		key = walkexpr(key, init)
   855  
   856  		t := map_.Type
   857  		fast := mapfast(t)
   858  		if fast == mapslow {
   859  			// orderstmt made sure key is addressable.
   860  			key = nod(OADDR, key, nil)
   861  		}
   862  		n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key)
   863  
   864  	case OAS2DOTTYPE:
   865  		walkexprlistsafe(n.List.Slice(), init)
   866  		n.Rlist.SetFirst(walkexpr(n.Rlist.First(), init))
   867  
   868  	case OCONVIFACE:
   869  		n.Left = walkexpr(n.Left, init)
   870  
   871  		// Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
   872  		if isdirectiface(n.Left.Type) {
   873  			var t *Node
   874  			if n.Type.IsEmptyInterface() {
   875  				t = typename(n.Left.Type)
   876  			} else {
   877  				t = itabname(n.Left.Type, n.Type)
   878  			}
   879  			l := nod(OEFACE, t, n.Left)
   880  			l.Type = n.Type
   881  			l.SetTypecheck(n.Typecheck())
   882  			n = l
   883  			break
   884  		}
   885  
   886  		if staticbytes == nil {
   887  			staticbytes = newname(Runtimepkg.Lookup("staticbytes"))
   888  			staticbytes.SetClass(PEXTERN)
   889  			staticbytes.Type = types.NewArray(types.Types[TUINT8], 256)
   890  			zerobase = newname(Runtimepkg.Lookup("zerobase"))
   891  			zerobase.SetClass(PEXTERN)
   892  			zerobase.Type = types.Types[TUINTPTR]
   893  		}
   894  
   895  		// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
   896  		// by using an existing addressable value identical to n.Left
   897  		// or creating one on the stack.
   898  		var value *Node
   899  		switch {
   900  		case n.Left.Type.Size() == 0:
   901  			// n.Left is zero-sized. Use zerobase.
   902  			cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246.
   903  			value = zerobase
   904  		case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()):
   905  			// n.Left is a bool/byte. Use staticbytes[n.Left].
   906  			n.Left = cheapexpr(n.Left, init)
   907  			value = nod(OINDEX, staticbytes, byteindex(n.Left))
   908  			value.SetBounded(true)
   909  		case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly():
   910  			// n.Left is a readonly global; use it directly.
   911  			value = n.Left
   912  		case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024:
   913  			// n.Left does not escape. Use a stack temporary initialized to n.Left.
   914  			value = temp(n.Left.Type)
   915  			init.Append(typecheck(nod(OAS, value, n.Left), Etop))
   916  		}
   917  
   918  		if value != nil {
   919  			// Value is identical to n.Left.
   920  			// Construct the interface directly: {type/itab, &value}.
   921  			var t *Node
   922  			if n.Type.IsEmptyInterface() {
   923  				t = typename(n.Left.Type)
   924  			} else {
   925  				t = itabname(n.Left.Type, n.Type)
   926  			}
   927  			l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv))
   928  			l.Type = n.Type
   929  			l.SetTypecheck(n.Typecheck())
   930  			n = l
   931  			break
   932  		}
   933  
   934  		// Implement interface to empty interface conversion.
   935  		// tmp = i.itab
   936  		// if tmp != nil {
   937  		//    tmp = tmp.type
   938  		// }
   939  		// e = iface{tmp, i.data}
   940  		if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   941  			// Evaluate the input interface.
   942  			c := temp(n.Left.Type)
   943  			init.Append(nod(OAS, c, n.Left))
   944  
   945  			// Get the itab out of the interface.
   946  			tmp := temp(types.NewPtr(types.Types[TUINT8]))
   947  			init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv)))
   948  
   949  			// Get the type out of the itab.
   950  			nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil)
   951  			nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp)))
   952  			init.Append(nif)
   953  
   954  			// Build the result.
   955  			e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8])))
   956  			e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE.
   957  			e.SetTypecheck(1)
   958  			n = e
   959  			break
   960  		}
   961  
   962  		var ll []*Node
   963  		if n.Type.IsEmptyInterface() {
   964  			if !n.Left.Type.IsInterface() {
   965  				ll = append(ll, typename(n.Left.Type))
   966  			}
   967  		} else {
   968  			if n.Left.Type.IsInterface() {
   969  				ll = append(ll, typename(n.Type))
   970  			} else {
   971  				ll = append(ll, itabname(n.Left.Type, n.Type))
   972  			}
   973  		}
   974  
   975  		if n.Left.Type.IsInterface() {
   976  			ll = append(ll, n.Left)
   977  		} else {
   978  			// regular types are passed by reference to avoid C vararg calls
   979  			// orderexpr arranged for n.Left to be a temporary for all
   980  			// the conversions it could see. comparison of an interface
   981  			// with a non-interface, especially in a switch on interface value
   982  			// with non-interface cases, is not visible to orderstmt, so we
   983  			// have to fall back on allocating a temp here.
   984  			if islvalue(n.Left) {
   985  				ll = append(ll, nod(OADDR, n.Left, nil))
   986  			} else {
   987  				ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil))
   988  			}
   989  			dowidth(n.Left.Type)
   990  		}
   991  
   992  		fn := syslook(convFuncName(n.Left.Type, n.Type))
   993  		fn = substArgTypes(fn, n.Left.Type, n.Type)
   994  		dowidth(fn.Type)
   995  		n = nod(OCALL, fn, nil)
   996  		n.List.Set(ll)
   997  		n = typecheck(n, Erv)
   998  		n = walkexpr(n, init)
   999  
  1000  	case OCONV, OCONVNOP:
  1001  		if thearch.LinkArch.Family == sys.ARM || thearch.LinkArch.Family == sys.MIPS {
  1002  			if n.Left.Type.IsFloat() {
  1003  				if n.Type.Etype == TINT64 {
  1004  					n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1005  					break
  1006  				}
  1007  
  1008  				if n.Type.Etype == TUINT64 {
  1009  					n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1010  					break
  1011  				}
  1012  			}
  1013  
  1014  			if n.Type.IsFloat() {
  1015  				if n.Left.Type.Etype == TINT64 {
  1016  					n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
  1017  					break
  1018  				}
  1019  
  1020  				if n.Left.Type.Etype == TUINT64 {
  1021  					n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
  1022  					break
  1023  				}
  1024  			}
  1025  		}
  1026  
  1027  		if thearch.LinkArch.Family == sys.I386 {
  1028  			if n.Left.Type.IsFloat() {
  1029  				if n.Type.Etype == TINT64 {
  1030  					n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1031  					break
  1032  				}
  1033  
  1034  				if n.Type.Etype == TUINT64 {
  1035  					n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1036  					break
  1037  				}
  1038  				if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR {
  1039  					n = mkcall("float64touint32", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1040  					break
  1041  				}
  1042  			}
  1043  			if n.Type.IsFloat() {
  1044  				if n.Left.Type.Etype == TINT64 {
  1045  					n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
  1046  					break
  1047  				}
  1048  
  1049  				if n.Left.Type.Etype == TUINT64 {
  1050  					n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
  1051  					break
  1052  				}
  1053  				if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR {
  1054  					n = conv(mkcall("uint32tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT32])), n.Type)
  1055  					break
  1056  				}
  1057  			}
  1058  		}
  1059  
  1060  		n.Left = walkexpr(n.Left, init)
  1061  
  1062  	case OANDNOT:
  1063  		n.Left = walkexpr(n.Left, init)
  1064  		n.Op = OAND
  1065  		n.Right = nod(OCOM, n.Right, nil)
  1066  		n.Right = typecheck(n.Right, Erv)
  1067  		n.Right = walkexpr(n.Right, init)
  1068  
  1069  	case ODIV, OMOD:
  1070  		n.Left = walkexpr(n.Left, init)
  1071  		n.Right = walkexpr(n.Right, init)
  1072  
  1073  		// rewrite complex div into function call.
  1074  		et := n.Left.Type.Etype
  1075  
  1076  		if isComplex[et] && n.Op == ODIV {
  1077  			t := n.Type
  1078  			n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128]))
  1079  			n = conv(n, t)
  1080  			break
  1081  		}
  1082  
  1083  		// Nothing to do for float divisions.
  1084  		if isFloat[et] {
  1085  			break
  1086  		}
  1087  
  1088  		// rewrite 64-bit div and mod on 32-bit architectures.
  1089  		// TODO: Remove this code once we can introduce
  1090  		// runtime calls late in SSA processing.
  1091  		if Widthreg < 8 && (et == TINT64 || et == TUINT64) {
  1092  			if n.Right.Op == OLITERAL {
  1093  				// Leave div/mod by constant powers of 2.
  1094  				// The SSA backend will handle those.
  1095  				switch et {
  1096  				case TINT64:
  1097  					c := n.Right.Int64()
  1098  					if c < 0 {
  1099  						c = -c
  1100  					}
  1101  					if c != 0 && c&(c-1) == 0 {
  1102  						break opswitch
  1103  					}
  1104  				case TUINT64:
  1105  					c := uint64(n.Right.Int64())
  1106  					if c != 0 && c&(c-1) == 0 {
  1107  						break opswitch
  1108  					}
  1109  				}
  1110  			}
  1111  			var fn string
  1112  			if et == TINT64 {
  1113  				fn = "int64"
  1114  			} else {
  1115  				fn = "uint64"
  1116  			}
  1117  			if n.Op == ODIV {
  1118  				fn += "div"
  1119  			} else {
  1120  				fn += "mod"
  1121  			}
  1122  			n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et]))
  1123  		}
  1124  
  1125  	case OINDEX:
  1126  		n.Left = walkexpr(n.Left, init)
  1127  
  1128  		// save the original node for bounds checking elision.
  1129  		// If it was a ODIV/OMOD walk might rewrite it.
  1130  		r := n.Right
  1131  
  1132  		n.Right = walkexpr(n.Right, init)
  1133  
  1134  		// if range of type cannot exceed static array bound,
  1135  		// disable bounds check.
  1136  		if n.Bounded() {
  1137  			break
  1138  		}
  1139  		t := n.Left.Type
  1140  		if t != nil && t.IsPtr() {
  1141  			t = t.Elem()
  1142  		}
  1143  		if t.IsArray() {
  1144  			n.SetBounded(bounded(r, t.NumElem()))
  1145  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1146  				Warn("index bounds check elided")
  1147  			}
  1148  			if smallintconst(n.Right) && !n.Bounded() {
  1149  				yyerror("index out of bounds")
  1150  			}
  1151  		} else if Isconst(n.Left, CTSTR) {
  1152  			n.SetBounded(bounded(r, int64(len(n.Left.Val().U.(string)))))
  1153  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1154  				Warn("index bounds check elided")
  1155  			}
  1156  			if smallintconst(n.Right) && !n.Bounded() {
  1157  				yyerror("index out of bounds")
  1158  			}
  1159  		}
  1160  
  1161  		if Isconst(n.Right, CTINT) {
  1162  			if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
  1163  				yyerror("index out of bounds")
  1164  			}
  1165  		}
  1166  
  1167  	case OINDEXMAP:
  1168  		// Replace m[k] with *map{access1,assign}(maptype, m, &k)
  1169  		n.Left = walkexpr(n.Left, init)
  1170  		n.Right = walkexpr(n.Right, init)
  1171  		map_ := n.Left
  1172  		key := n.Right
  1173  		t := map_.Type
  1174  		if n.Etype == 1 {
  1175  			// This m[k] expression is on the left-hand side of an assignment.
  1176  			fast := mapfast(t)
  1177  			if fast == mapslow {
  1178  				// standard version takes key by reference.
  1179  				// orderexpr made sure key is addressable.
  1180  				key = nod(OADDR, key, nil)
  1181  			}
  1182  			n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key)
  1183  		} else {
  1184  			// m[k] is not the target of an assignment.
  1185  			fast := mapfast(t)
  1186  			if fast == mapslow {
  1187  				// standard version takes key by reference.
  1188  				// orderexpr made sure key is addressable.
  1189  				key = nod(OADDR, key, nil)
  1190  			}
  1191  
  1192  			if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
  1193  				n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Val()), init, typename(t), map_, key)
  1194  			} else {
  1195  				z := zeroaddr(w)
  1196  				n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Val()), init, typename(t), map_, key, z)
  1197  			}
  1198  		}
  1199  		n.Type = types.NewPtr(t.Val())
  1200  		n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers.
  1201  		n = nod(OIND, n, nil)
  1202  		n.Type = t.Val()
  1203  		n.SetTypecheck(1)
  1204  
  1205  	case ORECV:
  1206  		Fatalf("walkexpr ORECV") // should see inside OAS only
  1207  
  1208  	case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
  1209  		n.Left = walkexpr(n.Left, init)
  1210  		low, high, max := n.SliceBounds()
  1211  		low = walkexpr(low, init)
  1212  		if low != nil && iszero(low) {
  1213  			// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
  1214  			low = nil
  1215  		}
  1216  		high = walkexpr(high, init)
  1217  		max = walkexpr(max, init)
  1218  		n.SetSliceBounds(low, high, max)
  1219  		if n.Op.IsSlice3() {
  1220  			if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) {
  1221  				// Reduce x[i:j:cap(x)] to x[i:j].
  1222  				if n.Op == OSLICE3 {
  1223  					n.Op = OSLICE
  1224  				} else {
  1225  					n.Op = OSLICEARR
  1226  				}
  1227  				n = reduceSlice(n)
  1228  			}
  1229  		} else {
  1230  			n = reduceSlice(n)
  1231  		}
  1232  
  1233  	case ONEW:
  1234  		if n.Esc == EscNone {
  1235  			if n.Type.Elem().Width >= 1<<16 {
  1236  				Fatalf("large ONEW with EscNone: %v", n)
  1237  			}
  1238  			r := temp(n.Type.Elem())
  1239  			r = nod(OAS, r, nil) // zero temp
  1240  			r = typecheck(r, Etop)
  1241  			init.Append(r)
  1242  			r = nod(OADDR, r.Left, nil)
  1243  			r = typecheck(r, Erv)
  1244  			n = r
  1245  		} else {
  1246  			n = callnew(n.Type.Elem())
  1247  		}
  1248  
  1249  	case OCMPSTR:
  1250  		// s + "badgerbadgerbadger" == "badgerbadgerbadger"
  1251  		if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) {
  1252  			// TODO(marvin): Fix Node.EType type union.
  1253  			r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0))
  1254  			r = typecheck(r, Erv)
  1255  			r = walkexpr(r, init)
  1256  			r.Type = n.Type
  1257  			n = r
  1258  			break
  1259  		}
  1260  
  1261  		// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
  1262  		var cs, ncs *Node // const string, non-const string
  1263  		switch {
  1264  		case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR):
  1265  			// ignore; will be constant evaluated
  1266  		case Isconst(n.Left, CTSTR):
  1267  			cs = n.Left
  1268  			ncs = n.Right
  1269  		case Isconst(n.Right, CTSTR):
  1270  			cs = n.Right
  1271  			ncs = n.Left
  1272  		}
  1273  		if cs != nil {
  1274  			cmp := Op(n.Etype)
  1275  			// maxRewriteLen was chosen empirically.
  1276  			// It is the value that minimizes cmd/go file size
  1277  			// across most architectures.
  1278  			// See the commit description for CL 26758 for details.
  1279  			maxRewriteLen := 6
  1280  			// Some architectures can load unaligned byte sequence as 1 word.
  1281  			// So we can cover longer strings with the same amount of code.
  1282  			canCombineLoads := false
  1283  			combine64bit := false
  1284  			// TODO: does this improve performance on any other architectures?
  1285  			switch thearch.LinkArch.Family {
  1286  			case sys.AMD64:
  1287  				// Larger compare require longer instructions, so keep this reasonably low.
  1288  				// Data from CL 26758 shows that longer strings are rare.
  1289  				// If we really want we can do 16 byte SSE comparisons in the future.
  1290  				maxRewriteLen = 16
  1291  				canCombineLoads = true
  1292  				combine64bit = true
  1293  			case sys.I386:
  1294  				maxRewriteLen = 8
  1295  				canCombineLoads = true
  1296  			}
  1297  			var and Op
  1298  			switch cmp {
  1299  			case OEQ:
  1300  				and = OANDAND
  1301  			case ONE:
  1302  				and = OOROR
  1303  			default:
  1304  				// Don't do byte-wise comparisons for <, <=, etc.
  1305  				// They're fairly complicated.
  1306  				// Length-only checks are ok, though.
  1307  				maxRewriteLen = 0
  1308  			}
  1309  			if s := cs.Val().U.(string); len(s) <= maxRewriteLen {
  1310  				if len(s) > 0 {
  1311  					ncs = safeexpr(ncs, init)
  1312  				}
  1313  				// TODO(marvin): Fix Node.EType type union.
  1314  				r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s))))
  1315  				remains := len(s)
  1316  				for i := 0; remains > 0; {
  1317  					if remains == 1 || !canCombineLoads {
  1318  						cb := nodintconst(int64(s[i]))
  1319  						ncb := nod(OINDEX, ncs, nodintconst(int64(i)))
  1320  						r = nod(and, r, nod(cmp, ncb, cb))
  1321  						remains--
  1322  						i++
  1323  						continue
  1324  					}
  1325  					var step int
  1326  					var convType *types.Type
  1327  					switch {
  1328  					case remains >= 8 && combine64bit:
  1329  						convType = types.Types[TINT64]
  1330  						step = 8
  1331  					case remains >= 4:
  1332  						convType = types.Types[TUINT32]
  1333  						step = 4
  1334  					case remains >= 2:
  1335  						convType = types.Types[TUINT16]
  1336  						step = 2
  1337  					}
  1338  					ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i)))
  1339  					ncsubstr = conv(ncsubstr, convType)
  1340  					csubstr := int64(s[i])
  1341  					// Calculate large constant from bytes as sequence of shifts and ors.
  1342  					// Like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  1343  					// ssa will combine this into a single large load.
  1344  					for offset := 1; offset < step; offset++ {
  1345  						b := nod(OINDEX, ncs, nodintconst(int64(i+offset)))
  1346  						b = conv(b, convType)
  1347  						b = nod(OLSH, b, nodintconst(int64(8*offset)))
  1348  						ncsubstr = nod(OOR, ncsubstr, b)
  1349  						csubstr = csubstr | int64(s[i+offset])<<uint8(8*offset)
  1350  					}
  1351  					csubstrPart := nodintconst(csubstr)
  1352  					// Compare "step" bytes as once
  1353  					r = nod(and, r, nod(cmp, csubstrPart, ncsubstr))
  1354  					remains -= step
  1355  					i += step
  1356  				}
  1357  				r = typecheck(r, Erv)
  1358  				r = walkexpr(r, init)
  1359  				r.Type = n.Type
  1360  				n = r
  1361  				break
  1362  			}
  1363  		}
  1364  
  1365  		var r *Node
  1366  		// TODO(marvin): Fix Node.EType type union.
  1367  		if Op(n.Etype) == OEQ || Op(n.Etype) == ONE {
  1368  			// prepare for rewrite below
  1369  			n.Left = cheapexpr(n.Left, init)
  1370  			n.Right = cheapexpr(n.Right, init)
  1371  
  1372  			r = mkcall("eqstring", types.Types[TBOOL], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  1373  
  1374  			// quick check of len before full compare for == or !=
  1375  			// eqstring assumes that the lengths are equal
  1376  			// TODO(marvin): Fix Node.EType type union.
  1377  			if Op(n.Etype) == OEQ {
  1378  				// len(left) == len(right) && eqstring(left, right)
  1379  				r = nod(OANDAND, nod(OEQ, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r)
  1380  			} else {
  1381  				// len(left) != len(right) || !eqstring(left, right)
  1382  				r = nod(ONOT, r, nil)
  1383  				r = nod(OOROR, nod(ONE, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r)
  1384  			}
  1385  
  1386  			r = typecheck(r, Erv)
  1387  			r = walkexpr(r, nil)
  1388  		} else {
  1389  			// sys_cmpstring(s1, s2) :: 0
  1390  			r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  1391  			// TODO(marvin): Fix Node.EType type union.
  1392  			r = nod(Op(n.Etype), r, nodintconst(0))
  1393  		}
  1394  
  1395  		r = typecheck(r, Erv)
  1396  		if !n.Type.IsBoolean() {
  1397  			Fatalf("cmp %v", n.Type)
  1398  		}
  1399  		r.Type = n.Type
  1400  		n = r
  1401  
  1402  	case OADDSTR:
  1403  		n = addstr(n, init)
  1404  
  1405  	case OAPPEND:
  1406  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
  1407  		Fatalf("append outside assignment")
  1408  
  1409  	case OCOPY:
  1410  		n = copyany(n, init, instrumenting && !compiling_runtime)
  1411  
  1412  		// cannot use chanfn - closechan takes any, not chan any
  1413  	case OCLOSE:
  1414  		fn := syslook("closechan")
  1415  
  1416  		fn = substArgTypes(fn, n.Left.Type)
  1417  		n = mkcall1(fn, nil, init, n.Left)
  1418  
  1419  	case OMAKECHAN:
  1420  		n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, types.Types[TINT64]))
  1421  
  1422  	case OMAKEMAP:
  1423  		t := n.Type
  1424  
  1425  		a := nodnil() // hmap buffer
  1426  		r := nodnil() // bucket buffer
  1427  		if n.Esc == EscNone {
  1428  			// Allocate hmap buffer on stack.
  1429  			var_ := temp(hmap(t))
  1430  
  1431  			a = nod(OAS, var_, nil) // zero temp
  1432  			a = typecheck(a, Etop)
  1433  			init.Append(a)
  1434  			a = nod(OADDR, var_, nil)
  1435  
  1436  			// Allocate one bucket on stack.
  1437  			// Maximum key/value size is 128 bytes, larger objects
  1438  			// are stored with an indirection. So max bucket size is 2048+eps.
  1439  			var_ = temp(mapbucket(t))
  1440  
  1441  			r = nod(OAS, var_, nil) // zero temp
  1442  			r = typecheck(r, Etop)
  1443  			init.Append(r)
  1444  			r = nod(OADDR, var_, nil)
  1445  		}
  1446  
  1447  		fn := syslook("makemap")
  1448  		fn = substArgTypes(fn, hmap(t), mapbucket(t), t.Key(), t.Val())
  1449  		n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, types.Types[TINT64]), a, r)
  1450  
  1451  	case OMAKESLICE:
  1452  		l := n.Left
  1453  		r := n.Right
  1454  		if r == nil {
  1455  			r = safeexpr(l, init)
  1456  			l = r
  1457  		}
  1458  		t := n.Type
  1459  		if n.Esc == EscNone {
  1460  			if !isSmallMakeSlice(n) {
  1461  				Fatalf("non-small OMAKESLICE with EscNone: %v", n)
  1462  			}
  1463  			// var arr [r]T
  1464  			// n = arr[:l]
  1465  			t = types.NewArray(t.Elem(), nonnegintconst(r)) // [r]T
  1466  			var_ := temp(t)
  1467  			a := nod(OAS, var_, nil) // zero temp
  1468  			a = typecheck(a, Etop)
  1469  			init.Append(a)
  1470  			r := nod(OSLICE, var_, nil) // arr[:l]
  1471  			r.SetSliceBounds(nil, l, nil)
  1472  			r = conv(r, n.Type) // in case n.Type is named.
  1473  			r = typecheck(r, Erv)
  1474  			r = walkexpr(r, init)
  1475  			n = r
  1476  		} else {
  1477  			// n escapes; set up a call to makeslice.
  1478  			// When len and cap can fit into int, use makeslice instead of
  1479  			// makeslice64, which is faster and shorter on 32 bit platforms.
  1480  
  1481  			if t.Elem().NotInHeap() {
  1482  				yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem())
  1483  			}
  1484  
  1485  			len, cap := l, r
  1486  
  1487  			fnname := "makeslice64"
  1488  			argtype := types.Types[TINT64]
  1489  
  1490  			// typechecking guarantees that TIDEAL len/cap are positive and fit in an int.
  1491  			// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
  1492  			// will be handled by the negative range checks in makeslice during runtime.
  1493  			if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) &&
  1494  				(cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) {
  1495  				fnname = "makeslice"
  1496  				argtype = types.Types[TINT]
  1497  			}
  1498  
  1499  			fn := syslook(fnname)
  1500  			fn = substArgTypes(fn, t.Elem()) // any-1
  1501  			n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
  1502  		}
  1503  
  1504  	case ORUNESTR:
  1505  		a := nodnil()
  1506  		if n.Esc == EscNone {
  1507  			t := types.NewArray(types.Types[TUINT8], 4)
  1508  			var_ := temp(t)
  1509  			a = nod(OADDR, var_, nil)
  1510  		}
  1511  
  1512  		// intstring(*[4]byte, rune)
  1513  		n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64]))
  1514  
  1515  	case OARRAYBYTESTR:
  1516  		a := nodnil()
  1517  		if n.Esc == EscNone {
  1518  			// Create temporary buffer for string on stack.
  1519  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1520  
  1521  			a = nod(OADDR, temp(t), nil)
  1522  		}
  1523  
  1524  		// slicebytetostring(*[32]byte, []byte) string;
  1525  		n = mkcall("slicebytetostring", n.Type, init, a, n.Left)
  1526  
  1527  		// slicebytetostringtmp([]byte) string;
  1528  	case OARRAYBYTESTRTMP:
  1529  		n.Left = walkexpr(n.Left, init)
  1530  
  1531  		if !instrumenting {
  1532  			// Let the backend handle OARRAYBYTESTRTMP directly
  1533  			// to avoid a function call to slicebytetostringtmp.
  1534  			break
  1535  		}
  1536  
  1537  		n = mkcall("slicebytetostringtmp", n.Type, init, n.Left)
  1538  
  1539  		// slicerunetostring(*[32]byte, []rune) string;
  1540  	case OARRAYRUNESTR:
  1541  		a := nodnil()
  1542  
  1543  		if n.Esc == EscNone {
  1544  			// Create temporary buffer for string on stack.
  1545  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1546  
  1547  			a = nod(OADDR, temp(t), nil)
  1548  		}
  1549  
  1550  		n = mkcall("slicerunetostring", n.Type, init, a, n.Left)
  1551  
  1552  		// stringtoslicebyte(*32[byte], string) []byte;
  1553  	case OSTRARRAYBYTE:
  1554  		a := nodnil()
  1555  
  1556  		if n.Esc == EscNone {
  1557  			// Create temporary buffer for slice on stack.
  1558  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1559  
  1560  			a = nod(OADDR, temp(t), nil)
  1561  		}
  1562  
  1563  		n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, types.Types[TSTRING]))
  1564  
  1565  	case OSTRARRAYBYTETMP:
  1566  		// []byte(string) conversion that creates a slice
  1567  		// referring to the actual string bytes.
  1568  		// This conversion is handled later by the backend and
  1569  		// is only for use by internal compiler optimizations
  1570  		// that know that the slice won't be mutated.
  1571  		// The only such case today is:
  1572  		// for i, c := range []byte(string)
  1573  		n.Left = walkexpr(n.Left, init)
  1574  
  1575  		// stringtoslicerune(*[32]rune, string) []rune
  1576  	case OSTRARRAYRUNE:
  1577  		a := nodnil()
  1578  
  1579  		if n.Esc == EscNone {
  1580  			// Create temporary buffer for slice on stack.
  1581  			t := types.NewArray(types.Types[TINT32], tmpstringbufsize)
  1582  
  1583  			a = nod(OADDR, temp(t), nil)
  1584  		}
  1585  
  1586  		n = mkcall("stringtoslicerune", n.Type, init, a, n.Left)
  1587  
  1588  		// ifaceeq(i1 any-1, i2 any-2) (ret bool);
  1589  	case OCMPIFACE:
  1590  		if !eqtype(n.Left.Type, n.Right.Type) {
  1591  			Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type)
  1592  		}
  1593  		var fn *Node
  1594  		if n.Left.Type.IsEmptyInterface() {
  1595  			fn = syslook("efaceeq")
  1596  		} else {
  1597  			fn = syslook("ifaceeq")
  1598  		}
  1599  
  1600  		n.Right = cheapexpr(n.Right, init)
  1601  		n.Left = cheapexpr(n.Left, init)
  1602  		lt := nod(OITAB, n.Left, nil)
  1603  		rt := nod(OITAB, n.Right, nil)
  1604  		ld := nod(OIDATA, n.Left, nil)
  1605  		rd := nod(OIDATA, n.Right, nil)
  1606  		ld.Type = types.Types[TUNSAFEPTR]
  1607  		rd.Type = types.Types[TUNSAFEPTR]
  1608  		ld.SetTypecheck(1)
  1609  		rd.SetTypecheck(1)
  1610  		call := mkcall1(fn, n.Type, init, lt, ld, rd)
  1611  
  1612  		// Check itable/type before full compare.
  1613  		// Note: short-circuited because order matters.
  1614  		// TODO(marvin): Fix Node.EType type union.
  1615  		var cmp *Node
  1616  		if Op(n.Etype) == OEQ {
  1617  			cmp = nod(OANDAND, nod(OEQ, lt, rt), call)
  1618  		} else {
  1619  			cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil))
  1620  		}
  1621  		cmp = typecheck(cmp, Erv)
  1622  		cmp = walkexpr(cmp, init)
  1623  		cmp.Type = n.Type
  1624  		n = cmp
  1625  
  1626  	case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT:
  1627  		if isStaticCompositeLiteral(n) && !canSSAType(n.Type) {
  1628  			// n can be directly represented in the read-only data section.
  1629  			// Make direct reference to the static data. See issue 12841.
  1630  			vstat := staticname(n.Type)
  1631  			vstat.Name.SetReadonly(true)
  1632  			fixedlit(inInitFunction, initKindStatic, n, vstat, init)
  1633  			n = vstat
  1634  			n = typecheck(n, Erv)
  1635  			break
  1636  		}
  1637  		var_ := temp(n.Type)
  1638  		anylit(n, var_, init)
  1639  		n = var_
  1640  
  1641  	case OSEND:
  1642  		n1 := n.Right
  1643  		n1 = assignconv(n1, n.Left.Type.Elem(), "chan send")
  1644  		n1 = walkexpr(n1, init)
  1645  		n1 = nod(OADDR, n1, nil)
  1646  		n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1)
  1647  
  1648  	case OCLOSURE:
  1649  		n = walkclosure(n, init)
  1650  
  1651  	case OCALLPART:
  1652  		n = walkpartialcall(n, init)
  1653  	}
  1654  
  1655  	// Expressions that are constant at run time but not
  1656  	// considered const by the language spec are not turned into
  1657  	// constants until walk. For example, if n is y%1 == 0, the
  1658  	// walk of y%1 may have replaced it by 0.
  1659  	// Check whether n with its updated args is itself now a constant.
  1660  	t := n.Type
  1661  	evconst(n)
  1662  	if n.Type != t {
  1663  		Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type)
  1664  	}
  1665  	if n.Op == OLITERAL {
  1666  		n = typecheck(n, Erv)
  1667  		// Emit string symbol now to avoid emitting
  1668  		// any concurrently during the backend.
  1669  		if s, ok := n.Val().U.(string); ok {
  1670  			_ = stringsym(s)
  1671  		}
  1672  	}
  1673  
  1674  	updateHasCall(n)
  1675  
  1676  	if Debug['w'] != 0 && n != nil {
  1677  		Dump("walk", n)
  1678  	}
  1679  
  1680  	lineno = lno
  1681  	return n
  1682  }
  1683  
  1684  // TODO(josharian): combine this with its caller and simplify
  1685  func reduceSlice(n *Node) *Node {
  1686  	low, high, max := n.SliceBounds()
  1687  	if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) {
  1688  		// Reduce x[i:len(x)] to x[i:].
  1689  		high = nil
  1690  	}
  1691  	n.SetSliceBounds(low, high, max)
  1692  	if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil {
  1693  		// Reduce x[:] to x.
  1694  		if Debug_slice > 0 {
  1695  			Warn("slice: omit slice operation")
  1696  		}
  1697  		return n.Left
  1698  	}
  1699  	return n
  1700  }
  1701  
  1702  func ascompatee1(op Op, l *Node, r *Node, init *Nodes) *Node {
  1703  	// convas will turn map assigns into function calls,
  1704  	// making it impossible for reorder3 to work.
  1705  	n := nod(OAS, l, r)
  1706  
  1707  	if l.Op == OINDEXMAP {
  1708  		return n
  1709  	}
  1710  
  1711  	return convas(n, init)
  1712  }
  1713  
  1714  func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
  1715  	// check assign expression list to
  1716  	// a expression list. called in
  1717  	//	expr-list = expr-list
  1718  
  1719  	// ensure order of evaluation for function calls
  1720  	for i := range nl {
  1721  		nl[i] = safeexpr(nl[i], init)
  1722  	}
  1723  	for i1 := range nr {
  1724  		nr[i1] = safeexpr(nr[i1], init)
  1725  	}
  1726  
  1727  	var nn []*Node
  1728  	i := 0
  1729  	for ; i < len(nl); i++ {
  1730  		if i >= len(nr) {
  1731  			break
  1732  		}
  1733  		// Do not generate 'x = x' during return. See issue 4014.
  1734  		if op == ORETURN && samesafeexpr(nl[i], nr[i]) {
  1735  			continue
  1736  		}
  1737  		nn = append(nn, ascompatee1(op, nl[i], nr[i], init))
  1738  	}
  1739  
  1740  	// cannot happen: caller checked that lists had same length
  1741  	if i < len(nl) || i < len(nr) {
  1742  		var nln, nrn Nodes
  1743  		nln.Set(nl)
  1744  		nrn.Set(nr)
  1745  		Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname())
  1746  	}
  1747  	return nn
  1748  }
  1749  
  1750  // l is an lv and rt is the type of an rv
  1751  // return 1 if this implies a function call
  1752  // evaluating the lv or a function call
  1753  // in the conversion of the types
  1754  func fncall(l *Node, rt *types.Type) bool {
  1755  	if l.HasCall() || l.Op == OINDEXMAP {
  1756  		return true
  1757  	}
  1758  	if needwritebarrier(l) {
  1759  		return true
  1760  	}
  1761  	if eqtype(l.Type, rt) {
  1762  		return false
  1763  	}
  1764  	return true
  1765  }
  1766  
  1767  // check assign type list to
  1768  // a expression list. called in
  1769  //	expr-list = func()
  1770  func ascompatet(op Op, nl Nodes, nr *types.Type) []*Node {
  1771  	if nl.Len() != nr.NumFields() {
  1772  		Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
  1773  	}
  1774  
  1775  	var nn, mm Nodes
  1776  	for i, l := range nl.Slice() {
  1777  		if isblank(l) {
  1778  			continue
  1779  		}
  1780  		r := nr.Field(i)
  1781  
  1782  		// any lv that causes a fn call must be
  1783  		// deferred until all the return arguments
  1784  		// have been pulled from the output arguments
  1785  		if fncall(l, r.Type) {
  1786  			tmp := temp(r.Type)
  1787  			tmp = typecheck(tmp, Erv)
  1788  			a := nod(OAS, l, tmp)
  1789  			a = convas(a, &mm)
  1790  			mm.Append(a)
  1791  			l = tmp
  1792  		}
  1793  
  1794  		a := nod(OAS, l, nodarg(r, 0))
  1795  		a = convas(a, &nn)
  1796  		updateHasCall(a)
  1797  		if a.HasCall() {
  1798  			Dump("ascompatet ucount", a)
  1799  			Fatalf("ascompatet: too many function calls evaluating parameters")
  1800  		}
  1801  
  1802  		nn.Append(a)
  1803  	}
  1804  	return append(nn.Slice(), mm.Slice()...)
  1805  }
  1806  
  1807  // nodarg returns a Node for the function argument denoted by t,
  1808  // which is either the entire function argument or result struct (t is a  struct *types.Type)
  1809  // or a specific argument (t is a *types.Field within a struct *types.Type).
  1810  //
  1811  // If fp is 0, the node is for use by a caller invoking the given
  1812  // function, preparing the arguments before the call
  1813  // or retrieving the results after the call.
  1814  // In this case, the node will correspond to an outgoing argument
  1815  // slot like 8(SP).
  1816  //
  1817  // If fp is 1, the node is for use by the function itself
  1818  // (the callee), to retrieve its arguments or write its results.
  1819  // In this case the node will be an ONAME with an appropriate
  1820  // type and offset.
  1821  func nodarg(t interface{}, fp int) *Node {
  1822  	var n *Node
  1823  
  1824  	var funarg types.Funarg
  1825  	switch t := t.(type) {
  1826  	default:
  1827  		Fatalf("bad nodarg %T(%v)", t, t)
  1828  
  1829  	case *types.Type:
  1830  		// Entire argument struct, not just one arg
  1831  		if !t.IsFuncArgStruct() {
  1832  			Fatalf("nodarg: bad type %v", t)
  1833  		}
  1834  		funarg = t.StructType().Funarg
  1835  
  1836  		// Build fake variable name for whole arg struct.
  1837  		n = newname(lookup(".args"))
  1838  		n.Type = t
  1839  		first := t.Field(0)
  1840  		if first == nil {
  1841  			Fatalf("nodarg: bad struct")
  1842  		}
  1843  		if first.Offset == BADWIDTH {
  1844  			Fatalf("nodarg: offset not computed for %v", t)
  1845  		}
  1846  		n.Xoffset = first.Offset
  1847  
  1848  	case *types.Field:
  1849  		funarg = t.Funarg
  1850  		if fp == 1 {
  1851  			// NOTE(rsc): This should be using t.Nname directly,
  1852  			// except in the case where t.Nname.Sym is the blank symbol and
  1853  			// so the assignment would be discarded during code generation.
  1854  			// In that case we need to make a new node, and there is no harm
  1855  			// in optimization passes to doing so. But otherwise we should
  1856  			// definitely be using the actual declaration and not a newly built node.
  1857  			// The extra Fatalf checks here are verifying that this is the case,
  1858  			// without changing the actual logic (at time of writing, it's getting
  1859  			// toward time for the Go 1.7 beta).
  1860  			// At some quieter time (assuming we've never seen these Fatalfs happen)
  1861  			// we could change this code to use "expect" directly.
  1862  			expect := asNode(t.Nname)
  1863  			if expect.isParamHeapCopy() {
  1864  				expect = expect.Name.Param.Stackcopy
  1865  			}
  1866  
  1867  			for _, n := range Curfn.Func.Dcl {
  1868  				if (n.Class() == PPARAM || n.Class() == PPARAMOUT) && !t.Sym.IsBlank() && n.Sym == t.Sym {
  1869  					if n != expect {
  1870  						Fatalf("nodarg: unexpected node: %v (%p %v) vs %v (%p %v)", n, n, n.Op, asNode(t.Nname), asNode(t.Nname), asNode(t.Nname).Op)
  1871  					}
  1872  					return n
  1873  				}
  1874  			}
  1875  
  1876  			if !expect.Sym.IsBlank() {
  1877  				Fatalf("nodarg: did not find node in dcl list: %v", expect)
  1878  			}
  1879  		}
  1880  
  1881  		// Build fake name for individual variable.
  1882  		// This is safe because if there was a real declared name
  1883  		// we'd have used it above.
  1884  		n = newname(lookup("__"))
  1885  		n.Type = t.Type
  1886  		if t.Offset == BADWIDTH {
  1887  			Fatalf("nodarg: offset not computed for %v", t)
  1888  		}
  1889  		n.Xoffset = t.Offset
  1890  		n.Orig = asNode(t.Nname)
  1891  	}
  1892  
  1893  	// Rewrite argument named _ to __,
  1894  	// or else the assignment to _ will be
  1895  	// discarded during code generation.
  1896  	if isblank(n) {
  1897  		n.Sym = lookup("__")
  1898  	}
  1899  
  1900  	switch fp {
  1901  	default:
  1902  		Fatalf("bad fp")
  1903  
  1904  	case 0: // preparing arguments for call
  1905  		n.Op = OINDREGSP
  1906  		n.Xoffset += Ctxt.FixedFrameSize()
  1907  
  1908  	case 1: // reading arguments inside call
  1909  		n.SetClass(PPARAM)
  1910  		if funarg == types.FunargResults {
  1911  			n.SetClass(PPARAMOUT)
  1912  		}
  1913  	}
  1914  
  1915  	n.SetTypecheck(1)
  1916  	n.SetAddrtaken(true) // keep optimizers at bay
  1917  	return n
  1918  }
  1919  
  1920  // package all the arguments that match a ... T parameter into a []T.
  1921  func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node {
  1922  	esc := uint16(EscUnknown)
  1923  	if ddd != nil {
  1924  		esc = ddd.Esc
  1925  	}
  1926  
  1927  	if len(args) == 0 {
  1928  		n := nodnil()
  1929  		n.Type = typ
  1930  		return n
  1931  	}
  1932  
  1933  	n := nod(OCOMPLIT, nil, typenod(typ))
  1934  	if ddd != nil && prealloc[ddd] != nil {
  1935  		prealloc[n] = prealloc[ddd] // temporary to use
  1936  	}
  1937  	n.List.Set(args)
  1938  	n.Esc = esc
  1939  	n = typecheck(n, Erv)
  1940  	if n.Type == nil {
  1941  		Fatalf("mkdotargslice: typecheck failed")
  1942  	}
  1943  	n = walkexpr(n, init)
  1944  	return n
  1945  }
  1946  
  1947  // check assign expression list to
  1948  // a type list. called in
  1949  //	return expr-list
  1950  //	func(expr-list)
  1951  func ascompatte(call *Node, isddd bool, lhs *types.Type, rhs []*Node, fp int, init *Nodes) []*Node {
  1952  	var nn []*Node
  1953  
  1954  	// f(g()) where g has multiple return values
  1955  	if len(rhs) == 1 && rhs[0].Type.IsFuncArgStruct() {
  1956  		// optimization - can do block copy
  1957  		if eqtypenoname(rhs[0].Type, lhs) {
  1958  			nl := nodarg(lhs, fp)
  1959  			nr := nod(OCONVNOP, rhs[0], nil)
  1960  			nr.Type = nl.Type
  1961  			nn = []*Node{convas(nod(OAS, nl, nr), init)}
  1962  			goto ret
  1963  		}
  1964  
  1965  		// conversions involved.
  1966  		// copy into temporaries.
  1967  		var tmps []*Node
  1968  		for _, nr := range rhs[0].Type.FieldSlice() {
  1969  			tmps = append(tmps, temp(nr.Type))
  1970  		}
  1971  
  1972  		a := nod(OAS2, nil, nil)
  1973  		a.List.Set(tmps)
  1974  		a.Rlist.Set(rhs)
  1975  		a = typecheck(a, Etop)
  1976  		a = walkstmt(a)
  1977  		init.Append(a)
  1978  
  1979  		rhs = tmps
  1980  	}
  1981  
  1982  	// For each parameter (LHS), assign its corresponding argument (RHS).
  1983  	// If there's a ... parameter (which is only valid as the final
  1984  	// parameter) and this is not a ... call expression,
  1985  	// then assign the remaining arguments as a slice.
  1986  	for i, nl := range lhs.FieldSlice() {
  1987  		var nr *Node
  1988  		if nl.Isddd() && !isddd {
  1989  			nr = mkdotargslice(nl.Type, rhs[i:], init, call.Right)
  1990  		} else {
  1991  			nr = rhs[i]
  1992  		}
  1993  
  1994  		a := nod(OAS, nodarg(nl, fp), nr)
  1995  		a = convas(a, init)
  1996  		nn = append(nn, a)
  1997  	}
  1998  
  1999  ret:
  2000  	for _, n := range nn {
  2001  		n.SetTypecheck(1)
  2002  	}
  2003  	return nn
  2004  }
  2005  
  2006  // generate code for print
  2007  func walkprint(nn *Node, init *Nodes) *Node {
  2008  	var r *Node
  2009  	var n *Node
  2010  	var on *Node
  2011  	var t *types.Type
  2012  	var et types.EType
  2013  
  2014  	op := nn.Op
  2015  	all := nn.List
  2016  	var calls []*Node
  2017  	notfirst := false
  2018  
  2019  	// Hoist all the argument evaluation up before the lock.
  2020  	walkexprlistcheap(all.Slice(), init)
  2021  
  2022  	calls = append(calls, mkcall("printlock", nil, init))
  2023  	for i1, n1 := range all.Slice() {
  2024  		if notfirst {
  2025  			calls = append(calls, mkcall("printsp", nil, init))
  2026  		}
  2027  
  2028  		notfirst = op == OPRINTN
  2029  
  2030  		n = n1
  2031  		if n.Op == OLITERAL {
  2032  			switch n.Val().Ctype() {
  2033  			case CTRUNE:
  2034  				n = defaultlit(n, types.Runetype)
  2035  
  2036  			case CTINT:
  2037  				n = defaultlit(n, types.Types[TINT64])
  2038  
  2039  			case CTFLT:
  2040  				n = defaultlit(n, types.Types[TFLOAT64])
  2041  			}
  2042  		}
  2043  
  2044  		if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
  2045  			n = defaultlit(n, types.Types[TINT64])
  2046  		}
  2047  		n = defaultlit(n, nil)
  2048  		all.SetIndex(i1, n)
  2049  		if n.Type == nil || n.Type.Etype == TFORW {
  2050  			continue
  2051  		}
  2052  
  2053  		t = n.Type
  2054  		et = n.Type.Etype
  2055  		if n.Type.IsInterface() {
  2056  			if n.Type.IsEmptyInterface() {
  2057  				on = syslook("printeface")
  2058  			} else {
  2059  				on = syslook("printiface")
  2060  			}
  2061  			on = substArgTypes(on, n.Type) // any-1
  2062  		} else if n.Type.IsPtr() || et == TCHAN || et == TMAP || et == TFUNC || et == TUNSAFEPTR {
  2063  			on = syslook("printpointer")
  2064  			on = substArgTypes(on, n.Type) // any-1
  2065  		} else if n.Type.IsSlice() {
  2066  			on = syslook("printslice")
  2067  			on = substArgTypes(on, n.Type) // any-1
  2068  		} else if isInt[et] {
  2069  			if et == TUINT64 {
  2070  				if isRuntimePkg(t.Sym.Pkg) && t.Sym.Name == "hex" {
  2071  					on = syslook("printhex")
  2072  				} else {
  2073  					on = syslook("printuint")
  2074  				}
  2075  			} else {
  2076  				on = syslook("printint")
  2077  			}
  2078  		} else if isFloat[et] {
  2079  			on = syslook("printfloat")
  2080  		} else if isComplex[et] {
  2081  			on = syslook("printcomplex")
  2082  		} else if et == TBOOL {
  2083  			on = syslook("printbool")
  2084  		} else if et == TSTRING {
  2085  			on = syslook("printstring")
  2086  		} else {
  2087  			badtype(OPRINT, n.Type, nil)
  2088  			continue
  2089  		}
  2090  
  2091  		t = on.Type.Params().Field(0).Type
  2092  
  2093  		if !eqtype(t, n.Type) {
  2094  			n = nod(OCONV, n, nil)
  2095  			n.Type = t
  2096  		}
  2097  
  2098  		r = nod(OCALL, on, nil)
  2099  		r.List.Append(n)
  2100  		calls = append(calls, r)
  2101  	}
  2102  
  2103  	if op == OPRINTN {
  2104  		calls = append(calls, mkcall("printnl", nil, nil))
  2105  	}
  2106  
  2107  	calls = append(calls, mkcall("printunlock", nil, init))
  2108  
  2109  	typecheckslice(calls, Etop)
  2110  	walkexprlist(calls, init)
  2111  
  2112  	r = nod(OEMPTY, nil, nil)
  2113  	r = typecheck(r, Etop)
  2114  	r = walkexpr(r, init)
  2115  	r.Ninit.Set(calls)
  2116  	return r
  2117  }
  2118  
  2119  func callnew(t *types.Type) *Node {
  2120  	if t.NotInHeap() {
  2121  		yyerror("%v is go:notinheap; heap allocation disallowed", t)
  2122  	}
  2123  	dowidth(t)
  2124  	fn := syslook("newobject")
  2125  	fn = substArgTypes(fn, t)
  2126  	v := mkcall1(fn, types.NewPtr(t), nil, typename(t))
  2127  	v.SetNonNil(true)
  2128  	return v
  2129  }
  2130  
  2131  func iscallret(n *Node) bool {
  2132  	n = outervalue(n)
  2133  	return n.Op == OINDREGSP
  2134  }
  2135  
  2136  func isstack(n *Node) bool {
  2137  	n = outervalue(n)
  2138  
  2139  	// If n is *autotmp and autotmp = &foo, replace n with foo.
  2140  	// We introduce such temps when initializing struct literals.
  2141  	if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() {
  2142  		defn := n.Left.Name.Defn
  2143  		if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR {
  2144  			n = defn.Right.Left
  2145  		}
  2146  	}
  2147  
  2148  	switch n.Op {
  2149  	case OINDREGSP:
  2150  		return true
  2151  
  2152  	case ONAME:
  2153  		switch n.Class() {
  2154  		case PAUTO, PPARAM, PPARAMOUT:
  2155  			return true
  2156  		}
  2157  	}
  2158  
  2159  	return false
  2160  }
  2161  
  2162  // isReflectHeaderDataField reports whether l is an expression p.Data
  2163  // where p has type reflect.SliceHeader or reflect.StringHeader.
  2164  func isReflectHeaderDataField(l *Node) bool {
  2165  	if l.Type != types.Types[TUINTPTR] {
  2166  		return false
  2167  	}
  2168  
  2169  	var tsym *types.Sym
  2170  	switch l.Op {
  2171  	case ODOT:
  2172  		tsym = l.Left.Type.Sym
  2173  	case ODOTPTR:
  2174  		tsym = l.Left.Type.Elem().Sym
  2175  	default:
  2176  		return false
  2177  	}
  2178  
  2179  	if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" {
  2180  		return false
  2181  	}
  2182  	return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader"
  2183  }
  2184  
  2185  // Do we need a write barrier for assigning to l?
  2186  func needwritebarrier(l *Node) bool {
  2187  	if !use_writebarrier {
  2188  		return false
  2189  	}
  2190  
  2191  	if l == nil || isblank(l) {
  2192  		return false
  2193  	}
  2194  
  2195  	// No write barrier for write to stack.
  2196  	if isstack(l) {
  2197  		return false
  2198  	}
  2199  
  2200  	// Package unsafe's documentation says storing pointers into
  2201  	// reflect.SliceHeader and reflect.StringHeader's Data fields
  2202  	// is valid, even though they have type uintptr (#19168).
  2203  	if isReflectHeaderDataField(l) {
  2204  		return true
  2205  	}
  2206  
  2207  	// No write barrier for write of non-pointers.
  2208  	dowidth(l.Type)
  2209  	if !types.Haspointers(l.Type) {
  2210  		return false
  2211  	}
  2212  
  2213  	// No write barrier if this is a pointer to a go:notinheap
  2214  	// type, since the write barrier's inheap(ptr) check will fail.
  2215  	if l.Type.IsPtr() && l.Type.Elem().NotInHeap() {
  2216  		return false
  2217  	}
  2218  
  2219  	// TODO: We can eliminate write barriers if we know *both* the
  2220  	// current and new content of the slot must already be shaded.
  2221  	// We know a pointer is shaded if it's nil, or points to
  2222  	// static data, a global (variable or function), or the stack.
  2223  	// The nil optimization could be particularly useful for
  2224  	// writes to just-allocated objects. Unfortunately, knowing
  2225  	// the "current" value of the slot requires flow analysis.
  2226  
  2227  	// Otherwise, be conservative and use write barrier.
  2228  	return true
  2229  }
  2230  
  2231  func convas(n *Node, init *Nodes) *Node {
  2232  	if n.Op != OAS {
  2233  		Fatalf("convas: not OAS %v", n.Op)
  2234  	}
  2235  
  2236  	n.SetTypecheck(1)
  2237  
  2238  	var lt *types.Type
  2239  	var rt *types.Type
  2240  	if n.Left == nil || n.Right == nil {
  2241  		goto out
  2242  	}
  2243  
  2244  	lt = n.Left.Type
  2245  	rt = n.Right.Type
  2246  	if lt == nil || rt == nil {
  2247  		goto out
  2248  	}
  2249  
  2250  	if isblank(n.Left) {
  2251  		n.Right = defaultlit(n.Right, nil)
  2252  		goto out
  2253  	}
  2254  
  2255  	if !eqtype(lt, rt) {
  2256  		n.Right = assignconv(n.Right, lt, "assignment")
  2257  		n.Right = walkexpr(n.Right, init)
  2258  	}
  2259  	dowidth(n.Right.Type)
  2260  
  2261  out:
  2262  	updateHasCall(n)
  2263  	return n
  2264  }
  2265  
  2266  // from ascompat[te]
  2267  // evaluating actual function arguments.
  2268  //	f(a,b)
  2269  // if there is exactly one function expr,
  2270  // then it is done first. otherwise must
  2271  // make temp variables
  2272  func reorder1(all []*Node) []*Node {
  2273  	c := 0 // function calls
  2274  	t := 0 // total parameters
  2275  
  2276  	for _, n := range all {
  2277  		t++
  2278  		updateHasCall(n)
  2279  		if n.HasCall() {
  2280  			c++
  2281  		}
  2282  	}
  2283  
  2284  	if c == 0 || t == 1 {
  2285  		return all
  2286  	}
  2287  
  2288  	var g []*Node // fncalls assigned to tempnames
  2289  	var f *Node   // last fncall assigned to stack
  2290  	var r []*Node // non fncalls and tempnames assigned to stack
  2291  	d := 0
  2292  	var a *Node
  2293  	for _, n := range all {
  2294  		if !n.HasCall() {
  2295  			r = append(r, n)
  2296  			continue
  2297  		}
  2298  
  2299  		d++
  2300  		if d == c {
  2301  			f = n
  2302  			continue
  2303  		}
  2304  
  2305  		// make assignment of fncall to tempname
  2306  		a = temp(n.Right.Type)
  2307  
  2308  		a = nod(OAS, a, n.Right)
  2309  		g = append(g, a)
  2310  
  2311  		// put normal arg assignment on list
  2312  		// with fncall replaced by tempname
  2313  		n.Right = a.Left
  2314  
  2315  		r = append(r, n)
  2316  	}
  2317  
  2318  	if f != nil {
  2319  		g = append(g, f)
  2320  	}
  2321  	return append(g, r...)
  2322  }
  2323  
  2324  // from ascompat[ee]
  2325  //	a,b = c,d
  2326  // simultaneous assignment. there cannot
  2327  // be later use of an earlier lvalue.
  2328  //
  2329  // function calls have been removed.
  2330  func reorder3(all []*Node) []*Node {
  2331  	var l *Node
  2332  
  2333  	// If a needed expression may be affected by an
  2334  	// earlier assignment, make an early copy of that
  2335  	// expression and use the copy instead.
  2336  	var early []*Node
  2337  
  2338  	var mapinit Nodes
  2339  	for i, n := range all {
  2340  		l = n.Left
  2341  
  2342  		// Save subexpressions needed on left side.
  2343  		// Drill through non-dereferences.
  2344  		for {
  2345  			if l.Op == ODOT || l.Op == OPAREN {
  2346  				l = l.Left
  2347  				continue
  2348  			}
  2349  
  2350  			if l.Op == OINDEX && l.Left.Type.IsArray() {
  2351  				l.Right = reorder3save(l.Right, all, i, &early)
  2352  				l = l.Left
  2353  				continue
  2354  			}
  2355  
  2356  			break
  2357  		}
  2358  
  2359  		switch l.Op {
  2360  		default:
  2361  			Fatalf("reorder3 unexpected lvalue %#v", l.Op)
  2362  
  2363  		case ONAME:
  2364  			break
  2365  
  2366  		case OINDEX, OINDEXMAP:
  2367  			l.Left = reorder3save(l.Left, all, i, &early)
  2368  			l.Right = reorder3save(l.Right, all, i, &early)
  2369  			if l.Op == OINDEXMAP {
  2370  				all[i] = convas(all[i], &mapinit)
  2371  			}
  2372  
  2373  		case OIND, ODOTPTR:
  2374  			l.Left = reorder3save(l.Left, all, i, &early)
  2375  		}
  2376  
  2377  		// Save expression on right side.
  2378  		all[i].Right = reorder3save(all[i].Right, all, i, &early)
  2379  	}
  2380  
  2381  	early = append(mapinit.Slice(), early...)
  2382  	return append(early, all...)
  2383  }
  2384  
  2385  // if the evaluation of *np would be affected by the
  2386  // assignments in all up to but not including the ith assignment,
  2387  // copy into a temporary during *early and
  2388  // replace *np with that temp.
  2389  // The result of reorder3save MUST be assigned back to n, e.g.
  2390  // 	n.Left = reorder3save(n.Left, all, i, early)
  2391  func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node {
  2392  	if !aliased(n, all, i) {
  2393  		return n
  2394  	}
  2395  
  2396  	q := temp(n.Type)
  2397  	q = nod(OAS, q, n)
  2398  	q = typecheck(q, Etop)
  2399  	*early = append(*early, q)
  2400  	return q.Left
  2401  }
  2402  
  2403  // what's the outer value that a write to n affects?
  2404  // outer value means containing struct or array.
  2405  func outervalue(n *Node) *Node {
  2406  	for {
  2407  		if n.Op == OXDOT {
  2408  			Fatalf("OXDOT in walk")
  2409  		}
  2410  		if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP {
  2411  			n = n.Left
  2412  			continue
  2413  		}
  2414  
  2415  		if n.Op == OINDEX && n.Left.Type != nil && n.Left.Type.IsArray() {
  2416  			n = n.Left
  2417  			continue
  2418  		}
  2419  
  2420  		break
  2421  	}
  2422  
  2423  	return n
  2424  }
  2425  
  2426  // Is it possible that the computation of n might be
  2427  // affected by writes in as up to but not including the ith element?
  2428  func aliased(n *Node, all []*Node, i int) bool {
  2429  	if n == nil {
  2430  		return false
  2431  	}
  2432  
  2433  	// Treat all fields of a struct as referring to the whole struct.
  2434  	// We could do better but we would have to keep track of the fields.
  2435  	for n.Op == ODOT {
  2436  		n = n.Left
  2437  	}
  2438  
  2439  	// Look for obvious aliasing: a variable being assigned
  2440  	// during the all list and appearing in n.
  2441  	// Also record whether there are any writes to main memory.
  2442  	// Also record whether there are any writes to variables
  2443  	// whose addresses have been taken.
  2444  	memwrite := 0
  2445  
  2446  	varwrite := 0
  2447  	var a *Node
  2448  	for _, an := range all[:i] {
  2449  		a = outervalue(an.Left)
  2450  
  2451  		for a.Op == ODOT {
  2452  			a = a.Left
  2453  		}
  2454  
  2455  		if a.Op != ONAME {
  2456  			memwrite = 1
  2457  			continue
  2458  		}
  2459  
  2460  		switch n.Class() {
  2461  		default:
  2462  			varwrite = 1
  2463  			continue
  2464  
  2465  		case PAUTO, PPARAM, PPARAMOUT:
  2466  			if n.Addrtaken() {
  2467  				varwrite = 1
  2468  				continue
  2469  			}
  2470  
  2471  			if vmatch2(a, n) {
  2472  				// Direct hit.
  2473  				return true
  2474  			}
  2475  		}
  2476  	}
  2477  
  2478  	// The variables being written do not appear in n.
  2479  	// However, n might refer to computed addresses
  2480  	// that are being written.
  2481  
  2482  	// If no computed addresses are affected by the writes, no aliasing.
  2483  	if memwrite == 0 && varwrite == 0 {
  2484  		return false
  2485  	}
  2486  
  2487  	// If n does not refer to computed addresses
  2488  	// (that is, if n only refers to variables whose addresses
  2489  	// have not been taken), no aliasing.
  2490  	if varexpr(n) {
  2491  		return false
  2492  	}
  2493  
  2494  	// Otherwise, both the writes and n refer to computed memory addresses.
  2495  	// Assume that they might conflict.
  2496  	return true
  2497  }
  2498  
  2499  // does the evaluation of n only refer to variables
  2500  // whose addresses have not been taken?
  2501  // (and no other memory)
  2502  func varexpr(n *Node) bool {
  2503  	if n == nil {
  2504  		return true
  2505  	}
  2506  
  2507  	switch n.Op {
  2508  	case OLITERAL:
  2509  		return true
  2510  
  2511  	case ONAME:
  2512  		switch n.Class() {
  2513  		case PAUTO, PPARAM, PPARAMOUT:
  2514  			if !n.Addrtaken() {
  2515  				return true
  2516  			}
  2517  		}
  2518  
  2519  		return false
  2520  
  2521  	case OADD,
  2522  		OSUB,
  2523  		OOR,
  2524  		OXOR,
  2525  		OMUL,
  2526  		ODIV,
  2527  		OMOD,
  2528  		OLSH,
  2529  		ORSH,
  2530  		OAND,
  2531  		OANDNOT,
  2532  		OPLUS,
  2533  		OMINUS,
  2534  		OCOM,
  2535  		OPAREN,
  2536  		OANDAND,
  2537  		OOROR,
  2538  		OCONV,
  2539  		OCONVNOP,
  2540  		OCONVIFACE,
  2541  		ODOTTYPE:
  2542  		return varexpr(n.Left) && varexpr(n.Right)
  2543  
  2544  	case ODOT: // but not ODOTPTR
  2545  		// Should have been handled in aliased.
  2546  		Fatalf("varexpr unexpected ODOT")
  2547  	}
  2548  
  2549  	// Be conservative.
  2550  	return false
  2551  }
  2552  
  2553  // is the name l mentioned in r?
  2554  func vmatch2(l *Node, r *Node) bool {
  2555  	if r == nil {
  2556  		return false
  2557  	}
  2558  	switch r.Op {
  2559  	// match each right given left
  2560  	case ONAME:
  2561  		return l == r
  2562  
  2563  	case OLITERAL:
  2564  		return false
  2565  	}
  2566  
  2567  	if vmatch2(l, r.Left) {
  2568  		return true
  2569  	}
  2570  	if vmatch2(l, r.Right) {
  2571  		return true
  2572  	}
  2573  	for _, n := range r.List.Slice() {
  2574  		if vmatch2(l, n) {
  2575  			return true
  2576  		}
  2577  	}
  2578  	return false
  2579  }
  2580  
  2581  // is any name mentioned in l also mentioned in r?
  2582  // called by sinit.go
  2583  func vmatch1(l *Node, r *Node) bool {
  2584  	// isolate all left sides
  2585  	if l == nil || r == nil {
  2586  		return false
  2587  	}
  2588  	switch l.Op {
  2589  	case ONAME:
  2590  		switch l.Class() {
  2591  		case PPARAM, PAUTO:
  2592  			break
  2593  
  2594  		default:
  2595  			// assignment to non-stack variable must be
  2596  			// delayed if right has function calls.
  2597  			if r.HasCall() {
  2598  				return true
  2599  			}
  2600  		}
  2601  
  2602  		return vmatch2(l, r)
  2603  
  2604  	case OLITERAL:
  2605  		return false
  2606  	}
  2607  
  2608  	if vmatch1(l.Left, r) {
  2609  		return true
  2610  	}
  2611  	if vmatch1(l.Right, r) {
  2612  		return true
  2613  	}
  2614  	for _, n := range l.List.Slice() {
  2615  		if vmatch1(n, r) {
  2616  			return true
  2617  		}
  2618  	}
  2619  	return false
  2620  }
  2621  
  2622  // paramstoheap returns code to allocate memory for heap-escaped parameters
  2623  // and to copy non-result parameters' values from the stack.
  2624  func paramstoheap(params *types.Type) []*Node {
  2625  	var nn []*Node
  2626  	for _, t := range params.Fields().Slice() {
  2627  		v := asNode(t.Nname)
  2628  		if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result
  2629  			v = nil
  2630  		}
  2631  		if v == nil {
  2632  			continue
  2633  		}
  2634  
  2635  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil {
  2636  			nn = append(nn, walkstmt(nod(ODCL, v, nil)))
  2637  			if stackcopy.Class() == PPARAM {
  2638  				nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop)))
  2639  			}
  2640  		}
  2641  	}
  2642  
  2643  	return nn
  2644  }
  2645  
  2646  // zeroResults zeros the return values at the start of the function.
  2647  // We need to do this very early in the function.  Defer might stop a
  2648  // panic and show the return values as they exist at the time of
  2649  // panic.  For precise stacks, the garbage collector assumes results
  2650  // are always live, so we need to zero them before any allocations,
  2651  // even allocations to move params/results to the heap.
  2652  // The generated code is added to Curfn's Enter list.
  2653  func zeroResults() {
  2654  	lno := lineno
  2655  	lineno = Curfn.Pos
  2656  	for _, f := range Curfn.Type.Results().Fields().Slice() {
  2657  		if v := asNode(f.Nname); v != nil && v.Name.Param.Heapaddr != nil {
  2658  			// The local which points to the return value is the
  2659  			// thing that needs zeroing. This is already handled
  2660  			// by a Needzero annotation in plive.go:livenessepilogue.
  2661  			continue
  2662  		}
  2663  		// Zero the stack location containing f.
  2664  		Curfn.Func.Enter.Append(nod(OAS, nodarg(f, 1), nil))
  2665  	}
  2666  	lineno = lno
  2667  }
  2668  
  2669  // returnsfromheap returns code to copy values for heap-escaped parameters
  2670  // back to the stack.
  2671  func returnsfromheap(params *types.Type) []*Node {
  2672  	var nn []*Node
  2673  	for _, t := range params.Fields().Slice() {
  2674  		v := asNode(t.Nname)
  2675  		if v == nil {
  2676  			continue
  2677  		}
  2678  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT {
  2679  			nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop)))
  2680  		}
  2681  	}
  2682  
  2683  	return nn
  2684  }
  2685  
  2686  // heapmoves generates code to handle migrating heap-escaped parameters
  2687  // between the stack and the heap. The generated code is added to Curfn's
  2688  // Enter and Exit lists.
  2689  func heapmoves() {
  2690  	lno := lineno
  2691  	lineno = Curfn.Pos
  2692  	nn := paramstoheap(Curfn.Type.Recvs())
  2693  	nn = append(nn, paramstoheap(Curfn.Type.Params())...)
  2694  	nn = append(nn, paramstoheap(Curfn.Type.Results())...)
  2695  	Curfn.Func.Enter.Append(nn...)
  2696  	lineno = Curfn.Func.Endlineno
  2697  	Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...)
  2698  	lineno = lno
  2699  }
  2700  
  2701  func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node {
  2702  	if fn.Type == nil || fn.Type.Etype != TFUNC {
  2703  		Fatalf("mkcall %v %v", fn, fn.Type)
  2704  	}
  2705  
  2706  	n := fn.Type.Params().NumFields()
  2707  
  2708  	r := nod(OCALL, fn, nil)
  2709  	r.List.Set(va[:n])
  2710  	if fn.Type.Results().NumFields() > 0 {
  2711  		r = typecheck(r, Erv|Efnstruct)
  2712  	} else {
  2713  		r = typecheck(r, Etop)
  2714  	}
  2715  	r = walkexpr(r, init)
  2716  	r.Type = t
  2717  	return r
  2718  }
  2719  
  2720  func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node {
  2721  	return vmkcall(syslook(name), t, init, args)
  2722  }
  2723  
  2724  func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node {
  2725  	return vmkcall(fn, t, init, args)
  2726  }
  2727  
  2728  func conv(n *Node, t *types.Type) *Node {
  2729  	if eqtype(n.Type, t) {
  2730  		return n
  2731  	}
  2732  	n = nod(OCONV, n, nil)
  2733  	n.Type = t
  2734  	n = typecheck(n, Erv)
  2735  	return n
  2736  }
  2737  
  2738  // byteindex converts n, which is byte-sized, to a uint8.
  2739  // We cannot use conv, because we allow converting bool to uint8 here,
  2740  // which is forbidden in user code.
  2741  func byteindex(n *Node) *Node {
  2742  	if eqtype(n.Type, types.Types[TUINT8]) {
  2743  		return n
  2744  	}
  2745  	n = nod(OCONV, n, nil)
  2746  	n.Type = types.Types[TUINT8]
  2747  	n.SetTypecheck(1)
  2748  	return n
  2749  }
  2750  
  2751  func chanfn(name string, n int, t *types.Type) *Node {
  2752  	if !t.IsChan() {
  2753  		Fatalf("chanfn %v", t)
  2754  	}
  2755  	fn := syslook(name)
  2756  	switch n {
  2757  	default:
  2758  		Fatalf("chanfn %d", n)
  2759  	case 1:
  2760  		fn = substArgTypes(fn, t.Elem())
  2761  	case 2:
  2762  		fn = substArgTypes(fn, t.Elem(), t.Elem())
  2763  	}
  2764  	return fn
  2765  }
  2766  
  2767  func mapfn(name string, t *types.Type) *Node {
  2768  	if !t.IsMap() {
  2769  		Fatalf("mapfn %v", t)
  2770  	}
  2771  	fn := syslook(name)
  2772  	fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val())
  2773  	return fn
  2774  }
  2775  
  2776  func mapfndel(name string, t *types.Type) *Node {
  2777  	if !t.IsMap() {
  2778  		Fatalf("mapfn %v", t)
  2779  	}
  2780  	fn := syslook(name)
  2781  	fn = substArgTypes(fn, t.Key(), t.Val(), t.Key())
  2782  	return fn
  2783  }
  2784  
  2785  const (
  2786  	mapslow = iota
  2787  	mapfast32
  2788  	mapfast32ptr
  2789  	mapfast64
  2790  	mapfast64ptr
  2791  	mapfaststr
  2792  	nmapfast
  2793  )
  2794  
  2795  type mapnames [nmapfast]string
  2796  
  2797  func mkmapnames(base string, ptr string) mapnames {
  2798  	return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
  2799  }
  2800  
  2801  var mapaccess1 mapnames = mkmapnames("mapaccess1", "")
  2802  var mapaccess2 mapnames = mkmapnames("mapaccess2", "")
  2803  var mapassign mapnames = mkmapnames("mapassign", "ptr")
  2804  var mapdelete mapnames = mkmapnames("mapdelete", "")
  2805  
  2806  func mapfast(t *types.Type) int {
  2807  	// Check ../../runtime/hashmap.go:maxValueSize before changing.
  2808  	if t.Val().Width > 128 {
  2809  		return mapslow
  2810  	}
  2811  	switch algtype(t.Key()) {
  2812  	case AMEM32:
  2813  		if !t.Key().HasPointer() {
  2814  			return mapfast32
  2815  		}
  2816  		if Widthptr == 4 {
  2817  			return mapfast32ptr
  2818  		}
  2819  		Fatalf("small pointer %v", t.Key())
  2820  	case AMEM64:
  2821  		if !t.Key().HasPointer() {
  2822  			return mapfast64
  2823  		}
  2824  		if Widthptr == 8 {
  2825  			return mapfast64ptr
  2826  		}
  2827  		// Two-word object, at least one of which is a pointer.
  2828  		// Use the slow path.
  2829  	case ASTRING:
  2830  		return mapfaststr
  2831  	}
  2832  	return mapslow
  2833  }
  2834  
  2835  func writebarrierfn(name string, l *types.Type, r *types.Type) *Node {
  2836  	fn := syslook(name)
  2837  	fn = substArgTypes(fn, l, r)
  2838  	return fn
  2839  }
  2840  
  2841  func addstr(n *Node, init *Nodes) *Node {
  2842  	// orderexpr rewrote OADDSTR to have a list of strings.
  2843  	c := n.List.Len()
  2844  
  2845  	if c < 2 {
  2846  		Fatalf("addstr count %d too small", c)
  2847  	}
  2848  
  2849  	buf := nodnil()
  2850  	if n.Esc == EscNone {
  2851  		sz := int64(0)
  2852  		for _, n1 := range n.List.Slice() {
  2853  			if n1.Op == OLITERAL {
  2854  				sz += int64(len(n1.Val().U.(string)))
  2855  			}
  2856  		}
  2857  
  2858  		// Don't allocate the buffer if the result won't fit.
  2859  		if sz < tmpstringbufsize {
  2860  			// Create temporary buffer for result string on stack.
  2861  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  2862  
  2863  			buf = nod(OADDR, temp(t), nil)
  2864  		}
  2865  	}
  2866  
  2867  	// build list of string arguments
  2868  	args := []*Node{buf}
  2869  	for _, n2 := range n.List.Slice() {
  2870  		args = append(args, conv(n2, types.Types[TSTRING]))
  2871  	}
  2872  
  2873  	var fn string
  2874  	if c <= 5 {
  2875  		// small numbers of strings use direct runtime helpers.
  2876  		// note: orderexpr knows this cutoff too.
  2877  		fn = fmt.Sprintf("concatstring%d", c)
  2878  	} else {
  2879  		// large numbers of strings are passed to the runtime as a slice.
  2880  		fn = "concatstrings"
  2881  
  2882  		t := types.NewSlice(types.Types[TSTRING])
  2883  		slice := nod(OCOMPLIT, nil, typenod(t))
  2884  		if prealloc[n] != nil {
  2885  			prealloc[slice] = prealloc[n]
  2886  		}
  2887  		slice.List.Set(args[1:]) // skip buf arg
  2888  		args = []*Node{buf, slice}
  2889  		slice.Esc = EscNone
  2890  	}
  2891  
  2892  	cat := syslook(fn)
  2893  	r := nod(OCALL, cat, nil)
  2894  	r.List.Set(args)
  2895  	r = typecheck(r, Erv)
  2896  	r = walkexpr(r, init)
  2897  	r.Type = n.Type
  2898  
  2899  	return r
  2900  }
  2901  
  2902  // expand append(l1, l2...) to
  2903  //   init {
  2904  //     s := l1
  2905  //     n := len(s) + len(l2)
  2906  //     // Compare as uint so growslice can panic on overflow.
  2907  //     if uint(n) > uint(cap(s)) {
  2908  //       s = growslice(s, n)
  2909  //     }
  2910  //     s = s[:n]
  2911  //     memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2912  //   }
  2913  //   s
  2914  //
  2915  // l2 is allowed to be a string.
  2916  func appendslice(n *Node, init *Nodes) *Node {
  2917  	walkexprlistsafe(n.List.Slice(), init)
  2918  
  2919  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2920  	// and n are name or literal, but those may index the slice we're
  2921  	// modifying here. Fix explicitly.
  2922  	ls := n.List.Slice()
  2923  	for i1, n1 := range ls {
  2924  		ls[i1] = cheapexpr(n1, init)
  2925  	}
  2926  
  2927  	l1 := n.List.First()
  2928  	l2 := n.List.Second()
  2929  
  2930  	var l []*Node
  2931  
  2932  	// var s []T
  2933  	s := temp(l1.Type)
  2934  	l = append(l, nod(OAS, s, l1)) // s = l1
  2935  
  2936  	// n := len(s) + len(l2)
  2937  	nn := temp(types.Types[TINT])
  2938  	l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil))))
  2939  
  2940  	// if uint(n) > uint(cap(s))
  2941  	nif := nod(OIF, nil, nil)
  2942  	nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil))
  2943  	nif.Left.Left.Type = types.Types[TUINT]
  2944  	nif.Left.Right.Type = types.Types[TUINT]
  2945  
  2946  	// instantiate growslice(Type*, []any, int) []any
  2947  	fn := syslook("growslice")
  2948  	fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
  2949  
  2950  	// s = growslice(T, s, n)
  2951  	nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn)))
  2952  	l = append(l, nif)
  2953  
  2954  	// s = s[:n]
  2955  	nt := nod(OSLICE, s, nil)
  2956  	nt.SetSliceBounds(nil, nn, nil)
  2957  	nt.Etype = 1
  2958  	l = append(l, nod(OAS, s, nt))
  2959  
  2960  	if types.Haspointers(l1.Type.Elem()) {
  2961  		// copy(s[len(l1):], l2)
  2962  		nptr1 := nod(OSLICE, s, nil)
  2963  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2964  		nptr1.Etype = 1
  2965  		nptr2 := l2
  2966  		fn := syslook("typedslicecopy")
  2967  		fn = substArgTypes(fn, l1.Type, l2.Type)
  2968  		var ln Nodes
  2969  		ln.Set(l)
  2970  		nt := mkcall1(fn, types.Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2)
  2971  		l = append(ln.Slice(), nt)
  2972  	} else if instrumenting && !compiling_runtime {
  2973  		// rely on runtime to instrument copy.
  2974  		// copy(s[len(l1):], l2)
  2975  		nptr1 := nod(OSLICE, s, nil)
  2976  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2977  		nptr1.Etype = 1
  2978  		nptr2 := l2
  2979  		var fn *Node
  2980  		if l2.Type.IsString() {
  2981  			fn = syslook("slicestringcopy")
  2982  		} else {
  2983  			fn = syslook("slicecopy")
  2984  		}
  2985  		fn = substArgTypes(fn, l1.Type, l2.Type)
  2986  		var ln Nodes
  2987  		ln.Set(l)
  2988  		nt := mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width))
  2989  		l = append(ln.Slice(), nt)
  2990  	} else {
  2991  		// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2992  		nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil))
  2993  		nptr1.SetBounded(true)
  2994  
  2995  		nptr1 = nod(OADDR, nptr1, nil)
  2996  
  2997  		nptr2 := nod(OSPTR, l2, nil)
  2998  
  2999  		fn := syslook("memmove")
  3000  		fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
  3001  
  3002  		var ln Nodes
  3003  		ln.Set(l)
  3004  		nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &ln)
  3005  
  3006  		nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width))
  3007  		nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid)
  3008  		l = append(ln.Slice(), nt)
  3009  	}
  3010  
  3011  	typecheckslice(l, Etop)
  3012  	walkstmtlist(l)
  3013  	init.Append(l...)
  3014  	return s
  3015  }
  3016  
  3017  // Rewrite append(src, x, y, z) so that any side effects in
  3018  // x, y, z (including runtime panics) are evaluated in
  3019  // initialization statements before the append.
  3020  // For normal code generation, stop there and leave the
  3021  // rest to cgen_append.
  3022  //
  3023  // For race detector, expand append(src, a [, b]* ) to
  3024  //
  3025  //   init {
  3026  //     s := src
  3027  //     const argc = len(args) - 1
  3028  //     if cap(s) - len(s) < argc {
  3029  //	    s = growslice(s, len(s)+argc)
  3030  //     }
  3031  //     n := len(s)
  3032  //     s = s[:n+argc]
  3033  //     s[n] = a
  3034  //     s[n+1] = b
  3035  //     ...
  3036  //   }
  3037  //   s
  3038  func walkappend(n *Node, init *Nodes, dst *Node) *Node {
  3039  	if !samesafeexpr(dst, n.List.First()) {
  3040  		n.List.SetFirst(safeexpr(n.List.First(), init))
  3041  		n.List.SetFirst(walkexpr(n.List.First(), init))
  3042  	}
  3043  	walkexprlistsafe(n.List.Slice()[1:], init)
  3044  
  3045  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  3046  	// and n are name or literal, but those may index the slice we're
  3047  	// modifying here. Fix explicitly.
  3048  	// Using cheapexpr also makes sure that the evaluation
  3049  	// of all arguments (and especially any panics) happen
  3050  	// before we begin to modify the slice in a visible way.
  3051  	ls := n.List.Slice()[1:]
  3052  	for i, n := range ls {
  3053  		ls[i] = cheapexpr(n, init)
  3054  	}
  3055  
  3056  	nsrc := n.List.First()
  3057  
  3058  	argc := n.List.Len() - 1
  3059  	if argc < 1 {
  3060  		return nsrc
  3061  	}
  3062  
  3063  	// General case, with no function calls left as arguments.
  3064  	// Leave for gen, except that instrumentation requires old form.
  3065  	if !instrumenting || compiling_runtime {
  3066  		return n
  3067  	}
  3068  
  3069  	var l []*Node
  3070  
  3071  	ns := temp(nsrc.Type)
  3072  	l = append(l, nod(OAS, ns, nsrc)) // s = src
  3073  
  3074  	na := nodintconst(int64(argc)) // const argc
  3075  	nx := nod(OIF, nil, nil)       // if cap(s) - len(s) < argc
  3076  	nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na)
  3077  
  3078  	fn := syslook("growslice") //   growslice(<type>, old []T, mincap int) (ret []T)
  3079  	fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem())
  3080  
  3081  	nx.Nbody.Set1(nod(OAS, ns,
  3082  		mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns,
  3083  			nod(OADD, nod(OLEN, ns, nil), na))))
  3084  
  3085  	l = append(l, nx)
  3086  
  3087  	nn := temp(types.Types[TINT])
  3088  	l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s)
  3089  
  3090  	nx = nod(OSLICE, ns, nil) // ...s[:n+argc]
  3091  	nx.SetSliceBounds(nil, nod(OADD, nn, na), nil)
  3092  	nx.Etype = 1
  3093  	l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc]
  3094  
  3095  	ls = n.List.Slice()[1:]
  3096  	for i, n := range ls {
  3097  		nx = nod(OINDEX, ns, nn) // s[n] ...
  3098  		nx.SetBounded(true)
  3099  		l = append(l, nod(OAS, nx, n)) // s[n] = arg
  3100  		if i+1 < len(ls) {
  3101  			l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1
  3102  		}
  3103  	}
  3104  
  3105  	typecheckslice(l, Etop)
  3106  	walkstmtlist(l)
  3107  	init.Append(l...)
  3108  	return ns
  3109  }
  3110  
  3111  // Lower copy(a, b) to a memmove call or a runtime call.
  3112  //
  3113  // init {
  3114  //   n := len(a)
  3115  //   if n > len(b) { n = len(b) }
  3116  //   memmove(a.ptr, b.ptr, n*sizeof(elem(a)))
  3117  // }
  3118  // n;
  3119  //
  3120  // Also works if b is a string.
  3121  //
  3122  func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
  3123  	if types.Haspointers(n.Left.Type.Elem()) {
  3124  		fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
  3125  		return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right)
  3126  	}
  3127  
  3128  	if runtimecall {
  3129  		var fn *Node
  3130  		if n.Right.Type.IsString() {
  3131  			fn = syslook("slicestringcopy")
  3132  		} else {
  3133  			fn = syslook("slicecopy")
  3134  		}
  3135  		fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  3136  		return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width))
  3137  	}
  3138  
  3139  	n.Left = walkexpr(n.Left, init)
  3140  	n.Right = walkexpr(n.Right, init)
  3141  	nl := temp(n.Left.Type)
  3142  	nr := temp(n.Right.Type)
  3143  	var l []*Node
  3144  	l = append(l, nod(OAS, nl, n.Left))
  3145  	l = append(l, nod(OAS, nr, n.Right))
  3146  
  3147  	nfrm := nod(OSPTR, nr, nil)
  3148  	nto := nod(OSPTR, nl, nil)
  3149  
  3150  	nlen := temp(types.Types[TINT])
  3151  
  3152  	// n = len(to)
  3153  	l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil)))
  3154  
  3155  	// if n > len(frm) { n = len(frm) }
  3156  	nif := nod(OIF, nil, nil)
  3157  
  3158  	nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil))
  3159  	nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil)))
  3160  	l = append(l, nif)
  3161  
  3162  	// Call memmove.
  3163  	fn := syslook("memmove")
  3164  
  3165  	fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem())
  3166  	nwid := temp(types.Types[TUINTPTR])
  3167  	l = append(l, nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR])))
  3168  	nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width))
  3169  	l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid))
  3170  
  3171  	typecheckslice(l, Etop)
  3172  	walkstmtlist(l)
  3173  	init.Append(l...)
  3174  	return nlen
  3175  }
  3176  
  3177  func eqfor(t *types.Type, needsize *int) *Node {
  3178  	// Should only arrive here with large memory or
  3179  	// a struct/array containing a non-memory field/element.
  3180  	// Small memory is handled inline, and single non-memory
  3181  	// is handled during type check (OCMPSTR etc).
  3182  	switch a, _ := algtype1(t); a {
  3183  	case AMEM:
  3184  		n := syslook("memequal")
  3185  		n = substArgTypes(n, t, t)
  3186  		*needsize = 1
  3187  		return n
  3188  	case ASPECIAL:
  3189  		sym := typesymprefix(".eq", t)
  3190  		n := newname(sym)
  3191  		n.SetClass(PFUNC)
  3192  		ntype := nod(OTFUNC, nil, nil)
  3193  		ntype.List.Append(anonfield(types.NewPtr(t)))
  3194  		ntype.List.Append(anonfield(types.NewPtr(t)))
  3195  		ntype.Rlist.Append(anonfield(types.Types[TBOOL]))
  3196  		ntype = typecheck(ntype, Etype)
  3197  		n.Type = ntype.Type
  3198  		*needsize = 0
  3199  		return n
  3200  	}
  3201  	Fatalf("eqfor %v", t)
  3202  	return nil
  3203  }
  3204  
  3205  // The result of walkcompare MUST be assigned back to n, e.g.
  3206  // 	n.Left = walkcompare(n.Left, init)
  3207  func walkcompare(n *Node, init *Nodes) *Node {
  3208  	// Given interface value l and concrete value r, rewrite
  3209  	//   l == r
  3210  	// into types-equal && data-equal.
  3211  	// This is efficient, avoids allocations, and avoids runtime calls.
  3212  	var l, r *Node
  3213  	if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() {
  3214  		l = n.Left
  3215  		r = n.Right
  3216  	} else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() {
  3217  		l = n.Right
  3218  		r = n.Left
  3219  	}
  3220  
  3221  	if l != nil {
  3222  		// Handle both == and !=.
  3223  		eq := n.Op
  3224  		var andor Op
  3225  		if eq == OEQ {
  3226  			andor = OANDAND
  3227  		} else {
  3228  			andor = OOROR
  3229  		}
  3230  		// Check for types equal.
  3231  		// For empty interface, this is:
  3232  		//   l.tab == type(r)
  3233  		// For non-empty interface, this is:
  3234  		//   l.tab != nil && l.tab._type == type(r)
  3235  		var eqtype *Node
  3236  		tab := nod(OITAB, l, nil)
  3237  		rtyp := typename(r.Type)
  3238  		if l.Type.IsEmptyInterface() {
  3239  			tab.Type = types.NewPtr(types.Types[TUINT8])
  3240  			tab.SetTypecheck(1)
  3241  			eqtype = nod(eq, tab, rtyp)
  3242  		} else {
  3243  			nonnil := nod(brcom(eq), nodnil(), tab)
  3244  			match := nod(eq, itabType(tab), rtyp)
  3245  			eqtype = nod(andor, nonnil, match)
  3246  		}
  3247  		// Check for data equal.
  3248  		eqdata := nod(eq, ifaceData(l, r.Type), r)
  3249  		// Put it all together.
  3250  		expr := nod(andor, eqtype, eqdata)
  3251  		n = finishcompare(n, expr, init)
  3252  		return n
  3253  	}
  3254  
  3255  	// Must be comparison of array or struct.
  3256  	// Otherwise back end handles it.
  3257  	// While we're here, decide whether to
  3258  	// inline or call an eq alg.
  3259  	t := n.Left.Type
  3260  	var inline bool
  3261  
  3262  	maxcmpsize := int64(4)
  3263  	unalignedLoad := false
  3264  	switch thearch.LinkArch.Family {
  3265  	case sys.AMD64, sys.ARM64, sys.S390X:
  3266  		// Keep this low enough, to generate less code than function call.
  3267  		maxcmpsize = 16
  3268  		unalignedLoad = true
  3269  	case sys.I386:
  3270  		maxcmpsize = 8
  3271  		unalignedLoad = true
  3272  	}
  3273  
  3274  	switch t.Etype {
  3275  	default:
  3276  		return n
  3277  	case TARRAY:
  3278  		// We can compare several elements at once with 2/4/8 byte integer compares
  3279  		inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize))
  3280  	case TSTRUCT:
  3281  		inline = t.NumFields() <= 4
  3282  	}
  3283  
  3284  	cmpl := n.Left
  3285  	for cmpl != nil && cmpl.Op == OCONVNOP {
  3286  		cmpl = cmpl.Left
  3287  	}
  3288  	cmpr := n.Right
  3289  	for cmpr != nil && cmpr.Op == OCONVNOP {
  3290  		cmpr = cmpr.Left
  3291  	}
  3292  
  3293  	// Chose not to inline. Call equality function directly.
  3294  	if !inline {
  3295  		if isvaluelit(cmpl) {
  3296  			var_ := temp(cmpl.Type)
  3297  			anylit(cmpl, var_, init)
  3298  			cmpl = var_
  3299  		}
  3300  		if isvaluelit(cmpr) {
  3301  			var_ := temp(cmpr.Type)
  3302  			anylit(cmpr, var_, init)
  3303  			cmpr = var_
  3304  		}
  3305  		if !islvalue(cmpl) || !islvalue(cmpr) {
  3306  			Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
  3307  		}
  3308  
  3309  		// eq algs take pointers
  3310  		pl := temp(types.NewPtr(t))
  3311  		al := nod(OAS, pl, nod(OADDR, cmpl, nil))
  3312  		al.Right.Etype = 1 // addr does not escape
  3313  		al = typecheck(al, Etop)
  3314  		init.Append(al)
  3315  
  3316  		pr := temp(types.NewPtr(t))
  3317  		ar := nod(OAS, pr, nod(OADDR, cmpr, nil))
  3318  		ar.Right.Etype = 1 // addr does not escape
  3319  		ar = typecheck(ar, Etop)
  3320  		init.Append(ar)
  3321  
  3322  		var needsize int
  3323  		call := nod(OCALL, eqfor(t, &needsize), nil)
  3324  		call.List.Append(pl)
  3325  		call.List.Append(pr)
  3326  		if needsize != 0 {
  3327  			call.List.Append(nodintconst(t.Width))
  3328  		}
  3329  		res := call
  3330  		if n.Op != OEQ {
  3331  			res = nod(ONOT, res, nil)
  3332  		}
  3333  		n = finishcompare(n, res, init)
  3334  		return n
  3335  	}
  3336  
  3337  	// inline: build boolean expression comparing element by element
  3338  	andor := OANDAND
  3339  	if n.Op == ONE {
  3340  		andor = OOROR
  3341  	}
  3342  	var expr *Node
  3343  	compare := func(el, er *Node) {
  3344  		a := nod(n.Op, el, er)
  3345  		if expr == nil {
  3346  			expr = a
  3347  		} else {
  3348  			expr = nod(andor, expr, a)
  3349  		}
  3350  	}
  3351  	cmpl = safeexpr(cmpl, init)
  3352  	cmpr = safeexpr(cmpr, init)
  3353  	if t.IsStruct() {
  3354  		for _, f := range t.Fields().Slice() {
  3355  			sym := f.Sym
  3356  			if sym.IsBlank() {
  3357  				continue
  3358  			}
  3359  			compare(
  3360  				nodSym(OXDOT, cmpl, sym),
  3361  				nodSym(OXDOT, cmpr, sym),
  3362  			)
  3363  		}
  3364  	} else {
  3365  		step := int64(1)
  3366  		remains := t.NumElem() * t.Elem().Width
  3367  		combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger()
  3368  		combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger()
  3369  		combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger()
  3370  		for i := int64(0); remains > 0; {
  3371  			var convType *types.Type
  3372  			switch {
  3373  			case remains >= 8 && combine64bit:
  3374  				convType = types.Types[TINT64]
  3375  				step = 8 / t.Elem().Width
  3376  			case remains >= 4 && combine32bit:
  3377  				convType = types.Types[TUINT32]
  3378  				step = 4 / t.Elem().Width
  3379  			case remains >= 2 && combine16bit:
  3380  				convType = types.Types[TUINT16]
  3381  				step = 2 / t.Elem().Width
  3382  			default:
  3383  				step = 1
  3384  			}
  3385  			if step == 1 {
  3386  				compare(
  3387  					nod(OINDEX, cmpl, nodintconst(int64(i))),
  3388  					nod(OINDEX, cmpr, nodintconst(int64(i))),
  3389  				)
  3390  				i++
  3391  				remains -= t.Elem().Width
  3392  			} else {
  3393  				cmplw := nod(OINDEX, cmpl, nodintconst(int64(i)))
  3394  				cmplw = conv(cmplw, convType)
  3395  				cmprw := nod(OINDEX, cmpr, nodintconst(int64(i)))
  3396  				cmprw = conv(cmprw, convType)
  3397  				// For code like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3398  				// ssa will generate a single large load.
  3399  				for offset := int64(1); offset < step; offset++ {
  3400  					lb := nod(OINDEX, cmpl, nodintconst(int64(i+offset)))
  3401  					lb = conv(lb, convType)
  3402  					lb = nod(OLSH, lb, nodintconst(int64(8*t.Elem().Width*offset)))
  3403  					cmplw = nod(OOR, cmplw, lb)
  3404  					rb := nod(OINDEX, cmpr, nodintconst(int64(i+offset)))
  3405  					rb = conv(rb, convType)
  3406  					rb = nod(OLSH, rb, nodintconst(int64(8*t.Elem().Width*offset)))
  3407  					cmprw = nod(OOR, cmprw, rb)
  3408  				}
  3409  				compare(cmplw, cmprw)
  3410  				i += step
  3411  				remains -= step * t.Elem().Width
  3412  			}
  3413  		}
  3414  	}
  3415  	if expr == nil {
  3416  		expr = nodbool(n.Op == OEQ)
  3417  	}
  3418  	n = finishcompare(n, expr, init)
  3419  	return n
  3420  }
  3421  
  3422  // The result of finishcompare MUST be assigned back to n, e.g.
  3423  // 	n.Left = finishcompare(n.Left, x, r, init)
  3424  func finishcompare(n, r *Node, init *Nodes) *Node {
  3425  	// Use nn here to avoid passing r to typecheck.
  3426  	nn := r
  3427  	nn = typecheck(nn, Erv)
  3428  	nn = walkexpr(nn, init)
  3429  	r = nn
  3430  	if r.Type != n.Type {
  3431  		r = nod(OCONVNOP, r, nil)
  3432  		r.Type = n.Type
  3433  		r.SetTypecheck(1)
  3434  		nn = r
  3435  	}
  3436  	return nn
  3437  }
  3438  
  3439  // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers.
  3440  func (n *Node) isIntOrdering() bool {
  3441  	switch n.Op {
  3442  	case OLE, OLT, OGE, OGT:
  3443  	default:
  3444  		return false
  3445  	}
  3446  	return n.Left.Type.IsInteger() && n.Right.Type.IsInteger()
  3447  }
  3448  
  3449  // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10.
  3450  // n must be an OANDAND or OOROR node.
  3451  // The result of walkinrange MUST be assigned back to n, e.g.
  3452  // 	n.Left = walkinrange(n.Left)
  3453  func walkinrange(n *Node, init *Nodes) *Node {
  3454  	// We are looking for something equivalent to a opl b OP b opr c, where:
  3455  	// * a, b, and c have integer type
  3456  	// * b is side-effect-free
  3457  	// * opl and opr are each < or ≤
  3458  	// * OP is &&
  3459  	l := n.Left
  3460  	r := n.Right
  3461  	if !l.isIntOrdering() || !r.isIntOrdering() {
  3462  		return n
  3463  	}
  3464  
  3465  	// Find b, if it exists, and rename appropriately.
  3466  	// Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right
  3467  	// Output is: a opl b(==x) ANDAND/OROR b(==x) opr c
  3468  	a, opl, b := l.Left, l.Op, l.Right
  3469  	x, opr, c := r.Left, r.Op, r.Right
  3470  	for i := 0; ; i++ {
  3471  		if samesafeexpr(b, x) {
  3472  			break
  3473  		}
  3474  		if i == 3 {
  3475  			// Tried all permutations and couldn't find an appropriate b == x.
  3476  			return n
  3477  		}
  3478  		if i&1 == 0 {
  3479  			a, opl, b = b, brrev(opl), a
  3480  		} else {
  3481  			x, opr, c = c, brrev(opr), x
  3482  		}
  3483  	}
  3484  
  3485  	// If n.Op is ||, apply de Morgan.
  3486  	// Negate the internal ops now; we'll negate the top level op at the end.
  3487  	// Henceforth assume &&.
  3488  	negateResult := n.Op == OOROR
  3489  	if negateResult {
  3490  		opl = brcom(opl)
  3491  		opr = brcom(opr)
  3492  	}
  3493  
  3494  	cmpdir := func(o Op) int {
  3495  		switch o {
  3496  		case OLE, OLT:
  3497  			return -1
  3498  		case OGE, OGT:
  3499  			return +1
  3500  		}
  3501  		Fatalf("walkinrange cmpdir %v", o)
  3502  		return 0
  3503  	}
  3504  	if cmpdir(opl) != cmpdir(opr) {
  3505  		// Not a range check; something like b < a && b < c.
  3506  		return n
  3507  	}
  3508  
  3509  	switch opl {
  3510  	case OGE, OGT:
  3511  		// We have something like a > b && b ≥ c.
  3512  		// Switch and reverse ops and rename constants,
  3513  		// to make it look like a ≤ b && b < c.
  3514  		a, c = c, a
  3515  		opl, opr = brrev(opr), brrev(opl)
  3516  	}
  3517  
  3518  	// We must ensure that c-a is non-negative.
  3519  	// For now, require a and c to be constants.
  3520  	// In the future, we could also support a == 0 and c == len/cap(...).
  3521  	// Unfortunately, by this point, most len/cap expressions have been
  3522  	// stored into temporary variables.
  3523  	if !Isconst(a, CTINT) || !Isconst(c, CTINT) {
  3524  		return n
  3525  	}
  3526  
  3527  	if opl == OLT {
  3528  		// We have a < b && ...
  3529  		// We need a ≤ b && ... to safely use unsigned comparison tricks.
  3530  		// If a is not the maximum constant for b's type,
  3531  		// we can increment a and switch to ≤.
  3532  		if a.Int64() >= maxintval[b.Type.Etype].Int64() {
  3533  			return n
  3534  		}
  3535  		a = nodintconst(a.Int64() + 1)
  3536  		opl = OLE
  3537  	}
  3538  
  3539  	bound := c.Int64() - a.Int64()
  3540  	if bound < 0 {
  3541  		// Bad news. Something like 5 <= x && x < 3.
  3542  		// Rare in practice, and we still need to generate side-effects,
  3543  		// so just leave it alone.
  3544  		return n
  3545  	}
  3546  
  3547  	// We have a ≤ b && b < c (or a ≤ b && b ≤ c).
  3548  	// This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a),
  3549  	// which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a),
  3550  	// which is equivalent to uint(b-a) < uint(c-a).
  3551  	ut := b.Type.ToUnsigned()
  3552  	lhs := conv(nod(OSUB, b, a), ut)
  3553  	rhs := nodintconst(bound)
  3554  	if negateResult {
  3555  		// Negate top level.
  3556  		opr = brcom(opr)
  3557  	}
  3558  	cmp := nod(opr, lhs, rhs)
  3559  	cmp.Pos = n.Pos
  3560  	cmp = addinit(cmp, l.Ninit.Slice())
  3561  	cmp = addinit(cmp, r.Ninit.Slice())
  3562  	// Typecheck the AST rooted at cmp...
  3563  	cmp = typecheck(cmp, Erv)
  3564  	// ...but then reset cmp's type to match n's type.
  3565  	cmp.Type = n.Type
  3566  	cmp = walkexpr(cmp, init)
  3567  	return cmp
  3568  }
  3569  
  3570  // return 1 if integer n must be in range [0, max), 0 otherwise
  3571  func bounded(n *Node, max int64) bool {
  3572  	if n.Type == nil || !n.Type.IsInteger() {
  3573  		return false
  3574  	}
  3575  
  3576  	sign := n.Type.IsSigned()
  3577  	bits := int32(8 * n.Type.Width)
  3578  
  3579  	if smallintconst(n) {
  3580  		v := n.Int64()
  3581  		return 0 <= v && v < max
  3582  	}
  3583  
  3584  	switch n.Op {
  3585  	case OAND:
  3586  		v := int64(-1)
  3587  		if smallintconst(n.Left) {
  3588  			v = n.Left.Int64()
  3589  		} else if smallintconst(n.Right) {
  3590  			v = n.Right.Int64()
  3591  		}
  3592  
  3593  		if 0 <= v && v < max {
  3594  			return true
  3595  		}
  3596  
  3597  	case OMOD:
  3598  		if !sign && smallintconst(n.Right) {
  3599  			v := n.Right.Int64()
  3600  			if 0 <= v && v <= max {
  3601  				return true
  3602  			}
  3603  		}
  3604  
  3605  	case ODIV:
  3606  		if !sign && smallintconst(n.Right) {
  3607  			v := n.Right.Int64()
  3608  			for bits > 0 && v >= 2 {
  3609  				bits--
  3610  				v >>= 1
  3611  			}
  3612  		}
  3613  
  3614  	case ORSH:
  3615  		if !sign && smallintconst(n.Right) {
  3616  			v := n.Right.Int64()
  3617  			if v > int64(bits) {
  3618  				return true
  3619  			}
  3620  			bits -= int32(v)
  3621  		}
  3622  	}
  3623  
  3624  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
  3625  		return true
  3626  	}
  3627  
  3628  	return false
  3629  }
  3630  
  3631  // usemethod checks interface method calls for uses of reflect.Type.Method.
  3632  func usemethod(n *Node) {
  3633  	t := n.Left.Type
  3634  
  3635  	// Looking for either of:
  3636  	//	Method(int) reflect.Method
  3637  	//	MethodByName(string) (reflect.Method, bool)
  3638  	//
  3639  	// TODO(crawshaw): improve precision of match by working out
  3640  	//                 how to check the method name.
  3641  	if n := t.Params().NumFields(); n != 1 {
  3642  		return
  3643  	}
  3644  	if n := t.Results().NumFields(); n != 1 && n != 2 {
  3645  		return
  3646  	}
  3647  	p0 := t.Params().Field(0)
  3648  	res0 := t.Results().Field(0)
  3649  	var res1 *types.Field
  3650  	if t.Results().NumFields() == 2 {
  3651  		res1 = t.Results().Field(1)
  3652  	}
  3653  
  3654  	if res1 == nil {
  3655  		if p0.Type.Etype != TINT {
  3656  			return
  3657  		}
  3658  	} else {
  3659  		if !p0.Type.IsString() {
  3660  			return
  3661  		}
  3662  		if !res1.Type.IsBoolean() {
  3663  			return
  3664  		}
  3665  	}
  3666  
  3667  	// Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
  3668  	//       (including global variables such as numImports - was issue #19028).
  3669  	if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" {
  3670  		Curfn.Func.SetReflectMethod(true)
  3671  	}
  3672  }
  3673  
  3674  func usefield(n *Node) {
  3675  	if objabi.Fieldtrack_enabled == 0 {
  3676  		return
  3677  	}
  3678  
  3679  	switch n.Op {
  3680  	default:
  3681  		Fatalf("usefield %v", n.Op)
  3682  
  3683  	case ODOT, ODOTPTR:
  3684  		break
  3685  	}
  3686  	if n.Sym == nil {
  3687  		// No field name.  This DOTPTR was built by the compiler for access
  3688  		// to runtime data structures.  Ignore.
  3689  		return
  3690  	}
  3691  
  3692  	t := n.Left.Type
  3693  	if t.IsPtr() {
  3694  		t = t.Elem()
  3695  	}
  3696  	field := dotField[typeSymKey{t.Orig, n.Sym}]
  3697  	if field == nil {
  3698  		Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
  3699  	}
  3700  	if !strings.Contains(field.Note, "go:\"track\"") {
  3701  		return
  3702  	}
  3703  
  3704  	outer := n.Left.Type
  3705  	if outer.IsPtr() {
  3706  		outer = outer.Elem()
  3707  	}
  3708  	if outer.Sym == nil {
  3709  		yyerror("tracked field must be in named struct type")
  3710  	}
  3711  	if !exportname(field.Sym.Name) {
  3712  		yyerror("tracked field must be exported (upper case)")
  3713  	}
  3714  
  3715  	sym := tracksym(outer, field)
  3716  	if Curfn.Func.FieldTrack == nil {
  3717  		Curfn.Func.FieldTrack = make(map[*types.Sym]struct{})
  3718  	}
  3719  	Curfn.Func.FieldTrack[sym] = struct{}{}
  3720  }
  3721  
  3722  func candiscardlist(l Nodes) bool {
  3723  	for _, n := range l.Slice() {
  3724  		if !candiscard(n) {
  3725  			return false
  3726  		}
  3727  	}
  3728  	return true
  3729  }
  3730  
  3731  func candiscard(n *Node) bool {
  3732  	if n == nil {
  3733  		return true
  3734  	}
  3735  
  3736  	switch n.Op {
  3737  	default:
  3738  		return false
  3739  
  3740  		// Discardable as long as the subpieces are.
  3741  	case ONAME,
  3742  		ONONAME,
  3743  		OTYPE,
  3744  		OPACK,
  3745  		OLITERAL,
  3746  		OADD,
  3747  		OSUB,
  3748  		OOR,
  3749  		OXOR,
  3750  		OADDSTR,
  3751  		OADDR,
  3752  		OANDAND,
  3753  		OARRAYBYTESTR,
  3754  		OARRAYRUNESTR,
  3755  		OSTRARRAYBYTE,
  3756  		OSTRARRAYRUNE,
  3757  		OCAP,
  3758  		OCMPIFACE,
  3759  		OCMPSTR,
  3760  		OCOMPLIT,
  3761  		OMAPLIT,
  3762  		OSTRUCTLIT,
  3763  		OARRAYLIT,
  3764  		OSLICELIT,
  3765  		OPTRLIT,
  3766  		OCONV,
  3767  		OCONVIFACE,
  3768  		OCONVNOP,
  3769  		ODOT,
  3770  		OEQ,
  3771  		ONE,
  3772  		OLT,
  3773  		OLE,
  3774  		OGT,
  3775  		OGE,
  3776  		OKEY,
  3777  		OSTRUCTKEY,
  3778  		OLEN,
  3779  		OMUL,
  3780  		OLSH,
  3781  		ORSH,
  3782  		OAND,
  3783  		OANDNOT,
  3784  		ONEW,
  3785  		ONOT,
  3786  		OCOM,
  3787  		OPLUS,
  3788  		OMINUS,
  3789  		OOROR,
  3790  		OPAREN,
  3791  		ORUNESTR,
  3792  		OREAL,
  3793  		OIMAG,
  3794  		OCOMPLEX:
  3795  		break
  3796  
  3797  		// Discardable as long as we know it's not division by zero.
  3798  	case ODIV, OMOD:
  3799  		if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 {
  3800  			break
  3801  		}
  3802  		if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 {
  3803  			break
  3804  		}
  3805  		return false
  3806  
  3807  		// Discardable as long as we know it won't fail because of a bad size.
  3808  	case OMAKECHAN, OMAKEMAP:
  3809  		if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 {
  3810  			break
  3811  		}
  3812  		return false
  3813  
  3814  		// Difficult to tell what sizes are okay.
  3815  	case OMAKESLICE:
  3816  		return false
  3817  	}
  3818  
  3819  	if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) {
  3820  		return false
  3821  	}
  3822  
  3823  	return true
  3824  }
  3825  
  3826  // rewrite
  3827  //	print(x, y, z)
  3828  // into
  3829  //	func(a1, a2, a3) {
  3830  //		print(a1, a2, a3)
  3831  //	}(x, y, z)
  3832  // and same for println.
  3833  
  3834  var walkprintfunc_prgen int
  3835  
  3836  // The result of walkprintfunc MUST be assigned back to n, e.g.
  3837  // 	n.Left = walkprintfunc(n.Left, init)
  3838  func walkprintfunc(n *Node, init *Nodes) *Node {
  3839  	if n.Ninit.Len() != 0 {
  3840  		walkstmtlist(n.Ninit.Slice())
  3841  		init.AppendNodes(&n.Ninit)
  3842  	}
  3843  
  3844  	t := nod(OTFUNC, nil, nil)
  3845  	num := 0
  3846  	var printargs []*Node
  3847  	var a *Node
  3848  	var buf string
  3849  	for _, n1 := range n.List.Slice() {
  3850  		buf = fmt.Sprintf("a%d", num)
  3851  		num++
  3852  		a = namedfield(buf, n1.Type)
  3853  		t.List.Append(a)
  3854  		printargs = append(printargs, a.Left)
  3855  	}
  3856  
  3857  	oldfn := Curfn
  3858  	Curfn = nil
  3859  
  3860  	walkprintfunc_prgen++
  3861  	sym := lookupN("print·%d", walkprintfunc_prgen)
  3862  	fn := dclfunc(sym, t)
  3863  
  3864  	a = nod(n.Op, nil, nil)
  3865  	a.List.Set(printargs)
  3866  	a = typecheck(a, Etop)
  3867  	a = walkstmt(a)
  3868  
  3869  	fn.Nbody.Set1(a)
  3870  
  3871  	funcbody(fn)
  3872  
  3873  	fn = typecheck(fn, Etop)
  3874  	typecheckslice(fn.Nbody.Slice(), Etop)
  3875  	xtop = append(xtop, fn)
  3876  	Curfn = oldfn
  3877  
  3878  	a = nod(OCALL, nil, nil)
  3879  	a.Left = fn.Func.Nname
  3880  	a.List.Set(n.List.Slice())
  3881  	a = typecheck(a, Etop)
  3882  	a = walkexpr(a, init)
  3883  	return a
  3884  }
  3885  
  3886  // substArgTypes substitutes the given list of types for
  3887  // successive occurrences of the "any" placeholder in the
  3888  // type syntax expression n.Type.
  3889  // The result of substArgTypes MUST be assigned back to old, e.g.
  3890  // 	n.Left = substArgTypes(n.Left, t1, t2)
  3891  func substArgTypes(old *Node, types_ ...*types.Type) *Node {
  3892  	n := *old // make shallow copy
  3893  
  3894  	for _, t := range types_ {
  3895  		dowidth(t)
  3896  	}
  3897  	n.Type = types.SubstAny(n.Type, &types_)
  3898  	if len(types_) > 0 {
  3899  		Fatalf("substArgTypes: too many argument types")
  3900  	}
  3901  	return &n
  3902  }