github.com/karrick/go@v0.0.0-20170817181416-d5b0ec858b37/src/cmd/compile/internal/gc/walk.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc
     6  
     7  import (
     8  	"cmd/compile/internal/types"
     9  	"cmd/internal/objabi"
    10  	"cmd/internal/sys"
    11  	"fmt"
    12  	"strings"
    13  )
    14  
    15  // The constant is known to runtime.
    16  const (
    17  	tmpstringbufsize = 32
    18  )
    19  
    20  func walk(fn *Node) {
    21  	Curfn = fn
    22  
    23  	if Debug['W'] != 0 {
    24  		s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym)
    25  		dumplist(s, Curfn.Nbody)
    26  	}
    27  
    28  	lno := lineno
    29  
    30  	// Final typecheck for any unused variables.
    31  	for i, ln := range fn.Func.Dcl {
    32  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) {
    33  			ln = typecheck(ln, Erv|Easgn)
    34  			fn.Func.Dcl[i] = ln
    35  		}
    36  	}
    37  
    38  	// Propagate the used flag for typeswitch variables up to the NONAME in it's definition.
    39  	for _, ln := range fn.Func.Dcl {
    40  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() {
    41  			ln.Name.Defn.Left.Name.SetUsed(true)
    42  		}
    43  	}
    44  
    45  	for _, ln := range fn.Func.Dcl {
    46  		if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() {
    47  			continue
    48  		}
    49  		if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW {
    50  			if defn.Left.Name.Used() {
    51  				continue
    52  			}
    53  			yyerrorl(defn.Left.Pos, "%v declared and not used", ln.Sym)
    54  			defn.Left.Name.SetUsed(true) // suppress repeats
    55  		} else {
    56  			yyerrorl(ln.Pos, "%v declared and not used", ln.Sym)
    57  		}
    58  	}
    59  
    60  	lineno = lno
    61  	if nerrors != 0 {
    62  		return
    63  	}
    64  	walkstmtlist(Curfn.Nbody.Slice())
    65  	if Debug['W'] != 0 {
    66  		s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym)
    67  		dumplist(s, Curfn.Nbody)
    68  	}
    69  
    70  	zeroResults()
    71  	heapmoves()
    72  	if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 {
    73  		s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym)
    74  		dumplist(s, Curfn.Func.Enter)
    75  	}
    76  }
    77  
    78  func walkstmtlist(s []*Node) {
    79  	for i := range s {
    80  		s[i] = walkstmt(s[i])
    81  	}
    82  }
    83  
    84  func samelist(a, b []*Node) bool {
    85  	if len(a) != len(b) {
    86  		return false
    87  	}
    88  	for i, n := range a {
    89  		if n != b[i] {
    90  			return false
    91  		}
    92  	}
    93  	return true
    94  }
    95  
    96  func paramoutheap(fn *Node) bool {
    97  	for _, ln := range fn.Func.Dcl {
    98  		switch ln.Class() {
    99  		case PPARAMOUT:
   100  			if ln.isParamStackCopy() || ln.Addrtaken() {
   101  				return true
   102  			}
   103  
   104  		case PAUTO:
   105  			// stop early - parameters are over
   106  			return false
   107  		}
   108  	}
   109  
   110  	return false
   111  }
   112  
   113  // adds "adjust" to all the argument locations for the call n.
   114  // n must be a defer or go node that has already been walked.
   115  func adjustargs(n *Node, adjust int) {
   116  	var arg *Node
   117  	var lhs *Node
   118  
   119  	callfunc := n.Left
   120  	for _, arg = range callfunc.List.Slice() {
   121  		if arg.Op != OAS {
   122  			Fatalf("call arg not assignment")
   123  		}
   124  		lhs = arg.Left
   125  		if lhs.Op == ONAME {
   126  			// This is a temporary introduced by reorder1.
   127  			// The real store to the stack appears later in the arg list.
   128  			continue
   129  		}
   130  
   131  		if lhs.Op != OINDREGSP {
   132  			Fatalf("call argument store does not use OINDREGSP")
   133  		}
   134  
   135  		// can't really check this in machine-indep code.
   136  		//if(lhs->val.u.reg != D_SP)
   137  		//      Fatalf("call arg assign not indreg(SP)")
   138  		lhs.Xoffset += int64(adjust)
   139  	}
   140  }
   141  
   142  // The result of walkstmt MUST be assigned back to n, e.g.
   143  // 	n.Left = walkstmt(n.Left)
   144  func walkstmt(n *Node) *Node {
   145  	if n == nil {
   146  		return n
   147  	}
   148  
   149  	setlineno(n)
   150  
   151  	walkstmtlist(n.Ninit.Slice())
   152  
   153  	switch n.Op {
   154  	default:
   155  		if n.Op == ONAME {
   156  			yyerror("%v is not a top level statement", n.Sym)
   157  		} else {
   158  			yyerror("%v is not a top level statement", n.Op)
   159  		}
   160  		Dump("nottop", n)
   161  
   162  	case OAS,
   163  		OASOP,
   164  		OAS2,
   165  		OAS2DOTTYPE,
   166  		OAS2RECV,
   167  		OAS2FUNC,
   168  		OAS2MAPR,
   169  		OCLOSE,
   170  		OCOPY,
   171  		OCALLMETH,
   172  		OCALLINTER,
   173  		OCALL,
   174  		OCALLFUNC,
   175  		ODELETE,
   176  		OSEND,
   177  		OPRINT,
   178  		OPRINTN,
   179  		OPANIC,
   180  		OEMPTY,
   181  		ORECOVER,
   182  		OGETG:
   183  		if n.Typecheck() == 0 {
   184  			Fatalf("missing typecheck: %+v", n)
   185  		}
   186  		wascopy := n.Op == OCOPY
   187  		init := n.Ninit
   188  		n.Ninit.Set(nil)
   189  		n = walkexpr(n, &init)
   190  		n = addinit(n, init.Slice())
   191  		if wascopy && n.Op == OCONVNOP {
   192  			n.Op = OEMPTY // don't leave plain values as statements.
   193  		}
   194  
   195  	// special case for a receive where we throw away
   196  	// the value received.
   197  	case ORECV:
   198  		if n.Typecheck() == 0 {
   199  			Fatalf("missing typecheck: %+v", n)
   200  		}
   201  		init := n.Ninit
   202  		n.Ninit.Set(nil)
   203  
   204  		n.Left = walkexpr(n.Left, &init)
   205  		n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil())
   206  		n = walkexpr(n, &init)
   207  
   208  		n = addinit(n, init.Slice())
   209  
   210  	case OBREAK,
   211  		OCONTINUE,
   212  		OFALL,
   213  		OGOTO,
   214  		OLABEL,
   215  		ODCLCONST,
   216  		ODCLTYPE,
   217  		OCHECKNIL,
   218  		OVARKILL,
   219  		OVARLIVE:
   220  		break
   221  
   222  	case ODCL:
   223  		v := n.Left
   224  		if v.Class() == PAUTOHEAP {
   225  			if compiling_runtime {
   226  				yyerror("%v escapes to heap, not allowed in runtime.", v)
   227  			}
   228  			if prealloc[v] == nil {
   229  				prealloc[v] = callnew(v.Type)
   230  			}
   231  			nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v])
   232  			nn.SetColas(true)
   233  			nn = typecheck(nn, Etop)
   234  			return walkstmt(nn)
   235  		}
   236  
   237  	case OBLOCK:
   238  		walkstmtlist(n.List.Slice())
   239  
   240  	case OXCASE:
   241  		yyerror("case statement out of place")
   242  		n.Op = OCASE
   243  		fallthrough
   244  
   245  	case OCASE:
   246  		n.Right = walkstmt(n.Right)
   247  
   248  	case ODEFER:
   249  		Curfn.Func.SetHasDefer(true)
   250  		switch n.Left.Op {
   251  		case OPRINT, OPRINTN:
   252  			n.Left = walkprintfunc(n.Left, &n.Ninit)
   253  
   254  		case OCOPY:
   255  			n.Left = copyany(n.Left, &n.Ninit, true)
   256  
   257  		default:
   258  			n.Left = walkexpr(n.Left, &n.Ninit)
   259  		}
   260  
   261  		// make room for size & fn arguments.
   262  		adjustargs(n, 2*Widthptr)
   263  
   264  	case OFOR, OFORUNTIL:
   265  		if n.Left != nil {
   266  			walkstmtlist(n.Left.Ninit.Slice())
   267  			init := n.Left.Ninit
   268  			n.Left.Ninit.Set(nil)
   269  			n.Left = walkexpr(n.Left, &init)
   270  			n.Left = addinit(n.Left, init.Slice())
   271  		}
   272  
   273  		n.Right = walkstmt(n.Right)
   274  		walkstmtlist(n.Nbody.Slice())
   275  
   276  	case OIF:
   277  		n.Left = walkexpr(n.Left, &n.Ninit)
   278  		walkstmtlist(n.Nbody.Slice())
   279  		walkstmtlist(n.Rlist.Slice())
   280  
   281  	case OPROC:
   282  		switch n.Left.Op {
   283  		case OPRINT, OPRINTN:
   284  			n.Left = walkprintfunc(n.Left, &n.Ninit)
   285  
   286  		case OCOPY:
   287  			n.Left = copyany(n.Left, &n.Ninit, true)
   288  
   289  		default:
   290  			n.Left = walkexpr(n.Left, &n.Ninit)
   291  		}
   292  
   293  		// make room for size & fn arguments.
   294  		adjustargs(n, 2*Widthptr)
   295  
   296  	case ORETURN:
   297  		walkexprlist(n.List.Slice(), &n.Ninit)
   298  		if n.List.Len() == 0 {
   299  			break
   300  		}
   301  		if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) {
   302  			// assign to the function out parameters,
   303  			// so that reorder3 can fix up conflicts
   304  			var rl []*Node
   305  
   306  			var cl Class
   307  			for _, ln := range Curfn.Func.Dcl {
   308  				cl = ln.Class()
   309  				if cl == PAUTO || cl == PAUTOHEAP {
   310  					break
   311  				}
   312  				if cl == PPARAMOUT {
   313  					if ln.isParamStackCopy() {
   314  						ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil)
   315  					}
   316  					rl = append(rl, ln)
   317  				}
   318  			}
   319  
   320  			if got, want := n.List.Len(), len(rl); got != want {
   321  				// order should have rewritten multi-value function calls
   322  				// with explicit OAS2FUNC nodes.
   323  				Fatalf("expected %v return arguments, have %v", want, got)
   324  			}
   325  
   326  			if samelist(rl, n.List.Slice()) {
   327  				// special return in disguise
   328  				n.List.Set(nil)
   329  
   330  				break
   331  			}
   332  
   333  			// move function calls out, to make reorder3's job easier.
   334  			walkexprlistsafe(n.List.Slice(), &n.Ninit)
   335  
   336  			ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit)
   337  			n.List.Set(reorder3(ll))
   338  			break
   339  		}
   340  
   341  		ll := ascompatte(nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit)
   342  		n.List.Set(ll)
   343  
   344  	case ORETJMP:
   345  		break
   346  
   347  	case OSELECT:
   348  		walkselect(n)
   349  
   350  	case OSWITCH:
   351  		walkswitch(n)
   352  
   353  	case ORANGE:
   354  		n = walkrange(n)
   355  
   356  	case OXFALL:
   357  		yyerror("fallthrough statement out of place")
   358  		n.Op = OFALL
   359  	}
   360  
   361  	if n.Op == ONAME {
   362  		Fatalf("walkstmt ended up with name: %+v", n)
   363  	}
   364  	return n
   365  }
   366  
   367  func isSmallMakeSlice(n *Node) bool {
   368  	if n.Op != OMAKESLICE {
   369  		return false
   370  	}
   371  	l := n.Left
   372  	r := n.Right
   373  	if r == nil {
   374  		r = l
   375  	}
   376  	t := n.Type
   377  
   378  	return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width)
   379  }
   380  
   381  // walk the whole tree of the body of an
   382  // expression or simple statement.
   383  // the types expressions are calculated.
   384  // compile-time constants are evaluated.
   385  // complex side effects like statements are appended to init
   386  func walkexprlist(s []*Node, init *Nodes) {
   387  	for i := range s {
   388  		s[i] = walkexpr(s[i], init)
   389  	}
   390  }
   391  
   392  func walkexprlistsafe(s []*Node, init *Nodes) {
   393  	for i, n := range s {
   394  		s[i] = safeexpr(n, init)
   395  		s[i] = walkexpr(s[i], init)
   396  	}
   397  }
   398  
   399  func walkexprlistcheap(s []*Node, init *Nodes) {
   400  	for i, n := range s {
   401  		s[i] = cheapexpr(n, init)
   402  		s[i] = walkexpr(s[i], init)
   403  	}
   404  }
   405  
   406  // Build name of function for interface conversion.
   407  // Not all names are possible
   408  // (e.g., we'll never generate convE2E or convE2I or convI2E).
   409  func convFuncName(from, to *types.Type) string {
   410  	tkind := to.Tie()
   411  	switch from.Tie() {
   412  	case 'I':
   413  		switch tkind {
   414  		case 'I':
   415  			return "convI2I"
   416  		}
   417  	case 'T':
   418  		switch tkind {
   419  		case 'E':
   420  			switch {
   421  			case from.Size() == 2 && from.Align == 2:
   422  				return "convT2E16"
   423  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   424  				return "convT2E32"
   425  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   426  				return "convT2E64"
   427  			case from.IsString():
   428  				return "convT2Estring"
   429  			case from.IsSlice():
   430  				return "convT2Eslice"
   431  			case !types.Haspointers(from):
   432  				return "convT2Enoptr"
   433  			}
   434  			return "convT2E"
   435  		case 'I':
   436  			switch {
   437  			case from.Size() == 2 && from.Align == 2:
   438  				return "convT2I16"
   439  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   440  				return "convT2I32"
   441  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   442  				return "convT2I64"
   443  			case from.IsString():
   444  				return "convT2Istring"
   445  			case from.IsSlice():
   446  				return "convT2Islice"
   447  			case !types.Haspointers(from):
   448  				return "convT2Inoptr"
   449  			}
   450  			return "convT2I"
   451  		}
   452  	}
   453  	Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
   454  	panic("unreachable")
   455  }
   456  
   457  // The result of walkexpr MUST be assigned back to n, e.g.
   458  // 	n.Left = walkexpr(n.Left, init)
   459  func walkexpr(n *Node, init *Nodes) *Node {
   460  	if n == nil {
   461  		return n
   462  	}
   463  
   464  	// Eagerly checkwidth all expressions for the back end.
   465  	if n.Type != nil && !n.Type.WidthCalculated() {
   466  		switch n.Type.Etype {
   467  		case TBLANK, TNIL, TIDEAL:
   468  		default:
   469  			checkwidth(n.Type)
   470  		}
   471  	}
   472  
   473  	if init == &n.Ninit {
   474  		// not okay to use n->ninit when walking n,
   475  		// because we might replace n with some other node
   476  		// and would lose the init list.
   477  		Fatalf("walkexpr init == &n->ninit")
   478  	}
   479  
   480  	if n.Ninit.Len() != 0 {
   481  		walkstmtlist(n.Ninit.Slice())
   482  		init.AppendNodes(&n.Ninit)
   483  	}
   484  
   485  	lno := setlineno(n)
   486  
   487  	if Debug['w'] > 1 {
   488  		Dump("walk-before", n)
   489  	}
   490  
   491  	if n.Typecheck() != 1 {
   492  		Fatalf("missed typecheck: %+v", n)
   493  	}
   494  
   495  	if n.Op == ONAME && n.Class() == PAUTOHEAP {
   496  		nn := nod(OIND, n.Name.Param.Heapaddr, nil)
   497  		nn = typecheck(nn, Erv)
   498  		nn = walkexpr(nn, init)
   499  		nn.Left.SetNonNil(true)
   500  		return nn
   501  	}
   502  
   503  opswitch:
   504  	switch n.Op {
   505  	default:
   506  		Dump("walk", n)
   507  		Fatalf("walkexpr: switch 1 unknown op %+S", n)
   508  
   509  	case ONONAME, OINDREGSP, OEMPTY, OGETG:
   510  
   511  	case OTYPE, ONAME, OLITERAL:
   512  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
   513  		// Perhaps refactor to use Node.mayBeShared for these instead.
   514  		// If these return early, make sure to still call
   515  		// stringsym for constant strings.
   516  
   517  	case ONOT, OMINUS, OPLUS, OCOM, OREAL, OIMAG, ODOTMETH, ODOTINTER,
   518  		OIND, OSPTR, OITAB, OIDATA, OADDR:
   519  		n.Left = walkexpr(n.Left, init)
   520  
   521  	case OEFACE, OAND, OSUB, OMUL, OLT, OLE, OGE, OGT, OADD, OOR, OXOR:
   522  		n.Left = walkexpr(n.Left, init)
   523  		n.Right = walkexpr(n.Right, init)
   524  
   525  	case ODOT:
   526  		usefield(n)
   527  		n.Left = walkexpr(n.Left, init)
   528  
   529  	case ODOTTYPE, ODOTTYPE2:
   530  		n.Left = walkexpr(n.Left, init)
   531  		// Set up interface type addresses for back end.
   532  		n.Right = typename(n.Type)
   533  		if n.Op == ODOTTYPE {
   534  			n.Right.Right = typename(n.Left.Type)
   535  		}
   536  		if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   537  			n.List.Set1(itabname(n.Type, n.Left.Type))
   538  		}
   539  
   540  	case ODOTPTR:
   541  		usefield(n)
   542  		if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 {
   543  			// No actual copy will be generated, so emit an explicit nil check.
   544  			n.Left = cheapexpr(n.Left, init)
   545  
   546  			checknil(n.Left, init)
   547  		}
   548  
   549  		n.Left = walkexpr(n.Left, init)
   550  
   551  	case OLEN, OCAP:
   552  		n.Left = walkexpr(n.Left, init)
   553  
   554  		// replace len(*[10]int) with 10.
   555  		// delayed until now to preserve side effects.
   556  		t := n.Left.Type
   557  
   558  		if t.IsPtr() {
   559  			t = t.Elem()
   560  		}
   561  		if t.IsArray() {
   562  			safeexpr(n.Left, init)
   563  			nodconst(n, n.Type, t.NumElem())
   564  			n.SetTypecheck(1)
   565  		}
   566  
   567  	case OLSH, ORSH:
   568  		n.Left = walkexpr(n.Left, init)
   569  		n.Right = walkexpr(n.Right, init)
   570  		t := n.Left.Type
   571  		n.SetBounded(bounded(n.Right, 8*t.Width))
   572  		if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) {
   573  			Warn("shift bounds check elided")
   574  		}
   575  
   576  	case OCOMPLEX:
   577  		// Use results from call expression as arguments for complex.
   578  		if n.Left == nil && n.Right == nil {
   579  			n.Left = n.List.First()
   580  			n.Right = n.List.Second()
   581  		}
   582  		n.Left = walkexpr(n.Left, init)
   583  		n.Right = walkexpr(n.Right, init)
   584  
   585  	case OEQ, ONE:
   586  		n.Left = walkexpr(n.Left, init)
   587  		n.Right = walkexpr(n.Right, init)
   588  
   589  		// Disable safemode while compiling this code: the code we
   590  		// generate internally can refer to unsafe.Pointer.
   591  		// In this case it can happen if we need to generate an ==
   592  		// for a struct containing a reflect.Value, which itself has
   593  		// an unexported field of type unsafe.Pointer.
   594  		old_safemode := safemode
   595  		safemode = false
   596  		n = walkcompare(n, init)
   597  		safemode = old_safemode
   598  
   599  	case OANDAND, OOROR:
   600  		n.Left = walkexpr(n.Left, init)
   601  
   602  		// cannot put side effects from n.Right on init,
   603  		// because they cannot run before n.Left is checked.
   604  		// save elsewhere and store on the eventual n.Right.
   605  		var ll Nodes
   606  
   607  		n.Right = walkexpr(n.Right, &ll)
   608  		n.Right = addinit(n.Right, ll.Slice())
   609  		n = walkinrange(n, init)
   610  
   611  	case OPRINT, OPRINTN:
   612  		walkexprlist(n.List.Slice(), init)
   613  		n = walkprint(n, init)
   614  
   615  	case OPANIC:
   616  		n = mkcall("gopanic", nil, init, n.Left)
   617  
   618  	case ORECOVER:
   619  		n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil))
   620  
   621  	case OCLOSUREVAR, OCFUNC:
   622  		n.SetAddable(true)
   623  
   624  	case OCALLINTER:
   625  		usemethod(n)
   626  		t := n.Left.Type
   627  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   628  			break
   629  		}
   630  		n.Left = walkexpr(n.Left, init)
   631  		walkexprlist(n.List.Slice(), init)
   632  		ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   633  		n.List.Set(reorder1(ll))
   634  
   635  	case OCALLFUNC:
   636  		if n.Left.Op == OCLOSURE {
   637  			// Transform direct call of a closure to call of a normal function.
   638  			// transformclosure already did all preparation work.
   639  
   640  			// Prepend captured variables to argument list.
   641  			n.List.Prepend(n.Left.Func.Enter.Slice()...)
   642  
   643  			n.Left.Func.Enter.Set(nil)
   644  
   645  			// Replace OCLOSURE with ONAME/PFUNC.
   646  			n.Left = n.Left.Func.Closure.Func.Nname
   647  
   648  			// Update type of OCALLFUNC node.
   649  			// Output arguments had not changed, but their offsets could.
   650  			if n.Left.Type.Results().NumFields() == 1 {
   651  				n.Type = n.Left.Type.Results().Field(0).Type
   652  			} else {
   653  				n.Type = n.Left.Type.Results()
   654  			}
   655  		}
   656  
   657  		t := n.Left.Type
   658  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   659  			break
   660  		}
   661  
   662  		n.Left = walkexpr(n.Left, init)
   663  		walkexprlist(n.List.Slice(), init)
   664  
   665  		ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   666  		n.List.Set(reorder1(ll))
   667  
   668  	case OCALLMETH:
   669  		t := n.Left.Type
   670  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   671  			break
   672  		}
   673  		n.Left = walkexpr(n.Left, init)
   674  		walkexprlist(n.List.Slice(), init)
   675  		ll := ascompatte(n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init)
   676  		lr := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   677  		ll = append(ll, lr...)
   678  		n.Left.Left = nil
   679  		updateHasCall(n.Left)
   680  		n.List.Set(reorder1(ll))
   681  
   682  	case OAS:
   683  		init.AppendNodes(&n.Ninit)
   684  
   685  		n.Left = walkexpr(n.Left, init)
   686  		n.Left = safeexpr(n.Left, init)
   687  
   688  		if oaslit(n, init) {
   689  			break
   690  		}
   691  
   692  		if n.Right == nil {
   693  			// TODO(austin): Check all "implicit zeroing"
   694  			break
   695  		}
   696  
   697  		if !instrumenting && iszero(n.Right) {
   698  			break
   699  		}
   700  
   701  		switch n.Right.Op {
   702  		default:
   703  			n.Right = walkexpr(n.Right, init)
   704  
   705  		case ORECV:
   706  			// x = <-c; n.Left is x, n.Right.Left is c.
   707  			// orderstmt made sure x is addressable.
   708  			n.Right.Left = walkexpr(n.Right.Left, init)
   709  
   710  			n1 := nod(OADDR, n.Left, nil)
   711  			r := n.Right.Left // the channel
   712  			n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1)
   713  			n = walkexpr(n, init)
   714  			break opswitch
   715  
   716  		case OAPPEND:
   717  			// x = append(...)
   718  			r := n.Right
   719  			if r.Type.Elem().NotInHeap() {
   720  				yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem())
   721  			}
   722  			if r.Isddd() {
   723  				r = appendslice(r, init) // also works for append(slice, string).
   724  			} else {
   725  				r = walkappend(r, init, n)
   726  			}
   727  			n.Right = r
   728  			if r.Op == OAPPEND {
   729  				// Left in place for back end.
   730  				// Do not add a new write barrier.
   731  				// Set up address of type for back end.
   732  				r.Left = typename(r.Type.Elem())
   733  				break opswitch
   734  			}
   735  			// Otherwise, lowered for race detector.
   736  			// Treat as ordinary assignment.
   737  		}
   738  
   739  		if n.Left != nil && n.Right != nil {
   740  			n = convas(n, init)
   741  		}
   742  
   743  	case OAS2:
   744  		init.AppendNodes(&n.Ninit)
   745  		walkexprlistsafe(n.List.Slice(), init)
   746  		walkexprlistsafe(n.Rlist.Slice(), init)
   747  		ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init)
   748  		ll = reorder3(ll)
   749  		n = liststmt(ll)
   750  
   751  	// a,b,... = fn()
   752  	case OAS2FUNC:
   753  		init.AppendNodes(&n.Ninit)
   754  
   755  		r := n.Rlist.First()
   756  		walkexprlistsafe(n.List.Slice(), init)
   757  		r = walkexpr(r, init)
   758  
   759  		if isIntrinsicCall(r) {
   760  			n.Rlist.Set1(r)
   761  			break
   762  		}
   763  		init.Append(r)
   764  
   765  		ll := ascompatet(n.List, r.Type)
   766  		n = liststmt(ll)
   767  
   768  	// x, y = <-c
   769  	// orderstmt made sure x is addressable.
   770  	case OAS2RECV:
   771  		init.AppendNodes(&n.Ninit)
   772  
   773  		r := n.Rlist.First()
   774  		walkexprlistsafe(n.List.Slice(), init)
   775  		r.Left = walkexpr(r.Left, init)
   776  		var n1 *Node
   777  		if isblank(n.List.First()) {
   778  			n1 = nodnil()
   779  		} else {
   780  			n1 = nod(OADDR, n.List.First(), nil)
   781  		}
   782  		n1.Etype = 1 // addr does not escape
   783  		fn := chanfn("chanrecv2", 2, r.Left.Type)
   784  		ok := n.List.Second()
   785  		call := mkcall1(fn, ok.Type, init, r.Left, n1)
   786  		n = nod(OAS, ok, call)
   787  		n = typecheck(n, Etop)
   788  
   789  	// a,b = m[i]
   790  	case OAS2MAPR:
   791  		init.AppendNodes(&n.Ninit)
   792  
   793  		r := n.Rlist.First()
   794  		walkexprlistsafe(n.List.Slice(), init)
   795  		r.Left = walkexpr(r.Left, init)
   796  		r.Right = walkexpr(r.Right, init)
   797  		t := r.Left.Type
   798  
   799  		fast := mapfast(t)
   800  		var key *Node
   801  		if fast != mapslow {
   802  			// fast versions take key by value
   803  			key = r.Right
   804  		} else {
   805  			// standard version takes key by reference
   806  			// orderexpr made sure key is addressable.
   807  			key = nod(OADDR, r.Right, nil)
   808  		}
   809  
   810  		// from:
   811  		//   a,b = m[i]
   812  		// to:
   813  		//   var,b = mapaccess2*(t, m, i)
   814  		//   a = *var
   815  		a := n.List.First()
   816  
   817  		if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
   818  			fn := mapfn(mapaccess2[fast], t)
   819  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key)
   820  		} else {
   821  			fn := mapfn("mapaccess2_fat", t)
   822  			z := zeroaddr(w)
   823  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z)
   824  		}
   825  
   826  		// mapaccess2* returns a typed bool, but due to spec changes,
   827  		// the boolean result of i.(T) is now untyped so we make it the
   828  		// same type as the variable on the lhs.
   829  		if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() {
   830  			r.Type.Field(1).Type = ok.Type
   831  		}
   832  		n.Rlist.Set1(r)
   833  		n.Op = OAS2FUNC
   834  
   835  		// don't generate a = *var if a is _
   836  		if !isblank(a) {
   837  			var_ := temp(types.NewPtr(t.Val()))
   838  			var_.SetTypecheck(1)
   839  			var_.SetNonNil(true) // mapaccess always returns a non-nil pointer
   840  			n.List.SetFirst(var_)
   841  			n = walkexpr(n, init)
   842  			init.Append(n)
   843  			n = nod(OAS, a, nod(OIND, var_, nil))
   844  		}
   845  
   846  		n = typecheck(n, Etop)
   847  		n = walkexpr(n, init)
   848  
   849  	case ODELETE:
   850  		init.AppendNodes(&n.Ninit)
   851  		map_ := n.List.First()
   852  		key := n.List.Second()
   853  		map_ = walkexpr(map_, init)
   854  		key = walkexpr(key, init)
   855  
   856  		t := map_.Type
   857  		fast := mapfast(t)
   858  		if fast == mapslow {
   859  			// orderstmt made sure key is addressable.
   860  			key = nod(OADDR, key, nil)
   861  		}
   862  		n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key)
   863  
   864  	case OAS2DOTTYPE:
   865  		walkexprlistsafe(n.List.Slice(), init)
   866  		n.Rlist.SetFirst(walkexpr(n.Rlist.First(), init))
   867  
   868  	case OCONVIFACE:
   869  		n.Left = walkexpr(n.Left, init)
   870  
   871  		// Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
   872  		if isdirectiface(n.Left.Type) {
   873  			var t *Node
   874  			if n.Type.IsEmptyInterface() {
   875  				t = typename(n.Left.Type)
   876  			} else {
   877  				t = itabname(n.Left.Type, n.Type)
   878  			}
   879  			l := nod(OEFACE, t, n.Left)
   880  			l.Type = n.Type
   881  			l.SetTypecheck(n.Typecheck())
   882  			n = l
   883  			break
   884  		}
   885  
   886  		if staticbytes == nil {
   887  			staticbytes = newname(Runtimepkg.Lookup("staticbytes"))
   888  			staticbytes.SetClass(PEXTERN)
   889  			staticbytes.Type = types.NewArray(types.Types[TUINT8], 256)
   890  			zerobase = newname(Runtimepkg.Lookup("zerobase"))
   891  			zerobase.SetClass(PEXTERN)
   892  			zerobase.Type = types.Types[TUINTPTR]
   893  		}
   894  
   895  		// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
   896  		// by using an existing addressable value identical to n.Left
   897  		// or creating one on the stack.
   898  		var value *Node
   899  		switch {
   900  		case n.Left.Type.Size() == 0:
   901  			// n.Left is zero-sized. Use zerobase.
   902  			cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246.
   903  			value = zerobase
   904  		case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()):
   905  			// n.Left is a bool/byte. Use staticbytes[n.Left].
   906  			n.Left = cheapexpr(n.Left, init)
   907  			value = nod(OINDEX, staticbytes, byteindex(n.Left))
   908  			value.SetBounded(true)
   909  		case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly():
   910  			// n.Left is a readonly global; use it directly.
   911  			value = n.Left
   912  		case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024:
   913  			// n.Left does not escape. Use a stack temporary initialized to n.Left.
   914  			value = temp(n.Left.Type)
   915  			init.Append(typecheck(nod(OAS, value, n.Left), Etop))
   916  		}
   917  
   918  		if value != nil {
   919  			// Value is identical to n.Left.
   920  			// Construct the interface directly: {type/itab, &value}.
   921  			var t *Node
   922  			if n.Type.IsEmptyInterface() {
   923  				t = typename(n.Left.Type)
   924  			} else {
   925  				t = itabname(n.Left.Type, n.Type)
   926  			}
   927  			l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv))
   928  			l.Type = n.Type
   929  			l.SetTypecheck(n.Typecheck())
   930  			n = l
   931  			break
   932  		}
   933  
   934  		// Implement interface to empty interface conversion.
   935  		// tmp = i.itab
   936  		// if tmp != nil {
   937  		//    tmp = tmp.type
   938  		// }
   939  		// e = iface{tmp, i.data}
   940  		if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   941  			// Evaluate the input interface.
   942  			c := temp(n.Left.Type)
   943  			init.Append(nod(OAS, c, n.Left))
   944  
   945  			// Get the itab out of the interface.
   946  			tmp := temp(types.NewPtr(types.Types[TUINT8]))
   947  			init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv)))
   948  
   949  			// Get the type out of the itab.
   950  			nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil)
   951  			nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp)))
   952  			init.Append(nif)
   953  
   954  			// Build the result.
   955  			e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8])))
   956  			e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE.
   957  			e.SetTypecheck(1)
   958  			n = e
   959  			break
   960  		}
   961  
   962  		var ll []*Node
   963  		if n.Type.IsEmptyInterface() {
   964  			if !n.Left.Type.IsInterface() {
   965  				ll = append(ll, typename(n.Left.Type))
   966  			}
   967  		} else {
   968  			if n.Left.Type.IsInterface() {
   969  				ll = append(ll, typename(n.Type))
   970  			} else {
   971  				ll = append(ll, itabname(n.Left.Type, n.Type))
   972  			}
   973  		}
   974  
   975  		if n.Left.Type.IsInterface() {
   976  			ll = append(ll, n.Left)
   977  		} else {
   978  			// regular types are passed by reference to avoid C vararg calls
   979  			// orderexpr arranged for n.Left to be a temporary for all
   980  			// the conversions it could see. comparison of an interface
   981  			// with a non-interface, especially in a switch on interface value
   982  			// with non-interface cases, is not visible to orderstmt, so we
   983  			// have to fall back on allocating a temp here.
   984  			if islvalue(n.Left) {
   985  				ll = append(ll, nod(OADDR, n.Left, nil))
   986  			} else {
   987  				ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil))
   988  			}
   989  			dowidth(n.Left.Type)
   990  		}
   991  
   992  		fn := syslook(convFuncName(n.Left.Type, n.Type))
   993  		fn = substArgTypes(fn, n.Left.Type, n.Type)
   994  		dowidth(fn.Type)
   995  		n = nod(OCALL, fn, nil)
   996  		n.List.Set(ll)
   997  		n = typecheck(n, Erv)
   998  		n = walkexpr(n, init)
   999  
  1000  	case OCONV, OCONVNOP:
  1001  		if thearch.LinkArch.Family == sys.ARM || thearch.LinkArch.Family == sys.MIPS {
  1002  			if n.Left.Type.IsFloat() {
  1003  				if n.Type.Etype == TINT64 {
  1004  					n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1005  					break
  1006  				}
  1007  
  1008  				if n.Type.Etype == TUINT64 {
  1009  					n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1010  					break
  1011  				}
  1012  			}
  1013  
  1014  			if n.Type.IsFloat() {
  1015  				if n.Left.Type.Etype == TINT64 {
  1016  					n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
  1017  					break
  1018  				}
  1019  
  1020  				if n.Left.Type.Etype == TUINT64 {
  1021  					n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
  1022  					break
  1023  				}
  1024  			}
  1025  		}
  1026  
  1027  		if thearch.LinkArch.Family == sys.I386 {
  1028  			if n.Left.Type.IsFloat() {
  1029  				if n.Type.Etype == TINT64 {
  1030  					n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1031  					break
  1032  				}
  1033  
  1034  				if n.Type.Etype == TUINT64 {
  1035  					n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1036  					break
  1037  				}
  1038  				if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR {
  1039  					n = mkcall("float64touint32", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1040  					break
  1041  				}
  1042  			}
  1043  			if n.Type.IsFloat() {
  1044  				if n.Left.Type.Etype == TINT64 {
  1045  					n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
  1046  					break
  1047  				}
  1048  
  1049  				if n.Left.Type.Etype == TUINT64 {
  1050  					n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
  1051  					break
  1052  				}
  1053  				if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR {
  1054  					n = conv(mkcall("uint32tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT32])), n.Type)
  1055  					break
  1056  				}
  1057  			}
  1058  		}
  1059  
  1060  		n.Left = walkexpr(n.Left, init)
  1061  
  1062  	case OANDNOT:
  1063  		n.Left = walkexpr(n.Left, init)
  1064  		n.Op = OAND
  1065  		n.Right = nod(OCOM, n.Right, nil)
  1066  		n.Right = typecheck(n.Right, Erv)
  1067  		n.Right = walkexpr(n.Right, init)
  1068  
  1069  	case ODIV, OMOD:
  1070  		n.Left = walkexpr(n.Left, init)
  1071  		n.Right = walkexpr(n.Right, init)
  1072  
  1073  		// rewrite complex div into function call.
  1074  		et := n.Left.Type.Etype
  1075  
  1076  		if isComplex[et] && n.Op == ODIV {
  1077  			t := n.Type
  1078  			n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128]))
  1079  			n = conv(n, t)
  1080  			break
  1081  		}
  1082  
  1083  		// Nothing to do for float divisions.
  1084  		if isFloat[et] {
  1085  			break
  1086  		}
  1087  
  1088  		// rewrite 64-bit div and mod on 32-bit architectures.
  1089  		// TODO: Remove this code once we can introduce
  1090  		// runtime calls late in SSA processing.
  1091  		if Widthreg < 8 && (et == TINT64 || et == TUINT64) {
  1092  			if n.Right.Op == OLITERAL {
  1093  				// Leave div/mod by constant powers of 2.
  1094  				// The SSA backend will handle those.
  1095  				switch et {
  1096  				case TINT64:
  1097  					c := n.Right.Int64()
  1098  					if c < 0 {
  1099  						c = -c
  1100  					}
  1101  					if c != 0 && c&(c-1) == 0 {
  1102  						break opswitch
  1103  					}
  1104  				case TUINT64:
  1105  					c := uint64(n.Right.Int64())
  1106  					if c != 0 && c&(c-1) == 0 {
  1107  						break opswitch
  1108  					}
  1109  				}
  1110  			}
  1111  			var fn string
  1112  			if et == TINT64 {
  1113  				fn = "int64"
  1114  			} else {
  1115  				fn = "uint64"
  1116  			}
  1117  			if n.Op == ODIV {
  1118  				fn += "div"
  1119  			} else {
  1120  				fn += "mod"
  1121  			}
  1122  			n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et]))
  1123  		}
  1124  
  1125  	case OINDEX:
  1126  		n.Left = walkexpr(n.Left, init)
  1127  
  1128  		// save the original node for bounds checking elision.
  1129  		// If it was a ODIV/OMOD walk might rewrite it.
  1130  		r := n.Right
  1131  
  1132  		n.Right = walkexpr(n.Right, init)
  1133  
  1134  		// if range of type cannot exceed static array bound,
  1135  		// disable bounds check.
  1136  		if n.Bounded() {
  1137  			break
  1138  		}
  1139  		t := n.Left.Type
  1140  		if t != nil && t.IsPtr() {
  1141  			t = t.Elem()
  1142  		}
  1143  		if t.IsArray() {
  1144  			n.SetBounded(bounded(r, t.NumElem()))
  1145  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1146  				Warn("index bounds check elided")
  1147  			}
  1148  			if smallintconst(n.Right) && !n.Bounded() {
  1149  				yyerror("index out of bounds")
  1150  			}
  1151  		} else if Isconst(n.Left, CTSTR) {
  1152  			n.SetBounded(bounded(r, int64(len(n.Left.Val().U.(string)))))
  1153  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1154  				Warn("index bounds check elided")
  1155  			}
  1156  			if smallintconst(n.Right) && !n.Bounded() {
  1157  				yyerror("index out of bounds")
  1158  			}
  1159  		}
  1160  
  1161  		if Isconst(n.Right, CTINT) {
  1162  			if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
  1163  				yyerror("index out of bounds")
  1164  			}
  1165  		}
  1166  
  1167  	case OINDEXMAP:
  1168  		// Replace m[k] with *map{access1,assign}(maptype, m, &k)
  1169  		n.Left = walkexpr(n.Left, init)
  1170  		n.Right = walkexpr(n.Right, init)
  1171  		map_ := n.Left
  1172  		key := n.Right
  1173  		t := map_.Type
  1174  		if n.Etype == 1 {
  1175  			// This m[k] expression is on the left-hand side of an assignment.
  1176  			fast := mapfast(t)
  1177  			if fast == mapslow {
  1178  				// standard version takes key by reference.
  1179  				// orderexpr made sure key is addressable.
  1180  				key = nod(OADDR, key, nil)
  1181  			}
  1182  			n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key)
  1183  		} else {
  1184  			// m[k] is not the target of an assignment.
  1185  			fast := mapfast(t)
  1186  			if fast == mapslow {
  1187  				// standard version takes key by reference.
  1188  				// orderexpr made sure key is addressable.
  1189  				key = nod(OADDR, key, nil)
  1190  			}
  1191  
  1192  			if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
  1193  				n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Val()), init, typename(t), map_, key)
  1194  			} else {
  1195  				z := zeroaddr(w)
  1196  				n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Val()), init, typename(t), map_, key, z)
  1197  			}
  1198  		}
  1199  		n.Type = types.NewPtr(t.Val())
  1200  		n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers.
  1201  		n = nod(OIND, n, nil)
  1202  		n.Type = t.Val()
  1203  		n.SetTypecheck(1)
  1204  
  1205  	case ORECV:
  1206  		Fatalf("walkexpr ORECV") // should see inside OAS only
  1207  
  1208  	case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
  1209  		n.Left = walkexpr(n.Left, init)
  1210  		low, high, max := n.SliceBounds()
  1211  		low = walkexpr(low, init)
  1212  		if low != nil && iszero(low) {
  1213  			// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
  1214  			low = nil
  1215  		}
  1216  		high = walkexpr(high, init)
  1217  		max = walkexpr(max, init)
  1218  		n.SetSliceBounds(low, high, max)
  1219  		if n.Op.IsSlice3() {
  1220  			if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) {
  1221  				// Reduce x[i:j:cap(x)] to x[i:j].
  1222  				if n.Op == OSLICE3 {
  1223  					n.Op = OSLICE
  1224  				} else {
  1225  					n.Op = OSLICEARR
  1226  				}
  1227  				n = reduceSlice(n)
  1228  			}
  1229  		} else {
  1230  			n = reduceSlice(n)
  1231  		}
  1232  
  1233  	case ONEW:
  1234  		if n.Esc == EscNone {
  1235  			if n.Type.Elem().Width >= 1<<16 {
  1236  				Fatalf("large ONEW with EscNone: %v", n)
  1237  			}
  1238  			r := temp(n.Type.Elem())
  1239  			r = nod(OAS, r, nil) // zero temp
  1240  			r = typecheck(r, Etop)
  1241  			init.Append(r)
  1242  			r = nod(OADDR, r.Left, nil)
  1243  			r = typecheck(r, Erv)
  1244  			n = r
  1245  		} else {
  1246  			n = callnew(n.Type.Elem())
  1247  		}
  1248  
  1249  	case OCMPSTR:
  1250  		// s + "badgerbadgerbadger" == "badgerbadgerbadger"
  1251  		if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) {
  1252  			// TODO(marvin): Fix Node.EType type union.
  1253  			r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0))
  1254  			r = typecheck(r, Erv)
  1255  			r = walkexpr(r, init)
  1256  			r.Type = n.Type
  1257  			n = r
  1258  			break
  1259  		}
  1260  
  1261  		// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
  1262  		var cs, ncs *Node // const string, non-const string
  1263  		switch {
  1264  		case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR):
  1265  			// ignore; will be constant evaluated
  1266  		case Isconst(n.Left, CTSTR):
  1267  			cs = n.Left
  1268  			ncs = n.Right
  1269  		case Isconst(n.Right, CTSTR):
  1270  			cs = n.Right
  1271  			ncs = n.Left
  1272  		}
  1273  		if cs != nil {
  1274  			cmp := Op(n.Etype)
  1275  			// maxRewriteLen was chosen empirically.
  1276  			// It is the value that minimizes cmd/go file size
  1277  			// across most architectures.
  1278  			// See the commit description for CL 26758 for details.
  1279  			maxRewriteLen := 6
  1280  			// Some architectures can load unaligned byte sequence as 1 word.
  1281  			// So we can cover longer strings with the same amount of code.
  1282  			canCombineLoads := false
  1283  			combine64bit := false
  1284  			// TODO: does this improve performance on any other architectures?
  1285  			switch thearch.LinkArch.Family {
  1286  			case sys.AMD64:
  1287  				// Larger compare require longer instructions, so keep this reasonably low.
  1288  				// Data from CL 26758 shows that longer strings are rare.
  1289  				// If we really want we can do 16 byte SSE comparisons in the future.
  1290  				maxRewriteLen = 16
  1291  				canCombineLoads = true
  1292  				combine64bit = true
  1293  			case sys.I386:
  1294  				maxRewriteLen = 8
  1295  				canCombineLoads = true
  1296  			}
  1297  			var and Op
  1298  			switch cmp {
  1299  			case OEQ:
  1300  				and = OANDAND
  1301  			case ONE:
  1302  				and = OOROR
  1303  			default:
  1304  				// Don't do byte-wise comparisons for <, <=, etc.
  1305  				// They're fairly complicated.
  1306  				// Length-only checks are ok, though.
  1307  				maxRewriteLen = 0
  1308  			}
  1309  			if s := cs.Val().U.(string); len(s) <= maxRewriteLen {
  1310  				if len(s) > 0 {
  1311  					ncs = safeexpr(ncs, init)
  1312  				}
  1313  				// TODO(marvin): Fix Node.EType type union.
  1314  				r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s))))
  1315  				remains := len(s)
  1316  				for i := 0; remains > 0; {
  1317  					if remains == 1 || !canCombineLoads {
  1318  						cb := nodintconst(int64(s[i]))
  1319  						ncb := nod(OINDEX, ncs, nodintconst(int64(i)))
  1320  						r = nod(and, r, nod(cmp, ncb, cb))
  1321  						remains--
  1322  						i++
  1323  						continue
  1324  					}
  1325  					var step int
  1326  					var convType *types.Type
  1327  					switch {
  1328  					case remains >= 8 && combine64bit:
  1329  						convType = types.Types[TINT64]
  1330  						step = 8
  1331  					case remains >= 4:
  1332  						convType = types.Types[TUINT32]
  1333  						step = 4
  1334  					case remains >= 2:
  1335  						convType = types.Types[TUINT16]
  1336  						step = 2
  1337  					}
  1338  					ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i)))
  1339  					ncsubstr = conv(ncsubstr, convType)
  1340  					csubstr := int64(s[i])
  1341  					// Calculate large constant from bytes as sequence of shifts and ors.
  1342  					// Like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  1343  					// ssa will combine this into a single large load.
  1344  					for offset := 1; offset < step; offset++ {
  1345  						b := nod(OINDEX, ncs, nodintconst(int64(i+offset)))
  1346  						b = conv(b, convType)
  1347  						b = nod(OLSH, b, nodintconst(int64(8*offset)))
  1348  						ncsubstr = nod(OOR, ncsubstr, b)
  1349  						csubstr = csubstr | int64(s[i+offset])<<uint8(8*offset)
  1350  					}
  1351  					csubstrPart := nodintconst(csubstr)
  1352  					// Compare "step" bytes as once
  1353  					r = nod(and, r, nod(cmp, csubstrPart, ncsubstr))
  1354  					remains -= step
  1355  					i += step
  1356  				}
  1357  				r = typecheck(r, Erv)
  1358  				r = walkexpr(r, init)
  1359  				r.Type = n.Type
  1360  				n = r
  1361  				break
  1362  			}
  1363  		}
  1364  
  1365  		var r *Node
  1366  		// TODO(marvin): Fix Node.EType type union.
  1367  		if Op(n.Etype) == OEQ || Op(n.Etype) == ONE {
  1368  			// prepare for rewrite below
  1369  			n.Left = cheapexpr(n.Left, init)
  1370  			n.Right = cheapexpr(n.Right, init)
  1371  
  1372  			r = mkcall("eqstring", types.Types[TBOOL], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  1373  
  1374  			// quick check of len before full compare for == or !=
  1375  			// eqstring assumes that the lengths are equal
  1376  			// TODO(marvin): Fix Node.EType type union.
  1377  			if Op(n.Etype) == OEQ {
  1378  				// len(left) == len(right) && eqstring(left, right)
  1379  				r = nod(OANDAND, nod(OEQ, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r)
  1380  			} else {
  1381  				// len(left) != len(right) || !eqstring(left, right)
  1382  				r = nod(ONOT, r, nil)
  1383  				r = nod(OOROR, nod(ONE, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r)
  1384  			}
  1385  
  1386  			r = typecheck(r, Erv)
  1387  			r = walkexpr(r, nil)
  1388  		} else {
  1389  			// sys_cmpstring(s1, s2) :: 0
  1390  			r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  1391  			// TODO(marvin): Fix Node.EType type union.
  1392  			r = nod(Op(n.Etype), r, nodintconst(0))
  1393  		}
  1394  
  1395  		r = typecheck(r, Erv)
  1396  		if !n.Type.IsBoolean() {
  1397  			Fatalf("cmp %v", n.Type)
  1398  		}
  1399  		r.Type = n.Type
  1400  		n = r
  1401  
  1402  	case OADDSTR:
  1403  		n = addstr(n, init)
  1404  
  1405  	case OAPPEND:
  1406  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
  1407  		Fatalf("append outside assignment")
  1408  
  1409  	case OCOPY:
  1410  		n = copyany(n, init, instrumenting && !compiling_runtime)
  1411  
  1412  		// cannot use chanfn - closechan takes any, not chan any
  1413  	case OCLOSE:
  1414  		fn := syslook("closechan")
  1415  
  1416  		fn = substArgTypes(fn, n.Left.Type)
  1417  		n = mkcall1(fn, nil, init, n.Left)
  1418  
  1419  	case OMAKECHAN:
  1420  		// When size fits into int, use makechan instead of
  1421  		// makechan64, which is faster and shorter on 32 bit platforms.
  1422  		size := n.Left
  1423  		fnname := "makechan64"
  1424  		argtype := types.Types[TINT64]
  1425  
  1426  		// Type checking guarantees that TIDEAL size is positive and fits in an int.
  1427  		// The case of size overflow when converting TUINT or TUINTPTR to TINT
  1428  		// will be handled by the negative range checks in makechan during runtime.
  1429  		if size.Type.IsKind(TIDEAL) || maxintval[size.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1430  			fnname = "makechan"
  1431  			argtype = types.Types[TINT]
  1432  		}
  1433  
  1434  		n = mkcall1(chanfn(fnname, 1, n.Type), n.Type, init, typename(n.Type), conv(size, argtype))
  1435  
  1436  	case OMAKEMAP:
  1437  		t := n.Type
  1438  
  1439  		a := nodnil() // hmap buffer
  1440  		r := nodnil() // bucket buffer
  1441  		if n.Esc == EscNone {
  1442  			// Allocate hmap buffer on stack.
  1443  			var_ := temp(hmap(t))
  1444  
  1445  			a = nod(OAS, var_, nil) // zero temp
  1446  			a = typecheck(a, Etop)
  1447  			init.Append(a)
  1448  			a = nod(OADDR, var_, nil)
  1449  
  1450  			// Allocate one bucket on stack.
  1451  			// Maximum key/value size is 128 bytes, larger objects
  1452  			// are stored with an indirection. So max bucket size is 2048+eps.
  1453  			var_ = temp(mapbucket(t))
  1454  
  1455  			r = nod(OAS, var_, nil) // zero temp
  1456  			r = typecheck(r, Etop)
  1457  			init.Append(r)
  1458  			r = nod(OADDR, var_, nil)
  1459  		}
  1460  
  1461  		fn := syslook("makemap")
  1462  		fn = substArgTypes(fn, hmap(t), mapbucket(t), t.Key(), t.Val())
  1463  		n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, types.Types[TINT64]), a, r)
  1464  
  1465  	case OMAKESLICE:
  1466  		l := n.Left
  1467  		r := n.Right
  1468  		if r == nil {
  1469  			r = safeexpr(l, init)
  1470  			l = r
  1471  		}
  1472  		t := n.Type
  1473  		if n.Esc == EscNone {
  1474  			if !isSmallMakeSlice(n) {
  1475  				Fatalf("non-small OMAKESLICE with EscNone: %v", n)
  1476  			}
  1477  			// var arr [r]T
  1478  			// n = arr[:l]
  1479  			t = types.NewArray(t.Elem(), nonnegintconst(r)) // [r]T
  1480  			var_ := temp(t)
  1481  			a := nod(OAS, var_, nil) // zero temp
  1482  			a = typecheck(a, Etop)
  1483  			init.Append(a)
  1484  			r := nod(OSLICE, var_, nil) // arr[:l]
  1485  			r.SetSliceBounds(nil, l, nil)
  1486  			r = conv(r, n.Type) // in case n.Type is named.
  1487  			r = typecheck(r, Erv)
  1488  			r = walkexpr(r, init)
  1489  			n = r
  1490  		} else {
  1491  			// n escapes; set up a call to makeslice.
  1492  			// When len and cap can fit into int, use makeslice instead of
  1493  			// makeslice64, which is faster and shorter on 32 bit platforms.
  1494  
  1495  			if t.Elem().NotInHeap() {
  1496  				yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem())
  1497  			}
  1498  
  1499  			len, cap := l, r
  1500  
  1501  			fnname := "makeslice64"
  1502  			argtype := types.Types[TINT64]
  1503  
  1504  			// typechecking guarantees that TIDEAL len/cap are positive and fit in an int.
  1505  			// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
  1506  			// will be handled by the negative range checks in makeslice during runtime.
  1507  			if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) &&
  1508  				(cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) {
  1509  				fnname = "makeslice"
  1510  				argtype = types.Types[TINT]
  1511  			}
  1512  
  1513  			fn := syslook(fnname)
  1514  			fn = substArgTypes(fn, t.Elem()) // any-1
  1515  			n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
  1516  		}
  1517  
  1518  	case ORUNESTR:
  1519  		a := nodnil()
  1520  		if n.Esc == EscNone {
  1521  			t := types.NewArray(types.Types[TUINT8], 4)
  1522  			var_ := temp(t)
  1523  			a = nod(OADDR, var_, nil)
  1524  		}
  1525  
  1526  		// intstring(*[4]byte, rune)
  1527  		n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64]))
  1528  
  1529  	case OARRAYBYTESTR:
  1530  		a := nodnil()
  1531  		if n.Esc == EscNone {
  1532  			// Create temporary buffer for string on stack.
  1533  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1534  
  1535  			a = nod(OADDR, temp(t), nil)
  1536  		}
  1537  
  1538  		// slicebytetostring(*[32]byte, []byte) string;
  1539  		n = mkcall("slicebytetostring", n.Type, init, a, n.Left)
  1540  
  1541  		// slicebytetostringtmp([]byte) string;
  1542  	case OARRAYBYTESTRTMP:
  1543  		n.Left = walkexpr(n.Left, init)
  1544  
  1545  		if !instrumenting {
  1546  			// Let the backend handle OARRAYBYTESTRTMP directly
  1547  			// to avoid a function call to slicebytetostringtmp.
  1548  			break
  1549  		}
  1550  
  1551  		n = mkcall("slicebytetostringtmp", n.Type, init, n.Left)
  1552  
  1553  		// slicerunetostring(*[32]byte, []rune) string;
  1554  	case OARRAYRUNESTR:
  1555  		a := nodnil()
  1556  
  1557  		if n.Esc == EscNone {
  1558  			// Create temporary buffer for string on stack.
  1559  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1560  
  1561  			a = nod(OADDR, temp(t), nil)
  1562  		}
  1563  
  1564  		n = mkcall("slicerunetostring", n.Type, init, a, n.Left)
  1565  
  1566  		// stringtoslicebyte(*32[byte], string) []byte;
  1567  	case OSTRARRAYBYTE:
  1568  		a := nodnil()
  1569  
  1570  		if n.Esc == EscNone {
  1571  			// Create temporary buffer for slice on stack.
  1572  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1573  
  1574  			a = nod(OADDR, temp(t), nil)
  1575  		}
  1576  
  1577  		n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, types.Types[TSTRING]))
  1578  
  1579  	case OSTRARRAYBYTETMP:
  1580  		// []byte(string) conversion that creates a slice
  1581  		// referring to the actual string bytes.
  1582  		// This conversion is handled later by the backend and
  1583  		// is only for use by internal compiler optimizations
  1584  		// that know that the slice won't be mutated.
  1585  		// The only such case today is:
  1586  		// for i, c := range []byte(string)
  1587  		n.Left = walkexpr(n.Left, init)
  1588  
  1589  		// stringtoslicerune(*[32]rune, string) []rune
  1590  	case OSTRARRAYRUNE:
  1591  		a := nodnil()
  1592  
  1593  		if n.Esc == EscNone {
  1594  			// Create temporary buffer for slice on stack.
  1595  			t := types.NewArray(types.Types[TINT32], tmpstringbufsize)
  1596  
  1597  			a = nod(OADDR, temp(t), nil)
  1598  		}
  1599  
  1600  		n = mkcall("stringtoslicerune", n.Type, init, a, n.Left)
  1601  
  1602  		// ifaceeq(i1 any-1, i2 any-2) (ret bool);
  1603  	case OCMPIFACE:
  1604  		if !eqtype(n.Left.Type, n.Right.Type) {
  1605  			Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type)
  1606  		}
  1607  		var fn *Node
  1608  		if n.Left.Type.IsEmptyInterface() {
  1609  			fn = syslook("efaceeq")
  1610  		} else {
  1611  			fn = syslook("ifaceeq")
  1612  		}
  1613  
  1614  		n.Right = cheapexpr(n.Right, init)
  1615  		n.Left = cheapexpr(n.Left, init)
  1616  		lt := nod(OITAB, n.Left, nil)
  1617  		rt := nod(OITAB, n.Right, nil)
  1618  		ld := nod(OIDATA, n.Left, nil)
  1619  		rd := nod(OIDATA, n.Right, nil)
  1620  		ld.Type = types.Types[TUNSAFEPTR]
  1621  		rd.Type = types.Types[TUNSAFEPTR]
  1622  		ld.SetTypecheck(1)
  1623  		rd.SetTypecheck(1)
  1624  		call := mkcall1(fn, n.Type, init, lt, ld, rd)
  1625  
  1626  		// Check itable/type before full compare.
  1627  		// Note: short-circuited because order matters.
  1628  		// TODO(marvin): Fix Node.EType type union.
  1629  		var cmp *Node
  1630  		if Op(n.Etype) == OEQ {
  1631  			cmp = nod(OANDAND, nod(OEQ, lt, rt), call)
  1632  		} else {
  1633  			cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil))
  1634  		}
  1635  		cmp = typecheck(cmp, Erv)
  1636  		cmp = walkexpr(cmp, init)
  1637  		cmp.Type = n.Type
  1638  		n = cmp
  1639  
  1640  	case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT:
  1641  		if isStaticCompositeLiteral(n) && !canSSAType(n.Type) {
  1642  			// n can be directly represented in the read-only data section.
  1643  			// Make direct reference to the static data. See issue 12841.
  1644  			vstat := staticname(n.Type)
  1645  			vstat.Name.SetReadonly(true)
  1646  			fixedlit(inInitFunction, initKindStatic, n, vstat, init)
  1647  			n = vstat
  1648  			n = typecheck(n, Erv)
  1649  			break
  1650  		}
  1651  		var_ := temp(n.Type)
  1652  		anylit(n, var_, init)
  1653  		n = var_
  1654  
  1655  	case OSEND:
  1656  		n1 := n.Right
  1657  		n1 = assignconv(n1, n.Left.Type.Elem(), "chan send")
  1658  		n1 = walkexpr(n1, init)
  1659  		n1 = nod(OADDR, n1, nil)
  1660  		n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1)
  1661  
  1662  	case OCLOSURE:
  1663  		n = walkclosure(n, init)
  1664  
  1665  	case OCALLPART:
  1666  		n = walkpartialcall(n, init)
  1667  	}
  1668  
  1669  	// Expressions that are constant at run time but not
  1670  	// considered const by the language spec are not turned into
  1671  	// constants until walk. For example, if n is y%1 == 0, the
  1672  	// walk of y%1 may have replaced it by 0.
  1673  	// Check whether n with its updated args is itself now a constant.
  1674  	t := n.Type
  1675  	evconst(n)
  1676  	if n.Type != t {
  1677  		Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type)
  1678  	}
  1679  	if n.Op == OLITERAL {
  1680  		n = typecheck(n, Erv)
  1681  		// Emit string symbol now to avoid emitting
  1682  		// any concurrently during the backend.
  1683  		if s, ok := n.Val().U.(string); ok {
  1684  			_ = stringsym(s)
  1685  		}
  1686  	}
  1687  
  1688  	updateHasCall(n)
  1689  
  1690  	if Debug['w'] != 0 && n != nil {
  1691  		Dump("walk", n)
  1692  	}
  1693  
  1694  	lineno = lno
  1695  	return n
  1696  }
  1697  
  1698  // TODO(josharian): combine this with its caller and simplify
  1699  func reduceSlice(n *Node) *Node {
  1700  	low, high, max := n.SliceBounds()
  1701  	if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) {
  1702  		// Reduce x[i:len(x)] to x[i:].
  1703  		high = nil
  1704  	}
  1705  	n.SetSliceBounds(low, high, max)
  1706  	if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil {
  1707  		// Reduce x[:] to x.
  1708  		if Debug_slice > 0 {
  1709  			Warn("slice: omit slice operation")
  1710  		}
  1711  		return n.Left
  1712  	}
  1713  	return n
  1714  }
  1715  
  1716  func ascompatee1(l *Node, r *Node, init *Nodes) *Node {
  1717  	// convas will turn map assigns into function calls,
  1718  	// making it impossible for reorder3 to work.
  1719  	n := nod(OAS, l, r)
  1720  
  1721  	if l.Op == OINDEXMAP {
  1722  		return n
  1723  	}
  1724  
  1725  	return convas(n, init)
  1726  }
  1727  
  1728  func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
  1729  	// check assign expression list to
  1730  	// a expression list. called in
  1731  	//	expr-list = expr-list
  1732  
  1733  	// ensure order of evaluation for function calls
  1734  	for i := range nl {
  1735  		nl[i] = safeexpr(nl[i], init)
  1736  	}
  1737  	for i1 := range nr {
  1738  		nr[i1] = safeexpr(nr[i1], init)
  1739  	}
  1740  
  1741  	var nn []*Node
  1742  	i := 0
  1743  	for ; i < len(nl); i++ {
  1744  		if i >= len(nr) {
  1745  			break
  1746  		}
  1747  		// Do not generate 'x = x' during return. See issue 4014.
  1748  		if op == ORETURN && samesafeexpr(nl[i], nr[i]) {
  1749  			continue
  1750  		}
  1751  		nn = append(nn, ascompatee1(nl[i], nr[i], init))
  1752  	}
  1753  
  1754  	// cannot happen: caller checked that lists had same length
  1755  	if i < len(nl) || i < len(nr) {
  1756  		var nln, nrn Nodes
  1757  		nln.Set(nl)
  1758  		nrn.Set(nr)
  1759  		Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname())
  1760  	}
  1761  	return nn
  1762  }
  1763  
  1764  // l is an lv and rt is the type of an rv
  1765  // return 1 if this implies a function call
  1766  // evaluating the lv or a function call
  1767  // in the conversion of the types
  1768  func fncall(l *Node, rt *types.Type) bool {
  1769  	if l.HasCall() || l.Op == OINDEXMAP {
  1770  		return true
  1771  	}
  1772  	if needwritebarrier(l) {
  1773  		return true
  1774  	}
  1775  	if eqtype(l.Type, rt) {
  1776  		return false
  1777  	}
  1778  	return true
  1779  }
  1780  
  1781  // check assign type list to
  1782  // a expression list. called in
  1783  //	expr-list = func()
  1784  func ascompatet(nl Nodes, nr *types.Type) []*Node {
  1785  	if nl.Len() != nr.NumFields() {
  1786  		Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
  1787  	}
  1788  
  1789  	var nn, mm Nodes
  1790  	for i, l := range nl.Slice() {
  1791  		if isblank(l) {
  1792  			continue
  1793  		}
  1794  		r := nr.Field(i)
  1795  
  1796  		// any lv that causes a fn call must be
  1797  		// deferred until all the return arguments
  1798  		// have been pulled from the output arguments
  1799  		if fncall(l, r.Type) {
  1800  			tmp := temp(r.Type)
  1801  			tmp = typecheck(tmp, Erv)
  1802  			a := nod(OAS, l, tmp)
  1803  			a = convas(a, &mm)
  1804  			mm.Append(a)
  1805  			l = tmp
  1806  		}
  1807  
  1808  		a := nod(OAS, l, nodarg(r, 0))
  1809  		a = convas(a, &nn)
  1810  		updateHasCall(a)
  1811  		if a.HasCall() {
  1812  			Dump("ascompatet ucount", a)
  1813  			Fatalf("ascompatet: too many function calls evaluating parameters")
  1814  		}
  1815  
  1816  		nn.Append(a)
  1817  	}
  1818  	return append(nn.Slice(), mm.Slice()...)
  1819  }
  1820  
  1821  // nodarg returns a Node for the function argument denoted by t,
  1822  // which is either the entire function argument or result struct (t is a  struct *types.Type)
  1823  // or a specific argument (t is a *types.Field within a struct *types.Type).
  1824  //
  1825  // If fp is 0, the node is for use by a caller invoking the given
  1826  // function, preparing the arguments before the call
  1827  // or retrieving the results after the call.
  1828  // In this case, the node will correspond to an outgoing argument
  1829  // slot like 8(SP).
  1830  //
  1831  // If fp is 1, the node is for use by the function itself
  1832  // (the callee), to retrieve its arguments or write its results.
  1833  // In this case the node will be an ONAME with an appropriate
  1834  // type and offset.
  1835  func nodarg(t interface{}, fp int) *Node {
  1836  	var n *Node
  1837  
  1838  	var funarg types.Funarg
  1839  	switch t := t.(type) {
  1840  	default:
  1841  		Fatalf("bad nodarg %T(%v)", t, t)
  1842  
  1843  	case *types.Type:
  1844  		// Entire argument struct, not just one arg
  1845  		if !t.IsFuncArgStruct() {
  1846  			Fatalf("nodarg: bad type %v", t)
  1847  		}
  1848  		funarg = t.StructType().Funarg
  1849  
  1850  		// Build fake variable name for whole arg struct.
  1851  		n = newname(lookup(".args"))
  1852  		n.Type = t
  1853  		first := t.Field(0)
  1854  		if first == nil {
  1855  			Fatalf("nodarg: bad struct")
  1856  		}
  1857  		if first.Offset == BADWIDTH {
  1858  			Fatalf("nodarg: offset not computed for %v", t)
  1859  		}
  1860  		n.Xoffset = first.Offset
  1861  
  1862  	case *types.Field:
  1863  		funarg = t.Funarg
  1864  		if fp == 1 {
  1865  			// NOTE(rsc): This should be using t.Nname directly,
  1866  			// except in the case where t.Nname.Sym is the blank symbol and
  1867  			// so the assignment would be discarded during code generation.
  1868  			// In that case we need to make a new node, and there is no harm
  1869  			// in optimization passes to doing so. But otherwise we should
  1870  			// definitely be using the actual declaration and not a newly built node.
  1871  			// The extra Fatalf checks here are verifying that this is the case,
  1872  			// without changing the actual logic (at time of writing, it's getting
  1873  			// toward time for the Go 1.7 beta).
  1874  			// At some quieter time (assuming we've never seen these Fatalfs happen)
  1875  			// we could change this code to use "expect" directly.
  1876  			expect := asNode(t.Nname)
  1877  			if expect.isParamHeapCopy() {
  1878  				expect = expect.Name.Param.Stackcopy
  1879  			}
  1880  
  1881  			for _, n := range Curfn.Func.Dcl {
  1882  				if (n.Class() == PPARAM || n.Class() == PPARAMOUT) && !t.Sym.IsBlank() && n.Sym == t.Sym {
  1883  					if n != expect {
  1884  						Fatalf("nodarg: unexpected node: %v (%p %v) vs %v (%p %v)", n, n, n.Op, asNode(t.Nname), asNode(t.Nname), asNode(t.Nname).Op)
  1885  					}
  1886  					return n
  1887  				}
  1888  			}
  1889  
  1890  			if !expect.Sym.IsBlank() {
  1891  				Fatalf("nodarg: did not find node in dcl list: %v", expect)
  1892  			}
  1893  		}
  1894  
  1895  		// Build fake name for individual variable.
  1896  		// This is safe because if there was a real declared name
  1897  		// we'd have used it above.
  1898  		n = newname(lookup("__"))
  1899  		n.Type = t.Type
  1900  		if t.Offset == BADWIDTH {
  1901  			Fatalf("nodarg: offset not computed for %v", t)
  1902  		}
  1903  		n.Xoffset = t.Offset
  1904  		n.Orig = asNode(t.Nname)
  1905  	}
  1906  
  1907  	// Rewrite argument named _ to __,
  1908  	// or else the assignment to _ will be
  1909  	// discarded during code generation.
  1910  	if isblank(n) {
  1911  		n.Sym = lookup("__")
  1912  	}
  1913  
  1914  	switch fp {
  1915  	default:
  1916  		Fatalf("bad fp")
  1917  
  1918  	case 0: // preparing arguments for call
  1919  		n.Op = OINDREGSP
  1920  		n.Xoffset += Ctxt.FixedFrameSize()
  1921  
  1922  	case 1: // reading arguments inside call
  1923  		n.SetClass(PPARAM)
  1924  		if funarg == types.FunargResults {
  1925  			n.SetClass(PPARAMOUT)
  1926  		}
  1927  	}
  1928  
  1929  	n.SetTypecheck(1)
  1930  	n.SetAddrtaken(true) // keep optimizers at bay
  1931  	return n
  1932  }
  1933  
  1934  // package all the arguments that match a ... T parameter into a []T.
  1935  func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node {
  1936  	esc := uint16(EscUnknown)
  1937  	if ddd != nil {
  1938  		esc = ddd.Esc
  1939  	}
  1940  
  1941  	if len(args) == 0 {
  1942  		n := nodnil()
  1943  		n.Type = typ
  1944  		return n
  1945  	}
  1946  
  1947  	n := nod(OCOMPLIT, nil, typenod(typ))
  1948  	if ddd != nil && prealloc[ddd] != nil {
  1949  		prealloc[n] = prealloc[ddd] // temporary to use
  1950  	}
  1951  	n.List.Set(args)
  1952  	n.Esc = esc
  1953  	n = typecheck(n, Erv)
  1954  	if n.Type == nil {
  1955  		Fatalf("mkdotargslice: typecheck failed")
  1956  	}
  1957  	n = walkexpr(n, init)
  1958  	return n
  1959  }
  1960  
  1961  // check assign expression list to
  1962  // a type list. called in
  1963  //	return expr-list
  1964  //	func(expr-list)
  1965  func ascompatte(call *Node, isddd bool, lhs *types.Type, rhs []*Node, fp int, init *Nodes) []*Node {
  1966  	var nn []*Node
  1967  
  1968  	// f(g()) where g has multiple return values
  1969  	if len(rhs) == 1 && rhs[0].Type.IsFuncArgStruct() {
  1970  		// optimization - can do block copy
  1971  		if eqtypenoname(rhs[0].Type, lhs) {
  1972  			nl := nodarg(lhs, fp)
  1973  			nr := nod(OCONVNOP, rhs[0], nil)
  1974  			nr.Type = nl.Type
  1975  			nn = []*Node{convas(nod(OAS, nl, nr), init)}
  1976  			goto ret
  1977  		}
  1978  
  1979  		// conversions involved.
  1980  		// copy into temporaries.
  1981  		var tmps []*Node
  1982  		for _, nr := range rhs[0].Type.FieldSlice() {
  1983  			tmps = append(tmps, temp(nr.Type))
  1984  		}
  1985  
  1986  		a := nod(OAS2, nil, nil)
  1987  		a.List.Set(tmps)
  1988  		a.Rlist.Set(rhs)
  1989  		a = typecheck(a, Etop)
  1990  		a = walkstmt(a)
  1991  		init.Append(a)
  1992  
  1993  		rhs = tmps
  1994  	}
  1995  
  1996  	// For each parameter (LHS), assign its corresponding argument (RHS).
  1997  	// If there's a ... parameter (which is only valid as the final
  1998  	// parameter) and this is not a ... call expression,
  1999  	// then assign the remaining arguments as a slice.
  2000  	for i, nl := range lhs.FieldSlice() {
  2001  		var nr *Node
  2002  		if nl.Isddd() && !isddd {
  2003  			nr = mkdotargslice(nl.Type, rhs[i:], init, call.Right)
  2004  		} else {
  2005  			nr = rhs[i]
  2006  		}
  2007  
  2008  		a := nod(OAS, nodarg(nl, fp), nr)
  2009  		a = convas(a, init)
  2010  		nn = append(nn, a)
  2011  	}
  2012  
  2013  ret:
  2014  	for _, n := range nn {
  2015  		n.SetTypecheck(1)
  2016  	}
  2017  	return nn
  2018  }
  2019  
  2020  // generate code for print
  2021  func walkprint(nn *Node, init *Nodes) *Node {
  2022  	// Hoist all the argument evaluation up before the lock.
  2023  	walkexprlistcheap(nn.List.Slice(), init)
  2024  
  2025  	// For println, add " " between elements and "\n" at the end.
  2026  	if nn.Op == OPRINTN {
  2027  		s := nn.List.Slice()
  2028  		t := make([]*Node, 0, len(s)*2)
  2029  		for i, n := range s {
  2030  			x := " "
  2031  			if len(s)-1 == i {
  2032  				x = "\n"
  2033  			}
  2034  			t = append(t, n, nodstr(x))
  2035  		}
  2036  		nn.List.Set(t)
  2037  	}
  2038  
  2039  	// Collapse runs of constant strings.
  2040  	s := nn.List.Slice()
  2041  	t := make([]*Node, 0, len(s))
  2042  	for i := 0; i < len(s); {
  2043  		var strs []string
  2044  		for i < len(s) && Isconst(s[i], CTSTR) {
  2045  			strs = append(strs, s[i].Val().U.(string))
  2046  			i++
  2047  		}
  2048  		if len(strs) > 0 {
  2049  			t = append(t, nodstr(strings.Join(strs, "")))
  2050  		}
  2051  		if i < len(s) {
  2052  			t = append(t, s[i])
  2053  			i++
  2054  		}
  2055  	}
  2056  	nn.List.Set(t)
  2057  
  2058  	calls := []*Node{mkcall("printlock", nil, init)}
  2059  	for i, n := range nn.List.Slice() {
  2060  		if n.Op == OLITERAL {
  2061  			switch n.Val().Ctype() {
  2062  			case CTRUNE:
  2063  				n = defaultlit(n, types.Runetype)
  2064  
  2065  			case CTINT:
  2066  				n = defaultlit(n, types.Types[TINT64])
  2067  
  2068  			case CTFLT:
  2069  				n = defaultlit(n, types.Types[TFLOAT64])
  2070  			}
  2071  		}
  2072  
  2073  		if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
  2074  			n = defaultlit(n, types.Types[TINT64])
  2075  		}
  2076  		n = defaultlit(n, nil)
  2077  		nn.List.SetIndex(i, n)
  2078  		if n.Type == nil || n.Type.Etype == TFORW {
  2079  			continue
  2080  		}
  2081  
  2082  		var on *Node
  2083  		switch n.Type.Etype {
  2084  		case TINTER:
  2085  			if n.Type.IsEmptyInterface() {
  2086  				on = syslook("printeface")
  2087  			} else {
  2088  				on = syslook("printiface")
  2089  			}
  2090  			on = substArgTypes(on, n.Type) // any-1
  2091  		case TPTR32, TPTR64, TCHAN, TMAP, TFUNC, TUNSAFEPTR:
  2092  			on = syslook("printpointer")
  2093  			on = substArgTypes(on, n.Type) // any-1
  2094  		case TSLICE:
  2095  			on = syslook("printslice")
  2096  			on = substArgTypes(on, n.Type) // any-1
  2097  		case TUINT64:
  2098  			if isRuntimePkg(n.Type.Sym.Pkg) && n.Type.Sym.Name == "hex" {
  2099  				on = syslook("printhex")
  2100  			} else {
  2101  				on = syslook("printuint")
  2102  			}
  2103  		case TINT, TUINT, TUINTPTR, TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64:
  2104  			on = syslook("printint")
  2105  		case TFLOAT32, TFLOAT64:
  2106  			on = syslook("printfloat")
  2107  		case TCOMPLEX64, TCOMPLEX128:
  2108  			on = syslook("printcomplex")
  2109  		case TBOOL:
  2110  			on = syslook("printbool")
  2111  		case TSTRING:
  2112  			cs := ""
  2113  			if Isconst(n, CTSTR) {
  2114  				cs = n.Val().U.(string)
  2115  			}
  2116  			switch cs {
  2117  			case " ":
  2118  				on = syslook("printsp")
  2119  			case "\n":
  2120  				on = syslook("printnl")
  2121  			default:
  2122  				on = syslook("printstring")
  2123  			}
  2124  		default:
  2125  			badtype(OPRINT, n.Type, nil)
  2126  			continue
  2127  		}
  2128  
  2129  		r := nod(OCALL, on, nil)
  2130  		if params := on.Type.Params().FieldSlice(); len(params) > 0 {
  2131  			t := params[0].Type
  2132  			if !eqtype(t, n.Type) {
  2133  				n = nod(OCONV, n, nil)
  2134  				n.Type = t
  2135  			}
  2136  			r.List.Append(n)
  2137  		}
  2138  		calls = append(calls, r)
  2139  	}
  2140  
  2141  	calls = append(calls, mkcall("printunlock", nil, init))
  2142  
  2143  	typecheckslice(calls, Etop)
  2144  	walkexprlist(calls, init)
  2145  
  2146  	r := nod(OEMPTY, nil, nil)
  2147  	r = typecheck(r, Etop)
  2148  	r = walkexpr(r, init)
  2149  	r.Ninit.Set(calls)
  2150  	return r
  2151  }
  2152  
  2153  func callnew(t *types.Type) *Node {
  2154  	if t.NotInHeap() {
  2155  		yyerror("%v is go:notinheap; heap allocation disallowed", t)
  2156  	}
  2157  	dowidth(t)
  2158  	fn := syslook("newobject")
  2159  	fn = substArgTypes(fn, t)
  2160  	v := mkcall1(fn, types.NewPtr(t), nil, typename(t))
  2161  	v.SetNonNil(true)
  2162  	return v
  2163  }
  2164  
  2165  func iscallret(n *Node) bool {
  2166  	n = outervalue(n)
  2167  	return n.Op == OINDREGSP
  2168  }
  2169  
  2170  func isstack(n *Node) bool {
  2171  	n = outervalue(n)
  2172  
  2173  	// If n is *autotmp and autotmp = &foo, replace n with foo.
  2174  	// We introduce such temps when initializing struct literals.
  2175  	if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() {
  2176  		defn := n.Left.Name.Defn
  2177  		if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR {
  2178  			n = defn.Right.Left
  2179  		}
  2180  	}
  2181  
  2182  	switch n.Op {
  2183  	case OINDREGSP:
  2184  		return true
  2185  
  2186  	case ONAME:
  2187  		switch n.Class() {
  2188  		case PAUTO, PPARAM, PPARAMOUT:
  2189  			return true
  2190  		}
  2191  	}
  2192  
  2193  	return false
  2194  }
  2195  
  2196  // isReflectHeaderDataField reports whether l is an expression p.Data
  2197  // where p has type reflect.SliceHeader or reflect.StringHeader.
  2198  func isReflectHeaderDataField(l *Node) bool {
  2199  	if l.Type != types.Types[TUINTPTR] {
  2200  		return false
  2201  	}
  2202  
  2203  	var tsym *types.Sym
  2204  	switch l.Op {
  2205  	case ODOT:
  2206  		tsym = l.Left.Type.Sym
  2207  	case ODOTPTR:
  2208  		tsym = l.Left.Type.Elem().Sym
  2209  	default:
  2210  		return false
  2211  	}
  2212  
  2213  	if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" {
  2214  		return false
  2215  	}
  2216  	return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader"
  2217  }
  2218  
  2219  // Do we need a write barrier for assigning to l?
  2220  func needwritebarrier(l *Node) bool {
  2221  	if !use_writebarrier {
  2222  		return false
  2223  	}
  2224  
  2225  	if l == nil || isblank(l) {
  2226  		return false
  2227  	}
  2228  
  2229  	// No write barrier for write to stack.
  2230  	if isstack(l) {
  2231  		return false
  2232  	}
  2233  
  2234  	// Package unsafe's documentation says storing pointers into
  2235  	// reflect.SliceHeader and reflect.StringHeader's Data fields
  2236  	// is valid, even though they have type uintptr (#19168).
  2237  	if isReflectHeaderDataField(l) {
  2238  		return true
  2239  	}
  2240  
  2241  	// No write barrier for write of non-pointers.
  2242  	dowidth(l.Type)
  2243  	if !types.Haspointers(l.Type) {
  2244  		return false
  2245  	}
  2246  
  2247  	// No write barrier if this is a pointer to a go:notinheap
  2248  	// type, since the write barrier's inheap(ptr) check will fail.
  2249  	if l.Type.IsPtr() && l.Type.Elem().NotInHeap() {
  2250  		return false
  2251  	}
  2252  
  2253  	// TODO: We can eliminate write barriers if we know *both* the
  2254  	// current and new content of the slot must already be shaded.
  2255  	// We know a pointer is shaded if it's nil, or points to
  2256  	// static data, a global (variable or function), or the stack.
  2257  	// The nil optimization could be particularly useful for
  2258  	// writes to just-allocated objects. Unfortunately, knowing
  2259  	// the "current" value of the slot requires flow analysis.
  2260  
  2261  	// Otherwise, be conservative and use write barrier.
  2262  	return true
  2263  }
  2264  
  2265  func convas(n *Node, init *Nodes) *Node {
  2266  	if n.Op != OAS {
  2267  		Fatalf("convas: not OAS %v", n.Op)
  2268  	}
  2269  
  2270  	n.SetTypecheck(1)
  2271  
  2272  	var lt *types.Type
  2273  	var rt *types.Type
  2274  	if n.Left == nil || n.Right == nil {
  2275  		goto out
  2276  	}
  2277  
  2278  	lt = n.Left.Type
  2279  	rt = n.Right.Type
  2280  	if lt == nil || rt == nil {
  2281  		goto out
  2282  	}
  2283  
  2284  	if isblank(n.Left) {
  2285  		n.Right = defaultlit(n.Right, nil)
  2286  		goto out
  2287  	}
  2288  
  2289  	if !eqtype(lt, rt) {
  2290  		n.Right = assignconv(n.Right, lt, "assignment")
  2291  		n.Right = walkexpr(n.Right, init)
  2292  	}
  2293  	dowidth(n.Right.Type)
  2294  
  2295  out:
  2296  	updateHasCall(n)
  2297  	return n
  2298  }
  2299  
  2300  // from ascompat[te]
  2301  // evaluating actual function arguments.
  2302  //	f(a,b)
  2303  // if there is exactly one function expr,
  2304  // then it is done first. otherwise must
  2305  // make temp variables
  2306  func reorder1(all []*Node) []*Node {
  2307  	c := 0 // function calls
  2308  	t := 0 // total parameters
  2309  
  2310  	for _, n := range all {
  2311  		t++
  2312  		updateHasCall(n)
  2313  		if n.HasCall() {
  2314  			c++
  2315  		}
  2316  	}
  2317  
  2318  	if c == 0 || t == 1 {
  2319  		return all
  2320  	}
  2321  
  2322  	var g []*Node // fncalls assigned to tempnames
  2323  	var f *Node   // last fncall assigned to stack
  2324  	var r []*Node // non fncalls and tempnames assigned to stack
  2325  	d := 0
  2326  	var a *Node
  2327  	for _, n := range all {
  2328  		if !n.HasCall() {
  2329  			r = append(r, n)
  2330  			continue
  2331  		}
  2332  
  2333  		d++
  2334  		if d == c {
  2335  			f = n
  2336  			continue
  2337  		}
  2338  
  2339  		// make assignment of fncall to tempname
  2340  		a = temp(n.Right.Type)
  2341  
  2342  		a = nod(OAS, a, n.Right)
  2343  		g = append(g, a)
  2344  
  2345  		// put normal arg assignment on list
  2346  		// with fncall replaced by tempname
  2347  		n.Right = a.Left
  2348  
  2349  		r = append(r, n)
  2350  	}
  2351  
  2352  	if f != nil {
  2353  		g = append(g, f)
  2354  	}
  2355  	return append(g, r...)
  2356  }
  2357  
  2358  // from ascompat[ee]
  2359  //	a,b = c,d
  2360  // simultaneous assignment. there cannot
  2361  // be later use of an earlier lvalue.
  2362  //
  2363  // function calls have been removed.
  2364  func reorder3(all []*Node) []*Node {
  2365  	var l *Node
  2366  
  2367  	// If a needed expression may be affected by an
  2368  	// earlier assignment, make an early copy of that
  2369  	// expression and use the copy instead.
  2370  	var early []*Node
  2371  
  2372  	var mapinit Nodes
  2373  	for i, n := range all {
  2374  		l = n.Left
  2375  
  2376  		// Save subexpressions needed on left side.
  2377  		// Drill through non-dereferences.
  2378  		for {
  2379  			if l.Op == ODOT || l.Op == OPAREN {
  2380  				l = l.Left
  2381  				continue
  2382  			}
  2383  
  2384  			if l.Op == OINDEX && l.Left.Type.IsArray() {
  2385  				l.Right = reorder3save(l.Right, all, i, &early)
  2386  				l = l.Left
  2387  				continue
  2388  			}
  2389  
  2390  			break
  2391  		}
  2392  
  2393  		switch l.Op {
  2394  		default:
  2395  			Fatalf("reorder3 unexpected lvalue %#v", l.Op)
  2396  
  2397  		case ONAME:
  2398  			break
  2399  
  2400  		case OINDEX, OINDEXMAP:
  2401  			l.Left = reorder3save(l.Left, all, i, &early)
  2402  			l.Right = reorder3save(l.Right, all, i, &early)
  2403  			if l.Op == OINDEXMAP {
  2404  				all[i] = convas(all[i], &mapinit)
  2405  			}
  2406  
  2407  		case OIND, ODOTPTR:
  2408  			l.Left = reorder3save(l.Left, all, i, &early)
  2409  		}
  2410  
  2411  		// Save expression on right side.
  2412  		all[i].Right = reorder3save(all[i].Right, all, i, &early)
  2413  	}
  2414  
  2415  	early = append(mapinit.Slice(), early...)
  2416  	return append(early, all...)
  2417  }
  2418  
  2419  // if the evaluation of *np would be affected by the
  2420  // assignments in all up to but not including the ith assignment,
  2421  // copy into a temporary during *early and
  2422  // replace *np with that temp.
  2423  // The result of reorder3save MUST be assigned back to n, e.g.
  2424  // 	n.Left = reorder3save(n.Left, all, i, early)
  2425  func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node {
  2426  	if !aliased(n, all, i) {
  2427  		return n
  2428  	}
  2429  
  2430  	q := temp(n.Type)
  2431  	q = nod(OAS, q, n)
  2432  	q = typecheck(q, Etop)
  2433  	*early = append(*early, q)
  2434  	return q.Left
  2435  }
  2436  
  2437  // what's the outer value that a write to n affects?
  2438  // outer value means containing struct or array.
  2439  func outervalue(n *Node) *Node {
  2440  	for {
  2441  		if n.Op == OXDOT {
  2442  			Fatalf("OXDOT in walk")
  2443  		}
  2444  		if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP {
  2445  			n = n.Left
  2446  			continue
  2447  		}
  2448  
  2449  		if n.Op == OINDEX && n.Left.Type != nil && n.Left.Type.IsArray() {
  2450  			n = n.Left
  2451  			continue
  2452  		}
  2453  
  2454  		break
  2455  	}
  2456  
  2457  	return n
  2458  }
  2459  
  2460  // Is it possible that the computation of n might be
  2461  // affected by writes in as up to but not including the ith element?
  2462  func aliased(n *Node, all []*Node, i int) bool {
  2463  	if n == nil {
  2464  		return false
  2465  	}
  2466  
  2467  	// Treat all fields of a struct as referring to the whole struct.
  2468  	// We could do better but we would have to keep track of the fields.
  2469  	for n.Op == ODOT {
  2470  		n = n.Left
  2471  	}
  2472  
  2473  	// Look for obvious aliasing: a variable being assigned
  2474  	// during the all list and appearing in n.
  2475  	// Also record whether there are any writes to main memory.
  2476  	// Also record whether there are any writes to variables
  2477  	// whose addresses have been taken.
  2478  	memwrite := 0
  2479  
  2480  	varwrite := 0
  2481  	var a *Node
  2482  	for _, an := range all[:i] {
  2483  		a = outervalue(an.Left)
  2484  
  2485  		for a.Op == ODOT {
  2486  			a = a.Left
  2487  		}
  2488  
  2489  		if a.Op != ONAME {
  2490  			memwrite = 1
  2491  			continue
  2492  		}
  2493  
  2494  		switch n.Class() {
  2495  		default:
  2496  			varwrite = 1
  2497  			continue
  2498  
  2499  		case PAUTO, PPARAM, PPARAMOUT:
  2500  			if n.Addrtaken() {
  2501  				varwrite = 1
  2502  				continue
  2503  			}
  2504  
  2505  			if vmatch2(a, n) {
  2506  				// Direct hit.
  2507  				return true
  2508  			}
  2509  		}
  2510  	}
  2511  
  2512  	// The variables being written do not appear in n.
  2513  	// However, n might refer to computed addresses
  2514  	// that are being written.
  2515  
  2516  	// If no computed addresses are affected by the writes, no aliasing.
  2517  	if memwrite == 0 && varwrite == 0 {
  2518  		return false
  2519  	}
  2520  
  2521  	// If n does not refer to computed addresses
  2522  	// (that is, if n only refers to variables whose addresses
  2523  	// have not been taken), no aliasing.
  2524  	if varexpr(n) {
  2525  		return false
  2526  	}
  2527  
  2528  	// Otherwise, both the writes and n refer to computed memory addresses.
  2529  	// Assume that they might conflict.
  2530  	return true
  2531  }
  2532  
  2533  // does the evaluation of n only refer to variables
  2534  // whose addresses have not been taken?
  2535  // (and no other memory)
  2536  func varexpr(n *Node) bool {
  2537  	if n == nil {
  2538  		return true
  2539  	}
  2540  
  2541  	switch n.Op {
  2542  	case OLITERAL:
  2543  		return true
  2544  
  2545  	case ONAME:
  2546  		switch n.Class() {
  2547  		case PAUTO, PPARAM, PPARAMOUT:
  2548  			if !n.Addrtaken() {
  2549  				return true
  2550  			}
  2551  		}
  2552  
  2553  		return false
  2554  
  2555  	case OADD,
  2556  		OSUB,
  2557  		OOR,
  2558  		OXOR,
  2559  		OMUL,
  2560  		ODIV,
  2561  		OMOD,
  2562  		OLSH,
  2563  		ORSH,
  2564  		OAND,
  2565  		OANDNOT,
  2566  		OPLUS,
  2567  		OMINUS,
  2568  		OCOM,
  2569  		OPAREN,
  2570  		OANDAND,
  2571  		OOROR,
  2572  		OCONV,
  2573  		OCONVNOP,
  2574  		OCONVIFACE,
  2575  		ODOTTYPE:
  2576  		return varexpr(n.Left) && varexpr(n.Right)
  2577  
  2578  	case ODOT: // but not ODOTPTR
  2579  		// Should have been handled in aliased.
  2580  		Fatalf("varexpr unexpected ODOT")
  2581  	}
  2582  
  2583  	// Be conservative.
  2584  	return false
  2585  }
  2586  
  2587  // is the name l mentioned in r?
  2588  func vmatch2(l *Node, r *Node) bool {
  2589  	if r == nil {
  2590  		return false
  2591  	}
  2592  	switch r.Op {
  2593  	// match each right given left
  2594  	case ONAME:
  2595  		return l == r
  2596  
  2597  	case OLITERAL:
  2598  		return false
  2599  	}
  2600  
  2601  	if vmatch2(l, r.Left) {
  2602  		return true
  2603  	}
  2604  	if vmatch2(l, r.Right) {
  2605  		return true
  2606  	}
  2607  	for _, n := range r.List.Slice() {
  2608  		if vmatch2(l, n) {
  2609  			return true
  2610  		}
  2611  	}
  2612  	return false
  2613  }
  2614  
  2615  // is any name mentioned in l also mentioned in r?
  2616  // called by sinit.go
  2617  func vmatch1(l *Node, r *Node) bool {
  2618  	// isolate all left sides
  2619  	if l == nil || r == nil {
  2620  		return false
  2621  	}
  2622  	switch l.Op {
  2623  	case ONAME:
  2624  		switch l.Class() {
  2625  		case PPARAM, PAUTO:
  2626  			break
  2627  
  2628  		default:
  2629  			// assignment to non-stack variable must be
  2630  			// delayed if right has function calls.
  2631  			if r.HasCall() {
  2632  				return true
  2633  			}
  2634  		}
  2635  
  2636  		return vmatch2(l, r)
  2637  
  2638  	case OLITERAL:
  2639  		return false
  2640  	}
  2641  
  2642  	if vmatch1(l.Left, r) {
  2643  		return true
  2644  	}
  2645  	if vmatch1(l.Right, r) {
  2646  		return true
  2647  	}
  2648  	for _, n := range l.List.Slice() {
  2649  		if vmatch1(n, r) {
  2650  			return true
  2651  		}
  2652  	}
  2653  	return false
  2654  }
  2655  
  2656  // paramstoheap returns code to allocate memory for heap-escaped parameters
  2657  // and to copy non-result parameters' values from the stack.
  2658  func paramstoheap(params *types.Type) []*Node {
  2659  	var nn []*Node
  2660  	for _, t := range params.Fields().Slice() {
  2661  		v := asNode(t.Nname)
  2662  		if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result
  2663  			v = nil
  2664  		}
  2665  		if v == nil {
  2666  			continue
  2667  		}
  2668  
  2669  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil {
  2670  			nn = append(nn, walkstmt(nod(ODCL, v, nil)))
  2671  			if stackcopy.Class() == PPARAM {
  2672  				nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop)))
  2673  			}
  2674  		}
  2675  	}
  2676  
  2677  	return nn
  2678  }
  2679  
  2680  // zeroResults zeros the return values at the start of the function.
  2681  // We need to do this very early in the function.  Defer might stop a
  2682  // panic and show the return values as they exist at the time of
  2683  // panic.  For precise stacks, the garbage collector assumes results
  2684  // are always live, so we need to zero them before any allocations,
  2685  // even allocations to move params/results to the heap.
  2686  // The generated code is added to Curfn's Enter list.
  2687  func zeroResults() {
  2688  	lno := lineno
  2689  	lineno = Curfn.Pos
  2690  	for _, f := range Curfn.Type.Results().Fields().Slice() {
  2691  		if v := asNode(f.Nname); v != nil && v.Name.Param.Heapaddr != nil {
  2692  			// The local which points to the return value is the
  2693  			// thing that needs zeroing. This is already handled
  2694  			// by a Needzero annotation in plive.go:livenessepilogue.
  2695  			continue
  2696  		}
  2697  		// Zero the stack location containing f.
  2698  		Curfn.Func.Enter.Append(nod(OAS, nodarg(f, 1), nil))
  2699  	}
  2700  	lineno = lno
  2701  }
  2702  
  2703  // returnsfromheap returns code to copy values for heap-escaped parameters
  2704  // back to the stack.
  2705  func returnsfromheap(params *types.Type) []*Node {
  2706  	var nn []*Node
  2707  	for _, t := range params.Fields().Slice() {
  2708  		v := asNode(t.Nname)
  2709  		if v == nil {
  2710  			continue
  2711  		}
  2712  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT {
  2713  			nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop)))
  2714  		}
  2715  	}
  2716  
  2717  	return nn
  2718  }
  2719  
  2720  // heapmoves generates code to handle migrating heap-escaped parameters
  2721  // between the stack and the heap. The generated code is added to Curfn's
  2722  // Enter and Exit lists.
  2723  func heapmoves() {
  2724  	lno := lineno
  2725  	lineno = Curfn.Pos
  2726  	nn := paramstoheap(Curfn.Type.Recvs())
  2727  	nn = append(nn, paramstoheap(Curfn.Type.Params())...)
  2728  	nn = append(nn, paramstoheap(Curfn.Type.Results())...)
  2729  	Curfn.Func.Enter.Append(nn...)
  2730  	lineno = Curfn.Func.Endlineno
  2731  	Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...)
  2732  	lineno = lno
  2733  }
  2734  
  2735  func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node {
  2736  	if fn.Type == nil || fn.Type.Etype != TFUNC {
  2737  		Fatalf("mkcall %v %v", fn, fn.Type)
  2738  	}
  2739  
  2740  	n := fn.Type.Params().NumFields()
  2741  
  2742  	r := nod(OCALL, fn, nil)
  2743  	r.List.Set(va[:n])
  2744  	if fn.Type.Results().NumFields() > 0 {
  2745  		r = typecheck(r, Erv|Efnstruct)
  2746  	} else {
  2747  		r = typecheck(r, Etop)
  2748  	}
  2749  	r = walkexpr(r, init)
  2750  	r.Type = t
  2751  	return r
  2752  }
  2753  
  2754  func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node {
  2755  	return vmkcall(syslook(name), t, init, args)
  2756  }
  2757  
  2758  func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node {
  2759  	return vmkcall(fn, t, init, args)
  2760  }
  2761  
  2762  func conv(n *Node, t *types.Type) *Node {
  2763  	if eqtype(n.Type, t) {
  2764  		return n
  2765  	}
  2766  	n = nod(OCONV, n, nil)
  2767  	n.Type = t
  2768  	n = typecheck(n, Erv)
  2769  	return n
  2770  }
  2771  
  2772  // byteindex converts n, which is byte-sized, to a uint8.
  2773  // We cannot use conv, because we allow converting bool to uint8 here,
  2774  // which is forbidden in user code.
  2775  func byteindex(n *Node) *Node {
  2776  	if eqtype(n.Type, types.Types[TUINT8]) {
  2777  		return n
  2778  	}
  2779  	n = nod(OCONV, n, nil)
  2780  	n.Type = types.Types[TUINT8]
  2781  	n.SetTypecheck(1)
  2782  	return n
  2783  }
  2784  
  2785  func chanfn(name string, n int, t *types.Type) *Node {
  2786  	if !t.IsChan() {
  2787  		Fatalf("chanfn %v", t)
  2788  	}
  2789  	fn := syslook(name)
  2790  	switch n {
  2791  	default:
  2792  		Fatalf("chanfn %d", n)
  2793  	case 1:
  2794  		fn = substArgTypes(fn, t.Elem())
  2795  	case 2:
  2796  		fn = substArgTypes(fn, t.Elem(), t.Elem())
  2797  	}
  2798  	return fn
  2799  }
  2800  
  2801  func mapfn(name string, t *types.Type) *Node {
  2802  	if !t.IsMap() {
  2803  		Fatalf("mapfn %v", t)
  2804  	}
  2805  	fn := syslook(name)
  2806  	fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val())
  2807  	return fn
  2808  }
  2809  
  2810  func mapfndel(name string, t *types.Type) *Node {
  2811  	if !t.IsMap() {
  2812  		Fatalf("mapfn %v", t)
  2813  	}
  2814  	fn := syslook(name)
  2815  	fn = substArgTypes(fn, t.Key(), t.Val(), t.Key())
  2816  	return fn
  2817  }
  2818  
  2819  const (
  2820  	mapslow = iota
  2821  	mapfast32
  2822  	mapfast64
  2823  	mapfaststr
  2824  	nmapfast
  2825  )
  2826  
  2827  type mapnames [nmapfast]string
  2828  
  2829  func mkmapnames(base string) mapnames {
  2830  	return mapnames{base, base + "_fast32", base + "_fast64", base + "_faststr"}
  2831  }
  2832  
  2833  var mapaccess1 mapnames = mkmapnames("mapaccess1")
  2834  var mapaccess2 mapnames = mkmapnames("mapaccess2")
  2835  var mapassign mapnames = mkmapnames("mapassign")
  2836  var mapdelete mapnames = mkmapnames("mapdelete")
  2837  
  2838  func mapfast(t *types.Type) int {
  2839  	// Check ../../runtime/hashmap.go:maxValueSize before changing.
  2840  	if t.Val().Width > 128 {
  2841  		return mapslow
  2842  	}
  2843  	switch algtype(t.Key()) {
  2844  	case AMEM32:
  2845  		return mapfast32
  2846  	case AMEM64:
  2847  		return mapfast64
  2848  	case ASTRING:
  2849  		return mapfaststr
  2850  	}
  2851  	return mapslow
  2852  }
  2853  
  2854  func writebarrierfn(name string, l *types.Type, r *types.Type) *Node {
  2855  	fn := syslook(name)
  2856  	fn = substArgTypes(fn, l, r)
  2857  	return fn
  2858  }
  2859  
  2860  func addstr(n *Node, init *Nodes) *Node {
  2861  	// orderexpr rewrote OADDSTR to have a list of strings.
  2862  	c := n.List.Len()
  2863  
  2864  	if c < 2 {
  2865  		Fatalf("addstr count %d too small", c)
  2866  	}
  2867  
  2868  	buf := nodnil()
  2869  	if n.Esc == EscNone {
  2870  		sz := int64(0)
  2871  		for _, n1 := range n.List.Slice() {
  2872  			if n1.Op == OLITERAL {
  2873  				sz += int64(len(n1.Val().U.(string)))
  2874  			}
  2875  		}
  2876  
  2877  		// Don't allocate the buffer if the result won't fit.
  2878  		if sz < tmpstringbufsize {
  2879  			// Create temporary buffer for result string on stack.
  2880  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  2881  
  2882  			buf = nod(OADDR, temp(t), nil)
  2883  		}
  2884  	}
  2885  
  2886  	// build list of string arguments
  2887  	args := []*Node{buf}
  2888  	for _, n2 := range n.List.Slice() {
  2889  		args = append(args, conv(n2, types.Types[TSTRING]))
  2890  	}
  2891  
  2892  	var fn string
  2893  	if c <= 5 {
  2894  		// small numbers of strings use direct runtime helpers.
  2895  		// note: orderexpr knows this cutoff too.
  2896  		fn = fmt.Sprintf("concatstring%d", c)
  2897  	} else {
  2898  		// large numbers of strings are passed to the runtime as a slice.
  2899  		fn = "concatstrings"
  2900  
  2901  		t := types.NewSlice(types.Types[TSTRING])
  2902  		slice := nod(OCOMPLIT, nil, typenod(t))
  2903  		if prealloc[n] != nil {
  2904  			prealloc[slice] = prealloc[n]
  2905  		}
  2906  		slice.List.Set(args[1:]) // skip buf arg
  2907  		args = []*Node{buf, slice}
  2908  		slice.Esc = EscNone
  2909  	}
  2910  
  2911  	cat := syslook(fn)
  2912  	r := nod(OCALL, cat, nil)
  2913  	r.List.Set(args)
  2914  	r = typecheck(r, Erv)
  2915  	r = walkexpr(r, init)
  2916  	r.Type = n.Type
  2917  
  2918  	return r
  2919  }
  2920  
  2921  // expand append(l1, l2...) to
  2922  //   init {
  2923  //     s := l1
  2924  //     n := len(s) + len(l2)
  2925  //     // Compare as uint so growslice can panic on overflow.
  2926  //     if uint(n) > uint(cap(s)) {
  2927  //       s = growslice(s, n)
  2928  //     }
  2929  //     s = s[:n]
  2930  //     memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2931  //   }
  2932  //   s
  2933  //
  2934  // l2 is allowed to be a string.
  2935  func appendslice(n *Node, init *Nodes) *Node {
  2936  	walkexprlistsafe(n.List.Slice(), init)
  2937  
  2938  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2939  	// and n are name or literal, but those may index the slice we're
  2940  	// modifying here. Fix explicitly.
  2941  	ls := n.List.Slice()
  2942  	for i1, n1 := range ls {
  2943  		ls[i1] = cheapexpr(n1, init)
  2944  	}
  2945  
  2946  	l1 := n.List.First()
  2947  	l2 := n.List.Second()
  2948  
  2949  	var l []*Node
  2950  
  2951  	// var s []T
  2952  	s := temp(l1.Type)
  2953  	l = append(l, nod(OAS, s, l1)) // s = l1
  2954  
  2955  	// n := len(s) + len(l2)
  2956  	nn := temp(types.Types[TINT])
  2957  	l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil))))
  2958  
  2959  	// if uint(n) > uint(cap(s))
  2960  	nif := nod(OIF, nil, nil)
  2961  	nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil))
  2962  	nif.Left.Left.Type = types.Types[TUINT]
  2963  	nif.Left.Right.Type = types.Types[TUINT]
  2964  
  2965  	// instantiate growslice(Type*, []any, int) []any
  2966  	fn := syslook("growslice")
  2967  	fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
  2968  
  2969  	// s = growslice(T, s, n)
  2970  	nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn)))
  2971  	l = append(l, nif)
  2972  
  2973  	// s = s[:n]
  2974  	nt := nod(OSLICE, s, nil)
  2975  	nt.SetSliceBounds(nil, nn, nil)
  2976  	nt.Etype = 1
  2977  	l = append(l, nod(OAS, s, nt))
  2978  
  2979  	if types.Haspointers(l1.Type.Elem()) {
  2980  		// copy(s[len(l1):], l2)
  2981  		nptr1 := nod(OSLICE, s, nil)
  2982  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2983  		nptr1.Etype = 1
  2984  		nptr2 := l2
  2985  		fn := syslook("typedslicecopy")
  2986  		fn = substArgTypes(fn, l1.Type, l2.Type)
  2987  		var ln Nodes
  2988  		ln.Set(l)
  2989  		nt := mkcall1(fn, types.Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2)
  2990  		l = append(ln.Slice(), nt)
  2991  	} else if instrumenting && !compiling_runtime {
  2992  		// rely on runtime to instrument copy.
  2993  		// copy(s[len(l1):], l2)
  2994  		nptr1 := nod(OSLICE, s, nil)
  2995  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2996  		nptr1.Etype = 1
  2997  		nptr2 := l2
  2998  		var fn *Node
  2999  		if l2.Type.IsString() {
  3000  			fn = syslook("slicestringcopy")
  3001  		} else {
  3002  			fn = syslook("slicecopy")
  3003  		}
  3004  		fn = substArgTypes(fn, l1.Type, l2.Type)
  3005  		var ln Nodes
  3006  		ln.Set(l)
  3007  		nt := mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width))
  3008  		l = append(ln.Slice(), nt)
  3009  	} else {
  3010  		// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  3011  		nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil))
  3012  		nptr1.SetBounded(true)
  3013  
  3014  		nptr1 = nod(OADDR, nptr1, nil)
  3015  
  3016  		nptr2 := nod(OSPTR, l2, nil)
  3017  
  3018  		fn := syslook("memmove")
  3019  		fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
  3020  
  3021  		var ln Nodes
  3022  		ln.Set(l)
  3023  		nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &ln)
  3024  
  3025  		nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width))
  3026  		nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid)
  3027  		l = append(ln.Slice(), nt)
  3028  	}
  3029  
  3030  	typecheckslice(l, Etop)
  3031  	walkstmtlist(l)
  3032  	init.Append(l...)
  3033  	return s
  3034  }
  3035  
  3036  // Rewrite append(src, x, y, z) so that any side effects in
  3037  // x, y, z (including runtime panics) are evaluated in
  3038  // initialization statements before the append.
  3039  // For normal code generation, stop there and leave the
  3040  // rest to cgen_append.
  3041  //
  3042  // For race detector, expand append(src, a [, b]* ) to
  3043  //
  3044  //   init {
  3045  //     s := src
  3046  //     const argc = len(args) - 1
  3047  //     if cap(s) - len(s) < argc {
  3048  //	    s = growslice(s, len(s)+argc)
  3049  //     }
  3050  //     n := len(s)
  3051  //     s = s[:n+argc]
  3052  //     s[n] = a
  3053  //     s[n+1] = b
  3054  //     ...
  3055  //   }
  3056  //   s
  3057  func walkappend(n *Node, init *Nodes, dst *Node) *Node {
  3058  	if !samesafeexpr(dst, n.List.First()) {
  3059  		n.List.SetFirst(safeexpr(n.List.First(), init))
  3060  		n.List.SetFirst(walkexpr(n.List.First(), init))
  3061  	}
  3062  	walkexprlistsafe(n.List.Slice()[1:], init)
  3063  
  3064  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  3065  	// and n are name or literal, but those may index the slice we're
  3066  	// modifying here. Fix explicitly.
  3067  	// Using cheapexpr also makes sure that the evaluation
  3068  	// of all arguments (and especially any panics) happen
  3069  	// before we begin to modify the slice in a visible way.
  3070  	ls := n.List.Slice()[1:]
  3071  	for i, n := range ls {
  3072  		ls[i] = cheapexpr(n, init)
  3073  	}
  3074  
  3075  	nsrc := n.List.First()
  3076  
  3077  	argc := n.List.Len() - 1
  3078  	if argc < 1 {
  3079  		return nsrc
  3080  	}
  3081  
  3082  	// General case, with no function calls left as arguments.
  3083  	// Leave for gen, except that instrumentation requires old form.
  3084  	if !instrumenting || compiling_runtime {
  3085  		return n
  3086  	}
  3087  
  3088  	var l []*Node
  3089  
  3090  	ns := temp(nsrc.Type)
  3091  	l = append(l, nod(OAS, ns, nsrc)) // s = src
  3092  
  3093  	na := nodintconst(int64(argc)) // const argc
  3094  	nx := nod(OIF, nil, nil)       // if cap(s) - len(s) < argc
  3095  	nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na)
  3096  
  3097  	fn := syslook("growslice") //   growslice(<type>, old []T, mincap int) (ret []T)
  3098  	fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem())
  3099  
  3100  	nx.Nbody.Set1(nod(OAS, ns,
  3101  		mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns,
  3102  			nod(OADD, nod(OLEN, ns, nil), na))))
  3103  
  3104  	l = append(l, nx)
  3105  
  3106  	nn := temp(types.Types[TINT])
  3107  	l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s)
  3108  
  3109  	nx = nod(OSLICE, ns, nil) // ...s[:n+argc]
  3110  	nx.SetSliceBounds(nil, nod(OADD, nn, na), nil)
  3111  	nx.Etype = 1
  3112  	l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc]
  3113  
  3114  	ls = n.List.Slice()[1:]
  3115  	for i, n := range ls {
  3116  		nx = nod(OINDEX, ns, nn) // s[n] ...
  3117  		nx.SetBounded(true)
  3118  		l = append(l, nod(OAS, nx, n)) // s[n] = arg
  3119  		if i+1 < len(ls) {
  3120  			l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1
  3121  		}
  3122  	}
  3123  
  3124  	typecheckslice(l, Etop)
  3125  	walkstmtlist(l)
  3126  	init.Append(l...)
  3127  	return ns
  3128  }
  3129  
  3130  // Lower copy(a, b) to a memmove call or a runtime call.
  3131  //
  3132  // init {
  3133  //   n := len(a)
  3134  //   if n > len(b) { n = len(b) }
  3135  //   memmove(a.ptr, b.ptr, n*sizeof(elem(a)))
  3136  // }
  3137  // n;
  3138  //
  3139  // Also works if b is a string.
  3140  //
  3141  func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
  3142  	if types.Haspointers(n.Left.Type.Elem()) {
  3143  		fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
  3144  		return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right)
  3145  	}
  3146  
  3147  	if runtimecall {
  3148  		var fn *Node
  3149  		if n.Right.Type.IsString() {
  3150  			fn = syslook("slicestringcopy")
  3151  		} else {
  3152  			fn = syslook("slicecopy")
  3153  		}
  3154  		fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  3155  		return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width))
  3156  	}
  3157  
  3158  	n.Left = walkexpr(n.Left, init)
  3159  	n.Right = walkexpr(n.Right, init)
  3160  	nl := temp(n.Left.Type)
  3161  	nr := temp(n.Right.Type)
  3162  	var l []*Node
  3163  	l = append(l, nod(OAS, nl, n.Left))
  3164  	l = append(l, nod(OAS, nr, n.Right))
  3165  
  3166  	nfrm := nod(OSPTR, nr, nil)
  3167  	nto := nod(OSPTR, nl, nil)
  3168  
  3169  	nlen := temp(types.Types[TINT])
  3170  
  3171  	// n = len(to)
  3172  	l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil)))
  3173  
  3174  	// if n > len(frm) { n = len(frm) }
  3175  	nif := nod(OIF, nil, nil)
  3176  
  3177  	nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil))
  3178  	nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil)))
  3179  	l = append(l, nif)
  3180  
  3181  	// Call memmove.
  3182  	fn := syslook("memmove")
  3183  
  3184  	fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem())
  3185  	nwid := temp(types.Types[TUINTPTR])
  3186  	l = append(l, nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR])))
  3187  	nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width))
  3188  	l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid))
  3189  
  3190  	typecheckslice(l, Etop)
  3191  	walkstmtlist(l)
  3192  	init.Append(l...)
  3193  	return nlen
  3194  }
  3195  
  3196  func eqfor(t *types.Type, needsize *int) *Node {
  3197  	// Should only arrive here with large memory or
  3198  	// a struct/array containing a non-memory field/element.
  3199  	// Small memory is handled inline, and single non-memory
  3200  	// is handled during type check (OCMPSTR etc).
  3201  	switch a, _ := algtype1(t); a {
  3202  	case AMEM:
  3203  		n := syslook("memequal")
  3204  		n = substArgTypes(n, t, t)
  3205  		*needsize = 1
  3206  		return n
  3207  	case ASPECIAL:
  3208  		sym := typesymprefix(".eq", t)
  3209  		n := newname(sym)
  3210  		n.SetClass(PFUNC)
  3211  		ntype := nod(OTFUNC, nil, nil)
  3212  		ntype.List.Append(anonfield(types.NewPtr(t)))
  3213  		ntype.List.Append(anonfield(types.NewPtr(t)))
  3214  		ntype.Rlist.Append(anonfield(types.Types[TBOOL]))
  3215  		ntype = typecheck(ntype, Etype)
  3216  		n.Type = ntype.Type
  3217  		*needsize = 0
  3218  		return n
  3219  	}
  3220  	Fatalf("eqfor %v", t)
  3221  	return nil
  3222  }
  3223  
  3224  // The result of walkcompare MUST be assigned back to n, e.g.
  3225  // 	n.Left = walkcompare(n.Left, init)
  3226  func walkcompare(n *Node, init *Nodes) *Node {
  3227  	// Given interface value l and concrete value r, rewrite
  3228  	//   l == r
  3229  	// into types-equal && data-equal.
  3230  	// This is efficient, avoids allocations, and avoids runtime calls.
  3231  	var l, r *Node
  3232  	if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() {
  3233  		l = n.Left
  3234  		r = n.Right
  3235  	} else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() {
  3236  		l = n.Right
  3237  		r = n.Left
  3238  	}
  3239  
  3240  	if l != nil {
  3241  		// Handle both == and !=.
  3242  		eq := n.Op
  3243  		var andor Op
  3244  		if eq == OEQ {
  3245  			andor = OANDAND
  3246  		} else {
  3247  			andor = OOROR
  3248  		}
  3249  		// Check for types equal.
  3250  		// For empty interface, this is:
  3251  		//   l.tab == type(r)
  3252  		// For non-empty interface, this is:
  3253  		//   l.tab != nil && l.tab._type == type(r)
  3254  		var eqtype *Node
  3255  		tab := nod(OITAB, l, nil)
  3256  		rtyp := typename(r.Type)
  3257  		if l.Type.IsEmptyInterface() {
  3258  			tab.Type = types.NewPtr(types.Types[TUINT8])
  3259  			tab.SetTypecheck(1)
  3260  			eqtype = nod(eq, tab, rtyp)
  3261  		} else {
  3262  			nonnil := nod(brcom(eq), nodnil(), tab)
  3263  			match := nod(eq, itabType(tab), rtyp)
  3264  			eqtype = nod(andor, nonnil, match)
  3265  		}
  3266  		// Check for data equal.
  3267  		eqdata := nod(eq, ifaceData(l, r.Type), r)
  3268  		// Put it all together.
  3269  		expr := nod(andor, eqtype, eqdata)
  3270  		n = finishcompare(n, expr, init)
  3271  		return n
  3272  	}
  3273  
  3274  	// Must be comparison of array or struct.
  3275  	// Otherwise back end handles it.
  3276  	// While we're here, decide whether to
  3277  	// inline or call an eq alg.
  3278  	t := n.Left.Type
  3279  	var inline bool
  3280  
  3281  	maxcmpsize := int64(4)
  3282  	unalignedLoad := false
  3283  	switch thearch.LinkArch.Family {
  3284  	case sys.AMD64, sys.ARM64, sys.S390X:
  3285  		// Keep this low enough, to generate less code than function call.
  3286  		maxcmpsize = 16
  3287  		unalignedLoad = true
  3288  	case sys.I386:
  3289  		maxcmpsize = 8
  3290  		unalignedLoad = true
  3291  	}
  3292  
  3293  	switch t.Etype {
  3294  	default:
  3295  		return n
  3296  	case TARRAY:
  3297  		// We can compare several elements at once with 2/4/8 byte integer compares
  3298  		inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize))
  3299  	case TSTRUCT:
  3300  		inline = t.NumFields() <= 4
  3301  	}
  3302  
  3303  	cmpl := n.Left
  3304  	for cmpl != nil && cmpl.Op == OCONVNOP {
  3305  		cmpl = cmpl.Left
  3306  	}
  3307  	cmpr := n.Right
  3308  	for cmpr != nil && cmpr.Op == OCONVNOP {
  3309  		cmpr = cmpr.Left
  3310  	}
  3311  
  3312  	// Chose not to inline. Call equality function directly.
  3313  	if !inline {
  3314  		if isvaluelit(cmpl) {
  3315  			var_ := temp(cmpl.Type)
  3316  			anylit(cmpl, var_, init)
  3317  			cmpl = var_
  3318  		}
  3319  		if isvaluelit(cmpr) {
  3320  			var_ := temp(cmpr.Type)
  3321  			anylit(cmpr, var_, init)
  3322  			cmpr = var_
  3323  		}
  3324  		if !islvalue(cmpl) || !islvalue(cmpr) {
  3325  			Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
  3326  		}
  3327  
  3328  		// eq algs take pointers
  3329  		pl := temp(types.NewPtr(t))
  3330  		al := nod(OAS, pl, nod(OADDR, cmpl, nil))
  3331  		al.Right.Etype = 1 // addr does not escape
  3332  		al = typecheck(al, Etop)
  3333  		init.Append(al)
  3334  
  3335  		pr := temp(types.NewPtr(t))
  3336  		ar := nod(OAS, pr, nod(OADDR, cmpr, nil))
  3337  		ar.Right.Etype = 1 // addr does not escape
  3338  		ar = typecheck(ar, Etop)
  3339  		init.Append(ar)
  3340  
  3341  		var needsize int
  3342  		call := nod(OCALL, eqfor(t, &needsize), nil)
  3343  		call.List.Append(pl)
  3344  		call.List.Append(pr)
  3345  		if needsize != 0 {
  3346  			call.List.Append(nodintconst(t.Width))
  3347  		}
  3348  		res := call
  3349  		if n.Op != OEQ {
  3350  			res = nod(ONOT, res, nil)
  3351  		}
  3352  		n = finishcompare(n, res, init)
  3353  		return n
  3354  	}
  3355  
  3356  	// inline: build boolean expression comparing element by element
  3357  	andor := OANDAND
  3358  	if n.Op == ONE {
  3359  		andor = OOROR
  3360  	}
  3361  	var expr *Node
  3362  	compare := func(el, er *Node) {
  3363  		a := nod(n.Op, el, er)
  3364  		if expr == nil {
  3365  			expr = a
  3366  		} else {
  3367  			expr = nod(andor, expr, a)
  3368  		}
  3369  	}
  3370  	cmpl = safeexpr(cmpl, init)
  3371  	cmpr = safeexpr(cmpr, init)
  3372  	if t.IsStruct() {
  3373  		for _, f := range t.Fields().Slice() {
  3374  			sym := f.Sym
  3375  			if sym.IsBlank() {
  3376  				continue
  3377  			}
  3378  			compare(
  3379  				nodSym(OXDOT, cmpl, sym),
  3380  				nodSym(OXDOT, cmpr, sym),
  3381  			)
  3382  		}
  3383  	} else {
  3384  		step := int64(1)
  3385  		remains := t.NumElem() * t.Elem().Width
  3386  		combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger()
  3387  		combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger()
  3388  		combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger()
  3389  		for i := int64(0); remains > 0; {
  3390  			var convType *types.Type
  3391  			switch {
  3392  			case remains >= 8 && combine64bit:
  3393  				convType = types.Types[TINT64]
  3394  				step = 8 / t.Elem().Width
  3395  			case remains >= 4 && combine32bit:
  3396  				convType = types.Types[TUINT32]
  3397  				step = 4 / t.Elem().Width
  3398  			case remains >= 2 && combine16bit:
  3399  				convType = types.Types[TUINT16]
  3400  				step = 2 / t.Elem().Width
  3401  			default:
  3402  				step = 1
  3403  			}
  3404  			if step == 1 {
  3405  				compare(
  3406  					nod(OINDEX, cmpl, nodintconst(int64(i))),
  3407  					nod(OINDEX, cmpr, nodintconst(int64(i))),
  3408  				)
  3409  				i++
  3410  				remains -= t.Elem().Width
  3411  			} else {
  3412  				cmplw := nod(OINDEX, cmpl, nodintconst(int64(i)))
  3413  				cmplw = conv(cmplw, convType)
  3414  				cmprw := nod(OINDEX, cmpr, nodintconst(int64(i)))
  3415  				cmprw = conv(cmprw, convType)
  3416  				// For code like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3417  				// ssa will generate a single large load.
  3418  				for offset := int64(1); offset < step; offset++ {
  3419  					lb := nod(OINDEX, cmpl, nodintconst(int64(i+offset)))
  3420  					lb = conv(lb, convType)
  3421  					lb = nod(OLSH, lb, nodintconst(int64(8*t.Elem().Width*offset)))
  3422  					cmplw = nod(OOR, cmplw, lb)
  3423  					rb := nod(OINDEX, cmpr, nodintconst(int64(i+offset)))
  3424  					rb = conv(rb, convType)
  3425  					rb = nod(OLSH, rb, nodintconst(int64(8*t.Elem().Width*offset)))
  3426  					cmprw = nod(OOR, cmprw, rb)
  3427  				}
  3428  				compare(cmplw, cmprw)
  3429  				i += step
  3430  				remains -= step * t.Elem().Width
  3431  			}
  3432  		}
  3433  	}
  3434  	if expr == nil {
  3435  		expr = nodbool(n.Op == OEQ)
  3436  	}
  3437  	n = finishcompare(n, expr, init)
  3438  	return n
  3439  }
  3440  
  3441  // The result of finishcompare MUST be assigned back to n, e.g.
  3442  // 	n.Left = finishcompare(n.Left, x, r, init)
  3443  func finishcompare(n, r *Node, init *Nodes) *Node {
  3444  	// Use nn here to avoid passing r to typecheck.
  3445  	nn := r
  3446  	nn = typecheck(nn, Erv)
  3447  	nn = walkexpr(nn, init)
  3448  	r = nn
  3449  	if r.Type != n.Type {
  3450  		r = nod(OCONVNOP, r, nil)
  3451  		r.Type = n.Type
  3452  		r.SetTypecheck(1)
  3453  		nn = r
  3454  	}
  3455  	return nn
  3456  }
  3457  
  3458  // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers.
  3459  func (n *Node) isIntOrdering() bool {
  3460  	switch n.Op {
  3461  	case OLE, OLT, OGE, OGT:
  3462  	default:
  3463  		return false
  3464  	}
  3465  	return n.Left.Type.IsInteger() && n.Right.Type.IsInteger()
  3466  }
  3467  
  3468  // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10.
  3469  // n must be an OANDAND or OOROR node.
  3470  // The result of walkinrange MUST be assigned back to n, e.g.
  3471  // 	n.Left = walkinrange(n.Left)
  3472  func walkinrange(n *Node, init *Nodes) *Node {
  3473  	// We are looking for something equivalent to a opl b OP b opr c, where:
  3474  	// * a, b, and c have integer type
  3475  	// * b is side-effect-free
  3476  	// * opl and opr are each < or ≤
  3477  	// * OP is &&
  3478  	l := n.Left
  3479  	r := n.Right
  3480  	if !l.isIntOrdering() || !r.isIntOrdering() {
  3481  		return n
  3482  	}
  3483  
  3484  	// Find b, if it exists, and rename appropriately.
  3485  	// Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right
  3486  	// Output is: a opl b(==x) ANDAND/OROR b(==x) opr c
  3487  	a, opl, b := l.Left, l.Op, l.Right
  3488  	x, opr, c := r.Left, r.Op, r.Right
  3489  	for i := 0; ; i++ {
  3490  		if samesafeexpr(b, x) {
  3491  			break
  3492  		}
  3493  		if i == 3 {
  3494  			// Tried all permutations and couldn't find an appropriate b == x.
  3495  			return n
  3496  		}
  3497  		if i&1 == 0 {
  3498  			a, opl, b = b, brrev(opl), a
  3499  		} else {
  3500  			x, opr, c = c, brrev(opr), x
  3501  		}
  3502  	}
  3503  
  3504  	// If n.Op is ||, apply de Morgan.
  3505  	// Negate the internal ops now; we'll negate the top level op at the end.
  3506  	// Henceforth assume &&.
  3507  	negateResult := n.Op == OOROR
  3508  	if negateResult {
  3509  		opl = brcom(opl)
  3510  		opr = brcom(opr)
  3511  	}
  3512  
  3513  	cmpdir := func(o Op) int {
  3514  		switch o {
  3515  		case OLE, OLT:
  3516  			return -1
  3517  		case OGE, OGT:
  3518  			return +1
  3519  		}
  3520  		Fatalf("walkinrange cmpdir %v", o)
  3521  		return 0
  3522  	}
  3523  	if cmpdir(opl) != cmpdir(opr) {
  3524  		// Not a range check; something like b < a && b < c.
  3525  		return n
  3526  	}
  3527  
  3528  	switch opl {
  3529  	case OGE, OGT:
  3530  		// We have something like a > b && b ≥ c.
  3531  		// Switch and reverse ops and rename constants,
  3532  		// to make it look like a ≤ b && b < c.
  3533  		a, c = c, a
  3534  		opl, opr = brrev(opr), brrev(opl)
  3535  	}
  3536  
  3537  	// We must ensure that c-a is non-negative.
  3538  	// For now, require a and c to be constants.
  3539  	// In the future, we could also support a == 0 and c == len/cap(...).
  3540  	// Unfortunately, by this point, most len/cap expressions have been
  3541  	// stored into temporary variables.
  3542  	if !Isconst(a, CTINT) || !Isconst(c, CTINT) {
  3543  		return n
  3544  	}
  3545  
  3546  	if opl == OLT {
  3547  		// We have a < b && ...
  3548  		// We need a ≤ b && ... to safely use unsigned comparison tricks.
  3549  		// If a is not the maximum constant for b's type,
  3550  		// we can increment a and switch to ≤.
  3551  		if a.Int64() >= maxintval[b.Type.Etype].Int64() {
  3552  			return n
  3553  		}
  3554  		a = nodintconst(a.Int64() + 1)
  3555  		opl = OLE
  3556  	}
  3557  
  3558  	bound := c.Int64() - a.Int64()
  3559  	if bound < 0 {
  3560  		// Bad news. Something like 5 <= x && x < 3.
  3561  		// Rare in practice, and we still need to generate side-effects,
  3562  		// so just leave it alone.
  3563  		return n
  3564  	}
  3565  
  3566  	// We have a ≤ b && b < c (or a ≤ b && b ≤ c).
  3567  	// This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a),
  3568  	// which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a),
  3569  	// which is equivalent to uint(b-a) < uint(c-a).
  3570  	ut := b.Type.ToUnsigned()
  3571  	lhs := conv(nod(OSUB, b, a), ut)
  3572  	rhs := nodintconst(bound)
  3573  	if negateResult {
  3574  		// Negate top level.
  3575  		opr = brcom(opr)
  3576  	}
  3577  	cmp := nod(opr, lhs, rhs)
  3578  	cmp.Pos = n.Pos
  3579  	cmp = addinit(cmp, l.Ninit.Slice())
  3580  	cmp = addinit(cmp, r.Ninit.Slice())
  3581  	// Typecheck the AST rooted at cmp...
  3582  	cmp = typecheck(cmp, Erv)
  3583  	// ...but then reset cmp's type to match n's type.
  3584  	cmp.Type = n.Type
  3585  	cmp = walkexpr(cmp, init)
  3586  	return cmp
  3587  }
  3588  
  3589  // return 1 if integer n must be in range [0, max), 0 otherwise
  3590  func bounded(n *Node, max int64) bool {
  3591  	if n.Type == nil || !n.Type.IsInteger() {
  3592  		return false
  3593  	}
  3594  
  3595  	sign := n.Type.IsSigned()
  3596  	bits := int32(8 * n.Type.Width)
  3597  
  3598  	if smallintconst(n) {
  3599  		v := n.Int64()
  3600  		return 0 <= v && v < max
  3601  	}
  3602  
  3603  	switch n.Op {
  3604  	case OAND:
  3605  		v := int64(-1)
  3606  		if smallintconst(n.Left) {
  3607  			v = n.Left.Int64()
  3608  		} else if smallintconst(n.Right) {
  3609  			v = n.Right.Int64()
  3610  		}
  3611  
  3612  		if 0 <= v && v < max {
  3613  			return true
  3614  		}
  3615  
  3616  	case OMOD:
  3617  		if !sign && smallintconst(n.Right) {
  3618  			v := n.Right.Int64()
  3619  			if 0 <= v && v <= max {
  3620  				return true
  3621  			}
  3622  		}
  3623  
  3624  	case ODIV:
  3625  		if !sign && smallintconst(n.Right) {
  3626  			v := n.Right.Int64()
  3627  			for bits > 0 && v >= 2 {
  3628  				bits--
  3629  				v >>= 1
  3630  			}
  3631  		}
  3632  
  3633  	case ORSH:
  3634  		if !sign && smallintconst(n.Right) {
  3635  			v := n.Right.Int64()
  3636  			if v > int64(bits) {
  3637  				return true
  3638  			}
  3639  			bits -= int32(v)
  3640  		}
  3641  	}
  3642  
  3643  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
  3644  		return true
  3645  	}
  3646  
  3647  	return false
  3648  }
  3649  
  3650  // usemethod checks interface method calls for uses of reflect.Type.Method.
  3651  func usemethod(n *Node) {
  3652  	t := n.Left.Type
  3653  
  3654  	// Looking for either of:
  3655  	//	Method(int) reflect.Method
  3656  	//	MethodByName(string) (reflect.Method, bool)
  3657  	//
  3658  	// TODO(crawshaw): improve precision of match by working out
  3659  	//                 how to check the method name.
  3660  	if n := t.Params().NumFields(); n != 1 {
  3661  		return
  3662  	}
  3663  	if n := t.Results().NumFields(); n != 1 && n != 2 {
  3664  		return
  3665  	}
  3666  	p0 := t.Params().Field(0)
  3667  	res0 := t.Results().Field(0)
  3668  	var res1 *types.Field
  3669  	if t.Results().NumFields() == 2 {
  3670  		res1 = t.Results().Field(1)
  3671  	}
  3672  
  3673  	if res1 == nil {
  3674  		if p0.Type.Etype != TINT {
  3675  			return
  3676  		}
  3677  	} else {
  3678  		if !p0.Type.IsString() {
  3679  			return
  3680  		}
  3681  		if !res1.Type.IsBoolean() {
  3682  			return
  3683  		}
  3684  	}
  3685  
  3686  	// Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
  3687  	//       (including global variables such as numImports - was issue #19028).
  3688  	if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" {
  3689  		Curfn.Func.SetReflectMethod(true)
  3690  	}
  3691  }
  3692  
  3693  func usefield(n *Node) {
  3694  	if objabi.Fieldtrack_enabled == 0 {
  3695  		return
  3696  	}
  3697  
  3698  	switch n.Op {
  3699  	default:
  3700  		Fatalf("usefield %v", n.Op)
  3701  
  3702  	case ODOT, ODOTPTR:
  3703  		break
  3704  	}
  3705  	if n.Sym == nil {
  3706  		// No field name.  This DOTPTR was built by the compiler for access
  3707  		// to runtime data structures.  Ignore.
  3708  		return
  3709  	}
  3710  
  3711  	t := n.Left.Type
  3712  	if t.IsPtr() {
  3713  		t = t.Elem()
  3714  	}
  3715  	field := dotField[typeSymKey{t.Orig, n.Sym}]
  3716  	if field == nil {
  3717  		Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
  3718  	}
  3719  	if !strings.Contains(field.Note, "go:\"track\"") {
  3720  		return
  3721  	}
  3722  
  3723  	outer := n.Left.Type
  3724  	if outer.IsPtr() {
  3725  		outer = outer.Elem()
  3726  	}
  3727  	if outer.Sym == nil {
  3728  		yyerror("tracked field must be in named struct type")
  3729  	}
  3730  	if !exportname(field.Sym.Name) {
  3731  		yyerror("tracked field must be exported (upper case)")
  3732  	}
  3733  
  3734  	sym := tracksym(outer, field)
  3735  	if Curfn.Func.FieldTrack == nil {
  3736  		Curfn.Func.FieldTrack = make(map[*types.Sym]struct{})
  3737  	}
  3738  	Curfn.Func.FieldTrack[sym] = struct{}{}
  3739  }
  3740  
  3741  func candiscardlist(l Nodes) bool {
  3742  	for _, n := range l.Slice() {
  3743  		if !candiscard(n) {
  3744  			return false
  3745  		}
  3746  	}
  3747  	return true
  3748  }
  3749  
  3750  func candiscard(n *Node) bool {
  3751  	if n == nil {
  3752  		return true
  3753  	}
  3754  
  3755  	switch n.Op {
  3756  	default:
  3757  		return false
  3758  
  3759  		// Discardable as long as the subpieces are.
  3760  	case ONAME,
  3761  		ONONAME,
  3762  		OTYPE,
  3763  		OPACK,
  3764  		OLITERAL,
  3765  		OADD,
  3766  		OSUB,
  3767  		OOR,
  3768  		OXOR,
  3769  		OADDSTR,
  3770  		OADDR,
  3771  		OANDAND,
  3772  		OARRAYBYTESTR,
  3773  		OARRAYRUNESTR,
  3774  		OSTRARRAYBYTE,
  3775  		OSTRARRAYRUNE,
  3776  		OCAP,
  3777  		OCMPIFACE,
  3778  		OCMPSTR,
  3779  		OCOMPLIT,
  3780  		OMAPLIT,
  3781  		OSTRUCTLIT,
  3782  		OARRAYLIT,
  3783  		OSLICELIT,
  3784  		OPTRLIT,
  3785  		OCONV,
  3786  		OCONVIFACE,
  3787  		OCONVNOP,
  3788  		ODOT,
  3789  		OEQ,
  3790  		ONE,
  3791  		OLT,
  3792  		OLE,
  3793  		OGT,
  3794  		OGE,
  3795  		OKEY,
  3796  		OSTRUCTKEY,
  3797  		OLEN,
  3798  		OMUL,
  3799  		OLSH,
  3800  		ORSH,
  3801  		OAND,
  3802  		OANDNOT,
  3803  		ONEW,
  3804  		ONOT,
  3805  		OCOM,
  3806  		OPLUS,
  3807  		OMINUS,
  3808  		OOROR,
  3809  		OPAREN,
  3810  		ORUNESTR,
  3811  		OREAL,
  3812  		OIMAG,
  3813  		OCOMPLEX:
  3814  		break
  3815  
  3816  		// Discardable as long as we know it's not division by zero.
  3817  	case ODIV, OMOD:
  3818  		if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 {
  3819  			break
  3820  		}
  3821  		if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 {
  3822  			break
  3823  		}
  3824  		return false
  3825  
  3826  		// Discardable as long as we know it won't fail because of a bad size.
  3827  	case OMAKECHAN, OMAKEMAP:
  3828  		if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 {
  3829  			break
  3830  		}
  3831  		return false
  3832  
  3833  		// Difficult to tell what sizes are okay.
  3834  	case OMAKESLICE:
  3835  		return false
  3836  	}
  3837  
  3838  	if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) {
  3839  		return false
  3840  	}
  3841  
  3842  	return true
  3843  }
  3844  
  3845  // rewrite
  3846  //	print(x, y, z)
  3847  // into
  3848  //	func(a1, a2, a3) {
  3849  //		print(a1, a2, a3)
  3850  //	}(x, y, z)
  3851  // and same for println.
  3852  
  3853  var walkprintfunc_prgen int
  3854  
  3855  // The result of walkprintfunc MUST be assigned back to n, e.g.
  3856  // 	n.Left = walkprintfunc(n.Left, init)
  3857  func walkprintfunc(n *Node, init *Nodes) *Node {
  3858  	if n.Ninit.Len() != 0 {
  3859  		walkstmtlist(n.Ninit.Slice())
  3860  		init.AppendNodes(&n.Ninit)
  3861  	}
  3862  
  3863  	t := nod(OTFUNC, nil, nil)
  3864  	num := 0
  3865  	var printargs []*Node
  3866  	var a *Node
  3867  	var buf string
  3868  	for _, n1 := range n.List.Slice() {
  3869  		buf = fmt.Sprintf("a%d", num)
  3870  		num++
  3871  		a = namedfield(buf, n1.Type)
  3872  		t.List.Append(a)
  3873  		printargs = append(printargs, a.Left)
  3874  	}
  3875  
  3876  	oldfn := Curfn
  3877  	Curfn = nil
  3878  
  3879  	walkprintfunc_prgen++
  3880  	sym := lookupN("print·%d", walkprintfunc_prgen)
  3881  	fn := dclfunc(sym, t)
  3882  
  3883  	a = nod(n.Op, nil, nil)
  3884  	a.List.Set(printargs)
  3885  	a = typecheck(a, Etop)
  3886  	a = walkstmt(a)
  3887  
  3888  	fn.Nbody.Set1(a)
  3889  
  3890  	funcbody()
  3891  
  3892  	fn = typecheck(fn, Etop)
  3893  	typecheckslice(fn.Nbody.Slice(), Etop)
  3894  	xtop = append(xtop, fn)
  3895  	Curfn = oldfn
  3896  
  3897  	a = nod(OCALL, nil, nil)
  3898  	a.Left = fn.Func.Nname
  3899  	a.List.Set(n.List.Slice())
  3900  	a = typecheck(a, Etop)
  3901  	a = walkexpr(a, init)
  3902  	return a
  3903  }
  3904  
  3905  // substArgTypes substitutes the given list of types for
  3906  // successive occurrences of the "any" placeholder in the
  3907  // type syntax expression n.Type.
  3908  // The result of substArgTypes MUST be assigned back to old, e.g.
  3909  // 	n.Left = substArgTypes(n.Left, t1, t2)
  3910  func substArgTypes(old *Node, types_ ...*types.Type) *Node {
  3911  	n := *old // make shallow copy
  3912  
  3913  	for _, t := range types_ {
  3914  		dowidth(t)
  3915  	}
  3916  	n.Type = types.SubstAny(n.Type, &types_)
  3917  	if len(types_) > 0 {
  3918  		Fatalf("substArgTypes: too many argument types")
  3919  	}
  3920  	return &n
  3921  }