github.com/mangodowner/go-gm@v0.0.0-20180818020936-8baa2bd4408c/src/cmd/compile/internal/gc/walk.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package gc
     6  
     7  import (
     8  	"cmd/compile/internal/types"
     9  	"cmd/internal/objabi"
    10  	"cmd/internal/sys"
    11  	"fmt"
    12  	"strings"
    13  )
    14  
    15  // The constant is known to runtime.
    16  const (
    17  	tmpstringbufsize = 32
    18  )
    19  
    20  func walk(fn *Node) {
    21  	Curfn = fn
    22  
    23  	if Debug['W'] != 0 {
    24  		s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym)
    25  		dumplist(s, Curfn.Nbody)
    26  	}
    27  
    28  	lno := lineno
    29  
    30  	// Final typecheck for any unused variables.
    31  	for i, ln := range fn.Func.Dcl {
    32  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) {
    33  			ln = typecheck(ln, Erv|Easgn)
    34  			fn.Func.Dcl[i] = ln
    35  		}
    36  	}
    37  
    38  	// Propagate the used flag for typeswitch variables up to the NONAME in it's definition.
    39  	for _, ln := range fn.Func.Dcl {
    40  		if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() {
    41  			ln.Name.Defn.Left.Name.SetUsed(true)
    42  		}
    43  	}
    44  
    45  	for _, ln := range fn.Func.Dcl {
    46  		if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() {
    47  			continue
    48  		}
    49  		if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW {
    50  			if defn.Left.Name.Used() {
    51  				continue
    52  			}
    53  			yyerrorl(defn.Left.Pos, "%v declared and not used", ln.Sym)
    54  			defn.Left.Name.SetUsed(true) // suppress repeats
    55  		} else {
    56  			yyerrorl(ln.Pos, "%v declared and not used", ln.Sym)
    57  		}
    58  	}
    59  
    60  	lineno = lno
    61  	if nerrors != 0 {
    62  		return
    63  	}
    64  	walkstmtlist(Curfn.Nbody.Slice())
    65  	if Debug['W'] != 0 {
    66  		s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym)
    67  		dumplist(s, Curfn.Nbody)
    68  	}
    69  
    70  	zeroResults()
    71  	heapmoves()
    72  	if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 {
    73  		s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym)
    74  		dumplist(s, Curfn.Func.Enter)
    75  	}
    76  }
    77  
    78  func walkstmtlist(s []*Node) {
    79  	for i := range s {
    80  		s[i] = walkstmt(s[i])
    81  	}
    82  }
    83  
    84  func samelist(a, b []*Node) bool {
    85  	if len(a) != len(b) {
    86  		return false
    87  	}
    88  	for i, n := range a {
    89  		if n != b[i] {
    90  			return false
    91  		}
    92  	}
    93  	return true
    94  }
    95  
    96  func paramoutheap(fn *Node) bool {
    97  	for _, ln := range fn.Func.Dcl {
    98  		switch ln.Class() {
    99  		case PPARAMOUT:
   100  			if ln.isParamStackCopy() || ln.Addrtaken() {
   101  				return true
   102  			}
   103  
   104  		case PAUTO:
   105  			// stop early - parameters are over
   106  			return false
   107  		}
   108  	}
   109  
   110  	return false
   111  }
   112  
   113  // adds "adjust" to all the argument locations for the call n.
   114  // n must be a defer or go node that has already been walked.
   115  func adjustargs(n *Node, adjust int) {
   116  	var arg *Node
   117  	var lhs *Node
   118  
   119  	callfunc := n.Left
   120  	for _, arg = range callfunc.List.Slice() {
   121  		if arg.Op != OAS {
   122  			Fatalf("call arg not assignment")
   123  		}
   124  		lhs = arg.Left
   125  		if lhs.Op == ONAME {
   126  			// This is a temporary introduced by reorder1.
   127  			// The real store to the stack appears later in the arg list.
   128  			continue
   129  		}
   130  
   131  		if lhs.Op != OINDREGSP {
   132  			Fatalf("call argument store does not use OINDREGSP")
   133  		}
   134  
   135  		// can't really check this in machine-indep code.
   136  		//if(lhs->val.u.reg != D_SP)
   137  		//      Fatalf("call arg assign not indreg(SP)")
   138  		lhs.Xoffset += int64(adjust)
   139  	}
   140  }
   141  
   142  // The result of walkstmt MUST be assigned back to n, e.g.
   143  // 	n.Left = walkstmt(n.Left)
   144  func walkstmt(n *Node) *Node {
   145  	if n == nil {
   146  		return n
   147  	}
   148  
   149  	setlineno(n)
   150  
   151  	walkstmtlist(n.Ninit.Slice())
   152  
   153  	switch n.Op {
   154  	default:
   155  		if n.Op == ONAME {
   156  			yyerror("%v is not a top level statement", n.Sym)
   157  		} else {
   158  			yyerror("%v is not a top level statement", n.Op)
   159  		}
   160  		Dump("nottop", n)
   161  
   162  	case OAS,
   163  		OASOP,
   164  		OAS2,
   165  		OAS2DOTTYPE,
   166  		OAS2RECV,
   167  		OAS2FUNC,
   168  		OAS2MAPR,
   169  		OCLOSE,
   170  		OCOPY,
   171  		OCALLMETH,
   172  		OCALLINTER,
   173  		OCALL,
   174  		OCALLFUNC,
   175  		ODELETE,
   176  		OSEND,
   177  		OPRINT,
   178  		OPRINTN,
   179  		OPANIC,
   180  		OEMPTY,
   181  		ORECOVER,
   182  		OGETG:
   183  		if n.Typecheck() == 0 {
   184  			Fatalf("missing typecheck: %+v", n)
   185  		}
   186  		wascopy := n.Op == OCOPY
   187  		init := n.Ninit
   188  		n.Ninit.Set(nil)
   189  		n = walkexpr(n, &init)
   190  		n = addinit(n, init.Slice())
   191  		if wascopy && n.Op == OCONVNOP {
   192  			n.Op = OEMPTY // don't leave plain values as statements.
   193  		}
   194  
   195  	// special case for a receive where we throw away
   196  	// the value received.
   197  	case ORECV:
   198  		if n.Typecheck() == 0 {
   199  			Fatalf("missing typecheck: %+v", n)
   200  		}
   201  		init := n.Ninit
   202  		n.Ninit.Set(nil)
   203  
   204  		n.Left = walkexpr(n.Left, &init)
   205  		n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil())
   206  		n = walkexpr(n, &init)
   207  
   208  		n = addinit(n, init.Slice())
   209  
   210  	case OBREAK,
   211  		OCONTINUE,
   212  		OFALL,
   213  		OGOTO,
   214  		OLABEL,
   215  		ODCLCONST,
   216  		ODCLTYPE,
   217  		OCHECKNIL,
   218  		OVARKILL,
   219  		OVARLIVE:
   220  		break
   221  
   222  	case ODCL:
   223  		v := n.Left
   224  		if v.Class() == PAUTOHEAP {
   225  			if compiling_runtime {
   226  				yyerror("%v escapes to heap, not allowed in runtime.", v)
   227  			}
   228  			if prealloc[v] == nil {
   229  				prealloc[v] = callnew(v.Type)
   230  			}
   231  			nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v])
   232  			nn.SetColas(true)
   233  			nn = typecheck(nn, Etop)
   234  			return walkstmt(nn)
   235  		}
   236  
   237  	case OBLOCK:
   238  		walkstmtlist(n.List.Slice())
   239  
   240  	case OXCASE:
   241  		yyerror("case statement out of place")
   242  		n.Op = OCASE
   243  		fallthrough
   244  
   245  	case OCASE:
   246  		n.Right = walkstmt(n.Right)
   247  
   248  	case ODEFER:
   249  		Curfn.Func.SetHasDefer(true)
   250  		switch n.Left.Op {
   251  		case OPRINT, OPRINTN:
   252  			n.Left = walkprintfunc(n.Left, &n.Ninit)
   253  
   254  		case OCOPY:
   255  			n.Left = copyany(n.Left, &n.Ninit, true)
   256  
   257  		default:
   258  			n.Left = walkexpr(n.Left, &n.Ninit)
   259  		}
   260  
   261  		// make room for size & fn arguments.
   262  		adjustargs(n, 2*Widthptr)
   263  
   264  	case OFOR, OFORUNTIL:
   265  		if n.Left != nil {
   266  			walkstmtlist(n.Left.Ninit.Slice())
   267  			init := n.Left.Ninit
   268  			n.Left.Ninit.Set(nil)
   269  			n.Left = walkexpr(n.Left, &init)
   270  			n.Left = addinit(n.Left, init.Slice())
   271  		}
   272  
   273  		n.Right = walkstmt(n.Right)
   274  		walkstmtlist(n.Nbody.Slice())
   275  
   276  	case OIF:
   277  		n.Left = walkexpr(n.Left, &n.Ninit)
   278  		walkstmtlist(n.Nbody.Slice())
   279  		walkstmtlist(n.Rlist.Slice())
   280  
   281  	case OPROC:
   282  		switch n.Left.Op {
   283  		case OPRINT, OPRINTN:
   284  			n.Left = walkprintfunc(n.Left, &n.Ninit)
   285  
   286  		case OCOPY:
   287  			n.Left = copyany(n.Left, &n.Ninit, true)
   288  
   289  		default:
   290  			n.Left = walkexpr(n.Left, &n.Ninit)
   291  		}
   292  
   293  		// make room for size & fn arguments.
   294  		adjustargs(n, 2*Widthptr)
   295  
   296  	case ORETURN:
   297  		walkexprlist(n.List.Slice(), &n.Ninit)
   298  		if n.List.Len() == 0 {
   299  			break
   300  		}
   301  		if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) {
   302  			// assign to the function out parameters,
   303  			// so that reorder3 can fix up conflicts
   304  			var rl []*Node
   305  
   306  			var cl Class
   307  			for _, ln := range Curfn.Func.Dcl {
   308  				cl = ln.Class()
   309  				if cl == PAUTO || cl == PAUTOHEAP {
   310  					break
   311  				}
   312  				if cl == PPARAMOUT {
   313  					if ln.isParamStackCopy() {
   314  						ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil)
   315  					}
   316  					rl = append(rl, ln)
   317  				}
   318  			}
   319  
   320  			if got, want := n.List.Len(), len(rl); got != want {
   321  				// order should have rewritten multi-value function calls
   322  				// with explicit OAS2FUNC nodes.
   323  				Fatalf("expected %v return arguments, have %v", want, got)
   324  			}
   325  
   326  			if samelist(rl, n.List.Slice()) {
   327  				// special return in disguise
   328  				n.List.Set(nil)
   329  
   330  				break
   331  			}
   332  
   333  			// move function calls out, to make reorder3's job easier.
   334  			walkexprlistsafe(n.List.Slice(), &n.Ninit)
   335  
   336  			ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit)
   337  			n.List.Set(reorder3(ll))
   338  			break
   339  		}
   340  
   341  		ll := ascompatte(nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit)
   342  		n.List.Set(ll)
   343  
   344  	case ORETJMP:
   345  		break
   346  
   347  	case OSELECT:
   348  		walkselect(n)
   349  
   350  	case OSWITCH:
   351  		walkswitch(n)
   352  
   353  	case ORANGE:
   354  		n = walkrange(n)
   355  
   356  	case OXFALL:
   357  		yyerror("fallthrough statement out of place")
   358  		n.Op = OFALL
   359  	}
   360  
   361  	if n.Op == ONAME {
   362  		Fatalf("walkstmt ended up with name: %+v", n)
   363  	}
   364  	return n
   365  }
   366  
   367  func isSmallMakeSlice(n *Node) bool {
   368  	if n.Op != OMAKESLICE {
   369  		return false
   370  	}
   371  	l := n.Left
   372  	r := n.Right
   373  	if r == nil {
   374  		r = l
   375  	}
   376  	t := n.Type
   377  
   378  	return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width)
   379  }
   380  
   381  // walk the whole tree of the body of an
   382  // expression or simple statement.
   383  // the types expressions are calculated.
   384  // compile-time constants are evaluated.
   385  // complex side effects like statements are appended to init
   386  func walkexprlist(s []*Node, init *Nodes) {
   387  	for i := range s {
   388  		s[i] = walkexpr(s[i], init)
   389  	}
   390  }
   391  
   392  func walkexprlistsafe(s []*Node, init *Nodes) {
   393  	for i, n := range s {
   394  		s[i] = safeexpr(n, init)
   395  		s[i] = walkexpr(s[i], init)
   396  	}
   397  }
   398  
   399  func walkexprlistcheap(s []*Node, init *Nodes) {
   400  	for i, n := range s {
   401  		s[i] = cheapexpr(n, init)
   402  		s[i] = walkexpr(s[i], init)
   403  	}
   404  }
   405  
   406  // Build name of function for interface conversion.
   407  // Not all names are possible
   408  // (e.g., we'll never generate convE2E or convE2I or convI2E).
   409  func convFuncName(from, to *types.Type) string {
   410  	tkind := to.Tie()
   411  	switch from.Tie() {
   412  	case 'I':
   413  		switch tkind {
   414  		case 'I':
   415  			return "convI2I"
   416  		}
   417  	case 'T':
   418  		switch tkind {
   419  		case 'E':
   420  			switch {
   421  			case from.Size() == 2 && from.Align == 2:
   422  				return "convT2E16"
   423  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   424  				return "convT2E32"
   425  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   426  				return "convT2E64"
   427  			case from.IsString():
   428  				return "convT2Estring"
   429  			case from.IsSlice():
   430  				return "convT2Eslice"
   431  			case !types.Haspointers(from):
   432  				return "convT2Enoptr"
   433  			}
   434  			return "convT2E"
   435  		case 'I':
   436  			switch {
   437  			case from.Size() == 2 && from.Align == 2:
   438  				return "convT2I16"
   439  			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   440  				return "convT2I32"
   441  			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   442  				return "convT2I64"
   443  			case from.IsString():
   444  				return "convT2Istring"
   445  			case from.IsSlice():
   446  				return "convT2Islice"
   447  			case !types.Haspointers(from):
   448  				return "convT2Inoptr"
   449  			}
   450  			return "convT2I"
   451  		}
   452  	}
   453  	Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
   454  	panic("unreachable")
   455  }
   456  
   457  // The result of walkexpr MUST be assigned back to n, e.g.
   458  // 	n.Left = walkexpr(n.Left, init)
   459  func walkexpr(n *Node, init *Nodes) *Node {
   460  	if n == nil {
   461  		return n
   462  	}
   463  
   464  	// Eagerly checkwidth all expressions for the back end.
   465  	if n.Type != nil && !n.Type.WidthCalculated() {
   466  		switch n.Type.Etype {
   467  		case TBLANK, TNIL, TIDEAL:
   468  		default:
   469  			checkwidth(n.Type)
   470  		}
   471  	}
   472  
   473  	if init == &n.Ninit {
   474  		// not okay to use n->ninit when walking n,
   475  		// because we might replace n with some other node
   476  		// and would lose the init list.
   477  		Fatalf("walkexpr init == &n->ninit")
   478  	}
   479  
   480  	if n.Ninit.Len() != 0 {
   481  		walkstmtlist(n.Ninit.Slice())
   482  		init.AppendNodes(&n.Ninit)
   483  	}
   484  
   485  	lno := setlineno(n)
   486  
   487  	if Debug['w'] > 1 {
   488  		Dump("walk-before", n)
   489  	}
   490  
   491  	if n.Typecheck() != 1 {
   492  		Fatalf("missed typecheck: %+v", n)
   493  	}
   494  
   495  	if n.Op == ONAME && n.Class() == PAUTOHEAP {
   496  		nn := nod(OIND, n.Name.Param.Heapaddr, nil)
   497  		nn = typecheck(nn, Erv)
   498  		nn = walkexpr(nn, init)
   499  		nn.Left.SetNonNil(true)
   500  		return nn
   501  	}
   502  
   503  opswitch:
   504  	switch n.Op {
   505  	default:
   506  		Dump("walk", n)
   507  		Fatalf("walkexpr: switch 1 unknown op %+S", n)
   508  
   509  	case ONONAME, OINDREGSP, OEMPTY, OGETG:
   510  
   511  	case OTYPE, ONAME, OLITERAL:
   512  		// TODO(mdempsky): Just return n; see discussion on CL 38655.
   513  		// Perhaps refactor to use Node.mayBeShared for these instead.
   514  		// If these return early, make sure to still call
   515  		// stringsym for constant strings.
   516  
   517  	case ONOT, OMINUS, OPLUS, OCOM, OREAL, OIMAG, ODOTMETH, ODOTINTER,
   518  		OIND, OSPTR, OITAB, OIDATA, OADDR:
   519  		n.Left = walkexpr(n.Left, init)
   520  
   521  	case OEFACE, OAND, OSUB, OMUL, OLT, OLE, OGE, OGT, OADD, OOR, OXOR:
   522  		n.Left = walkexpr(n.Left, init)
   523  		n.Right = walkexpr(n.Right, init)
   524  
   525  	case ODOT:
   526  		usefield(n)
   527  		n.Left = walkexpr(n.Left, init)
   528  
   529  	case ODOTTYPE, ODOTTYPE2:
   530  		n.Left = walkexpr(n.Left, init)
   531  		// Set up interface type addresses for back end.
   532  		n.Right = typename(n.Type)
   533  		if n.Op == ODOTTYPE {
   534  			n.Right.Right = typename(n.Left.Type)
   535  		}
   536  		if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   537  			n.List.Set1(itabname(n.Type, n.Left.Type))
   538  		}
   539  
   540  	case ODOTPTR:
   541  		usefield(n)
   542  		if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 {
   543  			// No actual copy will be generated, so emit an explicit nil check.
   544  			n.Left = cheapexpr(n.Left, init)
   545  
   546  			checknil(n.Left, init)
   547  		}
   548  
   549  		n.Left = walkexpr(n.Left, init)
   550  
   551  	case OLEN, OCAP:
   552  		n.Left = walkexpr(n.Left, init)
   553  
   554  		// replace len(*[10]int) with 10.
   555  		// delayed until now to preserve side effects.
   556  		t := n.Left.Type
   557  
   558  		if t.IsPtr() {
   559  			t = t.Elem()
   560  		}
   561  		if t.IsArray() {
   562  			safeexpr(n.Left, init)
   563  			nodconst(n, n.Type, t.NumElem())
   564  			n.SetTypecheck(1)
   565  		}
   566  
   567  	case OLSH, ORSH:
   568  		n.Left = walkexpr(n.Left, init)
   569  		n.Right = walkexpr(n.Right, init)
   570  		t := n.Left.Type
   571  		n.SetBounded(bounded(n.Right, 8*t.Width))
   572  		if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) {
   573  			Warn("shift bounds check elided")
   574  		}
   575  
   576  	case OCOMPLEX:
   577  		// Use results from call expression as arguments for complex.
   578  		if n.Left == nil && n.Right == nil {
   579  			n.Left = n.List.First()
   580  			n.Right = n.List.Second()
   581  		}
   582  		n.Left = walkexpr(n.Left, init)
   583  		n.Right = walkexpr(n.Right, init)
   584  
   585  	case OEQ, ONE:
   586  		n.Left = walkexpr(n.Left, init)
   587  		n.Right = walkexpr(n.Right, init)
   588  
   589  		// Disable safemode while compiling this code: the code we
   590  		// generate internally can refer to unsafe.Pointer.
   591  		// In this case it can happen if we need to generate an ==
   592  		// for a struct containing a reflect.Value, which itself has
   593  		// an unexported field of type unsafe.Pointer.
   594  		old_safemode := safemode
   595  		safemode = false
   596  		n = walkcompare(n, init)
   597  		safemode = old_safemode
   598  
   599  	case OANDAND, OOROR:
   600  		n.Left = walkexpr(n.Left, init)
   601  
   602  		// cannot put side effects from n.Right on init,
   603  		// because they cannot run before n.Left is checked.
   604  		// save elsewhere and store on the eventual n.Right.
   605  		var ll Nodes
   606  
   607  		n.Right = walkexpr(n.Right, &ll)
   608  		n.Right = addinit(n.Right, ll.Slice())
   609  		n = walkinrange(n, init)
   610  
   611  	case OPRINT, OPRINTN:
   612  		walkexprlist(n.List.Slice(), init)
   613  		n = walkprint(n, init)
   614  
   615  	case OPANIC:
   616  		n = mkcall("gopanic", nil, init, n.Left)
   617  
   618  	case ORECOVER:
   619  		n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil))
   620  
   621  	case OCLOSUREVAR, OCFUNC:
   622  		n.SetAddable(true)
   623  
   624  	case OCALLINTER:
   625  		usemethod(n)
   626  		t := n.Left.Type
   627  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   628  			break
   629  		}
   630  		n.Left = walkexpr(n.Left, init)
   631  		walkexprlist(n.List.Slice(), init)
   632  		ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   633  		n.List.Set(reorder1(ll))
   634  
   635  	case OCALLFUNC:
   636  		if n.Left.Op == OCLOSURE {
   637  			// Transform direct call of a closure to call of a normal function.
   638  			// transformclosure already did all preparation work.
   639  
   640  			// Prepend captured variables to argument list.
   641  			n.List.Prepend(n.Left.Func.Enter.Slice()...)
   642  
   643  			n.Left.Func.Enter.Set(nil)
   644  
   645  			// Replace OCLOSURE with ONAME/PFUNC.
   646  			n.Left = n.Left.Func.Closure.Func.Nname
   647  
   648  			// Update type of OCALLFUNC node.
   649  			// Output arguments had not changed, but their offsets could.
   650  			if n.Left.Type.Results().NumFields() == 1 {
   651  				n.Type = n.Left.Type.Results().Field(0).Type
   652  			} else {
   653  				n.Type = n.Left.Type.Results()
   654  			}
   655  		}
   656  
   657  		t := n.Left.Type
   658  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   659  			break
   660  		}
   661  
   662  		n.Left = walkexpr(n.Left, init)
   663  		walkexprlist(n.List.Slice(), init)
   664  
   665  		ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   666  		n.List.Set(reorder1(ll))
   667  
   668  	case OCALLMETH:
   669  		t := n.Left.Type
   670  		if n.List.Len() != 0 && n.List.First().Op == OAS {
   671  			break
   672  		}
   673  		n.Left = walkexpr(n.Left, init)
   674  		walkexprlist(n.List.Slice(), init)
   675  		ll := ascompatte(n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init)
   676  		lr := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init)
   677  		ll = append(ll, lr...)
   678  		n.Left.Left = nil
   679  		updateHasCall(n.Left)
   680  		n.List.Set(reorder1(ll))
   681  
   682  	case OAS:
   683  		init.AppendNodes(&n.Ninit)
   684  
   685  		n.Left = walkexpr(n.Left, init)
   686  		n.Left = safeexpr(n.Left, init)
   687  
   688  		if oaslit(n, init) {
   689  			break
   690  		}
   691  
   692  		if n.Right == nil {
   693  			// TODO(austin): Check all "implicit zeroing"
   694  			break
   695  		}
   696  
   697  		if !instrumenting && iszero(n.Right) {
   698  			break
   699  		}
   700  
   701  		switch n.Right.Op {
   702  		default:
   703  			n.Right = walkexpr(n.Right, init)
   704  
   705  		case ORECV:
   706  			// x = <-c; n.Left is x, n.Right.Left is c.
   707  			// orderstmt made sure x is addressable.
   708  			n.Right.Left = walkexpr(n.Right.Left, init)
   709  
   710  			n1 := nod(OADDR, n.Left, nil)
   711  			r := n.Right.Left // the channel
   712  			n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1)
   713  			n = walkexpr(n, init)
   714  			break opswitch
   715  
   716  		case OAPPEND:
   717  			// x = append(...)
   718  			r := n.Right
   719  			if r.Type.Elem().NotInHeap() {
   720  				yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem())
   721  			}
   722  			if r.Isddd() {
   723  				r = appendslice(r, init) // also works for append(slice, string).
   724  			} else {
   725  				r = walkappend(r, init, n)
   726  			}
   727  			n.Right = r
   728  			if r.Op == OAPPEND {
   729  				// Left in place for back end.
   730  				// Do not add a new write barrier.
   731  				// Set up address of type for back end.
   732  				r.Left = typename(r.Type.Elem())
   733  				break opswitch
   734  			}
   735  			// Otherwise, lowered for race detector.
   736  			// Treat as ordinary assignment.
   737  		}
   738  
   739  		if n.Left != nil && n.Right != nil {
   740  			n = convas(n, init)
   741  		}
   742  
   743  	case OAS2:
   744  		init.AppendNodes(&n.Ninit)
   745  		walkexprlistsafe(n.List.Slice(), init)
   746  		walkexprlistsafe(n.Rlist.Slice(), init)
   747  		ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init)
   748  		ll = reorder3(ll)
   749  		n = liststmt(ll)
   750  
   751  	// a,b,... = fn()
   752  	case OAS2FUNC:
   753  		init.AppendNodes(&n.Ninit)
   754  
   755  		r := n.Rlist.First()
   756  		walkexprlistsafe(n.List.Slice(), init)
   757  		r = walkexpr(r, init)
   758  
   759  		if isIntrinsicCall(r) {
   760  			n.Rlist.Set1(r)
   761  			break
   762  		}
   763  		init.Append(r)
   764  
   765  		ll := ascompatet(n.Op, n.List, r.Type)
   766  		n = liststmt(ll)
   767  
   768  	// x, y = <-c
   769  	// orderstmt made sure x is addressable.
   770  	case OAS2RECV:
   771  		init.AppendNodes(&n.Ninit)
   772  
   773  		r := n.Rlist.First()
   774  		walkexprlistsafe(n.List.Slice(), init)
   775  		r.Left = walkexpr(r.Left, init)
   776  		var n1 *Node
   777  		if isblank(n.List.First()) {
   778  			n1 = nodnil()
   779  		} else {
   780  			n1 = nod(OADDR, n.List.First(), nil)
   781  		}
   782  		n1.Etype = 1 // addr does not escape
   783  		fn := chanfn("chanrecv2", 2, r.Left.Type)
   784  		ok := n.List.Second()
   785  		call := mkcall1(fn, ok.Type, init, r.Left, n1)
   786  		n = nod(OAS, ok, call)
   787  		n = typecheck(n, Etop)
   788  
   789  	// a,b = m[i]
   790  	case OAS2MAPR:
   791  		init.AppendNodes(&n.Ninit)
   792  
   793  		r := n.Rlist.First()
   794  		walkexprlistsafe(n.List.Slice(), init)
   795  		r.Left = walkexpr(r.Left, init)
   796  		r.Right = walkexpr(r.Right, init)
   797  		t := r.Left.Type
   798  
   799  		fast := mapfast(t)
   800  		var key *Node
   801  		if fast != mapslow {
   802  			// fast versions take key by value
   803  			key = r.Right
   804  		} else {
   805  			// standard version takes key by reference
   806  			// orderexpr made sure key is addressable.
   807  			key = nod(OADDR, r.Right, nil)
   808  		}
   809  
   810  		// from:
   811  		//   a,b = m[i]
   812  		// to:
   813  		//   var,b = mapaccess2*(t, m, i)
   814  		//   a = *var
   815  		a := n.List.First()
   816  
   817  		if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
   818  			fn := mapfn(mapaccess2[fast], t)
   819  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key)
   820  		} else {
   821  			fn := mapfn("mapaccess2_fat", t)
   822  			z := zeroaddr(w)
   823  			r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z)
   824  		}
   825  
   826  		// mapaccess2* returns a typed bool, but due to spec changes,
   827  		// the boolean result of i.(T) is now untyped so we make it the
   828  		// same type as the variable on the lhs.
   829  		if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() {
   830  			r.Type.Field(1).Type = ok.Type
   831  		}
   832  		n.Rlist.Set1(r)
   833  		n.Op = OAS2FUNC
   834  
   835  		// don't generate a = *var if a is _
   836  		if !isblank(a) {
   837  			var_ := temp(types.NewPtr(t.Val()))
   838  			var_.SetTypecheck(1)
   839  			var_.SetNonNil(true) // mapaccess always returns a non-nil pointer
   840  			n.List.SetFirst(var_)
   841  			n = walkexpr(n, init)
   842  			init.Append(n)
   843  			n = nod(OAS, a, nod(OIND, var_, nil))
   844  		}
   845  
   846  		n = typecheck(n, Etop)
   847  		n = walkexpr(n, init)
   848  
   849  	case ODELETE:
   850  		init.AppendNodes(&n.Ninit)
   851  		map_ := n.List.First()
   852  		key := n.List.Second()
   853  		map_ = walkexpr(map_, init)
   854  		key = walkexpr(key, init)
   855  
   856  		t := map_.Type
   857  		fast := mapfast(t)
   858  		if fast == mapslow {
   859  			// orderstmt made sure key is addressable.
   860  			key = nod(OADDR, key, nil)
   861  		}
   862  		n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key)
   863  
   864  	case OAS2DOTTYPE:
   865  		walkexprlistsafe(n.List.Slice(), init)
   866  		n.Rlist.SetFirst(walkexpr(n.Rlist.First(), init))
   867  
   868  	case OCONVIFACE:
   869  		n.Left = walkexpr(n.Left, init)
   870  
   871  		// Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
   872  		if isdirectiface(n.Left.Type) {
   873  			var t *Node
   874  			if n.Type.IsEmptyInterface() {
   875  				t = typename(n.Left.Type)
   876  			} else {
   877  				t = itabname(n.Left.Type, n.Type)
   878  			}
   879  			l := nod(OEFACE, t, n.Left)
   880  			l.Type = n.Type
   881  			l.SetTypecheck(n.Typecheck())
   882  			n = l
   883  			break
   884  		}
   885  
   886  		if staticbytes == nil {
   887  			staticbytes = newname(Runtimepkg.Lookup("staticbytes"))
   888  			staticbytes.SetClass(PEXTERN)
   889  			staticbytes.Type = types.NewArray(types.Types[TUINT8], 256)
   890  			zerobase = newname(Runtimepkg.Lookup("zerobase"))
   891  			zerobase.SetClass(PEXTERN)
   892  			zerobase.Type = types.Types[TUINTPTR]
   893  		}
   894  
   895  		// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
   896  		// by using an existing addressable value identical to n.Left
   897  		// or creating one on the stack.
   898  		var value *Node
   899  		switch {
   900  		case n.Left.Type.Size() == 0:
   901  			// n.Left is zero-sized. Use zerobase.
   902  			cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246.
   903  			value = zerobase
   904  		case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()):
   905  			// n.Left is a bool/byte. Use staticbytes[n.Left].
   906  			n.Left = cheapexpr(n.Left, init)
   907  			value = nod(OINDEX, staticbytes, byteindex(n.Left))
   908  			value.SetBounded(true)
   909  		case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly():
   910  			// n.Left is a readonly global; use it directly.
   911  			value = n.Left
   912  		case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024:
   913  			// n.Left does not escape. Use a stack temporary initialized to n.Left.
   914  			value = temp(n.Left.Type)
   915  			init.Append(typecheck(nod(OAS, value, n.Left), Etop))
   916  		}
   917  
   918  		if value != nil {
   919  			// Value is identical to n.Left.
   920  			// Construct the interface directly: {type/itab, &value}.
   921  			var t *Node
   922  			if n.Type.IsEmptyInterface() {
   923  				t = typename(n.Left.Type)
   924  			} else {
   925  				t = itabname(n.Left.Type, n.Type)
   926  			}
   927  			l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv))
   928  			l.Type = n.Type
   929  			l.SetTypecheck(n.Typecheck())
   930  			n = l
   931  			break
   932  		}
   933  
   934  		// Implement interface to empty interface conversion.
   935  		// tmp = i.itab
   936  		// if tmp != nil {
   937  		//    tmp = tmp.type
   938  		// }
   939  		// e = iface{tmp, i.data}
   940  		if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   941  			// Evaluate the input interface.
   942  			c := temp(n.Left.Type)
   943  			init.Append(nod(OAS, c, n.Left))
   944  
   945  			// Get the itab out of the interface.
   946  			tmp := temp(types.NewPtr(types.Types[TUINT8]))
   947  			init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv)))
   948  
   949  			// Get the type out of the itab.
   950  			nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil)
   951  			nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp)))
   952  			init.Append(nif)
   953  
   954  			// Build the result.
   955  			e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8])))
   956  			e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE.
   957  			e.SetTypecheck(1)
   958  			n = e
   959  			break
   960  		}
   961  
   962  		var ll []*Node
   963  		if n.Type.IsEmptyInterface() {
   964  			if !n.Left.Type.IsInterface() {
   965  				ll = append(ll, typename(n.Left.Type))
   966  			}
   967  		} else {
   968  			if n.Left.Type.IsInterface() {
   969  				ll = append(ll, typename(n.Type))
   970  			} else {
   971  				ll = append(ll, itabname(n.Left.Type, n.Type))
   972  			}
   973  		}
   974  
   975  		if n.Left.Type.IsInterface() {
   976  			ll = append(ll, n.Left)
   977  		} else {
   978  			// regular types are passed by reference to avoid C vararg calls
   979  			// orderexpr arranged for n.Left to be a temporary for all
   980  			// the conversions it could see. comparison of an interface
   981  			// with a non-interface, especially in a switch on interface value
   982  			// with non-interface cases, is not visible to orderstmt, so we
   983  			// have to fall back on allocating a temp here.
   984  			if islvalue(n.Left) {
   985  				ll = append(ll, nod(OADDR, n.Left, nil))
   986  			} else {
   987  				ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil))
   988  			}
   989  			dowidth(n.Left.Type)
   990  		}
   991  
   992  		fn := syslook(convFuncName(n.Left.Type, n.Type))
   993  		fn = substArgTypes(fn, n.Left.Type, n.Type)
   994  		dowidth(fn.Type)
   995  		n = nod(OCALL, fn, nil)
   996  		n.List.Set(ll)
   997  		n = typecheck(n, Erv)
   998  		n = walkexpr(n, init)
   999  
  1000  	case OCONV, OCONVNOP:
  1001  		if thearch.LinkArch.Family == sys.ARM || thearch.LinkArch.Family == sys.MIPS {
  1002  			if n.Left.Type.IsFloat() {
  1003  				if n.Type.Etype == TINT64 {
  1004  					n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1005  					break
  1006  				}
  1007  
  1008  				if n.Type.Etype == TUINT64 {
  1009  					n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1010  					break
  1011  				}
  1012  			}
  1013  
  1014  			if n.Type.IsFloat() {
  1015  				if n.Left.Type.Etype == TINT64 {
  1016  					n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
  1017  					break
  1018  				}
  1019  
  1020  				if n.Left.Type.Etype == TUINT64 {
  1021  					n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
  1022  					break
  1023  				}
  1024  			}
  1025  		}
  1026  
  1027  		if thearch.LinkArch.Family == sys.I386 {
  1028  			if n.Left.Type.IsFloat() {
  1029  				if n.Type.Etype == TINT64 {
  1030  					n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1031  					break
  1032  				}
  1033  
  1034  				if n.Type.Etype == TUINT64 {
  1035  					n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1036  					break
  1037  				}
  1038  				if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR {
  1039  					n = mkcall("float64touint32", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
  1040  					break
  1041  				}
  1042  			}
  1043  			if n.Type.IsFloat() {
  1044  				if n.Left.Type.Etype == TINT64 {
  1045  					n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
  1046  					break
  1047  				}
  1048  
  1049  				if n.Left.Type.Etype == TUINT64 {
  1050  					n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
  1051  					break
  1052  				}
  1053  				if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR {
  1054  					n = conv(mkcall("uint32tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT32])), n.Type)
  1055  					break
  1056  				}
  1057  			}
  1058  		}
  1059  
  1060  		n.Left = walkexpr(n.Left, init)
  1061  
  1062  	case OANDNOT:
  1063  		n.Left = walkexpr(n.Left, init)
  1064  		n.Op = OAND
  1065  		n.Right = nod(OCOM, n.Right, nil)
  1066  		n.Right = typecheck(n.Right, Erv)
  1067  		n.Right = walkexpr(n.Right, init)
  1068  
  1069  	case ODIV, OMOD:
  1070  		n.Left = walkexpr(n.Left, init)
  1071  		n.Right = walkexpr(n.Right, init)
  1072  
  1073  		// rewrite complex div into function call.
  1074  		et := n.Left.Type.Etype
  1075  
  1076  		if isComplex[et] && n.Op == ODIV {
  1077  			t := n.Type
  1078  			n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128]))
  1079  			n = conv(n, t)
  1080  			break
  1081  		}
  1082  
  1083  		// Nothing to do for float divisions.
  1084  		if isFloat[et] {
  1085  			break
  1086  		}
  1087  
  1088  		// rewrite 64-bit div and mod on 32-bit architectures.
  1089  		// TODO: Remove this code once we can introduce
  1090  		// runtime calls late in SSA processing.
  1091  		if Widthreg < 8 && (et == TINT64 || et == TUINT64) {
  1092  			if n.Right.Op == OLITERAL {
  1093  				// Leave div/mod by constant powers of 2.
  1094  				// The SSA backend will handle those.
  1095  				switch et {
  1096  				case TINT64:
  1097  					c := n.Right.Int64()
  1098  					if c < 0 {
  1099  						c = -c
  1100  					}
  1101  					if c != 0 && c&(c-1) == 0 {
  1102  						break opswitch
  1103  					}
  1104  				case TUINT64:
  1105  					c := uint64(n.Right.Int64())
  1106  					if c != 0 && c&(c-1) == 0 {
  1107  						break opswitch
  1108  					}
  1109  				}
  1110  			}
  1111  			var fn string
  1112  			if et == TINT64 {
  1113  				fn = "int64"
  1114  			} else {
  1115  				fn = "uint64"
  1116  			}
  1117  			if n.Op == ODIV {
  1118  				fn += "div"
  1119  			} else {
  1120  				fn += "mod"
  1121  			}
  1122  			n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et]))
  1123  		}
  1124  
  1125  	case OINDEX:
  1126  		n.Left = walkexpr(n.Left, init)
  1127  
  1128  		// save the original node for bounds checking elision.
  1129  		// If it was a ODIV/OMOD walk might rewrite it.
  1130  		r := n.Right
  1131  
  1132  		n.Right = walkexpr(n.Right, init)
  1133  
  1134  		// if range of type cannot exceed static array bound,
  1135  		// disable bounds check.
  1136  		if n.Bounded() {
  1137  			break
  1138  		}
  1139  		t := n.Left.Type
  1140  		if t != nil && t.IsPtr() {
  1141  			t = t.Elem()
  1142  		}
  1143  		if t.IsArray() {
  1144  			n.SetBounded(bounded(r, t.NumElem()))
  1145  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1146  				Warn("index bounds check elided")
  1147  			}
  1148  			if smallintconst(n.Right) && !n.Bounded() {
  1149  				yyerror("index out of bounds")
  1150  			}
  1151  		} else if Isconst(n.Left, CTSTR) {
  1152  			n.SetBounded(bounded(r, int64(len(n.Left.Val().U.(string)))))
  1153  			if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1154  				Warn("index bounds check elided")
  1155  			}
  1156  			if smallintconst(n.Right) && !n.Bounded() {
  1157  				yyerror("index out of bounds")
  1158  			}
  1159  		}
  1160  
  1161  		if Isconst(n.Right, CTINT) {
  1162  			if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
  1163  				yyerror("index out of bounds")
  1164  			}
  1165  		}
  1166  
  1167  	case OINDEXMAP:
  1168  		// Replace m[k] with *map{access1,assign}(maptype, m, &k)
  1169  		n.Left = walkexpr(n.Left, init)
  1170  		n.Right = walkexpr(n.Right, init)
  1171  		map_ := n.Left
  1172  		key := n.Right
  1173  		t := map_.Type
  1174  		if n.Etype == 1 {
  1175  			// This m[k] expression is on the left-hand side of an assignment.
  1176  			fast := mapfast(t)
  1177  			if fast == mapslow {
  1178  				// standard version takes key by reference.
  1179  				// orderexpr made sure key is addressable.
  1180  				key = nod(OADDR, key, nil)
  1181  			}
  1182  			n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key)
  1183  		} else {
  1184  			// m[k] is not the target of an assignment.
  1185  			fast := mapfast(t)
  1186  			if fast == mapslow {
  1187  				// standard version takes key by reference.
  1188  				// orderexpr made sure key is addressable.
  1189  				key = nod(OADDR, key, nil)
  1190  			}
  1191  
  1192  			if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
  1193  				n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Val()), init, typename(t), map_, key)
  1194  			} else {
  1195  				z := zeroaddr(w)
  1196  				n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Val()), init, typename(t), map_, key, z)
  1197  			}
  1198  		}
  1199  		n.Type = types.NewPtr(t.Val())
  1200  		n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers.
  1201  		n = nod(OIND, n, nil)
  1202  		n.Type = t.Val()
  1203  		n.SetTypecheck(1)
  1204  
  1205  	case ORECV:
  1206  		Fatalf("walkexpr ORECV") // should see inside OAS only
  1207  
  1208  	case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
  1209  		n.Left = walkexpr(n.Left, init)
  1210  		low, high, max := n.SliceBounds()
  1211  		low = walkexpr(low, init)
  1212  		if low != nil && iszero(low) {
  1213  			// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
  1214  			low = nil
  1215  		}
  1216  		high = walkexpr(high, init)
  1217  		max = walkexpr(max, init)
  1218  		n.SetSliceBounds(low, high, max)
  1219  		if n.Op.IsSlice3() {
  1220  			if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) {
  1221  				// Reduce x[i:j:cap(x)] to x[i:j].
  1222  				if n.Op == OSLICE3 {
  1223  					n.Op = OSLICE
  1224  				} else {
  1225  					n.Op = OSLICEARR
  1226  				}
  1227  				n = reduceSlice(n)
  1228  			}
  1229  		} else {
  1230  			n = reduceSlice(n)
  1231  		}
  1232  
  1233  	case ONEW:
  1234  		if n.Esc == EscNone {
  1235  			if n.Type.Elem().Width >= 1<<16 {
  1236  				Fatalf("large ONEW with EscNone: %v", n)
  1237  			}
  1238  			r := temp(n.Type.Elem())
  1239  			r = nod(OAS, r, nil) // zero temp
  1240  			r = typecheck(r, Etop)
  1241  			init.Append(r)
  1242  			r = nod(OADDR, r.Left, nil)
  1243  			r = typecheck(r, Erv)
  1244  			n = r
  1245  		} else {
  1246  			n = callnew(n.Type.Elem())
  1247  		}
  1248  
  1249  	case OCMPSTR:
  1250  		// s + "badgerbadgerbadger" == "badgerbadgerbadger"
  1251  		if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) {
  1252  			// TODO(marvin): Fix Node.EType type union.
  1253  			r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0))
  1254  			r = typecheck(r, Erv)
  1255  			r = walkexpr(r, init)
  1256  			r.Type = n.Type
  1257  			n = r
  1258  			break
  1259  		}
  1260  
  1261  		// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
  1262  		var cs, ncs *Node // const string, non-const string
  1263  		switch {
  1264  		case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR):
  1265  			// ignore; will be constant evaluated
  1266  		case Isconst(n.Left, CTSTR):
  1267  			cs = n.Left
  1268  			ncs = n.Right
  1269  		case Isconst(n.Right, CTSTR):
  1270  			cs = n.Right
  1271  			ncs = n.Left
  1272  		}
  1273  		if cs != nil {
  1274  			cmp := Op(n.Etype)
  1275  			// Our comparison below assumes that the non-constant string
  1276  			// is on the left hand side, so rewrite "" cmp x to x cmp "".
  1277  			// See issue 24817.
  1278  			if Isconst(n.Left, CTSTR) {
  1279  				cmp = brrev(cmp)
  1280  			}
  1281  
  1282  			// maxRewriteLen was chosen empirically.
  1283  			// It is the value that minimizes cmd/go file size
  1284  			// across most architectures.
  1285  			// See the commit description for CL 26758 for details.
  1286  			maxRewriteLen := 6
  1287  			// Some architectures can load unaligned byte sequence as 1 word.
  1288  			// So we can cover longer strings with the same amount of code.
  1289  			canCombineLoads := false
  1290  			combine64bit := false
  1291  			// TODO: does this improve performance on any other architectures?
  1292  			switch thearch.LinkArch.Family {
  1293  			case sys.AMD64:
  1294  				// Larger compare require longer instructions, so keep this reasonably low.
  1295  				// Data from CL 26758 shows that longer strings are rare.
  1296  				// If we really want we can do 16 byte SSE comparisons in the future.
  1297  				maxRewriteLen = 16
  1298  				canCombineLoads = true
  1299  				combine64bit = true
  1300  			case sys.I386:
  1301  				maxRewriteLen = 8
  1302  				canCombineLoads = true
  1303  			}
  1304  			var and Op
  1305  			switch cmp {
  1306  			case OEQ:
  1307  				and = OANDAND
  1308  			case ONE:
  1309  				and = OOROR
  1310  			default:
  1311  				// Don't do byte-wise comparisons for <, <=, etc.
  1312  				// They're fairly complicated.
  1313  				// Length-only checks are ok, though.
  1314  				maxRewriteLen = 0
  1315  			}
  1316  			if s := cs.Val().U.(string); len(s) <= maxRewriteLen {
  1317  				if len(s) > 0 {
  1318  					ncs = safeexpr(ncs, init)
  1319  				}
  1320  				// TODO(marvin): Fix Node.EType type union.
  1321  				r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s))))
  1322  				remains := len(s)
  1323  				for i := 0; remains > 0; {
  1324  					if remains == 1 || !canCombineLoads {
  1325  						cb := nodintconst(int64(s[i]))
  1326  						ncb := nod(OINDEX, ncs, nodintconst(int64(i)))
  1327  						r = nod(and, r, nod(cmp, ncb, cb))
  1328  						remains--
  1329  						i++
  1330  						continue
  1331  					}
  1332  					var step int
  1333  					var convType *types.Type
  1334  					switch {
  1335  					case remains >= 8 && combine64bit:
  1336  						convType = types.Types[TINT64]
  1337  						step = 8
  1338  					case remains >= 4:
  1339  						convType = types.Types[TUINT32]
  1340  						step = 4
  1341  					case remains >= 2:
  1342  						convType = types.Types[TUINT16]
  1343  						step = 2
  1344  					}
  1345  					ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i)))
  1346  					ncsubstr = conv(ncsubstr, convType)
  1347  					csubstr := int64(s[i])
  1348  					// Calculate large constant from bytes as sequence of shifts and ors.
  1349  					// Like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  1350  					// ssa will combine this into a single large load.
  1351  					for offset := 1; offset < step; offset++ {
  1352  						b := nod(OINDEX, ncs, nodintconst(int64(i+offset)))
  1353  						b = conv(b, convType)
  1354  						b = nod(OLSH, b, nodintconst(int64(8*offset)))
  1355  						ncsubstr = nod(OOR, ncsubstr, b)
  1356  						csubstr = csubstr | int64(s[i+offset])<<uint8(8*offset)
  1357  					}
  1358  					csubstrPart := nodintconst(csubstr)
  1359  					// Compare "step" bytes as once
  1360  					r = nod(and, r, nod(cmp, csubstrPart, ncsubstr))
  1361  					remains -= step
  1362  					i += step
  1363  				}
  1364  				r = typecheck(r, Erv)
  1365  				r = walkexpr(r, init)
  1366  				r.Type = n.Type
  1367  				n = r
  1368  				break
  1369  			}
  1370  		}
  1371  
  1372  		var r *Node
  1373  		// TODO(marvin): Fix Node.EType type union.
  1374  		if Op(n.Etype) == OEQ || Op(n.Etype) == ONE {
  1375  			// prepare for rewrite below
  1376  			n.Left = cheapexpr(n.Left, init)
  1377  			n.Right = cheapexpr(n.Right, init)
  1378  
  1379  			r = mkcall("eqstring", types.Types[TBOOL], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  1380  
  1381  			// quick check of len before full compare for == or !=
  1382  			// eqstring assumes that the lengths are equal
  1383  			// TODO(marvin): Fix Node.EType type union.
  1384  			if Op(n.Etype) == OEQ {
  1385  				// len(left) == len(right) && eqstring(left, right)
  1386  				r = nod(OANDAND, nod(OEQ, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r)
  1387  			} else {
  1388  				// len(left) != len(right) || !eqstring(left, right)
  1389  				r = nod(ONOT, r, nil)
  1390  				r = nod(OOROR, nod(ONE, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r)
  1391  			}
  1392  
  1393  			r = typecheck(r, Erv)
  1394  			r = walkexpr(r, nil)
  1395  		} else {
  1396  			// sys_cmpstring(s1, s2) :: 0
  1397  			r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  1398  			// TODO(marvin): Fix Node.EType type union.
  1399  			r = nod(Op(n.Etype), r, nodintconst(0))
  1400  		}
  1401  
  1402  		r = typecheck(r, Erv)
  1403  		if !n.Type.IsBoolean() {
  1404  			Fatalf("cmp %v", n.Type)
  1405  		}
  1406  		r.Type = n.Type
  1407  		n = r
  1408  
  1409  	case OADDSTR:
  1410  		n = addstr(n, init)
  1411  
  1412  	case OAPPEND:
  1413  		// order should make sure we only see OAS(node, OAPPEND), which we handle above.
  1414  		Fatalf("append outside assignment")
  1415  
  1416  	case OCOPY:
  1417  		n = copyany(n, init, instrumenting && !compiling_runtime)
  1418  
  1419  		// cannot use chanfn - closechan takes any, not chan any
  1420  	case OCLOSE:
  1421  		fn := syslook("closechan")
  1422  
  1423  		fn = substArgTypes(fn, n.Left.Type)
  1424  		n = mkcall1(fn, nil, init, n.Left)
  1425  
  1426  	case OMAKECHAN:
  1427  		n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, types.Types[TINT64]))
  1428  
  1429  	case OMAKEMAP:
  1430  		t := n.Type
  1431  
  1432  		a := nodnil() // hmap buffer
  1433  		r := nodnil() // bucket buffer
  1434  		if n.Esc == EscNone {
  1435  			// Allocate hmap buffer on stack.
  1436  			var_ := temp(hmap(t))
  1437  
  1438  			a = nod(OAS, var_, nil) // zero temp
  1439  			a = typecheck(a, Etop)
  1440  			init.Append(a)
  1441  			a = nod(OADDR, var_, nil)
  1442  
  1443  			// Allocate one bucket on stack.
  1444  			// Maximum key/value size is 128 bytes, larger objects
  1445  			// are stored with an indirection. So max bucket size is 2048+eps.
  1446  			var_ = temp(mapbucket(t))
  1447  
  1448  			r = nod(OAS, var_, nil) // zero temp
  1449  			r = typecheck(r, Etop)
  1450  			init.Append(r)
  1451  			r = nod(OADDR, var_, nil)
  1452  		}
  1453  
  1454  		fn := syslook("makemap")
  1455  		fn = substArgTypes(fn, hmap(t), mapbucket(t), t.Key(), t.Val())
  1456  		n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, types.Types[TINT64]), a, r)
  1457  
  1458  	case OMAKESLICE:
  1459  		l := n.Left
  1460  		r := n.Right
  1461  		if r == nil {
  1462  			r = safeexpr(l, init)
  1463  			l = r
  1464  		}
  1465  		t := n.Type
  1466  		if n.Esc == EscNone {
  1467  			if !isSmallMakeSlice(n) {
  1468  				Fatalf("non-small OMAKESLICE with EscNone: %v", n)
  1469  			}
  1470  			// var arr [r]T
  1471  			// n = arr[:l]
  1472  			t = types.NewArray(t.Elem(), nonnegintconst(r)) // [r]T
  1473  			var_ := temp(t)
  1474  			a := nod(OAS, var_, nil) // zero temp
  1475  			a = typecheck(a, Etop)
  1476  			init.Append(a)
  1477  			r := nod(OSLICE, var_, nil) // arr[:l]
  1478  			r.SetSliceBounds(nil, l, nil)
  1479  			r = conv(r, n.Type) // in case n.Type is named.
  1480  			r = typecheck(r, Erv)
  1481  			r = walkexpr(r, init)
  1482  			n = r
  1483  		} else {
  1484  			// n escapes; set up a call to makeslice.
  1485  			// When len and cap can fit into int, use makeslice instead of
  1486  			// makeslice64, which is faster and shorter on 32 bit platforms.
  1487  
  1488  			if t.Elem().NotInHeap() {
  1489  				yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem())
  1490  			}
  1491  
  1492  			len, cap := l, r
  1493  
  1494  			fnname := "makeslice64"
  1495  			argtype := types.Types[TINT64]
  1496  
  1497  			// typechecking guarantees that TIDEAL len/cap are positive and fit in an int.
  1498  			// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
  1499  			// will be handled by the negative range checks in makeslice during runtime.
  1500  			if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) &&
  1501  				(cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) {
  1502  				fnname = "makeslice"
  1503  				argtype = types.Types[TINT]
  1504  			}
  1505  
  1506  			fn := syslook(fnname)
  1507  			fn = substArgTypes(fn, t.Elem()) // any-1
  1508  			n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
  1509  		}
  1510  
  1511  	case ORUNESTR:
  1512  		a := nodnil()
  1513  		if n.Esc == EscNone {
  1514  			t := types.NewArray(types.Types[TUINT8], 4)
  1515  			var_ := temp(t)
  1516  			a = nod(OADDR, var_, nil)
  1517  		}
  1518  
  1519  		// intstring(*[4]byte, rune)
  1520  		n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64]))
  1521  
  1522  	case OARRAYBYTESTR:
  1523  		a := nodnil()
  1524  		if n.Esc == EscNone {
  1525  			// Create temporary buffer for string on stack.
  1526  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1527  
  1528  			a = nod(OADDR, temp(t), nil)
  1529  		}
  1530  
  1531  		// slicebytetostring(*[32]byte, []byte) string;
  1532  		n = mkcall("slicebytetostring", n.Type, init, a, n.Left)
  1533  
  1534  		// slicebytetostringtmp([]byte) string;
  1535  	case OARRAYBYTESTRTMP:
  1536  		n.Left = walkexpr(n.Left, init)
  1537  
  1538  		if !instrumenting {
  1539  			// Let the backend handle OARRAYBYTESTRTMP directly
  1540  			// to avoid a function call to slicebytetostringtmp.
  1541  			break
  1542  		}
  1543  
  1544  		n = mkcall("slicebytetostringtmp", n.Type, init, n.Left)
  1545  
  1546  		// slicerunetostring(*[32]byte, []rune) string;
  1547  	case OARRAYRUNESTR:
  1548  		a := nodnil()
  1549  
  1550  		if n.Esc == EscNone {
  1551  			// Create temporary buffer for string on stack.
  1552  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1553  
  1554  			a = nod(OADDR, temp(t), nil)
  1555  		}
  1556  
  1557  		n = mkcall("slicerunetostring", n.Type, init, a, n.Left)
  1558  
  1559  		// stringtoslicebyte(*32[byte], string) []byte;
  1560  	case OSTRARRAYBYTE:
  1561  		a := nodnil()
  1562  
  1563  		if n.Esc == EscNone {
  1564  			// Create temporary buffer for slice on stack.
  1565  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1566  
  1567  			a = nod(OADDR, temp(t), nil)
  1568  		}
  1569  
  1570  		n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, types.Types[TSTRING]))
  1571  
  1572  	case OSTRARRAYBYTETMP:
  1573  		// []byte(string) conversion that creates a slice
  1574  		// referring to the actual string bytes.
  1575  		// This conversion is handled later by the backend and
  1576  		// is only for use by internal compiler optimizations
  1577  		// that know that the slice won't be mutated.
  1578  		// The only such case today is:
  1579  		// for i, c := range []byte(string)
  1580  		n.Left = walkexpr(n.Left, init)
  1581  
  1582  		// stringtoslicerune(*[32]rune, string) []rune
  1583  	case OSTRARRAYRUNE:
  1584  		a := nodnil()
  1585  
  1586  		if n.Esc == EscNone {
  1587  			// Create temporary buffer for slice on stack.
  1588  			t := types.NewArray(types.Types[TINT32], tmpstringbufsize)
  1589  
  1590  			a = nod(OADDR, temp(t), nil)
  1591  		}
  1592  
  1593  		n = mkcall("stringtoslicerune", n.Type, init, a, n.Left)
  1594  
  1595  		// ifaceeq(i1 any-1, i2 any-2) (ret bool);
  1596  	case OCMPIFACE:
  1597  		if !eqtype(n.Left.Type, n.Right.Type) {
  1598  			Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type)
  1599  		}
  1600  		var fn *Node
  1601  		if n.Left.Type.IsEmptyInterface() {
  1602  			fn = syslook("efaceeq")
  1603  		} else {
  1604  			fn = syslook("ifaceeq")
  1605  		}
  1606  
  1607  		n.Right = cheapexpr(n.Right, init)
  1608  		n.Left = cheapexpr(n.Left, init)
  1609  		lt := nod(OITAB, n.Left, nil)
  1610  		rt := nod(OITAB, n.Right, nil)
  1611  		ld := nod(OIDATA, n.Left, nil)
  1612  		rd := nod(OIDATA, n.Right, nil)
  1613  		ld.Type = types.Types[TUNSAFEPTR]
  1614  		rd.Type = types.Types[TUNSAFEPTR]
  1615  		ld.SetTypecheck(1)
  1616  		rd.SetTypecheck(1)
  1617  		call := mkcall1(fn, n.Type, init, lt, ld, rd)
  1618  
  1619  		// Check itable/type before full compare.
  1620  		// Note: short-circuited because order matters.
  1621  		// TODO(marvin): Fix Node.EType type union.
  1622  		var cmp *Node
  1623  		if Op(n.Etype) == OEQ {
  1624  			cmp = nod(OANDAND, nod(OEQ, lt, rt), call)
  1625  		} else {
  1626  			cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil))
  1627  		}
  1628  		cmp = typecheck(cmp, Erv)
  1629  		cmp = walkexpr(cmp, init)
  1630  		cmp.Type = n.Type
  1631  		n = cmp
  1632  
  1633  	case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT:
  1634  		if isStaticCompositeLiteral(n) && !canSSAType(n.Type) {
  1635  			// n can be directly represented in the read-only data section.
  1636  			// Make direct reference to the static data. See issue 12841.
  1637  			vstat := staticname(n.Type)
  1638  			vstat.Name.SetReadonly(true)
  1639  			fixedlit(inInitFunction, initKindStatic, n, vstat, init)
  1640  			n = vstat
  1641  			n = typecheck(n, Erv)
  1642  			break
  1643  		}
  1644  		var_ := temp(n.Type)
  1645  		anylit(n, var_, init)
  1646  		n = var_
  1647  
  1648  	case OSEND:
  1649  		n1 := n.Right
  1650  		n1 = assignconv(n1, n.Left.Type.Elem(), "chan send")
  1651  		n1 = walkexpr(n1, init)
  1652  		n1 = nod(OADDR, n1, nil)
  1653  		n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1)
  1654  
  1655  	case OCLOSURE:
  1656  		n = walkclosure(n, init)
  1657  
  1658  	case OCALLPART:
  1659  		n = walkpartialcall(n, init)
  1660  	}
  1661  
  1662  	// Expressions that are constant at run time but not
  1663  	// considered const by the language spec are not turned into
  1664  	// constants until walk. For example, if n is y%1 == 0, the
  1665  	// walk of y%1 may have replaced it by 0.
  1666  	// Check whether n with its updated args is itself now a constant.
  1667  	t := n.Type
  1668  	evconst(n)
  1669  	if n.Type != t {
  1670  		Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type)
  1671  	}
  1672  	if n.Op == OLITERAL {
  1673  		n = typecheck(n, Erv)
  1674  		// Emit string symbol now to avoid emitting
  1675  		// any concurrently during the backend.
  1676  		if s, ok := n.Val().U.(string); ok {
  1677  			_ = stringsym(s)
  1678  		}
  1679  	}
  1680  
  1681  	updateHasCall(n)
  1682  
  1683  	if Debug['w'] != 0 && n != nil {
  1684  		Dump("walk", n)
  1685  	}
  1686  
  1687  	lineno = lno
  1688  	return n
  1689  }
  1690  
  1691  // TODO(josharian): combine this with its caller and simplify
  1692  func reduceSlice(n *Node) *Node {
  1693  	low, high, max := n.SliceBounds()
  1694  	if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) {
  1695  		// Reduce x[i:len(x)] to x[i:].
  1696  		high = nil
  1697  	}
  1698  	n.SetSliceBounds(low, high, max)
  1699  	if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil {
  1700  		// Reduce x[:] to x.
  1701  		if Debug_slice > 0 {
  1702  			Warn("slice: omit slice operation")
  1703  		}
  1704  		return n.Left
  1705  	}
  1706  	return n
  1707  }
  1708  
  1709  func ascompatee1(op Op, l *Node, r *Node, init *Nodes) *Node {
  1710  	// convas will turn map assigns into function calls,
  1711  	// making it impossible for reorder3 to work.
  1712  	n := nod(OAS, l, r)
  1713  
  1714  	if l.Op == OINDEXMAP {
  1715  		return n
  1716  	}
  1717  
  1718  	return convas(n, init)
  1719  }
  1720  
  1721  func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
  1722  	// check assign expression list to
  1723  	// a expression list. called in
  1724  	//	expr-list = expr-list
  1725  
  1726  	// ensure order of evaluation for function calls
  1727  	for i := range nl {
  1728  		nl[i] = safeexpr(nl[i], init)
  1729  	}
  1730  	for i1 := range nr {
  1731  		nr[i1] = safeexpr(nr[i1], init)
  1732  	}
  1733  
  1734  	var nn []*Node
  1735  	i := 0
  1736  	for ; i < len(nl); i++ {
  1737  		if i >= len(nr) {
  1738  			break
  1739  		}
  1740  		// Do not generate 'x = x' during return. See issue 4014.
  1741  		if op == ORETURN && samesafeexpr(nl[i], nr[i]) {
  1742  			continue
  1743  		}
  1744  		nn = append(nn, ascompatee1(op, nl[i], nr[i], init))
  1745  	}
  1746  
  1747  	// cannot happen: caller checked that lists had same length
  1748  	if i < len(nl) || i < len(nr) {
  1749  		var nln, nrn Nodes
  1750  		nln.Set(nl)
  1751  		nrn.Set(nr)
  1752  		Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname())
  1753  	}
  1754  	return nn
  1755  }
  1756  
  1757  // l is an lv and rt is the type of an rv
  1758  // return 1 if this implies a function call
  1759  // evaluating the lv or a function call
  1760  // in the conversion of the types
  1761  func fncall(l *Node, rt *types.Type) bool {
  1762  	if l.HasCall() || l.Op == OINDEXMAP {
  1763  		return true
  1764  	}
  1765  	if needwritebarrier(l) {
  1766  		return true
  1767  	}
  1768  	if eqtype(l.Type, rt) {
  1769  		return false
  1770  	}
  1771  	return true
  1772  }
  1773  
  1774  // check assign type list to
  1775  // a expression list. called in
  1776  //	expr-list = func()
  1777  func ascompatet(op Op, nl Nodes, nr *types.Type) []*Node {
  1778  	if nl.Len() != nr.NumFields() {
  1779  		Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
  1780  	}
  1781  
  1782  	var nn, mm Nodes
  1783  	for i, l := range nl.Slice() {
  1784  		if isblank(l) {
  1785  			continue
  1786  		}
  1787  		r := nr.Field(i)
  1788  
  1789  		// any lv that causes a fn call must be
  1790  		// deferred until all the return arguments
  1791  		// have been pulled from the output arguments
  1792  		if fncall(l, r.Type) {
  1793  			tmp := temp(r.Type)
  1794  			tmp = typecheck(tmp, Erv)
  1795  			a := nod(OAS, l, tmp)
  1796  			a = convas(a, &mm)
  1797  			mm.Append(a)
  1798  			l = tmp
  1799  		}
  1800  
  1801  		a := nod(OAS, l, nodarg(r, 0))
  1802  		a = convas(a, &nn)
  1803  		updateHasCall(a)
  1804  		if a.HasCall() {
  1805  			Dump("ascompatet ucount", a)
  1806  			Fatalf("ascompatet: too many function calls evaluating parameters")
  1807  		}
  1808  
  1809  		nn.Append(a)
  1810  	}
  1811  	return append(nn.Slice(), mm.Slice()...)
  1812  }
  1813  
  1814  // nodarg returns a Node for the function argument denoted by t,
  1815  // which is either the entire function argument or result struct (t is a  struct *types.Type)
  1816  // or a specific argument (t is a *types.Field within a struct *types.Type).
  1817  //
  1818  // If fp is 0, the node is for use by a caller invoking the given
  1819  // function, preparing the arguments before the call
  1820  // or retrieving the results after the call.
  1821  // In this case, the node will correspond to an outgoing argument
  1822  // slot like 8(SP).
  1823  //
  1824  // If fp is 1, the node is for use by the function itself
  1825  // (the callee), to retrieve its arguments or write its results.
  1826  // In this case the node will be an ONAME with an appropriate
  1827  // type and offset.
  1828  func nodarg(t interface{}, fp int) *Node {
  1829  	var n *Node
  1830  
  1831  	var funarg types.Funarg
  1832  	switch t := t.(type) {
  1833  	default:
  1834  		Fatalf("bad nodarg %T(%v)", t, t)
  1835  
  1836  	case *types.Type:
  1837  		// Entire argument struct, not just one arg
  1838  		if !t.IsFuncArgStruct() {
  1839  			Fatalf("nodarg: bad type %v", t)
  1840  		}
  1841  		funarg = t.StructType().Funarg
  1842  
  1843  		// Build fake variable name for whole arg struct.
  1844  		n = newname(lookup(".args"))
  1845  		n.Type = t
  1846  		first := t.Field(0)
  1847  		if first == nil {
  1848  			Fatalf("nodarg: bad struct")
  1849  		}
  1850  		if first.Offset == BADWIDTH {
  1851  			Fatalf("nodarg: offset not computed for %v", t)
  1852  		}
  1853  		n.Xoffset = first.Offset
  1854  
  1855  	case *types.Field:
  1856  		funarg = t.Funarg
  1857  		if fp == 1 {
  1858  			// NOTE(rsc): This should be using t.Nname directly,
  1859  			// except in the case where t.Nname.Sym is the blank symbol and
  1860  			// so the assignment would be discarded during code generation.
  1861  			// In that case we need to make a new node, and there is no harm
  1862  			// in optimization passes to doing so. But otherwise we should
  1863  			// definitely be using the actual declaration and not a newly built node.
  1864  			// The extra Fatalf checks here are verifying that this is the case,
  1865  			// without changing the actual logic (at time of writing, it's getting
  1866  			// toward time for the Go 1.7 beta).
  1867  			// At some quieter time (assuming we've never seen these Fatalfs happen)
  1868  			// we could change this code to use "expect" directly.
  1869  			expect := asNode(t.Nname)
  1870  			if expect.isParamHeapCopy() {
  1871  				expect = expect.Name.Param.Stackcopy
  1872  			}
  1873  
  1874  			for _, n := range Curfn.Func.Dcl {
  1875  				if (n.Class() == PPARAM || n.Class() == PPARAMOUT) && !t.Sym.IsBlank() && n.Sym == t.Sym {
  1876  					if n != expect {
  1877  						Fatalf("nodarg: unexpected node: %v (%p %v) vs %v (%p %v)", n, n, n.Op, asNode(t.Nname), asNode(t.Nname), asNode(t.Nname).Op)
  1878  					}
  1879  					return n
  1880  				}
  1881  			}
  1882  
  1883  			if !expect.Sym.IsBlank() {
  1884  				Fatalf("nodarg: did not find node in dcl list: %v", expect)
  1885  			}
  1886  		}
  1887  
  1888  		// Build fake name for individual variable.
  1889  		// This is safe because if there was a real declared name
  1890  		// we'd have used it above.
  1891  		n = newname(lookup("__"))
  1892  		n.Type = t.Type
  1893  		if t.Offset == BADWIDTH {
  1894  			Fatalf("nodarg: offset not computed for %v", t)
  1895  		}
  1896  		n.Xoffset = t.Offset
  1897  		n.Orig = asNode(t.Nname)
  1898  	}
  1899  
  1900  	// Rewrite argument named _ to __,
  1901  	// or else the assignment to _ will be
  1902  	// discarded during code generation.
  1903  	if isblank(n) {
  1904  		n.Sym = lookup("__")
  1905  	}
  1906  
  1907  	switch fp {
  1908  	default:
  1909  		Fatalf("bad fp")
  1910  
  1911  	case 0: // preparing arguments for call
  1912  		n.Op = OINDREGSP
  1913  		n.Xoffset += Ctxt.FixedFrameSize()
  1914  
  1915  	case 1: // reading arguments inside call
  1916  		n.SetClass(PPARAM)
  1917  		if funarg == types.FunargResults {
  1918  			n.SetClass(PPARAMOUT)
  1919  		}
  1920  	}
  1921  
  1922  	n.SetTypecheck(1)
  1923  	n.SetAddrtaken(true) // keep optimizers at bay
  1924  	return n
  1925  }
  1926  
  1927  // package all the arguments that match a ... T parameter into a []T.
  1928  func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node {
  1929  	esc := uint16(EscUnknown)
  1930  	if ddd != nil {
  1931  		esc = ddd.Esc
  1932  	}
  1933  
  1934  	if len(args) == 0 {
  1935  		n := nodnil()
  1936  		n.Type = typ
  1937  		return n
  1938  	}
  1939  
  1940  	n := nod(OCOMPLIT, nil, typenod(typ))
  1941  	if ddd != nil && prealloc[ddd] != nil {
  1942  		prealloc[n] = prealloc[ddd] // temporary to use
  1943  	}
  1944  	n.List.Set(args)
  1945  	n.Esc = esc
  1946  	n = typecheck(n, Erv)
  1947  	if n.Type == nil {
  1948  		Fatalf("mkdotargslice: typecheck failed")
  1949  	}
  1950  	n = walkexpr(n, init)
  1951  	return n
  1952  }
  1953  
  1954  // check assign expression list to
  1955  // a type list. called in
  1956  //	return expr-list
  1957  //	func(expr-list)
  1958  func ascompatte(call *Node, isddd bool, lhs *types.Type, rhs []*Node, fp int, init *Nodes) []*Node {
  1959  	var nn []*Node
  1960  
  1961  	// f(g()) where g has multiple return values
  1962  	if len(rhs) == 1 && rhs[0].Type.IsFuncArgStruct() {
  1963  		// optimization - can do block copy
  1964  		if eqtypenoname(rhs[0].Type, lhs) {
  1965  			nl := nodarg(lhs, fp)
  1966  			nr := nod(OCONVNOP, rhs[0], nil)
  1967  			nr.Type = nl.Type
  1968  			nn = []*Node{convas(nod(OAS, nl, nr), init)}
  1969  			goto ret
  1970  		}
  1971  
  1972  		// conversions involved.
  1973  		// copy into temporaries.
  1974  		var tmps []*Node
  1975  		for _, nr := range rhs[0].Type.FieldSlice() {
  1976  			tmps = append(tmps, temp(nr.Type))
  1977  		}
  1978  
  1979  		a := nod(OAS2, nil, nil)
  1980  		a.List.Set(tmps)
  1981  		a.Rlist.Set(rhs)
  1982  		a = typecheck(a, Etop)
  1983  		a = walkstmt(a)
  1984  		init.Append(a)
  1985  
  1986  		rhs = tmps
  1987  	}
  1988  
  1989  	// For each parameter (LHS), assign its corresponding argument (RHS).
  1990  	// If there's a ... parameter (which is only valid as the final
  1991  	// parameter) and this is not a ... call expression,
  1992  	// then assign the remaining arguments as a slice.
  1993  	for i, nl := range lhs.FieldSlice() {
  1994  		var nr *Node
  1995  		if nl.Isddd() && !isddd {
  1996  			nr = mkdotargslice(nl.Type, rhs[i:], init, call.Right)
  1997  		} else {
  1998  			nr = rhs[i]
  1999  		}
  2000  
  2001  		a := nod(OAS, nodarg(nl, fp), nr)
  2002  		a = convas(a, init)
  2003  		nn = append(nn, a)
  2004  	}
  2005  
  2006  ret:
  2007  	for _, n := range nn {
  2008  		n.SetTypecheck(1)
  2009  	}
  2010  	return nn
  2011  }
  2012  
  2013  // generate code for print
  2014  func walkprint(nn *Node, init *Nodes) *Node {
  2015  	var r *Node
  2016  	var n *Node
  2017  	var on *Node
  2018  	var t *types.Type
  2019  	var et types.EType
  2020  
  2021  	op := nn.Op
  2022  	all := nn.List
  2023  	var calls []*Node
  2024  	notfirst := false
  2025  
  2026  	// Hoist all the argument evaluation up before the lock.
  2027  	walkexprlistcheap(all.Slice(), init)
  2028  
  2029  	calls = append(calls, mkcall("printlock", nil, init))
  2030  	for i1, n1 := range all.Slice() {
  2031  		if notfirst {
  2032  			calls = append(calls, mkcall("printsp", nil, init))
  2033  		}
  2034  
  2035  		notfirst = op == OPRINTN
  2036  
  2037  		n = n1
  2038  		if n.Op == OLITERAL {
  2039  			switch n.Val().Ctype() {
  2040  			case CTRUNE:
  2041  				n = defaultlit(n, types.Runetype)
  2042  
  2043  			case CTINT:
  2044  				n = defaultlit(n, types.Types[TINT64])
  2045  
  2046  			case CTFLT:
  2047  				n = defaultlit(n, types.Types[TFLOAT64])
  2048  			}
  2049  		}
  2050  
  2051  		if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
  2052  			n = defaultlit(n, types.Types[TINT64])
  2053  		}
  2054  		n = defaultlit(n, nil)
  2055  		all.SetIndex(i1, n)
  2056  		if n.Type == nil || n.Type.Etype == TFORW {
  2057  			continue
  2058  		}
  2059  
  2060  		t = n.Type
  2061  		et = n.Type.Etype
  2062  		if n.Type.IsInterface() {
  2063  			if n.Type.IsEmptyInterface() {
  2064  				on = syslook("printeface")
  2065  			} else {
  2066  				on = syslook("printiface")
  2067  			}
  2068  			on = substArgTypes(on, n.Type) // any-1
  2069  		} else if n.Type.IsPtr() || et == TCHAN || et == TMAP || et == TFUNC || et == TUNSAFEPTR {
  2070  			on = syslook("printpointer")
  2071  			on = substArgTypes(on, n.Type) // any-1
  2072  		} else if n.Type.IsSlice() {
  2073  			on = syslook("printslice")
  2074  			on = substArgTypes(on, n.Type) // any-1
  2075  		} else if isInt[et] {
  2076  			if et == TUINT64 {
  2077  				if isRuntimePkg(t.Sym.Pkg) && t.Sym.Name == "hex" {
  2078  					on = syslook("printhex")
  2079  				} else {
  2080  					on = syslook("printuint")
  2081  				}
  2082  			} else {
  2083  				on = syslook("printint")
  2084  			}
  2085  		} else if isFloat[et] {
  2086  			on = syslook("printfloat")
  2087  		} else if isComplex[et] {
  2088  			on = syslook("printcomplex")
  2089  		} else if et == TBOOL {
  2090  			on = syslook("printbool")
  2091  		} else if et == TSTRING {
  2092  			on = syslook("printstring")
  2093  		} else {
  2094  			badtype(OPRINT, n.Type, nil)
  2095  			continue
  2096  		}
  2097  
  2098  		t = on.Type.Params().Field(0).Type
  2099  
  2100  		if !eqtype(t, n.Type) {
  2101  			n = nod(OCONV, n, nil)
  2102  			n.Type = t
  2103  		}
  2104  
  2105  		r = nod(OCALL, on, nil)
  2106  		r.List.Append(n)
  2107  		calls = append(calls, r)
  2108  	}
  2109  
  2110  	if op == OPRINTN {
  2111  		calls = append(calls, mkcall("printnl", nil, nil))
  2112  	}
  2113  
  2114  	calls = append(calls, mkcall("printunlock", nil, init))
  2115  
  2116  	typecheckslice(calls, Etop)
  2117  	walkexprlist(calls, init)
  2118  
  2119  	r = nod(OEMPTY, nil, nil)
  2120  	r = typecheck(r, Etop)
  2121  	r = walkexpr(r, init)
  2122  	r.Ninit.Set(calls)
  2123  	return r
  2124  }
  2125  
  2126  func callnew(t *types.Type) *Node {
  2127  	if t.NotInHeap() {
  2128  		yyerror("%v is go:notinheap; heap allocation disallowed", t)
  2129  	}
  2130  	dowidth(t)
  2131  	fn := syslook("newobject")
  2132  	fn = substArgTypes(fn, t)
  2133  	v := mkcall1(fn, types.NewPtr(t), nil, typename(t))
  2134  	v.SetNonNil(true)
  2135  	return v
  2136  }
  2137  
  2138  func iscallret(n *Node) bool {
  2139  	n = outervalue(n)
  2140  	return n.Op == OINDREGSP
  2141  }
  2142  
  2143  func isstack(n *Node) bool {
  2144  	n = outervalue(n)
  2145  
  2146  	// If n is *autotmp and autotmp = &foo, replace n with foo.
  2147  	// We introduce such temps when initializing struct literals.
  2148  	if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() {
  2149  		defn := n.Left.Name.Defn
  2150  		if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR {
  2151  			n = defn.Right.Left
  2152  		}
  2153  	}
  2154  
  2155  	switch n.Op {
  2156  	case OINDREGSP:
  2157  		return true
  2158  
  2159  	case ONAME:
  2160  		switch n.Class() {
  2161  		case PAUTO, PPARAM, PPARAMOUT:
  2162  			return true
  2163  		}
  2164  	}
  2165  
  2166  	return false
  2167  }
  2168  
  2169  // isReflectHeaderDataField reports whether l is an expression p.Data
  2170  // where p has type reflect.SliceHeader or reflect.StringHeader.
  2171  func isReflectHeaderDataField(l *Node) bool {
  2172  	if l.Type != types.Types[TUINTPTR] {
  2173  		return false
  2174  	}
  2175  
  2176  	var tsym *types.Sym
  2177  	switch l.Op {
  2178  	case ODOT:
  2179  		tsym = l.Left.Type.Sym
  2180  	case ODOTPTR:
  2181  		tsym = l.Left.Type.Elem().Sym
  2182  	default:
  2183  		return false
  2184  	}
  2185  
  2186  	if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" {
  2187  		return false
  2188  	}
  2189  	return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader"
  2190  }
  2191  
  2192  // Do we need a write barrier for assigning to l?
  2193  func needwritebarrier(l *Node) bool {
  2194  	if !use_writebarrier {
  2195  		return false
  2196  	}
  2197  
  2198  	if l == nil || isblank(l) {
  2199  		return false
  2200  	}
  2201  
  2202  	// No write barrier for write to stack.
  2203  	if isstack(l) {
  2204  		return false
  2205  	}
  2206  
  2207  	// Package unsafe's documentation says storing pointers into
  2208  	// reflect.SliceHeader and reflect.StringHeader's Data fields
  2209  	// is valid, even though they have type uintptr (#19168).
  2210  	if isReflectHeaderDataField(l) {
  2211  		return true
  2212  	}
  2213  
  2214  	// No write barrier for write of non-pointers.
  2215  	dowidth(l.Type)
  2216  	if !types.Haspointers(l.Type) {
  2217  		return false
  2218  	}
  2219  
  2220  	// No write barrier if this is a pointer to a go:notinheap
  2221  	// type, since the write barrier's inheap(ptr) check will fail.
  2222  	if l.Type.IsPtr() && l.Type.Elem().NotInHeap() {
  2223  		return false
  2224  	}
  2225  
  2226  	// TODO: We can eliminate write barriers if we know *both* the
  2227  	// current and new content of the slot must already be shaded.
  2228  	// We know a pointer is shaded if it's nil, or points to
  2229  	// static data, a global (variable or function), or the stack.
  2230  	// The nil optimization could be particularly useful for
  2231  	// writes to just-allocated objects. Unfortunately, knowing
  2232  	// the "current" value of the slot requires flow analysis.
  2233  
  2234  	// Otherwise, be conservative and use write barrier.
  2235  	return true
  2236  }
  2237  
  2238  func convas(n *Node, init *Nodes) *Node {
  2239  	if n.Op != OAS {
  2240  		Fatalf("convas: not OAS %v", n.Op)
  2241  	}
  2242  
  2243  	n.SetTypecheck(1)
  2244  
  2245  	var lt *types.Type
  2246  	var rt *types.Type
  2247  	if n.Left == nil || n.Right == nil {
  2248  		goto out
  2249  	}
  2250  
  2251  	lt = n.Left.Type
  2252  	rt = n.Right.Type
  2253  	if lt == nil || rt == nil {
  2254  		goto out
  2255  	}
  2256  
  2257  	if isblank(n.Left) {
  2258  		n.Right = defaultlit(n.Right, nil)
  2259  		goto out
  2260  	}
  2261  
  2262  	if !eqtype(lt, rt) {
  2263  		n.Right = assignconv(n.Right, lt, "assignment")
  2264  		n.Right = walkexpr(n.Right, init)
  2265  	}
  2266  	dowidth(n.Right.Type)
  2267  
  2268  out:
  2269  	updateHasCall(n)
  2270  	return n
  2271  }
  2272  
  2273  // from ascompat[te]
  2274  // evaluating actual function arguments.
  2275  //	f(a,b)
  2276  // if there is exactly one function expr,
  2277  // then it is done first. otherwise must
  2278  // make temp variables
  2279  func reorder1(all []*Node) []*Node {
  2280  	c := 0 // function calls
  2281  	t := 0 // total parameters
  2282  
  2283  	for _, n := range all {
  2284  		t++
  2285  		updateHasCall(n)
  2286  		if n.HasCall() {
  2287  			c++
  2288  		}
  2289  	}
  2290  
  2291  	if c == 0 || t == 1 {
  2292  		return all
  2293  	}
  2294  
  2295  	var g []*Node // fncalls assigned to tempnames
  2296  	var f *Node   // last fncall assigned to stack
  2297  	var r []*Node // non fncalls and tempnames assigned to stack
  2298  	d := 0
  2299  	var a *Node
  2300  	for _, n := range all {
  2301  		if !n.HasCall() {
  2302  			r = append(r, n)
  2303  			continue
  2304  		}
  2305  
  2306  		d++
  2307  		if d == c {
  2308  			f = n
  2309  			continue
  2310  		}
  2311  
  2312  		// make assignment of fncall to tempname
  2313  		a = temp(n.Right.Type)
  2314  
  2315  		a = nod(OAS, a, n.Right)
  2316  		g = append(g, a)
  2317  
  2318  		// put normal arg assignment on list
  2319  		// with fncall replaced by tempname
  2320  		n.Right = a.Left
  2321  
  2322  		r = append(r, n)
  2323  	}
  2324  
  2325  	if f != nil {
  2326  		g = append(g, f)
  2327  	}
  2328  	return append(g, r...)
  2329  }
  2330  
  2331  // from ascompat[ee]
  2332  //	a,b = c,d
  2333  // simultaneous assignment. there cannot
  2334  // be later use of an earlier lvalue.
  2335  //
  2336  // function calls have been removed.
  2337  func reorder3(all []*Node) []*Node {
  2338  	var l *Node
  2339  
  2340  	// If a needed expression may be affected by an
  2341  	// earlier assignment, make an early copy of that
  2342  	// expression and use the copy instead.
  2343  	var early []*Node
  2344  
  2345  	var mapinit Nodes
  2346  	for i, n := range all {
  2347  		l = n.Left
  2348  
  2349  		// Save subexpressions needed on left side.
  2350  		// Drill through non-dereferences.
  2351  		for {
  2352  			if l.Op == ODOT || l.Op == OPAREN {
  2353  				l = l.Left
  2354  				continue
  2355  			}
  2356  
  2357  			if l.Op == OINDEX && l.Left.Type.IsArray() {
  2358  				l.Right = reorder3save(l.Right, all, i, &early)
  2359  				l = l.Left
  2360  				continue
  2361  			}
  2362  
  2363  			break
  2364  		}
  2365  
  2366  		switch l.Op {
  2367  		default:
  2368  			Fatalf("reorder3 unexpected lvalue %#v", l.Op)
  2369  
  2370  		case ONAME:
  2371  			break
  2372  
  2373  		case OINDEX, OINDEXMAP:
  2374  			l.Left = reorder3save(l.Left, all, i, &early)
  2375  			l.Right = reorder3save(l.Right, all, i, &early)
  2376  			if l.Op == OINDEXMAP {
  2377  				all[i] = convas(all[i], &mapinit)
  2378  			}
  2379  
  2380  		case OIND, ODOTPTR:
  2381  			l.Left = reorder3save(l.Left, all, i, &early)
  2382  		}
  2383  
  2384  		// Save expression on right side.
  2385  		all[i].Right = reorder3save(all[i].Right, all, i, &early)
  2386  	}
  2387  
  2388  	early = append(mapinit.Slice(), early...)
  2389  	return append(early, all...)
  2390  }
  2391  
  2392  // if the evaluation of *np would be affected by the
  2393  // assignments in all up to but not including the ith assignment,
  2394  // copy into a temporary during *early and
  2395  // replace *np with that temp.
  2396  // The result of reorder3save MUST be assigned back to n, e.g.
  2397  // 	n.Left = reorder3save(n.Left, all, i, early)
  2398  func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node {
  2399  	if !aliased(n, all, i) {
  2400  		return n
  2401  	}
  2402  
  2403  	q := temp(n.Type)
  2404  	q = nod(OAS, q, n)
  2405  	q = typecheck(q, Etop)
  2406  	*early = append(*early, q)
  2407  	return q.Left
  2408  }
  2409  
  2410  // what's the outer value that a write to n affects?
  2411  // outer value means containing struct or array.
  2412  func outervalue(n *Node) *Node {
  2413  	for {
  2414  		if n.Op == OXDOT {
  2415  			Fatalf("OXDOT in walk")
  2416  		}
  2417  		if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP {
  2418  			n = n.Left
  2419  			continue
  2420  		}
  2421  
  2422  		if n.Op == OINDEX && n.Left.Type != nil && n.Left.Type.IsArray() {
  2423  			n = n.Left
  2424  			continue
  2425  		}
  2426  
  2427  		break
  2428  	}
  2429  
  2430  	return n
  2431  }
  2432  
  2433  // Is it possible that the computation of n might be
  2434  // affected by writes in as up to but not including the ith element?
  2435  func aliased(n *Node, all []*Node, i int) bool {
  2436  	if n == nil {
  2437  		return false
  2438  	}
  2439  
  2440  	// Treat all fields of a struct as referring to the whole struct.
  2441  	// We could do better but we would have to keep track of the fields.
  2442  	for n.Op == ODOT {
  2443  		n = n.Left
  2444  	}
  2445  
  2446  	// Look for obvious aliasing: a variable being assigned
  2447  	// during the all list and appearing in n.
  2448  	// Also record whether there are any writes to main memory.
  2449  	// Also record whether there are any writes to variables
  2450  	// whose addresses have been taken.
  2451  	memwrite := 0
  2452  
  2453  	varwrite := 0
  2454  	var a *Node
  2455  	for _, an := range all[:i] {
  2456  		a = outervalue(an.Left)
  2457  
  2458  		for a.Op == ODOT {
  2459  			a = a.Left
  2460  		}
  2461  
  2462  		if a.Op != ONAME {
  2463  			memwrite = 1
  2464  			continue
  2465  		}
  2466  
  2467  		switch n.Class() {
  2468  		default:
  2469  			varwrite = 1
  2470  			continue
  2471  
  2472  		case PAUTO, PPARAM, PPARAMOUT:
  2473  			if n.Addrtaken() {
  2474  				varwrite = 1
  2475  				continue
  2476  			}
  2477  
  2478  			if vmatch2(a, n) {
  2479  				// Direct hit.
  2480  				return true
  2481  			}
  2482  		}
  2483  	}
  2484  
  2485  	// The variables being written do not appear in n.
  2486  	// However, n might refer to computed addresses
  2487  	// that are being written.
  2488  
  2489  	// If no computed addresses are affected by the writes, no aliasing.
  2490  	if memwrite == 0 && varwrite == 0 {
  2491  		return false
  2492  	}
  2493  
  2494  	// If n does not refer to computed addresses
  2495  	// (that is, if n only refers to variables whose addresses
  2496  	// have not been taken), no aliasing.
  2497  	if varexpr(n) {
  2498  		return false
  2499  	}
  2500  
  2501  	// Otherwise, both the writes and n refer to computed memory addresses.
  2502  	// Assume that they might conflict.
  2503  	return true
  2504  }
  2505  
  2506  // does the evaluation of n only refer to variables
  2507  // whose addresses have not been taken?
  2508  // (and no other memory)
  2509  func varexpr(n *Node) bool {
  2510  	if n == nil {
  2511  		return true
  2512  	}
  2513  
  2514  	switch n.Op {
  2515  	case OLITERAL:
  2516  		return true
  2517  
  2518  	case ONAME:
  2519  		switch n.Class() {
  2520  		case PAUTO, PPARAM, PPARAMOUT:
  2521  			if !n.Addrtaken() {
  2522  				return true
  2523  			}
  2524  		}
  2525  
  2526  		return false
  2527  
  2528  	case OADD,
  2529  		OSUB,
  2530  		OOR,
  2531  		OXOR,
  2532  		OMUL,
  2533  		ODIV,
  2534  		OMOD,
  2535  		OLSH,
  2536  		ORSH,
  2537  		OAND,
  2538  		OANDNOT,
  2539  		OPLUS,
  2540  		OMINUS,
  2541  		OCOM,
  2542  		OPAREN,
  2543  		OANDAND,
  2544  		OOROR,
  2545  		OCONV,
  2546  		OCONVNOP,
  2547  		OCONVIFACE,
  2548  		ODOTTYPE:
  2549  		return varexpr(n.Left) && varexpr(n.Right)
  2550  
  2551  	case ODOT: // but not ODOTPTR
  2552  		// Should have been handled in aliased.
  2553  		Fatalf("varexpr unexpected ODOT")
  2554  	}
  2555  
  2556  	// Be conservative.
  2557  	return false
  2558  }
  2559  
  2560  // is the name l mentioned in r?
  2561  func vmatch2(l *Node, r *Node) bool {
  2562  	if r == nil {
  2563  		return false
  2564  	}
  2565  	switch r.Op {
  2566  	// match each right given left
  2567  	case ONAME:
  2568  		return l == r
  2569  
  2570  	case OLITERAL:
  2571  		return false
  2572  	}
  2573  
  2574  	if vmatch2(l, r.Left) {
  2575  		return true
  2576  	}
  2577  	if vmatch2(l, r.Right) {
  2578  		return true
  2579  	}
  2580  	for _, n := range r.List.Slice() {
  2581  		if vmatch2(l, n) {
  2582  			return true
  2583  		}
  2584  	}
  2585  	return false
  2586  }
  2587  
  2588  // is any name mentioned in l also mentioned in r?
  2589  // called by sinit.go
  2590  func vmatch1(l *Node, r *Node) bool {
  2591  	// isolate all left sides
  2592  	if l == nil || r == nil {
  2593  		return false
  2594  	}
  2595  	switch l.Op {
  2596  	case ONAME:
  2597  		switch l.Class() {
  2598  		case PPARAM, PAUTO:
  2599  			break
  2600  
  2601  		default:
  2602  			// assignment to non-stack variable must be
  2603  			// delayed if right has function calls.
  2604  			if r.HasCall() {
  2605  				return true
  2606  			}
  2607  		}
  2608  
  2609  		return vmatch2(l, r)
  2610  
  2611  	case OLITERAL:
  2612  		return false
  2613  	}
  2614  
  2615  	if vmatch1(l.Left, r) {
  2616  		return true
  2617  	}
  2618  	if vmatch1(l.Right, r) {
  2619  		return true
  2620  	}
  2621  	for _, n := range l.List.Slice() {
  2622  		if vmatch1(n, r) {
  2623  			return true
  2624  		}
  2625  	}
  2626  	return false
  2627  }
  2628  
  2629  // paramstoheap returns code to allocate memory for heap-escaped parameters
  2630  // and to copy non-result parameters' values from the stack.
  2631  func paramstoheap(params *types.Type) []*Node {
  2632  	var nn []*Node
  2633  	for _, t := range params.Fields().Slice() {
  2634  		v := asNode(t.Nname)
  2635  		if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result
  2636  			v = nil
  2637  		}
  2638  		if v == nil {
  2639  			continue
  2640  		}
  2641  
  2642  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil {
  2643  			nn = append(nn, walkstmt(nod(ODCL, v, nil)))
  2644  			if stackcopy.Class() == PPARAM {
  2645  				nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop)))
  2646  			}
  2647  		}
  2648  	}
  2649  
  2650  	return nn
  2651  }
  2652  
  2653  // zeroResults zeros the return values at the start of the function.
  2654  // We need to do this very early in the function.  Defer might stop a
  2655  // panic and show the return values as they exist at the time of
  2656  // panic.  For precise stacks, the garbage collector assumes results
  2657  // are always live, so we need to zero them before any allocations,
  2658  // even allocations to move params/results to the heap.
  2659  // The generated code is added to Curfn's Enter list.
  2660  func zeroResults() {
  2661  	lno := lineno
  2662  	lineno = Curfn.Pos
  2663  	for _, f := range Curfn.Type.Results().Fields().Slice() {
  2664  		if v := asNode(f.Nname); v != nil && v.Name.Param.Heapaddr != nil {
  2665  			// The local which points to the return value is the
  2666  			// thing that needs zeroing. This is already handled
  2667  			// by a Needzero annotation in plive.go:livenessepilogue.
  2668  			continue
  2669  		}
  2670  		// Zero the stack location containing f.
  2671  		Curfn.Func.Enter.Append(nod(OAS, nodarg(f, 1), nil))
  2672  	}
  2673  	lineno = lno
  2674  }
  2675  
  2676  // returnsfromheap returns code to copy values for heap-escaped parameters
  2677  // back to the stack.
  2678  func returnsfromheap(params *types.Type) []*Node {
  2679  	var nn []*Node
  2680  	for _, t := range params.Fields().Slice() {
  2681  		v := asNode(t.Nname)
  2682  		if v == nil {
  2683  			continue
  2684  		}
  2685  		if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT {
  2686  			nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop)))
  2687  		}
  2688  	}
  2689  
  2690  	return nn
  2691  }
  2692  
  2693  // heapmoves generates code to handle migrating heap-escaped parameters
  2694  // between the stack and the heap. The generated code is added to Curfn's
  2695  // Enter and Exit lists.
  2696  func heapmoves() {
  2697  	lno := lineno
  2698  	lineno = Curfn.Pos
  2699  	nn := paramstoheap(Curfn.Type.Recvs())
  2700  	nn = append(nn, paramstoheap(Curfn.Type.Params())...)
  2701  	nn = append(nn, paramstoheap(Curfn.Type.Results())...)
  2702  	Curfn.Func.Enter.Append(nn...)
  2703  	lineno = Curfn.Func.Endlineno
  2704  	Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...)
  2705  	lineno = lno
  2706  }
  2707  
  2708  func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node {
  2709  	if fn.Type == nil || fn.Type.Etype != TFUNC {
  2710  		Fatalf("mkcall %v %v", fn, fn.Type)
  2711  	}
  2712  
  2713  	n := fn.Type.Params().NumFields()
  2714  
  2715  	r := nod(OCALL, fn, nil)
  2716  	r.List.Set(va[:n])
  2717  	if fn.Type.Results().NumFields() > 0 {
  2718  		r = typecheck(r, Erv|Efnstruct)
  2719  	} else {
  2720  		r = typecheck(r, Etop)
  2721  	}
  2722  	r = walkexpr(r, init)
  2723  	r.Type = t
  2724  	return r
  2725  }
  2726  
  2727  func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node {
  2728  	return vmkcall(syslook(name), t, init, args)
  2729  }
  2730  
  2731  func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node {
  2732  	return vmkcall(fn, t, init, args)
  2733  }
  2734  
  2735  func conv(n *Node, t *types.Type) *Node {
  2736  	if eqtype(n.Type, t) {
  2737  		return n
  2738  	}
  2739  	n = nod(OCONV, n, nil)
  2740  	n.Type = t
  2741  	n = typecheck(n, Erv)
  2742  	return n
  2743  }
  2744  
  2745  // byteindex converts n, which is byte-sized, to a uint8.
  2746  // We cannot use conv, because we allow converting bool to uint8 here,
  2747  // which is forbidden in user code.
  2748  func byteindex(n *Node) *Node {
  2749  	if eqtype(n.Type, types.Types[TUINT8]) {
  2750  		return n
  2751  	}
  2752  	n = nod(OCONV, n, nil)
  2753  	n.Type = types.Types[TUINT8]
  2754  	n.SetTypecheck(1)
  2755  	return n
  2756  }
  2757  
  2758  func chanfn(name string, n int, t *types.Type) *Node {
  2759  	if !t.IsChan() {
  2760  		Fatalf("chanfn %v", t)
  2761  	}
  2762  	fn := syslook(name)
  2763  	switch n {
  2764  	default:
  2765  		Fatalf("chanfn %d", n)
  2766  	case 1:
  2767  		fn = substArgTypes(fn, t.Elem())
  2768  	case 2:
  2769  		fn = substArgTypes(fn, t.Elem(), t.Elem())
  2770  	}
  2771  	return fn
  2772  }
  2773  
  2774  func mapfn(name string, t *types.Type) *Node {
  2775  	if !t.IsMap() {
  2776  		Fatalf("mapfn %v", t)
  2777  	}
  2778  	fn := syslook(name)
  2779  	fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val())
  2780  	return fn
  2781  }
  2782  
  2783  func mapfndel(name string, t *types.Type) *Node {
  2784  	if !t.IsMap() {
  2785  		Fatalf("mapfn %v", t)
  2786  	}
  2787  	fn := syslook(name)
  2788  	fn = substArgTypes(fn, t.Key(), t.Val(), t.Key())
  2789  	return fn
  2790  }
  2791  
  2792  const (
  2793  	mapslow = iota
  2794  	mapfast32
  2795  	mapfast32ptr
  2796  	mapfast64
  2797  	mapfast64ptr
  2798  	mapfaststr
  2799  	nmapfast
  2800  )
  2801  
  2802  type mapnames [nmapfast]string
  2803  
  2804  func mkmapnames(base string, ptr string) mapnames {
  2805  	return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
  2806  }
  2807  
  2808  var mapaccess1 mapnames = mkmapnames("mapaccess1", "")
  2809  var mapaccess2 mapnames = mkmapnames("mapaccess2", "")
  2810  var mapassign mapnames = mkmapnames("mapassign", "ptr")
  2811  var mapdelete mapnames = mkmapnames("mapdelete", "")
  2812  
  2813  func mapfast(t *types.Type) int {
  2814  	// Check ../../runtime/hashmap.go:maxValueSize before changing.
  2815  	if t.Val().Width > 128 {
  2816  		return mapslow
  2817  	}
  2818  	switch algtype(t.Key()) {
  2819  	case AMEM32:
  2820  		if !t.Key().HasPointer() {
  2821  			return mapfast32
  2822  		}
  2823  		if Widthptr == 4 {
  2824  			return mapfast32ptr
  2825  		}
  2826  		Fatalf("small pointer %v", t.Key())
  2827  	case AMEM64:
  2828  		if !t.Key().HasPointer() {
  2829  			return mapfast64
  2830  		}
  2831  		if Widthptr == 8 {
  2832  			return mapfast64ptr
  2833  		}
  2834  		// Two-word object, at least one of which is a pointer.
  2835  		// Use the slow path.
  2836  	case ASTRING:
  2837  		return mapfaststr
  2838  	}
  2839  	return mapslow
  2840  }
  2841  
  2842  func writebarrierfn(name string, l *types.Type, r *types.Type) *Node {
  2843  	fn := syslook(name)
  2844  	fn = substArgTypes(fn, l, r)
  2845  	return fn
  2846  }
  2847  
  2848  func addstr(n *Node, init *Nodes) *Node {
  2849  	// orderexpr rewrote OADDSTR to have a list of strings.
  2850  	c := n.List.Len()
  2851  
  2852  	if c < 2 {
  2853  		Fatalf("addstr count %d too small", c)
  2854  	}
  2855  
  2856  	buf := nodnil()
  2857  	if n.Esc == EscNone {
  2858  		sz := int64(0)
  2859  		for _, n1 := range n.List.Slice() {
  2860  			if n1.Op == OLITERAL {
  2861  				sz += int64(len(n1.Val().U.(string)))
  2862  			}
  2863  		}
  2864  
  2865  		// Don't allocate the buffer if the result won't fit.
  2866  		if sz < tmpstringbufsize {
  2867  			// Create temporary buffer for result string on stack.
  2868  			t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  2869  
  2870  			buf = nod(OADDR, temp(t), nil)
  2871  		}
  2872  	}
  2873  
  2874  	// build list of string arguments
  2875  	args := []*Node{buf}
  2876  	for _, n2 := range n.List.Slice() {
  2877  		args = append(args, conv(n2, types.Types[TSTRING]))
  2878  	}
  2879  
  2880  	var fn string
  2881  	if c <= 5 {
  2882  		// small numbers of strings use direct runtime helpers.
  2883  		// note: orderexpr knows this cutoff too.
  2884  		fn = fmt.Sprintf("concatstring%d", c)
  2885  	} else {
  2886  		// large numbers of strings are passed to the runtime as a slice.
  2887  		fn = "concatstrings"
  2888  
  2889  		t := types.NewSlice(types.Types[TSTRING])
  2890  		slice := nod(OCOMPLIT, nil, typenod(t))
  2891  		if prealloc[n] != nil {
  2892  			prealloc[slice] = prealloc[n]
  2893  		}
  2894  		slice.List.Set(args[1:]) // skip buf arg
  2895  		args = []*Node{buf, slice}
  2896  		slice.Esc = EscNone
  2897  	}
  2898  
  2899  	cat := syslook(fn)
  2900  	r := nod(OCALL, cat, nil)
  2901  	r.List.Set(args)
  2902  	r = typecheck(r, Erv)
  2903  	r = walkexpr(r, init)
  2904  	r.Type = n.Type
  2905  
  2906  	return r
  2907  }
  2908  
  2909  // expand append(l1, l2...) to
  2910  //   init {
  2911  //     s := l1
  2912  //     n := len(s) + len(l2)
  2913  //     // Compare as uint so growslice can panic on overflow.
  2914  //     if uint(n) > uint(cap(s)) {
  2915  //       s = growslice(s, n)
  2916  //     }
  2917  //     s = s[:n]
  2918  //     memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2919  //   }
  2920  //   s
  2921  //
  2922  // l2 is allowed to be a string.
  2923  func appendslice(n *Node, init *Nodes) *Node {
  2924  	walkexprlistsafe(n.List.Slice(), init)
  2925  
  2926  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2927  	// and n are name or literal, but those may index the slice we're
  2928  	// modifying here. Fix explicitly.
  2929  	ls := n.List.Slice()
  2930  	for i1, n1 := range ls {
  2931  		ls[i1] = cheapexpr(n1, init)
  2932  	}
  2933  
  2934  	l1 := n.List.First()
  2935  	l2 := n.List.Second()
  2936  
  2937  	var l []*Node
  2938  
  2939  	// var s []T
  2940  	s := temp(l1.Type)
  2941  	l = append(l, nod(OAS, s, l1)) // s = l1
  2942  
  2943  	// n := len(s) + len(l2)
  2944  	nn := temp(types.Types[TINT])
  2945  	l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil))))
  2946  
  2947  	// if uint(n) > uint(cap(s))
  2948  	nif := nod(OIF, nil, nil)
  2949  	nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil))
  2950  	nif.Left.Left.Type = types.Types[TUINT]
  2951  	nif.Left.Right.Type = types.Types[TUINT]
  2952  
  2953  	// instantiate growslice(Type*, []any, int) []any
  2954  	fn := syslook("growslice")
  2955  	fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
  2956  
  2957  	// s = growslice(T, s, n)
  2958  	nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn)))
  2959  	l = append(l, nif)
  2960  
  2961  	// s = s[:n]
  2962  	nt := nod(OSLICE, s, nil)
  2963  	nt.SetSliceBounds(nil, nn, nil)
  2964  	nt.Etype = 1
  2965  	l = append(l, nod(OAS, s, nt))
  2966  
  2967  	if types.Haspointers(l1.Type.Elem()) {
  2968  		// copy(s[len(l1):], l2)
  2969  		nptr1 := nod(OSLICE, s, nil)
  2970  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2971  		nptr1.Etype = 1
  2972  		nptr2 := l2
  2973  		fn := syslook("typedslicecopy")
  2974  		fn = substArgTypes(fn, l1.Type, l2.Type)
  2975  		var ln Nodes
  2976  		ln.Set(l)
  2977  		nt := mkcall1(fn, types.Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2)
  2978  		l = append(ln.Slice(), nt)
  2979  	} else if instrumenting && !compiling_runtime {
  2980  		// rely on runtime to instrument copy.
  2981  		// copy(s[len(l1):], l2)
  2982  		nptr1 := nod(OSLICE, s, nil)
  2983  		nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2984  		nptr1.Etype = 1
  2985  		nptr2 := l2
  2986  		var fn *Node
  2987  		if l2.Type.IsString() {
  2988  			fn = syslook("slicestringcopy")
  2989  		} else {
  2990  			fn = syslook("slicecopy")
  2991  		}
  2992  		fn = substArgTypes(fn, l1.Type, l2.Type)
  2993  		var ln Nodes
  2994  		ln.Set(l)
  2995  		nt := mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width))
  2996  		l = append(ln.Slice(), nt)
  2997  	} else {
  2998  		// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2999  		nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil))
  3000  		nptr1.SetBounded(true)
  3001  
  3002  		nptr1 = nod(OADDR, nptr1, nil)
  3003  
  3004  		nptr2 := nod(OSPTR, l2, nil)
  3005  
  3006  		fn := syslook("memmove")
  3007  		fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem())
  3008  
  3009  		var ln Nodes
  3010  		ln.Set(l)
  3011  		nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &ln)
  3012  
  3013  		nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width))
  3014  		nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid)
  3015  		l = append(ln.Slice(), nt)
  3016  	}
  3017  
  3018  	typecheckslice(l, Etop)
  3019  	walkstmtlist(l)
  3020  	init.Append(l...)
  3021  	return s
  3022  }
  3023  
  3024  // Rewrite append(src, x, y, z) so that any side effects in
  3025  // x, y, z (including runtime panics) are evaluated in
  3026  // initialization statements before the append.
  3027  // For normal code generation, stop there and leave the
  3028  // rest to cgen_append.
  3029  //
  3030  // For race detector, expand append(src, a [, b]* ) to
  3031  //
  3032  //   init {
  3033  //     s := src
  3034  //     const argc = len(args) - 1
  3035  //     if cap(s) - len(s) < argc {
  3036  //	    s = growslice(s, len(s)+argc)
  3037  //     }
  3038  //     n := len(s)
  3039  //     s = s[:n+argc]
  3040  //     s[n] = a
  3041  //     s[n+1] = b
  3042  //     ...
  3043  //   }
  3044  //   s
  3045  func walkappend(n *Node, init *Nodes, dst *Node) *Node {
  3046  	if !samesafeexpr(dst, n.List.First()) {
  3047  		n.List.SetFirst(safeexpr(n.List.First(), init))
  3048  		n.List.SetFirst(walkexpr(n.List.First(), init))
  3049  	}
  3050  	walkexprlistsafe(n.List.Slice()[1:], init)
  3051  
  3052  	// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  3053  	// and n are name or literal, but those may index the slice we're
  3054  	// modifying here. Fix explicitly.
  3055  	// Using cheapexpr also makes sure that the evaluation
  3056  	// of all arguments (and especially any panics) happen
  3057  	// before we begin to modify the slice in a visible way.
  3058  	ls := n.List.Slice()[1:]
  3059  	for i, n := range ls {
  3060  		ls[i] = cheapexpr(n, init)
  3061  	}
  3062  
  3063  	nsrc := n.List.First()
  3064  
  3065  	argc := n.List.Len() - 1
  3066  	if argc < 1 {
  3067  		return nsrc
  3068  	}
  3069  
  3070  	// General case, with no function calls left as arguments.
  3071  	// Leave for gen, except that instrumentation requires old form.
  3072  	if !instrumenting || compiling_runtime {
  3073  		return n
  3074  	}
  3075  
  3076  	var l []*Node
  3077  
  3078  	ns := temp(nsrc.Type)
  3079  	l = append(l, nod(OAS, ns, nsrc)) // s = src
  3080  
  3081  	na := nodintconst(int64(argc)) // const argc
  3082  	nx := nod(OIF, nil, nil)       // if cap(s) - len(s) < argc
  3083  	nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na)
  3084  
  3085  	fn := syslook("growslice") //   growslice(<type>, old []T, mincap int) (ret []T)
  3086  	fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem())
  3087  
  3088  	nx.Nbody.Set1(nod(OAS, ns,
  3089  		mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns,
  3090  			nod(OADD, nod(OLEN, ns, nil), na))))
  3091  
  3092  	l = append(l, nx)
  3093  
  3094  	nn := temp(types.Types[TINT])
  3095  	l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s)
  3096  
  3097  	nx = nod(OSLICE, ns, nil) // ...s[:n+argc]
  3098  	nx.SetSliceBounds(nil, nod(OADD, nn, na), nil)
  3099  	nx.Etype = 1
  3100  	l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc]
  3101  
  3102  	ls = n.List.Slice()[1:]
  3103  	for i, n := range ls {
  3104  		nx = nod(OINDEX, ns, nn) // s[n] ...
  3105  		nx.SetBounded(true)
  3106  		l = append(l, nod(OAS, nx, n)) // s[n] = arg
  3107  		if i+1 < len(ls) {
  3108  			l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1
  3109  		}
  3110  	}
  3111  
  3112  	typecheckslice(l, Etop)
  3113  	walkstmtlist(l)
  3114  	init.Append(l...)
  3115  	return ns
  3116  }
  3117  
  3118  // Lower copy(a, b) to a memmove call or a runtime call.
  3119  //
  3120  // init {
  3121  //   n := len(a)
  3122  //   if n > len(b) { n = len(b) }
  3123  //   memmove(a.ptr, b.ptr, n*sizeof(elem(a)))
  3124  // }
  3125  // n;
  3126  //
  3127  // Also works if b is a string.
  3128  //
  3129  func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
  3130  	if types.Haspointers(n.Left.Type.Elem()) {
  3131  		fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
  3132  		return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right)
  3133  	}
  3134  
  3135  	if runtimecall {
  3136  		var fn *Node
  3137  		if n.Right.Type.IsString() {
  3138  			fn = syslook("slicestringcopy")
  3139  		} else {
  3140  			fn = syslook("slicecopy")
  3141  		}
  3142  		fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  3143  		return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width))
  3144  	}
  3145  
  3146  	n.Left = walkexpr(n.Left, init)
  3147  	n.Right = walkexpr(n.Right, init)
  3148  	nl := temp(n.Left.Type)
  3149  	nr := temp(n.Right.Type)
  3150  	var l []*Node
  3151  	l = append(l, nod(OAS, nl, n.Left))
  3152  	l = append(l, nod(OAS, nr, n.Right))
  3153  
  3154  	nfrm := nod(OSPTR, nr, nil)
  3155  	nto := nod(OSPTR, nl, nil)
  3156  
  3157  	nlen := temp(types.Types[TINT])
  3158  
  3159  	// n = len(to)
  3160  	l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil)))
  3161  
  3162  	// if n > len(frm) { n = len(frm) }
  3163  	nif := nod(OIF, nil, nil)
  3164  
  3165  	nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil))
  3166  	nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil)))
  3167  	l = append(l, nif)
  3168  
  3169  	// Call memmove.
  3170  	fn := syslook("memmove")
  3171  
  3172  	fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem())
  3173  	nwid := temp(types.Types[TUINTPTR])
  3174  	l = append(l, nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR])))
  3175  	nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width))
  3176  	l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid))
  3177  
  3178  	typecheckslice(l, Etop)
  3179  	walkstmtlist(l)
  3180  	init.Append(l...)
  3181  	return nlen
  3182  }
  3183  
  3184  func eqfor(t *types.Type, needsize *int) *Node {
  3185  	// Should only arrive here with large memory or
  3186  	// a struct/array containing a non-memory field/element.
  3187  	// Small memory is handled inline, and single non-memory
  3188  	// is handled during type check (OCMPSTR etc).
  3189  	switch a, _ := algtype1(t); a {
  3190  	case AMEM:
  3191  		n := syslook("memequal")
  3192  		n = substArgTypes(n, t, t)
  3193  		*needsize = 1
  3194  		return n
  3195  	case ASPECIAL:
  3196  		sym := typesymprefix(".eq", t)
  3197  		n := newname(sym)
  3198  		n.SetClass(PFUNC)
  3199  		ntype := nod(OTFUNC, nil, nil)
  3200  		ntype.List.Append(anonfield(types.NewPtr(t)))
  3201  		ntype.List.Append(anonfield(types.NewPtr(t)))
  3202  		ntype.Rlist.Append(anonfield(types.Types[TBOOL]))
  3203  		ntype = typecheck(ntype, Etype)
  3204  		n.Type = ntype.Type
  3205  		*needsize = 0
  3206  		return n
  3207  	}
  3208  	Fatalf("eqfor %v", t)
  3209  	return nil
  3210  }
  3211  
  3212  // The result of walkcompare MUST be assigned back to n, e.g.
  3213  // 	n.Left = walkcompare(n.Left, init)
  3214  func walkcompare(n *Node, init *Nodes) *Node {
  3215  	// Given interface value l and concrete value r, rewrite
  3216  	//   l == r
  3217  	// into types-equal && data-equal.
  3218  	// This is efficient, avoids allocations, and avoids runtime calls.
  3219  	var l, r *Node
  3220  	if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() {
  3221  		l = n.Left
  3222  		r = n.Right
  3223  	} else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() {
  3224  		l = n.Right
  3225  		r = n.Left
  3226  	}
  3227  
  3228  	if l != nil {
  3229  		// Handle both == and !=.
  3230  		eq := n.Op
  3231  		var andor Op
  3232  		if eq == OEQ {
  3233  			andor = OANDAND
  3234  		} else {
  3235  			andor = OOROR
  3236  		}
  3237  		// Check for types equal.
  3238  		// For empty interface, this is:
  3239  		//   l.tab == type(r)
  3240  		// For non-empty interface, this is:
  3241  		//   l.tab != nil && l.tab._type == type(r)
  3242  		var eqtype *Node
  3243  		tab := nod(OITAB, l, nil)
  3244  		rtyp := typename(r.Type)
  3245  		if l.Type.IsEmptyInterface() {
  3246  			tab.Type = types.NewPtr(types.Types[TUINT8])
  3247  			tab.SetTypecheck(1)
  3248  			eqtype = nod(eq, tab, rtyp)
  3249  		} else {
  3250  			nonnil := nod(brcom(eq), nodnil(), tab)
  3251  			match := nod(eq, itabType(tab), rtyp)
  3252  			eqtype = nod(andor, nonnil, match)
  3253  		}
  3254  		// Check for data equal.
  3255  		eqdata := nod(eq, ifaceData(l, r.Type), r)
  3256  		// Put it all together.
  3257  		expr := nod(andor, eqtype, eqdata)
  3258  		n = finishcompare(n, expr, init)
  3259  		return n
  3260  	}
  3261  
  3262  	// Must be comparison of array or struct.
  3263  	// Otherwise back end handles it.
  3264  	// While we're here, decide whether to
  3265  	// inline or call an eq alg.
  3266  	t := n.Left.Type
  3267  	var inline bool
  3268  
  3269  	maxcmpsize := int64(4)
  3270  	unalignedLoad := false
  3271  	switch thearch.LinkArch.Family {
  3272  	case sys.AMD64, sys.ARM64, sys.S390X:
  3273  		// Keep this low enough, to generate less code than function call.
  3274  		maxcmpsize = 16
  3275  		unalignedLoad = true
  3276  	case sys.I386:
  3277  		maxcmpsize = 8
  3278  		unalignedLoad = true
  3279  	}
  3280  
  3281  	switch t.Etype {
  3282  	default:
  3283  		return n
  3284  	case TARRAY:
  3285  		// We can compare several elements at once with 2/4/8 byte integer compares
  3286  		inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize))
  3287  	case TSTRUCT:
  3288  		inline = t.NumFields() <= 4
  3289  	}
  3290  
  3291  	cmpl := n.Left
  3292  	for cmpl != nil && cmpl.Op == OCONVNOP {
  3293  		cmpl = cmpl.Left
  3294  	}
  3295  	cmpr := n.Right
  3296  	for cmpr != nil && cmpr.Op == OCONVNOP {
  3297  		cmpr = cmpr.Left
  3298  	}
  3299  
  3300  	// Chose not to inline. Call equality function directly.
  3301  	if !inline {
  3302  		if isvaluelit(cmpl) {
  3303  			var_ := temp(cmpl.Type)
  3304  			anylit(cmpl, var_, init)
  3305  			cmpl = var_
  3306  		}
  3307  		if isvaluelit(cmpr) {
  3308  			var_ := temp(cmpr.Type)
  3309  			anylit(cmpr, var_, init)
  3310  			cmpr = var_
  3311  		}
  3312  		if !islvalue(cmpl) || !islvalue(cmpr) {
  3313  			Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
  3314  		}
  3315  
  3316  		// eq algs take pointers
  3317  		pl := temp(types.NewPtr(t))
  3318  		al := nod(OAS, pl, nod(OADDR, cmpl, nil))
  3319  		al.Right.Etype = 1 // addr does not escape
  3320  		al = typecheck(al, Etop)
  3321  		init.Append(al)
  3322  
  3323  		pr := temp(types.NewPtr(t))
  3324  		ar := nod(OAS, pr, nod(OADDR, cmpr, nil))
  3325  		ar.Right.Etype = 1 // addr does not escape
  3326  		ar = typecheck(ar, Etop)
  3327  		init.Append(ar)
  3328  
  3329  		var needsize int
  3330  		call := nod(OCALL, eqfor(t, &needsize), nil)
  3331  		call.List.Append(pl)
  3332  		call.List.Append(pr)
  3333  		if needsize != 0 {
  3334  			call.List.Append(nodintconst(t.Width))
  3335  		}
  3336  		res := call
  3337  		if n.Op != OEQ {
  3338  			res = nod(ONOT, res, nil)
  3339  		}
  3340  		n = finishcompare(n, res, init)
  3341  		return n
  3342  	}
  3343  
  3344  	// inline: build boolean expression comparing element by element
  3345  	andor := OANDAND
  3346  	if n.Op == ONE {
  3347  		andor = OOROR
  3348  	}
  3349  	var expr *Node
  3350  	compare := func(el, er *Node) {
  3351  		a := nod(n.Op, el, er)
  3352  		if expr == nil {
  3353  			expr = a
  3354  		} else {
  3355  			expr = nod(andor, expr, a)
  3356  		}
  3357  	}
  3358  	cmpl = safeexpr(cmpl, init)
  3359  	cmpr = safeexpr(cmpr, init)
  3360  	if t.IsStruct() {
  3361  		for _, f := range t.Fields().Slice() {
  3362  			sym := f.Sym
  3363  			if sym.IsBlank() {
  3364  				continue
  3365  			}
  3366  			compare(
  3367  				nodSym(OXDOT, cmpl, sym),
  3368  				nodSym(OXDOT, cmpr, sym),
  3369  			)
  3370  		}
  3371  	} else {
  3372  		step := int64(1)
  3373  		remains := t.NumElem() * t.Elem().Width
  3374  		combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger()
  3375  		combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger()
  3376  		combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger()
  3377  		for i := int64(0); remains > 0; {
  3378  			var convType *types.Type
  3379  			switch {
  3380  			case remains >= 8 && combine64bit:
  3381  				convType = types.Types[TINT64]
  3382  				step = 8 / t.Elem().Width
  3383  			case remains >= 4 && combine32bit:
  3384  				convType = types.Types[TUINT32]
  3385  				step = 4 / t.Elem().Width
  3386  			case remains >= 2 && combine16bit:
  3387  				convType = types.Types[TUINT16]
  3388  				step = 2 / t.Elem().Width
  3389  			default:
  3390  				step = 1
  3391  			}
  3392  			if step == 1 {
  3393  				compare(
  3394  					nod(OINDEX, cmpl, nodintconst(int64(i))),
  3395  					nod(OINDEX, cmpr, nodintconst(int64(i))),
  3396  				)
  3397  				i++
  3398  				remains -= t.Elem().Width
  3399  			} else {
  3400  				elemType := t.Elem().ToUnsigned()
  3401  				cmplw := nod(OINDEX, cmpl, nodintconst(int64(i)))
  3402  				cmplw = conv(cmplw, elemType) // convert to unsigned
  3403  				cmplw = conv(cmplw, convType) // widen
  3404  				cmprw := nod(OINDEX, cmpr, nodintconst(int64(i)))
  3405  				cmprw = conv(cmprw, elemType)
  3406  				cmprw = conv(cmprw, convType)
  3407  				// For code like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3408  				// ssa will generate a single large load.
  3409  				for offset := int64(1); offset < step; offset++ {
  3410  					lb := nod(OINDEX, cmpl, nodintconst(int64(i+offset)))
  3411  					lb = conv(lb, elemType)
  3412  					lb = conv(lb, convType)
  3413  					lb = nod(OLSH, lb, nodintconst(int64(8*t.Elem().Width*offset)))
  3414  					cmplw = nod(OOR, cmplw, lb)
  3415  					rb := nod(OINDEX, cmpr, nodintconst(int64(i+offset)))
  3416  					rb = conv(rb, elemType)
  3417  					rb = conv(rb, convType)
  3418  					rb = nod(OLSH, rb, nodintconst(int64(8*t.Elem().Width*offset)))
  3419  					cmprw = nod(OOR, cmprw, rb)
  3420  				}
  3421  				compare(cmplw, cmprw)
  3422  				i += step
  3423  				remains -= step * t.Elem().Width
  3424  			}
  3425  		}
  3426  	}
  3427  	if expr == nil {
  3428  		expr = nodbool(n.Op == OEQ)
  3429  	}
  3430  	n = finishcompare(n, expr, init)
  3431  	return n
  3432  }
  3433  
  3434  // The result of finishcompare MUST be assigned back to n, e.g.
  3435  // 	n.Left = finishcompare(n.Left, x, r, init)
  3436  func finishcompare(n, r *Node, init *Nodes) *Node {
  3437  	// Use nn here to avoid passing r to typecheck.
  3438  	nn := r
  3439  	nn = typecheck(nn, Erv)
  3440  	nn = walkexpr(nn, init)
  3441  	r = nn
  3442  	if r.Type != n.Type {
  3443  		r = nod(OCONVNOP, r, nil)
  3444  		r.Type = n.Type
  3445  		r.SetTypecheck(1)
  3446  		nn = r
  3447  	}
  3448  	return nn
  3449  }
  3450  
  3451  // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers.
  3452  func (n *Node) isIntOrdering() bool {
  3453  	switch n.Op {
  3454  	case OLE, OLT, OGE, OGT:
  3455  	default:
  3456  		return false
  3457  	}
  3458  	return n.Left.Type.IsInteger() && n.Right.Type.IsInteger()
  3459  }
  3460  
  3461  // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10.
  3462  // n must be an OANDAND or OOROR node.
  3463  // The result of walkinrange MUST be assigned back to n, e.g.
  3464  // 	n.Left = walkinrange(n.Left)
  3465  func walkinrange(n *Node, init *Nodes) *Node {
  3466  	// We are looking for something equivalent to a opl b OP b opr c, where:
  3467  	// * a, b, and c have integer type
  3468  	// * b is side-effect-free
  3469  	// * opl and opr are each < or ≤
  3470  	// * OP is &&
  3471  	l := n.Left
  3472  	r := n.Right
  3473  	if !l.isIntOrdering() || !r.isIntOrdering() {
  3474  		return n
  3475  	}
  3476  
  3477  	// Find b, if it exists, and rename appropriately.
  3478  	// Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right
  3479  	// Output is: a opl b(==x) ANDAND/OROR b(==x) opr c
  3480  	a, opl, b := l.Left, l.Op, l.Right
  3481  	x, opr, c := r.Left, r.Op, r.Right
  3482  	for i := 0; ; i++ {
  3483  		if samesafeexpr(b, x) {
  3484  			break
  3485  		}
  3486  		if i == 3 {
  3487  			// Tried all permutations and couldn't find an appropriate b == x.
  3488  			return n
  3489  		}
  3490  		if i&1 == 0 {
  3491  			a, opl, b = b, brrev(opl), a
  3492  		} else {
  3493  			x, opr, c = c, brrev(opr), x
  3494  		}
  3495  	}
  3496  
  3497  	// If n.Op is ||, apply de Morgan.
  3498  	// Negate the internal ops now; we'll negate the top level op at the end.
  3499  	// Henceforth assume &&.
  3500  	negateResult := n.Op == OOROR
  3501  	if negateResult {
  3502  		opl = brcom(opl)
  3503  		opr = brcom(opr)
  3504  	}
  3505  
  3506  	cmpdir := func(o Op) int {
  3507  		switch o {
  3508  		case OLE, OLT:
  3509  			return -1
  3510  		case OGE, OGT:
  3511  			return +1
  3512  		}
  3513  		Fatalf("walkinrange cmpdir %v", o)
  3514  		return 0
  3515  	}
  3516  	if cmpdir(opl) != cmpdir(opr) {
  3517  		// Not a range check; something like b < a && b < c.
  3518  		return n
  3519  	}
  3520  
  3521  	switch opl {
  3522  	case OGE, OGT:
  3523  		// We have something like a > b && b ≥ c.
  3524  		// Switch and reverse ops and rename constants,
  3525  		// to make it look like a ≤ b && b < c.
  3526  		a, c = c, a
  3527  		opl, opr = brrev(opr), brrev(opl)
  3528  	}
  3529  
  3530  	// We must ensure that c-a is non-negative.
  3531  	// For now, require a and c to be constants.
  3532  	// In the future, we could also support a == 0 and c == len/cap(...).
  3533  	// Unfortunately, by this point, most len/cap expressions have been
  3534  	// stored into temporary variables.
  3535  	if !Isconst(a, CTINT) || !Isconst(c, CTINT) {
  3536  		return n
  3537  	}
  3538  
  3539  	if opl == OLT {
  3540  		// We have a < b && ...
  3541  		// We need a ≤ b && ... to safely use unsigned comparison tricks.
  3542  		// If a is not the maximum constant for b's type,
  3543  		// we can increment a and switch to ≤.
  3544  		if a.Int64() >= maxintval[b.Type.Etype].Int64() {
  3545  			return n
  3546  		}
  3547  		a = nodintconst(a.Int64() + 1)
  3548  		opl = OLE
  3549  	}
  3550  
  3551  	bound := c.Int64() - a.Int64()
  3552  	if bound < 0 {
  3553  		// Bad news. Something like 5 <= x && x < 3.
  3554  		// Rare in practice, and we still need to generate side-effects,
  3555  		// so just leave it alone.
  3556  		return n
  3557  	}
  3558  
  3559  	// We have a ≤ b && b < c (or a ≤ b && b ≤ c).
  3560  	// This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a),
  3561  	// which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a),
  3562  	// which is equivalent to uint(b-a) < uint(c-a).
  3563  	ut := b.Type.ToUnsigned()
  3564  	lhs := conv(nod(OSUB, b, a), ut)
  3565  	rhs := nodintconst(bound)
  3566  	if negateResult {
  3567  		// Negate top level.
  3568  		opr = brcom(opr)
  3569  	}
  3570  	cmp := nod(opr, lhs, rhs)
  3571  	cmp.Pos = n.Pos
  3572  	cmp = addinit(cmp, l.Ninit.Slice())
  3573  	cmp = addinit(cmp, r.Ninit.Slice())
  3574  	// Typecheck the AST rooted at cmp...
  3575  	cmp = typecheck(cmp, Erv)
  3576  	// ...but then reset cmp's type to match n's type.
  3577  	cmp.Type = n.Type
  3578  	cmp = walkexpr(cmp, init)
  3579  	return cmp
  3580  }
  3581  
  3582  // return 1 if integer n must be in range [0, max), 0 otherwise
  3583  func bounded(n *Node, max int64) bool {
  3584  	if n.Type == nil || !n.Type.IsInteger() {
  3585  		return false
  3586  	}
  3587  
  3588  	sign := n.Type.IsSigned()
  3589  	bits := int32(8 * n.Type.Width)
  3590  
  3591  	if smallintconst(n) {
  3592  		v := n.Int64()
  3593  		return 0 <= v && v < max
  3594  	}
  3595  
  3596  	switch n.Op {
  3597  	case OAND:
  3598  		v := int64(-1)
  3599  		if smallintconst(n.Left) {
  3600  			v = n.Left.Int64()
  3601  		} else if smallintconst(n.Right) {
  3602  			v = n.Right.Int64()
  3603  		}
  3604  
  3605  		if 0 <= v && v < max {
  3606  			return true
  3607  		}
  3608  
  3609  	case OMOD:
  3610  		if !sign && smallintconst(n.Right) {
  3611  			v := n.Right.Int64()
  3612  			if 0 <= v && v <= max {
  3613  				return true
  3614  			}
  3615  		}
  3616  
  3617  	case ODIV:
  3618  		if !sign && smallintconst(n.Right) {
  3619  			v := n.Right.Int64()
  3620  			for bits > 0 && v >= 2 {
  3621  				bits--
  3622  				v >>= 1
  3623  			}
  3624  		}
  3625  
  3626  	case ORSH:
  3627  		if !sign && smallintconst(n.Right) {
  3628  			v := n.Right.Int64()
  3629  			if v > int64(bits) {
  3630  				return true
  3631  			}
  3632  			bits -= int32(v)
  3633  		}
  3634  	}
  3635  
  3636  	if !sign && bits <= 62 && 1<<uint(bits) <= max {
  3637  		return true
  3638  	}
  3639  
  3640  	return false
  3641  }
  3642  
  3643  // usemethod checks interface method calls for uses of reflect.Type.Method.
  3644  func usemethod(n *Node) {
  3645  	t := n.Left.Type
  3646  
  3647  	// Looking for either of:
  3648  	//	Method(int) reflect.Method
  3649  	//	MethodByName(string) (reflect.Method, bool)
  3650  	//
  3651  	// TODO(crawshaw): improve precision of match by working out
  3652  	//                 how to check the method name.
  3653  	if n := t.Params().NumFields(); n != 1 {
  3654  		return
  3655  	}
  3656  	if n := t.Results().NumFields(); n != 1 && n != 2 {
  3657  		return
  3658  	}
  3659  	p0 := t.Params().Field(0)
  3660  	res0 := t.Results().Field(0)
  3661  	var res1 *types.Field
  3662  	if t.Results().NumFields() == 2 {
  3663  		res1 = t.Results().Field(1)
  3664  	}
  3665  
  3666  	if res1 == nil {
  3667  		if p0.Type.Etype != TINT {
  3668  			return
  3669  		}
  3670  	} else {
  3671  		if !p0.Type.IsString() {
  3672  			return
  3673  		}
  3674  		if !res1.Type.IsBoolean() {
  3675  			return
  3676  		}
  3677  	}
  3678  
  3679  	// Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
  3680  	//       (including global variables such as numImports - was issue #19028).
  3681  	if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" {
  3682  		Curfn.Func.SetReflectMethod(true)
  3683  	}
  3684  }
  3685  
  3686  func usefield(n *Node) {
  3687  	if objabi.Fieldtrack_enabled == 0 {
  3688  		return
  3689  	}
  3690  
  3691  	switch n.Op {
  3692  	default:
  3693  		Fatalf("usefield %v", n.Op)
  3694  
  3695  	case ODOT, ODOTPTR:
  3696  		break
  3697  	}
  3698  	if n.Sym == nil {
  3699  		// No field name.  This DOTPTR was built by the compiler for access
  3700  		// to runtime data structures.  Ignore.
  3701  		return
  3702  	}
  3703  
  3704  	t := n.Left.Type
  3705  	if t.IsPtr() {
  3706  		t = t.Elem()
  3707  	}
  3708  	field := dotField[typeSymKey{t.Orig, n.Sym}]
  3709  	if field == nil {
  3710  		Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
  3711  	}
  3712  	if !strings.Contains(field.Note, "go:\"track\"") {
  3713  		return
  3714  	}
  3715  
  3716  	outer := n.Left.Type
  3717  	if outer.IsPtr() {
  3718  		outer = outer.Elem()
  3719  	}
  3720  	if outer.Sym == nil {
  3721  		yyerror("tracked field must be in named struct type")
  3722  	}
  3723  	if !exportname(field.Sym.Name) {
  3724  		yyerror("tracked field must be exported (upper case)")
  3725  	}
  3726  
  3727  	sym := tracksym(outer, field)
  3728  	if Curfn.Func.FieldTrack == nil {
  3729  		Curfn.Func.FieldTrack = make(map[*types.Sym]struct{})
  3730  	}
  3731  	Curfn.Func.FieldTrack[sym] = struct{}{}
  3732  }
  3733  
  3734  func candiscardlist(l Nodes) bool {
  3735  	for _, n := range l.Slice() {
  3736  		if !candiscard(n) {
  3737  			return false
  3738  		}
  3739  	}
  3740  	return true
  3741  }
  3742  
  3743  func candiscard(n *Node) bool {
  3744  	if n == nil {
  3745  		return true
  3746  	}
  3747  
  3748  	switch n.Op {
  3749  	default:
  3750  		return false
  3751  
  3752  		// Discardable as long as the subpieces are.
  3753  	case ONAME,
  3754  		ONONAME,
  3755  		OTYPE,
  3756  		OPACK,
  3757  		OLITERAL,
  3758  		OADD,
  3759  		OSUB,
  3760  		OOR,
  3761  		OXOR,
  3762  		OADDSTR,
  3763  		OADDR,
  3764  		OANDAND,
  3765  		OARRAYBYTESTR,
  3766  		OARRAYRUNESTR,
  3767  		OSTRARRAYBYTE,
  3768  		OSTRARRAYRUNE,
  3769  		OCAP,
  3770  		OCMPIFACE,
  3771  		OCMPSTR,
  3772  		OCOMPLIT,
  3773  		OMAPLIT,
  3774  		OSTRUCTLIT,
  3775  		OARRAYLIT,
  3776  		OSLICELIT,
  3777  		OPTRLIT,
  3778  		OCONV,
  3779  		OCONVIFACE,
  3780  		OCONVNOP,
  3781  		ODOT,
  3782  		OEQ,
  3783  		ONE,
  3784  		OLT,
  3785  		OLE,
  3786  		OGT,
  3787  		OGE,
  3788  		OKEY,
  3789  		OSTRUCTKEY,
  3790  		OLEN,
  3791  		OMUL,
  3792  		OLSH,
  3793  		ORSH,
  3794  		OAND,
  3795  		OANDNOT,
  3796  		ONEW,
  3797  		ONOT,
  3798  		OCOM,
  3799  		OPLUS,
  3800  		OMINUS,
  3801  		OOROR,
  3802  		OPAREN,
  3803  		ORUNESTR,
  3804  		OREAL,
  3805  		OIMAG,
  3806  		OCOMPLEX:
  3807  		break
  3808  
  3809  		// Discardable as long as we know it's not division by zero.
  3810  	case ODIV, OMOD:
  3811  		if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 {
  3812  			break
  3813  		}
  3814  		if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 {
  3815  			break
  3816  		}
  3817  		return false
  3818  
  3819  		// Discardable as long as we know it won't fail because of a bad size.
  3820  	case OMAKECHAN, OMAKEMAP:
  3821  		if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 {
  3822  			break
  3823  		}
  3824  		return false
  3825  
  3826  		// Difficult to tell what sizes are okay.
  3827  	case OMAKESLICE:
  3828  		return false
  3829  	}
  3830  
  3831  	if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) {
  3832  		return false
  3833  	}
  3834  
  3835  	return true
  3836  }
  3837  
  3838  // rewrite
  3839  //	print(x, y, z)
  3840  // into
  3841  //	func(a1, a2, a3) {
  3842  //		print(a1, a2, a3)
  3843  //	}(x, y, z)
  3844  // and same for println.
  3845  
  3846  var walkprintfunc_prgen int
  3847  
  3848  // The result of walkprintfunc MUST be assigned back to n, e.g.
  3849  // 	n.Left = walkprintfunc(n.Left, init)
  3850  func walkprintfunc(n *Node, init *Nodes) *Node {
  3851  	if n.Ninit.Len() != 0 {
  3852  		walkstmtlist(n.Ninit.Slice())
  3853  		init.AppendNodes(&n.Ninit)
  3854  	}
  3855  
  3856  	t := nod(OTFUNC, nil, nil)
  3857  	num := 0
  3858  	var printargs []*Node
  3859  	var a *Node
  3860  	var buf string
  3861  	for _, n1 := range n.List.Slice() {
  3862  		buf = fmt.Sprintf("a%d", num)
  3863  		num++
  3864  		a = namedfield(buf, n1.Type)
  3865  		t.List.Append(a)
  3866  		printargs = append(printargs, a.Left)
  3867  	}
  3868  
  3869  	oldfn := Curfn
  3870  	Curfn = nil
  3871  
  3872  	walkprintfunc_prgen++
  3873  	sym := lookupN("print·%d", walkprintfunc_prgen)
  3874  	fn := dclfunc(sym, t)
  3875  
  3876  	a = nod(n.Op, nil, nil)
  3877  	a.List.Set(printargs)
  3878  	a = typecheck(a, Etop)
  3879  	a = walkstmt(a)
  3880  
  3881  	fn.Nbody.Set1(a)
  3882  
  3883  	funcbody(fn)
  3884  
  3885  	fn = typecheck(fn, Etop)
  3886  	typecheckslice(fn.Nbody.Slice(), Etop)
  3887  	xtop = append(xtop, fn)
  3888  	Curfn = oldfn
  3889  
  3890  	a = nod(OCALL, nil, nil)
  3891  	a.Left = fn.Func.Nname
  3892  	a.List.Set(n.List.Slice())
  3893  	a = typecheck(a, Etop)
  3894  	a = walkexpr(a, init)
  3895  	return a
  3896  }
  3897  
  3898  // substArgTypes substitutes the given list of types for
  3899  // successive occurrences of the "any" placeholder in the
  3900  // type syntax expression n.Type.
  3901  // The result of substArgTypes MUST be assigned back to old, e.g.
  3902  // 	n.Left = substArgTypes(n.Left, t1, t2)
  3903  func substArgTypes(old *Node, types_ ...*types.Type) *Node {
  3904  	n := *old // make shallow copy
  3905  
  3906  	for _, t := range types_ {
  3907  		dowidth(t)
  3908  	}
  3909  	n.Type = types.SubstAny(n.Type, &types_)
  3910  	if len(types_) > 0 {
  3911  		Fatalf("substArgTypes: too many argument types")
  3912  	}
  3913  	return &n
  3914  }