github.com/bir3/gocompiler@v0.9.2202/src/cmd/compile/internal/walk/switch.go (about)

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"github.com/bir3/gocompiler/src/go/constant"
    10  	"github.com/bir3/gocompiler/src/go/token"
    11  	"math/bits"
    12  	"sort"
    13  
    14  	"github.com/bir3/gocompiler/src/cmd/compile/internal/base"
    15  	"github.com/bir3/gocompiler/src/cmd/compile/internal/ir"
    16  	"github.com/bir3/gocompiler/src/cmd/compile/internal/objw"
    17  	"github.com/bir3/gocompiler/src/cmd/compile/internal/reflectdata"
    18  	"github.com/bir3/gocompiler/src/cmd/compile/internal/rttype"
    19  	"github.com/bir3/gocompiler/src/cmd/compile/internal/ssagen"
    20  	"github.com/bir3/gocompiler/src/cmd/compile/internal/typecheck"
    21  	"github.com/bir3/gocompiler/src/cmd/compile/internal/types"
    22  	"github.com/bir3/gocompiler/src/cmd/internal/obj"
    23  	"github.com/bir3/gocompiler/src/cmd/internal/src"
    24  )
    25  
    26  // walkSwitch walks a switch statement.
    27  func walkSwitch(sw *ir.SwitchStmt) {
    28  	// Guard against double walk, see #25776.
    29  	if sw.Walked() {
    30  		return	// Was fatal, but eliminating every possible source of double-walking is hard
    31  	}
    32  	sw.SetWalked(true)
    33  
    34  	if sw.Tag != nil && sw.Tag.Op() == ir.OTYPESW {
    35  		walkSwitchType(sw)
    36  	} else {
    37  		walkSwitchExpr(sw)
    38  	}
    39  }
    40  
    41  // walkSwitchExpr generates an AST implementing sw.  sw is an
    42  // expression switch.
    43  func walkSwitchExpr(sw *ir.SwitchStmt) {
    44  	lno := ir.SetPos(sw)
    45  
    46  	cond := sw.Tag
    47  	sw.Tag = nil
    48  
    49  	// convert switch {...} to switch true {...}
    50  	if cond == nil {
    51  		cond = ir.NewBool(base.Pos, true)
    52  		cond = typecheck.Expr(cond)
    53  		cond = typecheck.DefaultLit(cond, nil)
    54  	}
    55  
    56  	// Given "switch string(byteslice)",
    57  	// with all cases being side-effect free,
    58  	// use a zero-cost alias of the byte slice.
    59  	// Do this before calling walkExpr on cond,
    60  	// because walkExpr will lower the string
    61  	// conversion into a runtime call.
    62  	// See issue 24937 for more discussion.
    63  	if cond.Op() == ir.OBYTES2STR && allCaseExprsAreSideEffectFree(sw) {
    64  		cond := cond.(*ir.ConvExpr)
    65  		cond.SetOp(ir.OBYTES2STRTMP)
    66  	}
    67  
    68  	cond = walkExpr(cond, sw.PtrInit())
    69  	if cond.Op() != ir.OLITERAL && cond.Op() != ir.ONIL {
    70  		cond = copyExpr(cond, cond.Type(), &sw.Compiled)
    71  	}
    72  
    73  	base.Pos = lno
    74  
    75  	s := exprSwitch{
    76  		pos:		lno,
    77  		exprname:	cond,
    78  	}
    79  
    80  	var defaultGoto ir.Node
    81  	var body ir.Nodes
    82  	for _, ncase := range sw.Cases {
    83  		label := typecheck.AutoLabel(".s")
    84  		jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
    85  
    86  		// Process case dispatch.
    87  		if len(ncase.List) == 0 {
    88  			if defaultGoto != nil {
    89  				base.Fatalf("duplicate default case not detected during typechecking")
    90  			}
    91  			defaultGoto = jmp
    92  		}
    93  
    94  		for i, n1 := range ncase.List {
    95  			var rtype ir.Node
    96  			if i < len(ncase.RTypes) {
    97  				rtype = ncase.RTypes[i]
    98  			}
    99  			s.Add(ncase.Pos(), n1, rtype, jmp)
   100  		}
   101  
   102  		// Process body.
   103  		body.Append(ir.NewLabelStmt(ncase.Pos(), label))
   104  		body.Append(ncase.Body...)
   105  		if fall, pos := endsInFallthrough(ncase.Body); !fall {
   106  			br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
   107  			br.SetPos(pos)
   108  			body.Append(br)
   109  		}
   110  	}
   111  	sw.Cases = nil
   112  
   113  	if defaultGoto == nil {
   114  		br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
   115  		br.SetPos(br.Pos().WithNotStmt())
   116  		defaultGoto = br
   117  	}
   118  
   119  	s.Emit(&sw.Compiled)
   120  	sw.Compiled.Append(defaultGoto)
   121  	sw.Compiled.Append(body.Take()...)
   122  	walkStmtList(sw.Compiled)
   123  }
   124  
   125  // An exprSwitch walks an expression switch.
   126  type exprSwitch struct {
   127  	pos		src.XPos
   128  	exprname	ir.Node	// value being switched on
   129  
   130  	done	ir.Nodes
   131  	clauses	[]exprClause
   132  }
   133  
   134  type exprClause struct {
   135  	pos	src.XPos
   136  	lo, hi	ir.Node
   137  	rtype	ir.Node	// *runtime._type for OEQ node
   138  	jmp	ir.Node
   139  }
   140  
   141  func (s *exprSwitch) Add(pos src.XPos, expr, rtype, jmp ir.Node) {
   142  	c := exprClause{pos: pos, lo: expr, hi: expr, rtype: rtype, jmp: jmp}
   143  	if types.IsOrdered[s.exprname.Type().Kind()] && expr.Op() == ir.OLITERAL {
   144  		s.clauses = append(s.clauses, c)
   145  		return
   146  	}
   147  
   148  	s.flush()
   149  	s.clauses = append(s.clauses, c)
   150  	s.flush()
   151  }
   152  
   153  func (s *exprSwitch) Emit(out *ir.Nodes) {
   154  	s.flush()
   155  	out.Append(s.done.Take()...)
   156  }
   157  
   158  func (s *exprSwitch) flush() {
   159  	cc := s.clauses
   160  	s.clauses = nil
   161  	if len(cc) == 0 {
   162  		return
   163  	}
   164  
   165  	// Caution: If len(cc) == 1, then cc[0] might not an OLITERAL.
   166  	// The code below is structured to implicitly handle this case
   167  	// (e.g., sort.Slice doesn't need to invoke the less function
   168  	// when there's only a single slice element).
   169  
   170  	if s.exprname.Type().IsString() && len(cc) >= 2 {
   171  		// Sort strings by length and then by value. It is
   172  		// much cheaper to compare lengths than values, and
   173  		// all we need here is consistency. We respect this
   174  		// sorting below.
   175  		sort.Slice(cc, func(i, j int) bool {
   176  			si := ir.StringVal(cc[i].lo)
   177  			sj := ir.StringVal(cc[j].lo)
   178  			if len(si) != len(sj) {
   179  				return len(si) < len(sj)
   180  			}
   181  			return si < sj
   182  		})
   183  
   184  		// runLen returns the string length associated with a
   185  		// particular run of exprClauses.
   186  		runLen := func(run []exprClause) int64 { return int64(len(ir.StringVal(run[0].lo))) }
   187  
   188  		// Collapse runs of consecutive strings with the same length.
   189  		var runs [][]exprClause
   190  		start := 0
   191  		for i := 1; i < len(cc); i++ {
   192  			if runLen(cc[start:]) != runLen(cc[i:]) {
   193  				runs = append(runs, cc[start:i])
   194  				start = i
   195  			}
   196  		}
   197  		runs = append(runs, cc[start:])
   198  
   199  		// We have strings of more than one length. Generate an
   200  		// outer switch which switches on the length of the string
   201  		// and an inner switch in each case which resolves all the
   202  		// strings of the same length. The code looks something like this:
   203  
   204  		// goto outerLabel
   205  		// len5:
   206  		//   ... search among length 5 strings ...
   207  		//   goto endLabel
   208  		// len8:
   209  		//   ... search among length 8 strings ...
   210  		//   goto endLabel
   211  		// ... other lengths ...
   212  		// outerLabel:
   213  		// switch len(s) {
   214  		//   case 5: goto len5
   215  		//   case 8: goto len8
   216  		//   ... other lengths ...
   217  		// }
   218  		// endLabel:
   219  
   220  		outerLabel := typecheck.AutoLabel(".s")
   221  		endLabel := typecheck.AutoLabel(".s")
   222  
   223  		// Jump around all the individual switches for each length.
   224  		s.done.Append(ir.NewBranchStmt(s.pos, ir.OGOTO, outerLabel))
   225  
   226  		var outer exprSwitch
   227  		outer.exprname = ir.NewUnaryExpr(s.pos, ir.OLEN, s.exprname)
   228  		outer.exprname.SetType(types.Types[types.TINT])
   229  
   230  		for _, run := range runs {
   231  			// Target label to jump to when we match this length.
   232  			label := typecheck.AutoLabel(".s")
   233  
   234  			// Search within this run of same-length strings.
   235  			pos := run[0].pos
   236  			s.done.Append(ir.NewLabelStmt(pos, label))
   237  			stringSearch(s.exprname, run, &s.done)
   238  			s.done.Append(ir.NewBranchStmt(pos, ir.OGOTO, endLabel))
   239  
   240  			// Add length case to outer switch.
   241  			cas := ir.NewInt(pos, runLen(run))
   242  			jmp := ir.NewBranchStmt(pos, ir.OGOTO, label)
   243  			outer.Add(pos, cas, nil, jmp)
   244  		}
   245  		s.done.Append(ir.NewLabelStmt(s.pos, outerLabel))
   246  		outer.Emit(&s.done)
   247  		s.done.Append(ir.NewLabelStmt(s.pos, endLabel))
   248  		return
   249  	}
   250  
   251  	sort.Slice(cc, func(i, j int) bool {
   252  		return constant.Compare(cc[i].lo.Val(), token.LSS, cc[j].lo.Val())
   253  	})
   254  
   255  	// Merge consecutive integer cases.
   256  	if s.exprname.Type().IsInteger() {
   257  		consecutive := func(last, next constant.Value) bool {
   258  			delta := constant.BinaryOp(next, token.SUB, last)
   259  			return constant.Compare(delta, token.EQL, constant.MakeInt64(1))
   260  		}
   261  
   262  		merged := cc[:1]
   263  		for _, c := range cc[1:] {
   264  			last := &merged[len(merged)-1]
   265  			if last.jmp == c.jmp && consecutive(last.hi.Val(), c.lo.Val()) {
   266  				last.hi = c.lo
   267  			} else {
   268  				merged = append(merged, c)
   269  			}
   270  		}
   271  		cc = merged
   272  	}
   273  
   274  	s.search(cc, &s.done)
   275  }
   276  
   277  func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) {
   278  	if s.tryJumpTable(cc, out) {
   279  		return
   280  	}
   281  	binarySearch(len(cc), out,
   282  		func(i int) ir.Node {
   283  			return ir.NewBinaryExpr(base.Pos, ir.OLE, s.exprname, cc[i-1].hi)
   284  		},
   285  		func(i int, nif *ir.IfStmt) {
   286  			c := &cc[i]
   287  			nif.Cond = c.test(s.exprname)
   288  			nif.Body = []ir.Node{c.jmp}
   289  		},
   290  	)
   291  }
   292  
   293  // Try to implement the clauses with a jump table. Returns true if successful.
   294  func (s *exprSwitch) tryJumpTable(cc []exprClause, out *ir.Nodes) bool {
   295  	const minCases = 8	// have at least minCases cases in the switch
   296  	const minDensity = 4	// use at least 1 out of every minDensity entries
   297  
   298  	if base.Flag.N != 0 || !ssagen.Arch.LinkArch.CanJumpTable || base.Ctxt.Retpoline {
   299  		return false
   300  	}
   301  	if len(cc) < minCases {
   302  		return false	// not enough cases for it to be worth it
   303  	}
   304  	if cc[0].lo.Val().Kind() != constant.Int {
   305  		return false	// e.g. float
   306  	}
   307  	if s.exprname.Type().Size() > int64(types.PtrSize) {
   308  		return false	// 64-bit switches on 32-bit archs
   309  	}
   310  	min := cc[0].lo.Val()
   311  	max := cc[len(cc)-1].hi.Val()
   312  	width := constant.BinaryOp(constant.BinaryOp(max, token.SUB, min), token.ADD, constant.MakeInt64(1))
   313  	limit := constant.MakeInt64(int64(len(cc)) * minDensity)
   314  	if constant.Compare(width, token.GTR, limit) {
   315  		// We disable jump tables if we use less than a minimum fraction of the entries.
   316  		// i.e. for switch x {case 0: case 1000: case 2000:} we don't want to use a jump table.
   317  		return false
   318  	}
   319  	jt := ir.NewJumpTableStmt(base.Pos, s.exprname)
   320  	for _, c := range cc {
   321  		jmp := c.jmp.(*ir.BranchStmt)
   322  		if jmp.Op() != ir.OGOTO || jmp.Label == nil {
   323  			panic("bad switch case body")
   324  		}
   325  		for i := c.lo.Val(); constant.Compare(i, token.LEQ, c.hi.Val()); i = constant.BinaryOp(i, token.ADD, constant.MakeInt64(1)) {
   326  			jt.Cases = append(jt.Cases, i)
   327  			jt.Targets = append(jt.Targets, jmp.Label)
   328  		}
   329  	}
   330  	out.Append(jt)
   331  	return true
   332  }
   333  
   334  func (c *exprClause) test(exprname ir.Node) ir.Node {
   335  	// Integer range.
   336  	if c.hi != c.lo {
   337  		low := ir.NewBinaryExpr(c.pos, ir.OGE, exprname, c.lo)
   338  		high := ir.NewBinaryExpr(c.pos, ir.OLE, exprname, c.hi)
   339  		return ir.NewLogicalExpr(c.pos, ir.OANDAND, low, high)
   340  	}
   341  
   342  	// Optimize "switch true { ...}" and "switch false { ... }".
   343  	if ir.IsConst(exprname, constant.Bool) && !c.lo.Type().IsInterface() {
   344  		if ir.BoolVal(exprname) {
   345  			return c.lo
   346  		} else {
   347  			return ir.NewUnaryExpr(c.pos, ir.ONOT, c.lo)
   348  		}
   349  	}
   350  
   351  	n := ir.NewBinaryExpr(c.pos, ir.OEQ, exprname, c.lo)
   352  	n.RType = c.rtype
   353  	return n
   354  }
   355  
   356  func allCaseExprsAreSideEffectFree(sw *ir.SwitchStmt) bool {
   357  	// In theory, we could be more aggressive, allowing any
   358  	// side-effect-free expressions in cases, but it's a bit
   359  	// tricky because some of that information is unavailable due
   360  	// to the introduction of temporaries during order.
   361  	// Restricting to constants is simple and probably powerful
   362  	// enough.
   363  
   364  	for _, ncase := range sw.Cases {
   365  		for _, v := range ncase.List {
   366  			if v.Op() != ir.OLITERAL {
   367  				return false
   368  			}
   369  		}
   370  	}
   371  	return true
   372  }
   373  
   374  // endsInFallthrough reports whether stmts ends with a "fallthrough" statement.
   375  func endsInFallthrough(stmts []ir.Node) (bool, src.XPos) {
   376  	if len(stmts) == 0 {
   377  		return false, src.NoXPos
   378  	}
   379  	i := len(stmts) - 1
   380  	return stmts[i].Op() == ir.OFALL, stmts[i].Pos()
   381  }
   382  
   383  // walkSwitchType generates an AST that implements sw, where sw is a
   384  // type switch.
   385  func walkSwitchType(sw *ir.SwitchStmt) {
   386  	var s typeSwitch
   387  	s.srcName = sw.Tag.(*ir.TypeSwitchGuard).X
   388  	s.srcName = walkExpr(s.srcName, sw.PtrInit())
   389  	s.srcName = copyExpr(s.srcName, s.srcName.Type(), &sw.Compiled)
   390  	s.okName = typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL])
   391  	s.itabName = typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINT8].PtrTo())
   392  
   393  	// Get interface descriptor word.
   394  	// For empty interfaces this will be the type.
   395  	// For non-empty interfaces this will be the itab.
   396  	srcItab := ir.NewUnaryExpr(base.Pos, ir.OITAB, s.srcName)
   397  	srcData := ir.NewUnaryExpr(base.Pos, ir.OIDATA, s.srcName)
   398  	srcData.SetType(types.Types[types.TUINT8].PtrTo())
   399  	srcData.SetTypecheck(1)
   400  
   401  	// For empty interfaces, do:
   402  	//     if e._type == nil {
   403  	//         do nil case if it exists, otherwise default
   404  	//     }
   405  	//     h := e._type.hash
   406  	// Use a similar strategy for non-empty interfaces.
   407  	ifNil := ir.NewIfStmt(base.Pos, nil, nil, nil)
   408  	ifNil.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, srcItab, typecheck.NodNil())
   409  	base.Pos = base.Pos.WithNotStmt()	// disable statement marks after the first check.
   410  	ifNil.Cond = typecheck.Expr(ifNil.Cond)
   411  	ifNil.Cond = typecheck.DefaultLit(ifNil.Cond, nil)
   412  	// ifNil.Nbody assigned later.
   413  	sw.Compiled.Append(ifNil)
   414  
   415  	// Load hash from type or itab.
   416  	dotHash := typeHashFieldOf(base.Pos, srcItab)
   417  	s.hashName = copyExpr(dotHash, dotHash.Type(), &sw.Compiled)
   418  
   419  	// Make a label for each case body.
   420  	labels := make([]*types.Sym, len(sw.Cases))
   421  	for i := range sw.Cases {
   422  		labels[i] = typecheck.AutoLabel(".s")
   423  	}
   424  
   425  	// "jump" to execute if no case matches.
   426  	br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
   427  
   428  	// Assemble a list of all the types we're looking for.
   429  	// This pass flattens the case lists, as well as handles
   430  	// some unusual cases, like default and nil cases.
   431  	type oneCase struct {
   432  		pos	src.XPos
   433  		jmp	ir.Node	// jump to body of selected case
   434  
   435  		// The case we're matching. Normally the type we're looking for
   436  		// is typ.Type(), but when typ is ODYNAMICTYPE the actual type
   437  		// we're looking for is not a compile-time constant (typ.Type()
   438  		// will be its shape).
   439  		typ	ir.Node
   440  	}
   441  	var cases []oneCase
   442  	var defaultGoto, nilGoto ir.Node
   443  	for i, ncase := range sw.Cases {
   444  		jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, labels[i])
   445  		if len(ncase.List) == 0 {	// default:
   446  			if defaultGoto != nil {
   447  				base.Fatalf("duplicate default case not detected during typechecking")
   448  			}
   449  			defaultGoto = jmp
   450  		}
   451  		for _, n1 := range ncase.List {
   452  			if ir.IsNil(n1) {	// case nil:
   453  				if nilGoto != nil {
   454  					base.Fatalf("duplicate nil case not detected during typechecking")
   455  				}
   456  				nilGoto = jmp
   457  				continue
   458  			}
   459  			if n1.Op() == ir.ODYNAMICTYPE {
   460  				// Convert dynamic to static, if the dynamic is actually static.
   461  				// TODO: why isn't this OTYPE to begin with?
   462  				dt := n1.(*ir.DynamicType)
   463  				if dt.RType != nil && dt.RType.Op() == ir.OADDR {
   464  					addr := dt.RType.(*ir.AddrExpr)
   465  					if addr.X.Op() == ir.OLINKSYMOFFSET {
   466  						n1 = ir.TypeNode(n1.Type())
   467  					}
   468  				}
   469  				if dt.ITab != nil && dt.ITab.Op() == ir.OADDR {
   470  					addr := dt.ITab.(*ir.AddrExpr)
   471  					if addr.X.Op() == ir.OLINKSYMOFFSET {
   472  						n1 = ir.TypeNode(n1.Type())
   473  					}
   474  				}
   475  			}
   476  			cases = append(cases, oneCase{
   477  				pos:	ncase.Pos(),
   478  				typ:	n1,
   479  				jmp:	jmp,
   480  			})
   481  		}
   482  	}
   483  	if defaultGoto == nil {
   484  		defaultGoto = br
   485  	}
   486  	if nilGoto == nil {
   487  		nilGoto = defaultGoto
   488  	}
   489  	ifNil.Body = []ir.Node{nilGoto}
   490  
   491  	// Now go through the list of cases, processing groups as we find them.
   492  	var concreteCases []oneCase
   493  	var interfaceCases []oneCase
   494  	flush := func() {
   495  		// Process all the concrete types first. Because we handle shadowing
   496  		// below, it is correct to do all the concrete types before all of
   497  		// the interface types.
   498  		// The concrete cases can all be handled without a runtime call.
   499  		if len(concreteCases) > 0 {
   500  			var clauses []typeClause
   501  			for _, c := range concreteCases {
   502  				as := ir.NewAssignListStmt(c.pos, ir.OAS2,
   503  					[]ir.Node{ir.BlankNode, s.okName},					// _, ok =
   504  					[]ir.Node{ir.NewTypeAssertExpr(c.pos, s.srcName, c.typ.Type())})	// iface.(type)
   505  				nif := ir.NewIfStmt(c.pos, s.okName, []ir.Node{c.jmp}, nil)
   506  				clauses = append(clauses, typeClause{
   507  					hash:	types.TypeHash(c.typ.Type()),
   508  					body:	[]ir.Node{typecheck.Stmt(as), typecheck.Stmt(nif)},
   509  				})
   510  			}
   511  			s.flush(clauses, &sw.Compiled)
   512  			concreteCases = concreteCases[:0]
   513  		}
   514  
   515  		// The "any" case, if it exists, must be the last interface case, because
   516  		// it would shadow all subsequent cases. Strip it off here so the runtime
   517  		// call only needs to handle non-empty interfaces.
   518  		var anyGoto ir.Node
   519  		if len(interfaceCases) > 0 && interfaceCases[len(interfaceCases)-1].typ.Type().IsEmptyInterface() {
   520  			anyGoto = interfaceCases[len(interfaceCases)-1].jmp
   521  			interfaceCases = interfaceCases[:len(interfaceCases)-1]
   522  		}
   523  
   524  		// Next, process all the interface types with a single call to the runtime.
   525  		if len(interfaceCases) > 0 {
   526  
   527  			// Build an internal/abi.InterfaceSwitch descriptor to pass to the runtime.
   528  			lsym := types.LocalPkg.Lookup(fmt.Sprintf(".interfaceSwitch.%d", interfaceSwitchGen)).LinksymABI(obj.ABI0)
   529  			interfaceSwitchGen++
   530  			c := rttype.NewCursor(lsym, 0, rttype.InterfaceSwitch)
   531  			c.Field("Cache").WritePtr(typecheck.LookupRuntimeVar("emptyInterfaceSwitchCache"))
   532  			c.Field("NCases").WriteInt(int64(len(interfaceCases)))
   533  			array, sizeDelta := c.Field("Cases").ModifyArray(len(interfaceCases))
   534  			for i, c := range interfaceCases {
   535  				array.Elem(i).WritePtr(reflectdata.TypeSym(c.typ.Type()).Linksym())
   536  			}
   537  			objw.Global(lsym, int32(rttype.InterfaceSwitch.Size()+sizeDelta), obj.LOCAL)
   538  			// The GC only needs to see the first pointer in the structure (all the others
   539  			// are to static locations). So the InterfaceSwitch type itself is fine, even
   540  			// though it might not cover the whole array we wrote above.
   541  			lsym.Gotype = reflectdata.TypeLinksym(rttype.InterfaceSwitch)
   542  
   543  			// Call runtime to do switch
   544  			// case, itab = runtime.interfaceSwitch(&descriptor, typeof(arg))
   545  			var typeArg ir.Node
   546  			if s.srcName.Type().IsEmptyInterface() {
   547  				typeArg = ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUINT8].PtrTo(), srcItab)
   548  			} else {
   549  				typeArg = itabType(srcItab)
   550  			}
   551  			caseVar := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
   552  			isw := ir.NewInterfaceSwitchStmt(base.Pos, caseVar, s.itabName, typeArg, dotHash, lsym)
   553  			sw.Compiled.Append(isw)
   554  
   555  			// Switch on the result of the call (or cache lookup).
   556  			var newCases []*ir.CaseClause
   557  			for i, c := range interfaceCases {
   558  				newCases = append(newCases, &ir.CaseClause{
   559  					List:	[]ir.Node{ir.NewInt(base.Pos, int64(i))},
   560  					Body:	[]ir.Node{c.jmp},
   561  				})
   562  			}
   563  			// TODO: add len(newCases) case, mark switch as bounded
   564  			sw2 := ir.NewSwitchStmt(base.Pos, caseVar, newCases)
   565  			sw.Compiled.Append(typecheck.Stmt(sw2))
   566  			interfaceCases = interfaceCases[:0]
   567  		}
   568  
   569  		if anyGoto != nil {
   570  			// We've already handled the nil case, so everything
   571  			// that reaches here matches the "any" case.
   572  			sw.Compiled.Append(anyGoto)
   573  		}
   574  	}
   575  caseLoop:
   576  	for _, c := range cases {
   577  		if c.typ.Op() == ir.ODYNAMICTYPE {
   578  			flush()	// process all previous cases
   579  			dt := c.typ.(*ir.DynamicType)
   580  			dot := ir.NewDynamicTypeAssertExpr(c.pos, ir.ODYNAMICDOTTYPE, s.srcName, dt.RType)
   581  			dot.ITab = dt.ITab
   582  			dot.SetType(c.typ.Type())
   583  			dot.SetTypecheck(1)
   584  
   585  			as := ir.NewAssignListStmt(c.pos, ir.OAS2, nil, nil)
   586  			as.Lhs = []ir.Node{ir.BlankNode, s.okName}	// _, ok =
   587  			as.Rhs = []ir.Node{dot}
   588  			typecheck.Stmt(as)
   589  
   590  			nif := ir.NewIfStmt(c.pos, s.okName, []ir.Node{c.jmp}, nil)
   591  			sw.Compiled.Append(as, nif)
   592  			continue
   593  		}
   594  
   595  		// Check for shadowing (a case that will never fire because
   596  		// a previous case would have always fired first). This check
   597  		// allows us to reorder concrete and interface cases.
   598  		// (TODO: these should be vet failures, maybe?)
   599  		for _, ic := range interfaceCases {
   600  			// An interface type case will shadow all
   601  			// subsequent types that implement that interface.
   602  			if typecheck.Implements(c.typ.Type(), ic.typ.Type()) {
   603  				continue caseLoop
   604  			}
   605  			// Note that we don't need to worry about:
   606  			// 1. Two concrete types shadowing each other. That's
   607  			//    disallowed by the spec.
   608  			// 2. A concrete type shadowing an interface type.
   609  			//    That can never happen, as interface types can
   610  			//    be satisfied by an infinite set of concrete types.
   611  			// The correctness of this step also depends on handling
   612  			// the dynamic type cases separately, as we do above.
   613  		}
   614  
   615  		if c.typ.Type().IsInterface() {
   616  			interfaceCases = append(interfaceCases, c)
   617  		} else {
   618  			concreteCases = append(concreteCases, c)
   619  		}
   620  	}
   621  	flush()
   622  
   623  	sw.Compiled.Append(defaultGoto)	// if none of the cases matched
   624  
   625  	// Now generate all the case bodies
   626  	for i, ncase := range sw.Cases {
   627  		sw.Compiled.Append(ir.NewLabelStmt(ncase.Pos(), labels[i]))
   628  		if caseVar := ncase.Var; caseVar != nil {
   629  			val := s.srcName
   630  			if len(ncase.List) == 1 {
   631  				// single type. We have to downcast the input value to the target type.
   632  				if ncase.List[0].Op() == ir.OTYPE {	// single compile-time known type
   633  					t := ncase.List[0].Type()
   634  					if t.IsInterface() {
   635  						// This case is an interface. Build case value from input interface.
   636  						// The data word will always be the same, but the itab/type changes.
   637  						if t.IsEmptyInterface() {
   638  							var typ ir.Node
   639  							if s.srcName.Type().IsEmptyInterface() {
   640  								// E->E, nothing to do, type is already correct.
   641  								typ = srcItab
   642  							} else {
   643  								// I->E, load type out of itab
   644  								typ = itabType(srcItab)
   645  								typ.SetPos(ncase.Pos())
   646  							}
   647  							val = ir.NewBinaryExpr(ncase.Pos(), ir.OMAKEFACE, typ, srcData)
   648  						} else {
   649  							// The itab we need was returned by a runtime.interfaceSwitch call.
   650  							val = ir.NewBinaryExpr(ncase.Pos(), ir.OMAKEFACE, s.itabName, srcData)
   651  						}
   652  					} else {
   653  						// This case is a concrete type, just read its value out of the interface.
   654  						val = ifaceData(ncase.Pos(), s.srcName, t)
   655  					}
   656  				} else if ncase.List[0].Op() == ir.ODYNAMICTYPE {	// single runtime known type
   657  					dt := ncase.List[0].(*ir.DynamicType)
   658  					x := ir.NewDynamicTypeAssertExpr(ncase.Pos(), ir.ODYNAMICDOTTYPE, val, dt.RType)
   659  					x.ITab = dt.ITab
   660  					val = x
   661  				} else if ir.IsNil(ncase.List[0]) {
   662  				} else {
   663  					base.Fatalf("unhandled type switch case %v", ncase.List[0])
   664  				}
   665  				val.SetType(caseVar.Type())
   666  				val.SetTypecheck(1)
   667  			}
   668  			l := []ir.Node{
   669  				ir.NewDecl(ncase.Pos(), ir.ODCL, caseVar),
   670  				ir.NewAssignStmt(ncase.Pos(), caseVar, val),
   671  			}
   672  			typecheck.Stmts(l)
   673  			sw.Compiled.Append(l...)
   674  		}
   675  		sw.Compiled.Append(ncase.Body...)
   676  		sw.Compiled.Append(br)
   677  	}
   678  
   679  	walkStmtList(sw.Compiled)
   680  	sw.Tag = nil
   681  	sw.Cases = nil
   682  }
   683  
   684  var interfaceSwitchGen int
   685  
   686  // typeHashFieldOf returns an expression to select the type hash field
   687  // from an interface's descriptor word (whether a *runtime._type or
   688  // *runtime.itab pointer).
   689  func typeHashFieldOf(pos src.XPos, itab *ir.UnaryExpr) *ir.SelectorExpr {
   690  	if itab.Op() != ir.OITAB {
   691  		base.Fatalf("expected OITAB, got %v", itab.Op())
   692  	}
   693  	var hashField *types.Field
   694  	if itab.X.Type().IsEmptyInterface() {
   695  		// runtime._type's hash field
   696  		if rtypeHashField == nil {
   697  			rtypeHashField = runtimeField("hash", rttype.Type.OffsetOf("Hash"), types.Types[types.TUINT32])
   698  		}
   699  		hashField = rtypeHashField
   700  	} else {
   701  		// runtime.itab's hash field
   702  		if itabHashField == nil {
   703  			itabHashField = runtimeField("hash", int64(2*types.PtrSize), types.Types[types.TUINT32])
   704  		}
   705  		hashField = itabHashField
   706  	}
   707  	return boundedDotPtr(pos, itab, hashField)
   708  }
   709  
   710  var rtypeHashField, itabHashField *types.Field
   711  
   712  // A typeSwitch walks a type switch.
   713  type typeSwitch struct {
   714  	// Temporary variables (i.e., ONAMEs) used by type switch dispatch logic:
   715  	srcName		ir.Node	// value being type-switched on
   716  	hashName	ir.Node	// type hash of the value being type-switched on
   717  	okName		ir.Node	// boolean used for comma-ok type assertions
   718  	itabName	ir.Node	// itab value to use for first word of non-empty interface
   719  }
   720  
   721  type typeClause struct {
   722  	hash	uint32
   723  	body	ir.Nodes
   724  }
   725  
   726  func (s *typeSwitch) flush(cc []typeClause, compiled *ir.Nodes) {
   727  	if len(cc) == 0 {
   728  		return
   729  	}
   730  
   731  	sort.Slice(cc, func(i, j int) bool { return cc[i].hash < cc[j].hash })
   732  
   733  	// Combine adjacent cases with the same hash.
   734  	merged := cc[:1]
   735  	for _, c := range cc[1:] {
   736  		last := &merged[len(merged)-1]
   737  		if last.hash == c.hash {
   738  			last.body.Append(c.body.Take()...)
   739  		} else {
   740  			merged = append(merged, c)
   741  		}
   742  	}
   743  	cc = merged
   744  
   745  	if s.tryJumpTable(cc, compiled) {
   746  		return
   747  	}
   748  	binarySearch(len(cc), compiled,
   749  		func(i int) ir.Node {
   750  			return ir.NewBinaryExpr(base.Pos, ir.OLE, s.hashName, ir.NewInt(base.Pos, int64(cc[i-1].hash)))
   751  		},
   752  		func(i int, nif *ir.IfStmt) {
   753  			// TODO(mdempsky): Omit hash equality check if
   754  			// there's only one type.
   755  			c := cc[i]
   756  			nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, s.hashName, ir.NewInt(base.Pos, int64(c.hash)))
   757  			nif.Body.Append(c.body.Take()...)
   758  		},
   759  	)
   760  }
   761  
   762  // Try to implement the clauses with a jump table. Returns true if successful.
   763  func (s *typeSwitch) tryJumpTable(cc []typeClause, out *ir.Nodes) bool {
   764  	const minCases = 5	// have at least minCases cases in the switch
   765  	if base.Flag.N != 0 || !ssagen.Arch.LinkArch.CanJumpTable || base.Ctxt.Retpoline {
   766  		return false
   767  	}
   768  	if len(cc) < minCases {
   769  		return false	// not enough cases for it to be worth it
   770  	}
   771  	hashes := make([]uint32, len(cc))
   772  	// b = # of bits to use. Start with the minimum number of
   773  	// bits possible, but try a few larger sizes if needed.
   774  	b0 := bits.Len(uint(len(cc) - 1))
   775  	for b := b0; b < b0+3; b++ {
   776  	pickI:
   777  		for i := 0; i <= 32-b; i++ {	// starting bit position
   778  			// Compute the hash we'd get from all the cases,
   779  			// selecting b bits starting at bit i.
   780  			hashes = hashes[:0]
   781  			for _, c := range cc {
   782  				h := c.hash >> i & (1<<b - 1)
   783  				hashes = append(hashes, h)
   784  			}
   785  			// Order by increasing hash.
   786  			sort.Slice(hashes, func(j, k int) bool {
   787  				return hashes[j] < hashes[k]
   788  			})
   789  			for j := 1; j < len(hashes); j++ {
   790  				if hashes[j] == hashes[j-1] {
   791  					// There is a duplicate hash; try a different b/i pair.
   792  					continue pickI
   793  				}
   794  			}
   795  
   796  			// All hashes are distinct. Use these values of b and i.
   797  			h := s.hashName
   798  			if i != 0 {
   799  				h = ir.NewBinaryExpr(base.Pos, ir.ORSH, h, ir.NewInt(base.Pos, int64(i)))
   800  			}
   801  			h = ir.NewBinaryExpr(base.Pos, ir.OAND, h, ir.NewInt(base.Pos, int64(1<<b-1)))
   802  			h = typecheck.Expr(h)
   803  
   804  			// Build jump table.
   805  			jt := ir.NewJumpTableStmt(base.Pos, h)
   806  			jt.Cases = make([]constant.Value, 1<<b)
   807  			jt.Targets = make([]*types.Sym, 1<<b)
   808  			out.Append(jt)
   809  
   810  			// Start with all hashes going to the didn't-match target.
   811  			noMatch := typecheck.AutoLabel(".s")
   812  			for j := 0; j < 1<<b; j++ {
   813  				jt.Cases[j] = constant.MakeInt64(int64(j))
   814  				jt.Targets[j] = noMatch
   815  			}
   816  			// This statement is not reachable, but it will make it obvious that we don't
   817  			// fall through to the first case.
   818  			out.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, noMatch))
   819  
   820  			// Emit each of the actual cases.
   821  			for _, c := range cc {
   822  				h := c.hash >> i & (1<<b - 1)
   823  				label := typecheck.AutoLabel(".s")
   824  				jt.Targets[h] = label
   825  				out.Append(ir.NewLabelStmt(base.Pos, label))
   826  				out.Append(c.body...)
   827  				// We reach here if the hash matches but the type equality test fails.
   828  				out.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, noMatch))
   829  			}
   830  			// Emit point to go to if type doesn't match any case.
   831  			out.Append(ir.NewLabelStmt(base.Pos, noMatch))
   832  			return true
   833  		}
   834  	}
   835  	// Couldn't find a perfect hash. Fall back to binary search.
   836  	return false
   837  }
   838  
   839  // binarySearch constructs a binary search tree for handling n cases,
   840  // and appends it to out. It's used for efficiently implementing
   841  // switch statements.
   842  //
   843  // less(i) should return a boolean expression. If it evaluates true,
   844  // then cases before i will be tested; otherwise, cases i and later.
   845  //
   846  // leaf(i, nif) should setup nif (an OIF node) to test case i. In
   847  // particular, it should set nif.Cond and nif.Body.
   848  func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i int, nif *ir.IfStmt)) {
   849  	const binarySearchMin = 4	// minimum number of cases for binary search
   850  
   851  	var do func(lo, hi int, out *ir.Nodes)
   852  	do = func(lo, hi int, out *ir.Nodes) {
   853  		n := hi - lo
   854  		if n < binarySearchMin {
   855  			for i := lo; i < hi; i++ {
   856  				nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   857  				leaf(i, nif)
   858  				base.Pos = base.Pos.WithNotStmt()
   859  				nif.Cond = typecheck.Expr(nif.Cond)
   860  				nif.Cond = typecheck.DefaultLit(nif.Cond, nil)
   861  				out.Append(nif)
   862  				out = &nif.Else
   863  			}
   864  			return
   865  		}
   866  
   867  		half := lo + n/2
   868  		nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   869  		nif.Cond = less(half)
   870  		base.Pos = base.Pos.WithNotStmt()
   871  		nif.Cond = typecheck.Expr(nif.Cond)
   872  		nif.Cond = typecheck.DefaultLit(nif.Cond, nil)
   873  		do(lo, half, &nif.Body)
   874  		do(half, hi, &nif.Else)
   875  		out.Append(nif)
   876  	}
   877  
   878  	do(0, n, out)
   879  }
   880  
   881  func stringSearch(expr ir.Node, cc []exprClause, out *ir.Nodes) {
   882  	if len(cc) < 4 {
   883  		// Short list, just do brute force equality checks.
   884  		for _, c := range cc {
   885  			nif := ir.NewIfStmt(base.Pos.WithNotStmt(), typecheck.DefaultLit(typecheck.Expr(c.test(expr)), nil), []ir.Node{c.jmp}, nil)
   886  			out.Append(nif)
   887  			out = &nif.Else
   888  		}
   889  		return
   890  	}
   891  
   892  	// The strategy here is to find a simple test to divide the set of possible strings
   893  	// that might match expr approximately in half.
   894  	// The test we're going to use is to do an ordered comparison of a single byte
   895  	// of expr to a constant. We will pick the index of that byte and the value we're
   896  	// comparing against to make the split as even as possible.
   897  	//   if expr[3] <= 'd' { ... search strings with expr[3] at 'd' or lower  ... }
   898  	//   else              { ... search strings with expr[3] at 'e' or higher ... }
   899  	//
   900  	// To add complication, we will do the ordered comparison in the signed domain.
   901  	// The reason for this is to prevent CSE from merging the load used for the
   902  	// ordered comparison with the load used for the later equality check.
   903  	//   if expr[3] <= 'd' { ... if expr[0] == 'f' && expr[1] == 'o' && expr[2] == 'o' && expr[3] == 'd' { ... } }
   904  	// If we did both expr[3] loads in the unsigned domain, they would be CSEd, and that
   905  	// would in turn defeat the combining of expr[0]...expr[3] into a single 4-byte load.
   906  	// See issue 48222.
   907  	// By using signed loads for the ordered comparison and unsigned loads for the
   908  	// equality comparison, they don't get CSEd and the equality comparisons will be
   909  	// done using wider loads.
   910  
   911  	n := len(ir.StringVal(cc[0].lo))	// Length of the constant strings.
   912  	bestScore := int64(0)			// measure of how good the split is.
   913  	bestIdx := 0				// split using expr[bestIdx]
   914  	bestByte := int8(0)			// compare expr[bestIdx] against bestByte
   915  	for idx := 0; idx < n; idx++ {
   916  		for b := int8(-128); b < 127; b++ {
   917  			le := 0
   918  			for _, c := range cc {
   919  				s := ir.StringVal(c.lo)
   920  				if int8(s[idx]) <= b {
   921  					le++
   922  				}
   923  			}
   924  			score := int64(le) * int64(len(cc)-le)
   925  			if score > bestScore {
   926  				bestScore = score
   927  				bestIdx = idx
   928  				bestByte = b
   929  			}
   930  		}
   931  	}
   932  
   933  	// The split must be at least 1:n-1 because we have at least 2 distinct strings; they
   934  	// have to be different somewhere.
   935  	// TODO: what if the best split is still pretty bad?
   936  	if bestScore == 0 {
   937  		base.Fatalf("unable to split string set")
   938  	}
   939  
   940  	// Convert expr to a []int8
   941  	slice := ir.NewConvExpr(base.Pos, ir.OSTR2BYTESTMP, types.NewSlice(types.Types[types.TINT8]), expr)
   942  	slice.SetTypecheck(1)	// legacy typechecker doesn't handle this op
   943  	slice.MarkNonNil()
   944  	// Load the byte we're splitting on.
   945  	load := ir.NewIndexExpr(base.Pos, slice, ir.NewInt(base.Pos, int64(bestIdx)))
   946  	// Compare with the value we're splitting on.
   947  	cmp := ir.Node(ir.NewBinaryExpr(base.Pos, ir.OLE, load, ir.NewInt(base.Pos, int64(bestByte))))
   948  	cmp = typecheck.DefaultLit(typecheck.Expr(cmp), nil)
   949  	nif := ir.NewIfStmt(base.Pos, cmp, nil, nil)
   950  
   951  	var le []exprClause
   952  	var gt []exprClause
   953  	for _, c := range cc {
   954  		s := ir.StringVal(c.lo)
   955  		if int8(s[bestIdx]) <= bestByte {
   956  			le = append(le, c)
   957  		} else {
   958  			gt = append(gt, c)
   959  		}
   960  	}
   961  	stringSearch(expr, le, &nif.Body)
   962  	stringSearch(expr, gt, &nif.Else)
   963  	out.Append(nif)
   964  
   965  	// TODO: if expr[bestIdx] has enough different possible values, use a jump table.
   966  }