github.com/slayercat/go@v0.0.0-20170428012452-c51559813f61/src/cmd/compile/internal/mips64/ssa.go (about)

     1  // Copyright 2016 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package mips64
     6  
     7  import (
     8  	"math"
     9  
    10  	"cmd/compile/internal/gc"
    11  	"cmd/compile/internal/ssa"
    12  	"cmd/internal/obj"
    13  	"cmd/internal/obj/mips"
    14  )
    15  
    16  // isFPreg returns whether r is an FP register
    17  func isFPreg(r int16) bool {
    18  	return mips.REG_F0 <= r && r <= mips.REG_F31
    19  }
    20  
    21  // isHILO returns whether r is HI or LO register
    22  func isHILO(r int16) bool {
    23  	return r == mips.REG_HI || r == mips.REG_LO
    24  }
    25  
    26  // loadByType returns the load instruction of the given type.
    27  func loadByType(t ssa.Type, r int16) obj.As {
    28  	if isFPreg(r) {
    29  		if t.Size() == 4 { // float32 or int32
    30  			return mips.AMOVF
    31  		} else { // float64 or int64
    32  			return mips.AMOVD
    33  		}
    34  	} else {
    35  		switch t.Size() {
    36  		case 1:
    37  			if t.IsSigned() {
    38  				return mips.AMOVB
    39  			} else {
    40  				return mips.AMOVBU
    41  			}
    42  		case 2:
    43  			if t.IsSigned() {
    44  				return mips.AMOVH
    45  			} else {
    46  				return mips.AMOVHU
    47  			}
    48  		case 4:
    49  			if t.IsSigned() {
    50  				return mips.AMOVW
    51  			} else {
    52  				return mips.AMOVWU
    53  			}
    54  		case 8:
    55  			return mips.AMOVV
    56  		}
    57  	}
    58  	panic("bad load type")
    59  }
    60  
    61  // storeByType returns the store instruction of the given type.
    62  func storeByType(t ssa.Type, r int16) obj.As {
    63  	if isFPreg(r) {
    64  		if t.Size() == 4 { // float32 or int32
    65  			return mips.AMOVF
    66  		} else { // float64 or int64
    67  			return mips.AMOVD
    68  		}
    69  	} else {
    70  		switch t.Size() {
    71  		case 1:
    72  			return mips.AMOVB
    73  		case 2:
    74  			return mips.AMOVH
    75  		case 4:
    76  			return mips.AMOVW
    77  		case 8:
    78  			return mips.AMOVV
    79  		}
    80  	}
    81  	panic("bad store type")
    82  }
    83  
    84  func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
    85  	switch v.Op {
    86  	case ssa.OpCopy, ssa.OpMIPS64MOVVconvert, ssa.OpMIPS64MOVVreg:
    87  		if v.Type.IsMemory() {
    88  			return
    89  		}
    90  		x := v.Args[0].Reg()
    91  		y := v.Reg()
    92  		if x == y {
    93  			return
    94  		}
    95  		as := mips.AMOVV
    96  		if isFPreg(x) && isFPreg(y) {
    97  			as = mips.AMOVD
    98  		}
    99  		p := s.Prog(as)
   100  		p.From.Type = obj.TYPE_REG
   101  		p.From.Reg = x
   102  		p.To.Type = obj.TYPE_REG
   103  		p.To.Reg = y
   104  		if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) {
   105  			// cannot move between special registers, use TMP as intermediate
   106  			p.To.Reg = mips.REGTMP
   107  			p = s.Prog(mips.AMOVV)
   108  			p.From.Type = obj.TYPE_REG
   109  			p.From.Reg = mips.REGTMP
   110  			p.To.Type = obj.TYPE_REG
   111  			p.To.Reg = y
   112  		}
   113  	case ssa.OpMIPS64MOVVnop:
   114  		if v.Reg() != v.Args[0].Reg() {
   115  			v.Fatalf("input[0] and output not in same register %s", v.LongString())
   116  		}
   117  		// nothing to do
   118  	case ssa.OpLoadReg:
   119  		if v.Type.IsFlags() {
   120  			v.Fatalf("load flags not implemented: %v", v.LongString())
   121  			return
   122  		}
   123  		r := v.Reg()
   124  		p := s.Prog(loadByType(v.Type, r))
   125  		gc.AddrAuto(&p.From, v.Args[0])
   126  		p.To.Type = obj.TYPE_REG
   127  		p.To.Reg = r
   128  		if isHILO(r) {
   129  			// cannot directly load, load to TMP and move
   130  			p.To.Reg = mips.REGTMP
   131  			p = s.Prog(mips.AMOVV)
   132  			p.From.Type = obj.TYPE_REG
   133  			p.From.Reg = mips.REGTMP
   134  			p.To.Type = obj.TYPE_REG
   135  			p.To.Reg = r
   136  		}
   137  	case ssa.OpStoreReg:
   138  		if v.Type.IsFlags() {
   139  			v.Fatalf("store flags not implemented: %v", v.LongString())
   140  			return
   141  		}
   142  		r := v.Args[0].Reg()
   143  		if isHILO(r) {
   144  			// cannot directly store, move to TMP and store
   145  			p := s.Prog(mips.AMOVV)
   146  			p.From.Type = obj.TYPE_REG
   147  			p.From.Reg = r
   148  			p.To.Type = obj.TYPE_REG
   149  			p.To.Reg = mips.REGTMP
   150  			r = mips.REGTMP
   151  		}
   152  		p := s.Prog(storeByType(v.Type, r))
   153  		p.From.Type = obj.TYPE_REG
   154  		p.From.Reg = r
   155  		gc.AddrAuto(&p.To, v)
   156  	case ssa.OpMIPS64ADDV,
   157  		ssa.OpMIPS64SUBV,
   158  		ssa.OpMIPS64AND,
   159  		ssa.OpMIPS64OR,
   160  		ssa.OpMIPS64XOR,
   161  		ssa.OpMIPS64NOR,
   162  		ssa.OpMIPS64SLLV,
   163  		ssa.OpMIPS64SRLV,
   164  		ssa.OpMIPS64SRAV,
   165  		ssa.OpMIPS64ADDF,
   166  		ssa.OpMIPS64ADDD,
   167  		ssa.OpMIPS64SUBF,
   168  		ssa.OpMIPS64SUBD,
   169  		ssa.OpMIPS64MULF,
   170  		ssa.OpMIPS64MULD,
   171  		ssa.OpMIPS64DIVF,
   172  		ssa.OpMIPS64DIVD:
   173  		p := s.Prog(v.Op.Asm())
   174  		p.From.Type = obj.TYPE_REG
   175  		p.From.Reg = v.Args[1].Reg()
   176  		p.Reg = v.Args[0].Reg()
   177  		p.To.Type = obj.TYPE_REG
   178  		p.To.Reg = v.Reg()
   179  	case ssa.OpMIPS64SGT,
   180  		ssa.OpMIPS64SGTU:
   181  		p := s.Prog(v.Op.Asm())
   182  		p.From.Type = obj.TYPE_REG
   183  		p.From.Reg = v.Args[0].Reg()
   184  		p.Reg = v.Args[1].Reg()
   185  		p.To.Type = obj.TYPE_REG
   186  		p.To.Reg = v.Reg()
   187  	case ssa.OpMIPS64ADDVconst,
   188  		ssa.OpMIPS64SUBVconst,
   189  		ssa.OpMIPS64ANDconst,
   190  		ssa.OpMIPS64ORconst,
   191  		ssa.OpMIPS64XORconst,
   192  		ssa.OpMIPS64NORconst,
   193  		ssa.OpMIPS64SLLVconst,
   194  		ssa.OpMIPS64SRLVconst,
   195  		ssa.OpMIPS64SRAVconst,
   196  		ssa.OpMIPS64SGTconst,
   197  		ssa.OpMIPS64SGTUconst:
   198  		p := s.Prog(v.Op.Asm())
   199  		p.From.Type = obj.TYPE_CONST
   200  		p.From.Offset = v.AuxInt
   201  		p.Reg = v.Args[0].Reg()
   202  		p.To.Type = obj.TYPE_REG
   203  		p.To.Reg = v.Reg()
   204  	case ssa.OpMIPS64MULV,
   205  		ssa.OpMIPS64MULVU,
   206  		ssa.OpMIPS64DIVV,
   207  		ssa.OpMIPS64DIVVU:
   208  		// result in hi,lo
   209  		p := s.Prog(v.Op.Asm())
   210  		p.From.Type = obj.TYPE_REG
   211  		p.From.Reg = v.Args[1].Reg()
   212  		p.Reg = v.Args[0].Reg()
   213  	case ssa.OpMIPS64MOVVconst:
   214  		r := v.Reg()
   215  		p := s.Prog(v.Op.Asm())
   216  		p.From.Type = obj.TYPE_CONST
   217  		p.From.Offset = v.AuxInt
   218  		p.To.Type = obj.TYPE_REG
   219  		p.To.Reg = r
   220  		if isFPreg(r) || isHILO(r) {
   221  			// cannot move into FP or special registers, use TMP as intermediate
   222  			p.To.Reg = mips.REGTMP
   223  			p = s.Prog(mips.AMOVV)
   224  			p.From.Type = obj.TYPE_REG
   225  			p.From.Reg = mips.REGTMP
   226  			p.To.Type = obj.TYPE_REG
   227  			p.To.Reg = r
   228  		}
   229  	case ssa.OpMIPS64MOVFconst,
   230  		ssa.OpMIPS64MOVDconst:
   231  		p := s.Prog(v.Op.Asm())
   232  		p.From.Type = obj.TYPE_FCONST
   233  		p.From.Val = math.Float64frombits(uint64(v.AuxInt))
   234  		p.To.Type = obj.TYPE_REG
   235  		p.To.Reg = v.Reg()
   236  	case ssa.OpMIPS64CMPEQF,
   237  		ssa.OpMIPS64CMPEQD,
   238  		ssa.OpMIPS64CMPGEF,
   239  		ssa.OpMIPS64CMPGED,
   240  		ssa.OpMIPS64CMPGTF,
   241  		ssa.OpMIPS64CMPGTD:
   242  		p := s.Prog(v.Op.Asm())
   243  		p.From.Type = obj.TYPE_REG
   244  		p.From.Reg = v.Args[0].Reg()
   245  		p.Reg = v.Args[1].Reg()
   246  	case ssa.OpMIPS64MOVVaddr:
   247  		p := s.Prog(mips.AMOVV)
   248  		p.From.Type = obj.TYPE_ADDR
   249  		var wantreg string
   250  		// MOVV $sym+off(base), R
   251  		// the assembler expands it as the following:
   252  		// - base is SP: add constant offset to SP (R29)
   253  		//               when constant is large, tmp register (R23) may be used
   254  		// - base is SB: load external address with relocation
   255  		switch v.Aux.(type) {
   256  		default:
   257  			v.Fatalf("aux is of unknown type %T", v.Aux)
   258  		case *ssa.ExternSymbol:
   259  			wantreg = "SB"
   260  			gc.AddAux(&p.From, v)
   261  		case *ssa.ArgSymbol, *ssa.AutoSymbol:
   262  			wantreg = "SP"
   263  			gc.AddAux(&p.From, v)
   264  		case nil:
   265  			// No sym, just MOVV $off(SP), R
   266  			wantreg = "SP"
   267  			p.From.Reg = mips.REGSP
   268  			p.From.Offset = v.AuxInt
   269  		}
   270  		if reg := v.Args[0].RegName(); reg != wantreg {
   271  			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
   272  		}
   273  		p.To.Type = obj.TYPE_REG
   274  		p.To.Reg = v.Reg()
   275  	case ssa.OpMIPS64MOVBload,
   276  		ssa.OpMIPS64MOVBUload,
   277  		ssa.OpMIPS64MOVHload,
   278  		ssa.OpMIPS64MOVHUload,
   279  		ssa.OpMIPS64MOVWload,
   280  		ssa.OpMIPS64MOVWUload,
   281  		ssa.OpMIPS64MOVVload,
   282  		ssa.OpMIPS64MOVFload,
   283  		ssa.OpMIPS64MOVDload:
   284  		p := s.Prog(v.Op.Asm())
   285  		p.From.Type = obj.TYPE_MEM
   286  		p.From.Reg = v.Args[0].Reg()
   287  		gc.AddAux(&p.From, v)
   288  		p.To.Type = obj.TYPE_REG
   289  		p.To.Reg = v.Reg()
   290  	case ssa.OpMIPS64MOVBstore,
   291  		ssa.OpMIPS64MOVHstore,
   292  		ssa.OpMIPS64MOVWstore,
   293  		ssa.OpMIPS64MOVVstore,
   294  		ssa.OpMIPS64MOVFstore,
   295  		ssa.OpMIPS64MOVDstore:
   296  		p := s.Prog(v.Op.Asm())
   297  		p.From.Type = obj.TYPE_REG
   298  		p.From.Reg = v.Args[1].Reg()
   299  		p.To.Type = obj.TYPE_MEM
   300  		p.To.Reg = v.Args[0].Reg()
   301  		gc.AddAux(&p.To, v)
   302  	case ssa.OpMIPS64MOVBstorezero,
   303  		ssa.OpMIPS64MOVHstorezero,
   304  		ssa.OpMIPS64MOVWstorezero,
   305  		ssa.OpMIPS64MOVVstorezero:
   306  		p := s.Prog(v.Op.Asm())
   307  		p.From.Type = obj.TYPE_REG
   308  		p.From.Reg = mips.REGZERO
   309  		p.To.Type = obj.TYPE_MEM
   310  		p.To.Reg = v.Args[0].Reg()
   311  		gc.AddAux(&p.To, v)
   312  	case ssa.OpMIPS64MOVBreg,
   313  		ssa.OpMIPS64MOVBUreg,
   314  		ssa.OpMIPS64MOVHreg,
   315  		ssa.OpMIPS64MOVHUreg,
   316  		ssa.OpMIPS64MOVWreg,
   317  		ssa.OpMIPS64MOVWUreg:
   318  		a := v.Args[0]
   319  		for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
   320  			a = a.Args[0]
   321  		}
   322  		if a.Op == ssa.OpLoadReg {
   323  			t := a.Type
   324  			switch {
   325  			case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
   326  				v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
   327  				v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
   328  				v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
   329  				v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
   330  				v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
   331  				// arg is a proper-typed load, already zero/sign-extended, don't extend again
   332  				if v.Reg() == v.Args[0].Reg() {
   333  					return
   334  				}
   335  				p := s.Prog(mips.AMOVV)
   336  				p.From.Type = obj.TYPE_REG
   337  				p.From.Reg = v.Args[0].Reg()
   338  				p.To.Type = obj.TYPE_REG
   339  				p.To.Reg = v.Reg()
   340  				return
   341  			default:
   342  			}
   343  		}
   344  		fallthrough
   345  	case ssa.OpMIPS64MOVWF,
   346  		ssa.OpMIPS64MOVWD,
   347  		ssa.OpMIPS64TRUNCFW,
   348  		ssa.OpMIPS64TRUNCDW,
   349  		ssa.OpMIPS64MOVVF,
   350  		ssa.OpMIPS64MOVVD,
   351  		ssa.OpMIPS64TRUNCFV,
   352  		ssa.OpMIPS64TRUNCDV,
   353  		ssa.OpMIPS64MOVFD,
   354  		ssa.OpMIPS64MOVDF,
   355  		ssa.OpMIPS64NEGF,
   356  		ssa.OpMIPS64NEGD:
   357  		p := s.Prog(v.Op.Asm())
   358  		p.From.Type = obj.TYPE_REG
   359  		p.From.Reg = v.Args[0].Reg()
   360  		p.To.Type = obj.TYPE_REG
   361  		p.To.Reg = v.Reg()
   362  	case ssa.OpMIPS64NEGV:
   363  		// SUB from REGZERO
   364  		p := s.Prog(mips.ASUBVU)
   365  		p.From.Type = obj.TYPE_REG
   366  		p.From.Reg = v.Args[0].Reg()
   367  		p.Reg = mips.REGZERO
   368  		p.To.Type = obj.TYPE_REG
   369  		p.To.Reg = v.Reg()
   370  	case ssa.OpMIPS64DUFFZERO:
   371  		// runtime.duffzero expects start address - 8 in R1
   372  		p := s.Prog(mips.ASUBVU)
   373  		p.From.Type = obj.TYPE_CONST
   374  		p.From.Offset = 8
   375  		p.Reg = v.Args[0].Reg()
   376  		p.To.Type = obj.TYPE_REG
   377  		p.To.Reg = mips.REG_R1
   378  		p = s.Prog(obj.ADUFFZERO)
   379  		p.To.Type = obj.TYPE_MEM
   380  		p.To.Name = obj.NAME_EXTERN
   381  		p.To.Sym = gc.Duffzero
   382  		p.To.Offset = v.AuxInt
   383  	case ssa.OpMIPS64LoweredZero:
   384  		// SUBV	$8, R1
   385  		// MOVV	R0, 8(R1)
   386  		// ADDV	$8, R1
   387  		// BNE	Rarg1, R1, -2(PC)
   388  		// arg1 is the address of the last element to zero
   389  		var sz int64
   390  		var mov obj.As
   391  		switch {
   392  		case v.AuxInt%8 == 0:
   393  			sz = 8
   394  			mov = mips.AMOVV
   395  		case v.AuxInt%4 == 0:
   396  			sz = 4
   397  			mov = mips.AMOVW
   398  		case v.AuxInt%2 == 0:
   399  			sz = 2
   400  			mov = mips.AMOVH
   401  		default:
   402  			sz = 1
   403  			mov = mips.AMOVB
   404  		}
   405  		p := s.Prog(mips.ASUBVU)
   406  		p.From.Type = obj.TYPE_CONST
   407  		p.From.Offset = sz
   408  		p.To.Type = obj.TYPE_REG
   409  		p.To.Reg = mips.REG_R1
   410  		p2 := s.Prog(mov)
   411  		p2.From.Type = obj.TYPE_REG
   412  		p2.From.Reg = mips.REGZERO
   413  		p2.To.Type = obj.TYPE_MEM
   414  		p2.To.Reg = mips.REG_R1
   415  		p2.To.Offset = sz
   416  		p3 := s.Prog(mips.AADDVU)
   417  		p3.From.Type = obj.TYPE_CONST
   418  		p3.From.Offset = sz
   419  		p3.To.Type = obj.TYPE_REG
   420  		p3.To.Reg = mips.REG_R1
   421  		p4 := s.Prog(mips.ABNE)
   422  		p4.From.Type = obj.TYPE_REG
   423  		p4.From.Reg = v.Args[1].Reg()
   424  		p4.Reg = mips.REG_R1
   425  		p4.To.Type = obj.TYPE_BRANCH
   426  		gc.Patch(p4, p2)
   427  	case ssa.OpMIPS64LoweredMove:
   428  		// SUBV	$8, R1
   429  		// MOVV	8(R1), Rtmp
   430  		// MOVV	Rtmp, (R2)
   431  		// ADDV	$8, R1
   432  		// ADDV	$8, R2
   433  		// BNE	Rarg2, R1, -4(PC)
   434  		// arg2 is the address of the last element of src
   435  		var sz int64
   436  		var mov obj.As
   437  		switch {
   438  		case v.AuxInt%8 == 0:
   439  			sz = 8
   440  			mov = mips.AMOVV
   441  		case v.AuxInt%4 == 0:
   442  			sz = 4
   443  			mov = mips.AMOVW
   444  		case v.AuxInt%2 == 0:
   445  			sz = 2
   446  			mov = mips.AMOVH
   447  		default:
   448  			sz = 1
   449  			mov = mips.AMOVB
   450  		}
   451  		p := s.Prog(mips.ASUBVU)
   452  		p.From.Type = obj.TYPE_CONST
   453  		p.From.Offset = sz
   454  		p.To.Type = obj.TYPE_REG
   455  		p.To.Reg = mips.REG_R1
   456  		p2 := s.Prog(mov)
   457  		p2.From.Type = obj.TYPE_MEM
   458  		p2.From.Reg = mips.REG_R1
   459  		p2.From.Offset = sz
   460  		p2.To.Type = obj.TYPE_REG
   461  		p2.To.Reg = mips.REGTMP
   462  		p3 := s.Prog(mov)
   463  		p3.From.Type = obj.TYPE_REG
   464  		p3.From.Reg = mips.REGTMP
   465  		p3.To.Type = obj.TYPE_MEM
   466  		p3.To.Reg = mips.REG_R2
   467  		p4 := s.Prog(mips.AADDVU)
   468  		p4.From.Type = obj.TYPE_CONST
   469  		p4.From.Offset = sz
   470  		p4.To.Type = obj.TYPE_REG
   471  		p4.To.Reg = mips.REG_R1
   472  		p5 := s.Prog(mips.AADDVU)
   473  		p5.From.Type = obj.TYPE_CONST
   474  		p5.From.Offset = sz
   475  		p5.To.Type = obj.TYPE_REG
   476  		p5.To.Reg = mips.REG_R2
   477  		p6 := s.Prog(mips.ABNE)
   478  		p6.From.Type = obj.TYPE_REG
   479  		p6.From.Reg = v.Args[2].Reg()
   480  		p6.Reg = mips.REG_R1
   481  		p6.To.Type = obj.TYPE_BRANCH
   482  		gc.Patch(p6, p2)
   483  	case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter:
   484  		s.Call(v)
   485  	case ssa.OpMIPS64LoweredNilCheck:
   486  		// Issue a load which will fault if arg is nil.
   487  		p := s.Prog(mips.AMOVB)
   488  		p.From.Type = obj.TYPE_MEM
   489  		p.From.Reg = v.Args[0].Reg()
   490  		gc.AddAux(&p.From, v)
   491  		p.To.Type = obj.TYPE_REG
   492  		p.To.Reg = mips.REGTMP
   493  		if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
   494  			gc.Warnl(v.Pos, "generated nil check")
   495  		}
   496  	case ssa.OpMIPS64FPFlagTrue,
   497  		ssa.OpMIPS64FPFlagFalse:
   498  		// MOVV	$0, r
   499  		// BFPF	2(PC)
   500  		// MOVV	$1, r
   501  		branch := mips.ABFPF
   502  		if v.Op == ssa.OpMIPS64FPFlagFalse {
   503  			branch = mips.ABFPT
   504  		}
   505  		p := s.Prog(mips.AMOVV)
   506  		p.From.Type = obj.TYPE_REG
   507  		p.From.Reg = mips.REGZERO
   508  		p.To.Type = obj.TYPE_REG
   509  		p.To.Reg = v.Reg()
   510  		p2 := s.Prog(branch)
   511  		p2.To.Type = obj.TYPE_BRANCH
   512  		p3 := s.Prog(mips.AMOVV)
   513  		p3.From.Type = obj.TYPE_CONST
   514  		p3.From.Offset = 1
   515  		p3.To.Type = obj.TYPE_REG
   516  		p3.To.Reg = v.Reg()
   517  		p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land
   518  		gc.Patch(p2, p4)
   519  	case ssa.OpMIPS64LoweredGetClosurePtr:
   520  		// Closure pointer is R22 (mips.REGCTXT).
   521  		gc.CheckLoweredGetClosurePtr(v)
   522  	case ssa.OpClobber:
   523  		// TODO: implement for clobberdead experiment. Nop is ok for now.
   524  	default:
   525  		v.Fatalf("genValue not implemented: %s", v.LongString())
   526  	}
   527  }
   528  
   529  var blockJump = map[ssa.BlockKind]struct {
   530  	asm, invasm obj.As
   531  }{
   532  	ssa.BlockMIPS64EQ:  {mips.ABEQ, mips.ABNE},
   533  	ssa.BlockMIPS64NE:  {mips.ABNE, mips.ABEQ},
   534  	ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ},
   535  	ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ},
   536  	ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ},
   537  	ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ},
   538  	ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF},
   539  	ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT},
   540  }
   541  
   542  func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
   543  	switch b.Kind {
   544  	case ssa.BlockPlain:
   545  		if b.Succs[0].Block() != next {
   546  			p := s.Prog(obj.AJMP)
   547  			p.To.Type = obj.TYPE_BRANCH
   548  			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
   549  		}
   550  	case ssa.BlockDefer:
   551  		// defer returns in R1:
   552  		// 0 if we should continue executing
   553  		// 1 if we should jump to deferreturn call
   554  		p := s.Prog(mips.ABNE)
   555  		p.From.Type = obj.TYPE_REG
   556  		p.From.Reg = mips.REGZERO
   557  		p.Reg = mips.REG_R1
   558  		p.To.Type = obj.TYPE_BRANCH
   559  		s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()})
   560  		if b.Succs[0].Block() != next {
   561  			p := s.Prog(obj.AJMP)
   562  			p.To.Type = obj.TYPE_BRANCH
   563  			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
   564  		}
   565  	case ssa.BlockExit:
   566  		s.Prog(obj.AUNDEF) // tell plive.go that we never reach here
   567  	case ssa.BlockRet:
   568  		s.Prog(obj.ARET)
   569  	case ssa.BlockRetJmp:
   570  		p := s.Prog(obj.ARET)
   571  		p.To.Type = obj.TYPE_MEM
   572  		p.To.Name = obj.NAME_EXTERN
   573  		p.To.Sym = b.Aux.(*obj.LSym)
   574  	case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
   575  		ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
   576  		ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
   577  		ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF:
   578  		jmp := blockJump[b.Kind]
   579  		var p *obj.Prog
   580  		switch next {
   581  		case b.Succs[0].Block():
   582  			p = s.Prog(jmp.invasm)
   583  			p.To.Type = obj.TYPE_BRANCH
   584  			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()})
   585  		case b.Succs[1].Block():
   586  			p = s.Prog(jmp.asm)
   587  			p.To.Type = obj.TYPE_BRANCH
   588  			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
   589  		default:
   590  			p = s.Prog(jmp.asm)
   591  			p.To.Type = obj.TYPE_BRANCH
   592  			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
   593  			q := s.Prog(obj.AJMP)
   594  			q.To.Type = obj.TYPE_BRANCH
   595  			s.Branches = append(s.Branches, gc.Branch{P: q, B: b.Succs[1].Block()})
   596  		}
   597  		if !b.Control.Type.IsFlags() {
   598  			p.From.Type = obj.TYPE_REG
   599  			p.From.Reg = b.Control.Reg()
   600  		}
   601  	default:
   602  		b.Fatalf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString())
   603  	}
   604  }