github.com/riscv/riscv-go@v0.0.0-20200123204226-124ebd6fcc8e/src/cmd/compile/internal/mips64/ssa.go (about)

     1  // Copyright 2016 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package mips64
     6  
     7  import (
     8  	"math"
     9  
    10  	"cmd/compile/internal/gc"
    11  	"cmd/compile/internal/ssa"
    12  	"cmd/internal/obj"
    13  	"cmd/internal/obj/mips"
    14  )
    15  
    16  // isFPreg returns whether r is an FP register
    17  func isFPreg(r int16) bool {
    18  	return mips.REG_F0 <= r && r <= mips.REG_F31
    19  }
    20  
    21  // isHILO returns whether r is HI or LO register
    22  func isHILO(r int16) bool {
    23  	return r == mips.REG_HI || r == mips.REG_LO
    24  }
    25  
    26  // loadByType returns the load instruction of the given type.
    27  func loadByType(t ssa.Type, r int16) obj.As {
    28  	if isFPreg(r) {
    29  		if t.Size() == 4 { // float32 or int32
    30  			return mips.AMOVF
    31  		} else { // float64 or int64
    32  			return mips.AMOVD
    33  		}
    34  	} else {
    35  		switch t.Size() {
    36  		case 1:
    37  			if t.IsSigned() {
    38  				return mips.AMOVB
    39  			} else {
    40  				return mips.AMOVBU
    41  			}
    42  		case 2:
    43  			if t.IsSigned() {
    44  				return mips.AMOVH
    45  			} else {
    46  				return mips.AMOVHU
    47  			}
    48  		case 4:
    49  			if t.IsSigned() {
    50  				return mips.AMOVW
    51  			} else {
    52  				return mips.AMOVWU
    53  			}
    54  		case 8:
    55  			return mips.AMOVV
    56  		}
    57  	}
    58  	panic("bad load type")
    59  }
    60  
    61  // storeByType returns the store instruction of the given type.
    62  func storeByType(t ssa.Type, r int16) obj.As {
    63  	if isFPreg(r) {
    64  		if t.Size() == 4 { // float32 or int32
    65  			return mips.AMOVF
    66  		} else { // float64 or int64
    67  			return mips.AMOVD
    68  		}
    69  	} else {
    70  		switch t.Size() {
    71  		case 1:
    72  			return mips.AMOVB
    73  		case 2:
    74  			return mips.AMOVH
    75  		case 4:
    76  			return mips.AMOVW
    77  		case 8:
    78  			return mips.AMOVV
    79  		}
    80  	}
    81  	panic("bad store type")
    82  }
    83  
    84  func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
    85  	s.SetPos(v.Pos)
    86  	switch v.Op {
    87  	case ssa.OpInitMem:
    88  		// memory arg needs no code
    89  	case ssa.OpArg:
    90  		// input args need no code
    91  	case ssa.OpSP, ssa.OpSB, ssa.OpGetG:
    92  		// nothing to do
    93  	case ssa.OpCopy, ssa.OpMIPS64MOVVconvert, ssa.OpMIPS64MOVVreg:
    94  		if v.Type.IsMemory() {
    95  			return
    96  		}
    97  		x := v.Args[0].Reg()
    98  		y := v.Reg()
    99  		if x == y {
   100  			return
   101  		}
   102  		as := mips.AMOVV
   103  		if isFPreg(x) && isFPreg(y) {
   104  			as = mips.AMOVD
   105  		}
   106  		p := gc.Prog(as)
   107  		p.From.Type = obj.TYPE_REG
   108  		p.From.Reg = x
   109  		p.To.Type = obj.TYPE_REG
   110  		p.To.Reg = y
   111  		if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) {
   112  			// cannot move between special registers, use TMP as intermediate
   113  			p.To.Reg = mips.REGTMP
   114  			p = gc.Prog(mips.AMOVV)
   115  			p.From.Type = obj.TYPE_REG
   116  			p.From.Reg = mips.REGTMP
   117  			p.To.Type = obj.TYPE_REG
   118  			p.To.Reg = y
   119  		}
   120  	case ssa.OpMIPS64MOVVnop:
   121  		if v.Reg() != v.Args[0].Reg() {
   122  			v.Fatalf("input[0] and output not in same register %s", v.LongString())
   123  		}
   124  		// nothing to do
   125  	case ssa.OpLoadReg:
   126  		if v.Type.IsFlags() {
   127  			v.Fatalf("load flags not implemented: %v", v.LongString())
   128  			return
   129  		}
   130  		r := v.Reg()
   131  		p := gc.Prog(loadByType(v.Type, r))
   132  		gc.AddrAuto(&p.From, v.Args[0])
   133  		p.To.Type = obj.TYPE_REG
   134  		p.To.Reg = r
   135  		if isHILO(r) {
   136  			// cannot directly load, load to TMP and move
   137  			p.To.Reg = mips.REGTMP
   138  			p = gc.Prog(mips.AMOVV)
   139  			p.From.Type = obj.TYPE_REG
   140  			p.From.Reg = mips.REGTMP
   141  			p.To.Type = obj.TYPE_REG
   142  			p.To.Reg = r
   143  		}
   144  	case ssa.OpPhi:
   145  		gc.CheckLoweredPhi(v)
   146  	case ssa.OpStoreReg:
   147  		if v.Type.IsFlags() {
   148  			v.Fatalf("store flags not implemented: %v", v.LongString())
   149  			return
   150  		}
   151  		r := v.Args[0].Reg()
   152  		if isHILO(r) {
   153  			// cannot directly store, move to TMP and store
   154  			p := gc.Prog(mips.AMOVV)
   155  			p.From.Type = obj.TYPE_REG
   156  			p.From.Reg = r
   157  			p.To.Type = obj.TYPE_REG
   158  			p.To.Reg = mips.REGTMP
   159  			r = mips.REGTMP
   160  		}
   161  		p := gc.Prog(storeByType(v.Type, r))
   162  		p.From.Type = obj.TYPE_REG
   163  		p.From.Reg = r
   164  		gc.AddrAuto(&p.To, v)
   165  	case ssa.OpMIPS64ADDV,
   166  		ssa.OpMIPS64SUBV,
   167  		ssa.OpMIPS64AND,
   168  		ssa.OpMIPS64OR,
   169  		ssa.OpMIPS64XOR,
   170  		ssa.OpMIPS64NOR,
   171  		ssa.OpMIPS64SLLV,
   172  		ssa.OpMIPS64SRLV,
   173  		ssa.OpMIPS64SRAV,
   174  		ssa.OpMIPS64ADDF,
   175  		ssa.OpMIPS64ADDD,
   176  		ssa.OpMIPS64SUBF,
   177  		ssa.OpMIPS64SUBD,
   178  		ssa.OpMIPS64MULF,
   179  		ssa.OpMIPS64MULD,
   180  		ssa.OpMIPS64DIVF,
   181  		ssa.OpMIPS64DIVD:
   182  		p := gc.Prog(v.Op.Asm())
   183  		p.From.Type = obj.TYPE_REG
   184  		p.From.Reg = v.Args[1].Reg()
   185  		p.Reg = v.Args[0].Reg()
   186  		p.To.Type = obj.TYPE_REG
   187  		p.To.Reg = v.Reg()
   188  	case ssa.OpMIPS64SGT,
   189  		ssa.OpMIPS64SGTU:
   190  		p := gc.Prog(v.Op.Asm())
   191  		p.From.Type = obj.TYPE_REG
   192  		p.From.Reg = v.Args[0].Reg()
   193  		p.Reg = v.Args[1].Reg()
   194  		p.To.Type = obj.TYPE_REG
   195  		p.To.Reg = v.Reg()
   196  	case ssa.OpMIPS64ADDVconst,
   197  		ssa.OpMIPS64SUBVconst,
   198  		ssa.OpMIPS64ANDconst,
   199  		ssa.OpMIPS64ORconst,
   200  		ssa.OpMIPS64XORconst,
   201  		ssa.OpMIPS64NORconst,
   202  		ssa.OpMIPS64SLLVconst,
   203  		ssa.OpMIPS64SRLVconst,
   204  		ssa.OpMIPS64SRAVconst,
   205  		ssa.OpMIPS64SGTconst,
   206  		ssa.OpMIPS64SGTUconst:
   207  		p := gc.Prog(v.Op.Asm())
   208  		p.From.Type = obj.TYPE_CONST
   209  		p.From.Offset = v.AuxInt
   210  		p.Reg = v.Args[0].Reg()
   211  		p.To.Type = obj.TYPE_REG
   212  		p.To.Reg = v.Reg()
   213  	case ssa.OpMIPS64MULV,
   214  		ssa.OpMIPS64MULVU,
   215  		ssa.OpMIPS64DIVV,
   216  		ssa.OpMIPS64DIVVU:
   217  		// result in hi,lo
   218  		p := gc.Prog(v.Op.Asm())
   219  		p.From.Type = obj.TYPE_REG
   220  		p.From.Reg = v.Args[1].Reg()
   221  		p.Reg = v.Args[0].Reg()
   222  	case ssa.OpMIPS64MOVVconst:
   223  		r := v.Reg()
   224  		p := gc.Prog(v.Op.Asm())
   225  		p.From.Type = obj.TYPE_CONST
   226  		p.From.Offset = v.AuxInt
   227  		p.To.Type = obj.TYPE_REG
   228  		p.To.Reg = r
   229  		if isFPreg(r) || isHILO(r) {
   230  			// cannot move into FP or special registers, use TMP as intermediate
   231  			p.To.Reg = mips.REGTMP
   232  			p = gc.Prog(mips.AMOVV)
   233  			p.From.Type = obj.TYPE_REG
   234  			p.From.Reg = mips.REGTMP
   235  			p.To.Type = obj.TYPE_REG
   236  			p.To.Reg = r
   237  		}
   238  	case ssa.OpMIPS64MOVFconst,
   239  		ssa.OpMIPS64MOVDconst:
   240  		p := gc.Prog(v.Op.Asm())
   241  		p.From.Type = obj.TYPE_FCONST
   242  		p.From.Val = math.Float64frombits(uint64(v.AuxInt))
   243  		p.To.Type = obj.TYPE_REG
   244  		p.To.Reg = v.Reg()
   245  	case ssa.OpMIPS64CMPEQF,
   246  		ssa.OpMIPS64CMPEQD,
   247  		ssa.OpMIPS64CMPGEF,
   248  		ssa.OpMIPS64CMPGED,
   249  		ssa.OpMIPS64CMPGTF,
   250  		ssa.OpMIPS64CMPGTD:
   251  		p := gc.Prog(v.Op.Asm())
   252  		p.From.Type = obj.TYPE_REG
   253  		p.From.Reg = v.Args[0].Reg()
   254  		p.Reg = v.Args[1].Reg()
   255  	case ssa.OpMIPS64MOVVaddr:
   256  		p := gc.Prog(mips.AMOVV)
   257  		p.From.Type = obj.TYPE_ADDR
   258  		var wantreg string
   259  		// MOVV $sym+off(base), R
   260  		// the assembler expands it as the following:
   261  		// - base is SP: add constant offset to SP (R29)
   262  		//               when constant is large, tmp register (R23) may be used
   263  		// - base is SB: load external address with relocation
   264  		switch v.Aux.(type) {
   265  		default:
   266  			v.Fatalf("aux is of unknown type %T", v.Aux)
   267  		case *ssa.ExternSymbol:
   268  			wantreg = "SB"
   269  			gc.AddAux(&p.From, v)
   270  		case *ssa.ArgSymbol, *ssa.AutoSymbol:
   271  			wantreg = "SP"
   272  			gc.AddAux(&p.From, v)
   273  		case nil:
   274  			// No sym, just MOVV $off(SP), R
   275  			wantreg = "SP"
   276  			p.From.Reg = mips.REGSP
   277  			p.From.Offset = v.AuxInt
   278  		}
   279  		if reg := v.Args[0].RegName(); reg != wantreg {
   280  			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
   281  		}
   282  		p.To.Type = obj.TYPE_REG
   283  		p.To.Reg = v.Reg()
   284  	case ssa.OpMIPS64MOVBload,
   285  		ssa.OpMIPS64MOVBUload,
   286  		ssa.OpMIPS64MOVHload,
   287  		ssa.OpMIPS64MOVHUload,
   288  		ssa.OpMIPS64MOVWload,
   289  		ssa.OpMIPS64MOVWUload,
   290  		ssa.OpMIPS64MOVVload,
   291  		ssa.OpMIPS64MOVFload,
   292  		ssa.OpMIPS64MOVDload:
   293  		p := gc.Prog(v.Op.Asm())
   294  		p.From.Type = obj.TYPE_MEM
   295  		p.From.Reg = v.Args[0].Reg()
   296  		gc.AddAux(&p.From, v)
   297  		p.To.Type = obj.TYPE_REG
   298  		p.To.Reg = v.Reg()
   299  	case ssa.OpMIPS64MOVBstore,
   300  		ssa.OpMIPS64MOVHstore,
   301  		ssa.OpMIPS64MOVWstore,
   302  		ssa.OpMIPS64MOVVstore,
   303  		ssa.OpMIPS64MOVFstore,
   304  		ssa.OpMIPS64MOVDstore:
   305  		p := gc.Prog(v.Op.Asm())
   306  		p.From.Type = obj.TYPE_REG
   307  		p.From.Reg = v.Args[1].Reg()
   308  		p.To.Type = obj.TYPE_MEM
   309  		p.To.Reg = v.Args[0].Reg()
   310  		gc.AddAux(&p.To, v)
   311  	case ssa.OpMIPS64MOVBstorezero,
   312  		ssa.OpMIPS64MOVHstorezero,
   313  		ssa.OpMIPS64MOVWstorezero,
   314  		ssa.OpMIPS64MOVVstorezero:
   315  		p := gc.Prog(v.Op.Asm())
   316  		p.From.Type = obj.TYPE_REG
   317  		p.From.Reg = mips.REGZERO
   318  		p.To.Type = obj.TYPE_MEM
   319  		p.To.Reg = v.Args[0].Reg()
   320  		gc.AddAux(&p.To, v)
   321  	case ssa.OpMIPS64MOVBreg,
   322  		ssa.OpMIPS64MOVBUreg,
   323  		ssa.OpMIPS64MOVHreg,
   324  		ssa.OpMIPS64MOVHUreg,
   325  		ssa.OpMIPS64MOVWreg,
   326  		ssa.OpMIPS64MOVWUreg:
   327  		a := v.Args[0]
   328  		for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
   329  			a = a.Args[0]
   330  		}
   331  		if a.Op == ssa.OpLoadReg {
   332  			t := a.Type
   333  			switch {
   334  			case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
   335  				v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
   336  				v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
   337  				v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
   338  				v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
   339  				v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
   340  				// arg is a proper-typed load, already zero/sign-extended, don't extend again
   341  				if v.Reg() == v.Args[0].Reg() {
   342  					return
   343  				}
   344  				p := gc.Prog(mips.AMOVV)
   345  				p.From.Type = obj.TYPE_REG
   346  				p.From.Reg = v.Args[0].Reg()
   347  				p.To.Type = obj.TYPE_REG
   348  				p.To.Reg = v.Reg()
   349  				return
   350  			default:
   351  			}
   352  		}
   353  		fallthrough
   354  	case ssa.OpMIPS64MOVWF,
   355  		ssa.OpMIPS64MOVWD,
   356  		ssa.OpMIPS64TRUNCFW,
   357  		ssa.OpMIPS64TRUNCDW,
   358  		ssa.OpMIPS64MOVVF,
   359  		ssa.OpMIPS64MOVVD,
   360  		ssa.OpMIPS64TRUNCFV,
   361  		ssa.OpMIPS64TRUNCDV,
   362  		ssa.OpMIPS64MOVFD,
   363  		ssa.OpMIPS64MOVDF,
   364  		ssa.OpMIPS64NEGF,
   365  		ssa.OpMIPS64NEGD:
   366  		p := gc.Prog(v.Op.Asm())
   367  		p.From.Type = obj.TYPE_REG
   368  		p.From.Reg = v.Args[0].Reg()
   369  		p.To.Type = obj.TYPE_REG
   370  		p.To.Reg = v.Reg()
   371  	case ssa.OpMIPS64NEGV:
   372  		// SUB from REGZERO
   373  		p := gc.Prog(mips.ASUBVU)
   374  		p.From.Type = obj.TYPE_REG
   375  		p.From.Reg = v.Args[0].Reg()
   376  		p.Reg = mips.REGZERO
   377  		p.To.Type = obj.TYPE_REG
   378  		p.To.Reg = v.Reg()
   379  	case ssa.OpMIPS64DUFFZERO:
   380  		// runtime.duffzero expects start address - 8 in R1
   381  		p := gc.Prog(mips.ASUBVU)
   382  		p.From.Type = obj.TYPE_CONST
   383  		p.From.Offset = 8
   384  		p.Reg = v.Args[0].Reg()
   385  		p.To.Type = obj.TYPE_REG
   386  		p.To.Reg = mips.REG_R1
   387  		p = gc.Prog(obj.ADUFFZERO)
   388  		p.To.Type = obj.TYPE_MEM
   389  		p.To.Name = obj.NAME_EXTERN
   390  		p.To.Sym = gc.Linksym(gc.Pkglookup("duffzero", gc.Runtimepkg))
   391  		p.To.Offset = v.AuxInt
   392  	case ssa.OpMIPS64LoweredZero:
   393  		// SUBV	$8, R1
   394  		// MOVV	R0, 8(R1)
   395  		// ADDV	$8, R1
   396  		// BNE	Rarg1, R1, -2(PC)
   397  		// arg1 is the address of the last element to zero
   398  		var sz int64
   399  		var mov obj.As
   400  		switch {
   401  		case v.AuxInt%8 == 0:
   402  			sz = 8
   403  			mov = mips.AMOVV
   404  		case v.AuxInt%4 == 0:
   405  			sz = 4
   406  			mov = mips.AMOVW
   407  		case v.AuxInt%2 == 0:
   408  			sz = 2
   409  			mov = mips.AMOVH
   410  		default:
   411  			sz = 1
   412  			mov = mips.AMOVB
   413  		}
   414  		p := gc.Prog(mips.ASUBVU)
   415  		p.From.Type = obj.TYPE_CONST
   416  		p.From.Offset = sz
   417  		p.To.Type = obj.TYPE_REG
   418  		p.To.Reg = mips.REG_R1
   419  		p2 := gc.Prog(mov)
   420  		p2.From.Type = obj.TYPE_REG
   421  		p2.From.Reg = mips.REGZERO
   422  		p2.To.Type = obj.TYPE_MEM
   423  		p2.To.Reg = mips.REG_R1
   424  		p2.To.Offset = sz
   425  		p3 := gc.Prog(mips.AADDVU)
   426  		p3.From.Type = obj.TYPE_CONST
   427  		p3.From.Offset = sz
   428  		p3.To.Type = obj.TYPE_REG
   429  		p3.To.Reg = mips.REG_R1
   430  		p4 := gc.Prog(mips.ABNE)
   431  		p4.From.Type = obj.TYPE_REG
   432  		p4.From.Reg = v.Args[1].Reg()
   433  		p4.Reg = mips.REG_R1
   434  		p4.To.Type = obj.TYPE_BRANCH
   435  		gc.Patch(p4, p2)
   436  	case ssa.OpMIPS64LoweredMove:
   437  		// SUBV	$8, R1
   438  		// MOVV	8(R1), Rtmp
   439  		// MOVV	Rtmp, (R2)
   440  		// ADDV	$8, R1
   441  		// ADDV	$8, R2
   442  		// BNE	Rarg2, R1, -4(PC)
   443  		// arg2 is the address of the last element of src
   444  		var sz int64
   445  		var mov obj.As
   446  		switch {
   447  		case v.AuxInt%8 == 0:
   448  			sz = 8
   449  			mov = mips.AMOVV
   450  		case v.AuxInt%4 == 0:
   451  			sz = 4
   452  			mov = mips.AMOVW
   453  		case v.AuxInt%2 == 0:
   454  			sz = 2
   455  			mov = mips.AMOVH
   456  		default:
   457  			sz = 1
   458  			mov = mips.AMOVB
   459  		}
   460  		p := gc.Prog(mips.ASUBVU)
   461  		p.From.Type = obj.TYPE_CONST
   462  		p.From.Offset = sz
   463  		p.To.Type = obj.TYPE_REG
   464  		p.To.Reg = mips.REG_R1
   465  		p2 := gc.Prog(mov)
   466  		p2.From.Type = obj.TYPE_MEM
   467  		p2.From.Reg = mips.REG_R1
   468  		p2.From.Offset = sz
   469  		p2.To.Type = obj.TYPE_REG
   470  		p2.To.Reg = mips.REGTMP
   471  		p3 := gc.Prog(mov)
   472  		p3.From.Type = obj.TYPE_REG
   473  		p3.From.Reg = mips.REGTMP
   474  		p3.To.Type = obj.TYPE_MEM
   475  		p3.To.Reg = mips.REG_R2
   476  		p4 := gc.Prog(mips.AADDVU)
   477  		p4.From.Type = obj.TYPE_CONST
   478  		p4.From.Offset = sz
   479  		p4.To.Type = obj.TYPE_REG
   480  		p4.To.Reg = mips.REG_R1
   481  		p5 := gc.Prog(mips.AADDVU)
   482  		p5.From.Type = obj.TYPE_CONST
   483  		p5.From.Offset = sz
   484  		p5.To.Type = obj.TYPE_REG
   485  		p5.To.Reg = mips.REG_R2
   486  		p6 := gc.Prog(mips.ABNE)
   487  		p6.From.Type = obj.TYPE_REG
   488  		p6.From.Reg = v.Args[2].Reg()
   489  		p6.Reg = mips.REG_R1
   490  		p6.To.Type = obj.TYPE_BRANCH
   491  		gc.Patch(p6, p2)
   492  	case ssa.OpMIPS64CALLstatic:
   493  		if v.Aux.(*gc.Sym) == gc.Deferreturn.Sym {
   494  			// Deferred calls will appear to be returning to
   495  			// the CALL deferreturn(SB) that we are about to emit.
   496  			// However, the stack trace code will show the line
   497  			// of the instruction byte before the return PC.
   498  			// To avoid that being an unrelated instruction,
   499  			// insert an actual hardware NOP that will have the right line number.
   500  			// This is different from obj.ANOP, which is a virtual no-op
   501  			// that doesn't make it into the instruction stream.
   502  			ginsnop()
   503  		}
   504  		p := gc.Prog(obj.ACALL)
   505  		p.To.Type = obj.TYPE_MEM
   506  		p.To.Name = obj.NAME_EXTERN
   507  		p.To.Sym = gc.Linksym(v.Aux.(*gc.Sym))
   508  		if gc.Maxarg < v.AuxInt {
   509  			gc.Maxarg = v.AuxInt
   510  		}
   511  	case ssa.OpMIPS64CALLclosure:
   512  		p := gc.Prog(obj.ACALL)
   513  		p.To.Type = obj.TYPE_MEM
   514  		p.To.Offset = 0
   515  		p.To.Reg = v.Args[0].Reg()
   516  		if gc.Maxarg < v.AuxInt {
   517  			gc.Maxarg = v.AuxInt
   518  		}
   519  	case ssa.OpMIPS64CALLdefer:
   520  		p := gc.Prog(obj.ACALL)
   521  		p.To.Type = obj.TYPE_MEM
   522  		p.To.Name = obj.NAME_EXTERN
   523  		p.To.Sym = gc.Linksym(gc.Deferproc.Sym)
   524  		if gc.Maxarg < v.AuxInt {
   525  			gc.Maxarg = v.AuxInt
   526  		}
   527  	case ssa.OpMIPS64CALLgo:
   528  		p := gc.Prog(obj.ACALL)
   529  		p.To.Type = obj.TYPE_MEM
   530  		p.To.Name = obj.NAME_EXTERN
   531  		p.To.Sym = gc.Linksym(gc.Newproc.Sym)
   532  		if gc.Maxarg < v.AuxInt {
   533  			gc.Maxarg = v.AuxInt
   534  		}
   535  	case ssa.OpMIPS64CALLinter:
   536  		p := gc.Prog(obj.ACALL)
   537  		p.To.Type = obj.TYPE_MEM
   538  		p.To.Offset = 0
   539  		p.To.Reg = v.Args[0].Reg()
   540  		if gc.Maxarg < v.AuxInt {
   541  			gc.Maxarg = v.AuxInt
   542  		}
   543  	case ssa.OpMIPS64LoweredNilCheck:
   544  		// Issue a load which will fault if arg is nil.
   545  		p := gc.Prog(mips.AMOVB)
   546  		p.From.Type = obj.TYPE_MEM
   547  		p.From.Reg = v.Args[0].Reg()
   548  		gc.AddAux(&p.From, v)
   549  		p.To.Type = obj.TYPE_REG
   550  		p.To.Reg = mips.REGTMP
   551  		if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
   552  			gc.Warnl(v.Pos, "generated nil check")
   553  		}
   554  	case ssa.OpVarDef:
   555  		gc.Gvardef(v.Aux.(*gc.Node))
   556  	case ssa.OpVarKill:
   557  		gc.Gvarkill(v.Aux.(*gc.Node))
   558  	case ssa.OpVarLive:
   559  		gc.Gvarlive(v.Aux.(*gc.Node))
   560  	case ssa.OpKeepAlive:
   561  		gc.KeepAlive(v)
   562  	case ssa.OpMIPS64FPFlagTrue,
   563  		ssa.OpMIPS64FPFlagFalse:
   564  		// MOVV	$0, r
   565  		// BFPF	2(PC)
   566  		// MOVV	$1, r
   567  		branch := mips.ABFPF
   568  		if v.Op == ssa.OpMIPS64FPFlagFalse {
   569  			branch = mips.ABFPT
   570  		}
   571  		p := gc.Prog(mips.AMOVV)
   572  		p.From.Type = obj.TYPE_REG
   573  		p.From.Reg = mips.REGZERO
   574  		p.To.Type = obj.TYPE_REG
   575  		p.To.Reg = v.Reg()
   576  		p2 := gc.Prog(branch)
   577  		p2.To.Type = obj.TYPE_BRANCH
   578  		p3 := gc.Prog(mips.AMOVV)
   579  		p3.From.Type = obj.TYPE_CONST
   580  		p3.From.Offset = 1
   581  		p3.To.Type = obj.TYPE_REG
   582  		p3.To.Reg = v.Reg()
   583  		p4 := gc.Prog(obj.ANOP) // not a machine instruction, for branch to land
   584  		gc.Patch(p2, p4)
   585  	case ssa.OpSelect0, ssa.OpSelect1:
   586  		// nothing to do
   587  	case ssa.OpMIPS64LoweredGetClosurePtr:
   588  		// Closure pointer is R22 (mips.REGCTXT).
   589  		gc.CheckLoweredGetClosurePtr(v)
   590  	default:
   591  		v.Fatalf("genValue not implemented: %s", v.LongString())
   592  	}
   593  }
   594  
   595  var blockJump = map[ssa.BlockKind]struct {
   596  	asm, invasm obj.As
   597  }{
   598  	ssa.BlockMIPS64EQ:  {mips.ABEQ, mips.ABNE},
   599  	ssa.BlockMIPS64NE:  {mips.ABNE, mips.ABEQ},
   600  	ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ},
   601  	ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ},
   602  	ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ},
   603  	ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ},
   604  	ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF},
   605  	ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT},
   606  }
   607  
   608  func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
   609  	s.SetPos(b.Pos)
   610  
   611  	switch b.Kind {
   612  	case ssa.BlockPlain:
   613  		if b.Succs[0].Block() != next {
   614  			p := gc.Prog(obj.AJMP)
   615  			p.To.Type = obj.TYPE_BRANCH
   616  			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
   617  		}
   618  	case ssa.BlockDefer:
   619  		// defer returns in R1:
   620  		// 0 if we should continue executing
   621  		// 1 if we should jump to deferreturn call
   622  		p := gc.Prog(mips.ABNE)
   623  		p.From.Type = obj.TYPE_REG
   624  		p.From.Reg = mips.REGZERO
   625  		p.Reg = mips.REG_R1
   626  		p.To.Type = obj.TYPE_BRANCH
   627  		s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()})
   628  		if b.Succs[0].Block() != next {
   629  			p := gc.Prog(obj.AJMP)
   630  			p.To.Type = obj.TYPE_BRANCH
   631  			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
   632  		}
   633  	case ssa.BlockExit:
   634  		gc.Prog(obj.AUNDEF) // tell plive.go that we never reach here
   635  	case ssa.BlockRet:
   636  		gc.Prog(obj.ARET)
   637  	case ssa.BlockRetJmp:
   638  		p := gc.Prog(obj.ARET)
   639  		p.To.Type = obj.TYPE_MEM
   640  		p.To.Name = obj.NAME_EXTERN
   641  		p.To.Sym = gc.Linksym(b.Aux.(*gc.Sym))
   642  	case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
   643  		ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
   644  		ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
   645  		ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF:
   646  		jmp := blockJump[b.Kind]
   647  		var p *obj.Prog
   648  		switch next {
   649  		case b.Succs[0].Block():
   650  			p = gc.Prog(jmp.invasm)
   651  			p.To.Type = obj.TYPE_BRANCH
   652  			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()})
   653  		case b.Succs[1].Block():
   654  			p = gc.Prog(jmp.asm)
   655  			p.To.Type = obj.TYPE_BRANCH
   656  			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
   657  		default:
   658  			p = gc.Prog(jmp.asm)
   659  			p.To.Type = obj.TYPE_BRANCH
   660  			s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()})
   661  			q := gc.Prog(obj.AJMP)
   662  			q.To.Type = obj.TYPE_BRANCH
   663  			s.Branches = append(s.Branches, gc.Branch{P: q, B: b.Succs[1].Block()})
   664  		}
   665  		if !b.Control.Type.IsFlags() {
   666  			p.From.Type = obj.TYPE_REG
   667  			p.From.Reg = b.Control.Reg()
   668  		}
   669  	default:
   670  		b.Fatalf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString())
   671  	}
   672  }