github.com/mangodowner/go-gm@v0.0.0-20180818020936-8baa2bd4408c/src/cmd/compile/internal/mips64/ssa.go (about) 1 // Copyright 2016 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package mips64 6 7 import ( 8 "math" 9 10 "cmd/compile/internal/gc" 11 "cmd/compile/internal/ssa" 12 "cmd/compile/internal/types" 13 "cmd/internal/obj" 14 "cmd/internal/obj/mips" 15 ) 16 17 // isFPreg returns whether r is an FP register 18 func isFPreg(r int16) bool { 19 return mips.REG_F0 <= r && r <= mips.REG_F31 20 } 21 22 // isHILO returns whether r is HI or LO register 23 func isHILO(r int16) bool { 24 return r == mips.REG_HI || r == mips.REG_LO 25 } 26 27 // loadByType returns the load instruction of the given type. 28 func loadByType(t *types.Type, r int16) obj.As { 29 if isFPreg(r) { 30 if t.Size() == 4 { // float32 or int32 31 return mips.AMOVF 32 } else { // float64 or int64 33 return mips.AMOVD 34 } 35 } else { 36 switch t.Size() { 37 case 1: 38 if t.IsSigned() { 39 return mips.AMOVB 40 } else { 41 return mips.AMOVBU 42 } 43 case 2: 44 if t.IsSigned() { 45 return mips.AMOVH 46 } else { 47 return mips.AMOVHU 48 } 49 case 4: 50 if t.IsSigned() { 51 return mips.AMOVW 52 } else { 53 return mips.AMOVWU 54 } 55 case 8: 56 return mips.AMOVV 57 } 58 } 59 panic("bad load type") 60 } 61 62 // storeByType returns the store instruction of the given type. 63 func storeByType(t *types.Type, r int16) obj.As { 64 if isFPreg(r) { 65 if t.Size() == 4 { // float32 or int32 66 return mips.AMOVF 67 } else { // float64 or int64 68 return mips.AMOVD 69 } 70 } else { 71 switch t.Size() { 72 case 1: 73 return mips.AMOVB 74 case 2: 75 return mips.AMOVH 76 case 4: 77 return mips.AMOVW 78 case 8: 79 return mips.AMOVV 80 } 81 } 82 panic("bad store type") 83 } 84 85 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { 86 switch v.Op { 87 case ssa.OpCopy, ssa.OpMIPS64MOVVconvert, ssa.OpMIPS64MOVVreg: 88 if v.Type.IsMemory() { 89 return 90 } 91 x := v.Args[0].Reg() 92 y := v.Reg() 93 if x == y { 94 return 95 } 96 as := mips.AMOVV 97 if isFPreg(x) && isFPreg(y) { 98 as = mips.AMOVD 99 } 100 p := s.Prog(as) 101 p.From.Type = obj.TYPE_REG 102 p.From.Reg = x 103 p.To.Type = obj.TYPE_REG 104 p.To.Reg = y 105 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) { 106 // cannot move between special registers, use TMP as intermediate 107 p.To.Reg = mips.REGTMP 108 p = s.Prog(mips.AMOVV) 109 p.From.Type = obj.TYPE_REG 110 p.From.Reg = mips.REGTMP 111 p.To.Type = obj.TYPE_REG 112 p.To.Reg = y 113 } 114 case ssa.OpMIPS64MOVVnop: 115 if v.Reg() != v.Args[0].Reg() { 116 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 117 } 118 // nothing to do 119 case ssa.OpLoadReg: 120 if v.Type.IsFlags() { 121 v.Fatalf("load flags not implemented: %v", v.LongString()) 122 return 123 } 124 r := v.Reg() 125 p := s.Prog(loadByType(v.Type, r)) 126 gc.AddrAuto(&p.From, v.Args[0]) 127 p.To.Type = obj.TYPE_REG 128 p.To.Reg = r 129 if isHILO(r) { 130 // cannot directly load, load to TMP and move 131 p.To.Reg = mips.REGTMP 132 p = s.Prog(mips.AMOVV) 133 p.From.Type = obj.TYPE_REG 134 p.From.Reg = mips.REGTMP 135 p.To.Type = obj.TYPE_REG 136 p.To.Reg = r 137 } 138 case ssa.OpStoreReg: 139 if v.Type.IsFlags() { 140 v.Fatalf("store flags not implemented: %v", v.LongString()) 141 return 142 } 143 r := v.Args[0].Reg() 144 if isHILO(r) { 145 // cannot directly store, move to TMP and store 146 p := s.Prog(mips.AMOVV) 147 p.From.Type = obj.TYPE_REG 148 p.From.Reg = r 149 p.To.Type = obj.TYPE_REG 150 p.To.Reg = mips.REGTMP 151 r = mips.REGTMP 152 } 153 p := s.Prog(storeByType(v.Type, r)) 154 p.From.Type = obj.TYPE_REG 155 p.From.Reg = r 156 gc.AddrAuto(&p.To, v) 157 case ssa.OpMIPS64ADDV, 158 ssa.OpMIPS64SUBV, 159 ssa.OpMIPS64AND, 160 ssa.OpMIPS64OR, 161 ssa.OpMIPS64XOR, 162 ssa.OpMIPS64NOR, 163 ssa.OpMIPS64SLLV, 164 ssa.OpMIPS64SRLV, 165 ssa.OpMIPS64SRAV, 166 ssa.OpMIPS64ADDF, 167 ssa.OpMIPS64ADDD, 168 ssa.OpMIPS64SUBF, 169 ssa.OpMIPS64SUBD, 170 ssa.OpMIPS64MULF, 171 ssa.OpMIPS64MULD, 172 ssa.OpMIPS64DIVF, 173 ssa.OpMIPS64DIVD: 174 p := s.Prog(v.Op.Asm()) 175 p.From.Type = obj.TYPE_REG 176 p.From.Reg = v.Args[1].Reg() 177 p.Reg = v.Args[0].Reg() 178 p.To.Type = obj.TYPE_REG 179 p.To.Reg = v.Reg() 180 case ssa.OpMIPS64SGT, 181 ssa.OpMIPS64SGTU: 182 p := s.Prog(v.Op.Asm()) 183 p.From.Type = obj.TYPE_REG 184 p.From.Reg = v.Args[0].Reg() 185 p.Reg = v.Args[1].Reg() 186 p.To.Type = obj.TYPE_REG 187 p.To.Reg = v.Reg() 188 case ssa.OpMIPS64ADDVconst, 189 ssa.OpMIPS64SUBVconst, 190 ssa.OpMIPS64ANDconst, 191 ssa.OpMIPS64ORconst, 192 ssa.OpMIPS64XORconst, 193 ssa.OpMIPS64NORconst, 194 ssa.OpMIPS64SLLVconst, 195 ssa.OpMIPS64SRLVconst, 196 ssa.OpMIPS64SRAVconst, 197 ssa.OpMIPS64SGTconst, 198 ssa.OpMIPS64SGTUconst: 199 p := s.Prog(v.Op.Asm()) 200 p.From.Type = obj.TYPE_CONST 201 p.From.Offset = v.AuxInt 202 p.Reg = v.Args[0].Reg() 203 p.To.Type = obj.TYPE_REG 204 p.To.Reg = v.Reg() 205 case ssa.OpMIPS64MULV, 206 ssa.OpMIPS64MULVU, 207 ssa.OpMIPS64DIVV, 208 ssa.OpMIPS64DIVVU: 209 // result in hi,lo 210 p := s.Prog(v.Op.Asm()) 211 p.From.Type = obj.TYPE_REG 212 p.From.Reg = v.Args[1].Reg() 213 p.Reg = v.Args[0].Reg() 214 case ssa.OpMIPS64MOVVconst: 215 r := v.Reg() 216 p := s.Prog(v.Op.Asm()) 217 p.From.Type = obj.TYPE_CONST 218 p.From.Offset = v.AuxInt 219 p.To.Type = obj.TYPE_REG 220 p.To.Reg = r 221 if isFPreg(r) || isHILO(r) { 222 // cannot move into FP or special registers, use TMP as intermediate 223 p.To.Reg = mips.REGTMP 224 p = s.Prog(mips.AMOVV) 225 p.From.Type = obj.TYPE_REG 226 p.From.Reg = mips.REGTMP 227 p.To.Type = obj.TYPE_REG 228 p.To.Reg = r 229 } 230 case ssa.OpMIPS64MOVFconst, 231 ssa.OpMIPS64MOVDconst: 232 p := s.Prog(v.Op.Asm()) 233 p.From.Type = obj.TYPE_FCONST 234 p.From.Val = math.Float64frombits(uint64(v.AuxInt)) 235 p.To.Type = obj.TYPE_REG 236 p.To.Reg = v.Reg() 237 case ssa.OpMIPS64CMPEQF, 238 ssa.OpMIPS64CMPEQD, 239 ssa.OpMIPS64CMPGEF, 240 ssa.OpMIPS64CMPGED, 241 ssa.OpMIPS64CMPGTF, 242 ssa.OpMIPS64CMPGTD: 243 p := s.Prog(v.Op.Asm()) 244 p.From.Type = obj.TYPE_REG 245 p.From.Reg = v.Args[0].Reg() 246 p.Reg = v.Args[1].Reg() 247 case ssa.OpMIPS64MOVVaddr: 248 p := s.Prog(mips.AMOVV) 249 p.From.Type = obj.TYPE_ADDR 250 p.From.Reg = v.Args[0].Reg() 251 var wantreg string 252 // MOVV $sym+off(base), R 253 // the assembler expands it as the following: 254 // - base is SP: add constant offset to SP (R29) 255 // when constant is large, tmp register (R23) may be used 256 // - base is SB: load external address with relocation 257 switch v.Aux.(type) { 258 default: 259 v.Fatalf("aux is of unknown type %T", v.Aux) 260 case *ssa.ExternSymbol: 261 wantreg = "SB" 262 gc.AddAux(&p.From, v) 263 case *ssa.ArgSymbol, *ssa.AutoSymbol: 264 wantreg = "SP" 265 gc.AddAux(&p.From, v) 266 case nil: 267 // No sym, just MOVV $off(SP), R 268 wantreg = "SP" 269 p.From.Offset = v.AuxInt 270 } 271 if reg := v.Args[0].RegName(); reg != wantreg { 272 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg) 273 } 274 p.To.Type = obj.TYPE_REG 275 p.To.Reg = v.Reg() 276 case ssa.OpMIPS64MOVBload, 277 ssa.OpMIPS64MOVBUload, 278 ssa.OpMIPS64MOVHload, 279 ssa.OpMIPS64MOVHUload, 280 ssa.OpMIPS64MOVWload, 281 ssa.OpMIPS64MOVWUload, 282 ssa.OpMIPS64MOVVload, 283 ssa.OpMIPS64MOVFload, 284 ssa.OpMIPS64MOVDload: 285 p := s.Prog(v.Op.Asm()) 286 p.From.Type = obj.TYPE_MEM 287 p.From.Reg = v.Args[0].Reg() 288 gc.AddAux(&p.From, v) 289 p.To.Type = obj.TYPE_REG 290 p.To.Reg = v.Reg() 291 case ssa.OpMIPS64MOVBstore, 292 ssa.OpMIPS64MOVHstore, 293 ssa.OpMIPS64MOVWstore, 294 ssa.OpMIPS64MOVVstore, 295 ssa.OpMIPS64MOVFstore, 296 ssa.OpMIPS64MOVDstore: 297 p := s.Prog(v.Op.Asm()) 298 p.From.Type = obj.TYPE_REG 299 p.From.Reg = v.Args[1].Reg() 300 p.To.Type = obj.TYPE_MEM 301 p.To.Reg = v.Args[0].Reg() 302 gc.AddAux(&p.To, v) 303 case ssa.OpMIPS64MOVBstorezero, 304 ssa.OpMIPS64MOVHstorezero, 305 ssa.OpMIPS64MOVWstorezero, 306 ssa.OpMIPS64MOVVstorezero: 307 p := s.Prog(v.Op.Asm()) 308 p.From.Type = obj.TYPE_REG 309 p.From.Reg = mips.REGZERO 310 p.To.Type = obj.TYPE_MEM 311 p.To.Reg = v.Args[0].Reg() 312 gc.AddAux(&p.To, v) 313 case ssa.OpMIPS64MOVBreg, 314 ssa.OpMIPS64MOVBUreg, 315 ssa.OpMIPS64MOVHreg, 316 ssa.OpMIPS64MOVHUreg, 317 ssa.OpMIPS64MOVWreg, 318 ssa.OpMIPS64MOVWUreg: 319 a := v.Args[0] 320 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg { 321 a = a.Args[0] 322 } 323 if a.Op == ssa.OpLoadReg { 324 t := a.Type 325 switch { 326 case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(), 327 v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(), 328 v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(), 329 v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(), 330 v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(), 331 v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned(): 332 // arg is a proper-typed load, already zero/sign-extended, don't extend again 333 if v.Reg() == v.Args[0].Reg() { 334 return 335 } 336 p := s.Prog(mips.AMOVV) 337 p.From.Type = obj.TYPE_REG 338 p.From.Reg = v.Args[0].Reg() 339 p.To.Type = obj.TYPE_REG 340 p.To.Reg = v.Reg() 341 return 342 default: 343 } 344 } 345 fallthrough 346 case ssa.OpMIPS64MOVWF, 347 ssa.OpMIPS64MOVWD, 348 ssa.OpMIPS64TRUNCFW, 349 ssa.OpMIPS64TRUNCDW, 350 ssa.OpMIPS64MOVVF, 351 ssa.OpMIPS64MOVVD, 352 ssa.OpMIPS64TRUNCFV, 353 ssa.OpMIPS64TRUNCDV, 354 ssa.OpMIPS64MOVFD, 355 ssa.OpMIPS64MOVDF, 356 ssa.OpMIPS64NEGF, 357 ssa.OpMIPS64NEGD: 358 p := s.Prog(v.Op.Asm()) 359 p.From.Type = obj.TYPE_REG 360 p.From.Reg = v.Args[0].Reg() 361 p.To.Type = obj.TYPE_REG 362 p.To.Reg = v.Reg() 363 case ssa.OpMIPS64NEGV: 364 // SUB from REGZERO 365 p := s.Prog(mips.ASUBVU) 366 p.From.Type = obj.TYPE_REG 367 p.From.Reg = v.Args[0].Reg() 368 p.Reg = mips.REGZERO 369 p.To.Type = obj.TYPE_REG 370 p.To.Reg = v.Reg() 371 case ssa.OpMIPS64DUFFZERO: 372 // runtime.duffzero expects start address - 8 in R1 373 p := s.Prog(mips.ASUBVU) 374 p.From.Type = obj.TYPE_CONST 375 p.From.Offset = 8 376 p.Reg = v.Args[0].Reg() 377 p.To.Type = obj.TYPE_REG 378 p.To.Reg = mips.REG_R1 379 p = s.Prog(obj.ADUFFZERO) 380 p.To.Type = obj.TYPE_MEM 381 p.To.Name = obj.NAME_EXTERN 382 p.To.Sym = gc.Duffzero 383 p.To.Offset = v.AuxInt 384 case ssa.OpMIPS64LoweredZero: 385 // SUBV $8, R1 386 // MOVV R0, 8(R1) 387 // ADDV $8, R1 388 // BNE Rarg1, R1, -2(PC) 389 // arg1 is the address of the last element to zero 390 var sz int64 391 var mov obj.As 392 switch { 393 case v.AuxInt%8 == 0: 394 sz = 8 395 mov = mips.AMOVV 396 case v.AuxInt%4 == 0: 397 sz = 4 398 mov = mips.AMOVW 399 case v.AuxInt%2 == 0: 400 sz = 2 401 mov = mips.AMOVH 402 default: 403 sz = 1 404 mov = mips.AMOVB 405 } 406 p := s.Prog(mips.ASUBVU) 407 p.From.Type = obj.TYPE_CONST 408 p.From.Offset = sz 409 p.To.Type = obj.TYPE_REG 410 p.To.Reg = mips.REG_R1 411 p2 := s.Prog(mov) 412 p2.From.Type = obj.TYPE_REG 413 p2.From.Reg = mips.REGZERO 414 p2.To.Type = obj.TYPE_MEM 415 p2.To.Reg = mips.REG_R1 416 p2.To.Offset = sz 417 p3 := s.Prog(mips.AADDVU) 418 p3.From.Type = obj.TYPE_CONST 419 p3.From.Offset = sz 420 p3.To.Type = obj.TYPE_REG 421 p3.To.Reg = mips.REG_R1 422 p4 := s.Prog(mips.ABNE) 423 p4.From.Type = obj.TYPE_REG 424 p4.From.Reg = v.Args[1].Reg() 425 p4.Reg = mips.REG_R1 426 p4.To.Type = obj.TYPE_BRANCH 427 gc.Patch(p4, p2) 428 case ssa.OpMIPS64LoweredMove: 429 // SUBV $8, R1 430 // MOVV 8(R1), Rtmp 431 // MOVV Rtmp, (R2) 432 // ADDV $8, R1 433 // ADDV $8, R2 434 // BNE Rarg2, R1, -4(PC) 435 // arg2 is the address of the last element of src 436 var sz int64 437 var mov obj.As 438 switch { 439 case v.AuxInt%8 == 0: 440 sz = 8 441 mov = mips.AMOVV 442 case v.AuxInt%4 == 0: 443 sz = 4 444 mov = mips.AMOVW 445 case v.AuxInt%2 == 0: 446 sz = 2 447 mov = mips.AMOVH 448 default: 449 sz = 1 450 mov = mips.AMOVB 451 } 452 p := s.Prog(mips.ASUBVU) 453 p.From.Type = obj.TYPE_CONST 454 p.From.Offset = sz 455 p.To.Type = obj.TYPE_REG 456 p.To.Reg = mips.REG_R1 457 p2 := s.Prog(mov) 458 p2.From.Type = obj.TYPE_MEM 459 p2.From.Reg = mips.REG_R1 460 p2.From.Offset = sz 461 p2.To.Type = obj.TYPE_REG 462 p2.To.Reg = mips.REGTMP 463 p3 := s.Prog(mov) 464 p3.From.Type = obj.TYPE_REG 465 p3.From.Reg = mips.REGTMP 466 p3.To.Type = obj.TYPE_MEM 467 p3.To.Reg = mips.REG_R2 468 p4 := s.Prog(mips.AADDVU) 469 p4.From.Type = obj.TYPE_CONST 470 p4.From.Offset = sz 471 p4.To.Type = obj.TYPE_REG 472 p4.To.Reg = mips.REG_R1 473 p5 := s.Prog(mips.AADDVU) 474 p5.From.Type = obj.TYPE_CONST 475 p5.From.Offset = sz 476 p5.To.Type = obj.TYPE_REG 477 p5.To.Reg = mips.REG_R2 478 p6 := s.Prog(mips.ABNE) 479 p6.From.Type = obj.TYPE_REG 480 p6.From.Reg = v.Args[2].Reg() 481 p6.Reg = mips.REG_R1 482 p6.To.Type = obj.TYPE_BRANCH 483 gc.Patch(p6, p2) 484 case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter: 485 s.Call(v) 486 case ssa.OpMIPS64LoweredNilCheck: 487 // Issue a load which will fault if arg is nil. 488 p := s.Prog(mips.AMOVB) 489 p.From.Type = obj.TYPE_MEM 490 p.From.Reg = v.Args[0].Reg() 491 gc.AddAux(&p.From, v) 492 p.To.Type = obj.TYPE_REG 493 p.To.Reg = mips.REGTMP 494 if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers 495 gc.Warnl(v.Pos, "generated nil check") 496 } 497 case ssa.OpMIPS64FPFlagTrue, 498 ssa.OpMIPS64FPFlagFalse: 499 // MOVV $0, r 500 // BFPF 2(PC) 501 // MOVV $1, r 502 branch := mips.ABFPF 503 if v.Op == ssa.OpMIPS64FPFlagFalse { 504 branch = mips.ABFPT 505 } 506 p := s.Prog(mips.AMOVV) 507 p.From.Type = obj.TYPE_REG 508 p.From.Reg = mips.REGZERO 509 p.To.Type = obj.TYPE_REG 510 p.To.Reg = v.Reg() 511 p2 := s.Prog(branch) 512 p2.To.Type = obj.TYPE_BRANCH 513 p3 := s.Prog(mips.AMOVV) 514 p3.From.Type = obj.TYPE_CONST 515 p3.From.Offset = 1 516 p3.To.Type = obj.TYPE_REG 517 p3.To.Reg = v.Reg() 518 p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land 519 gc.Patch(p2, p4) 520 case ssa.OpMIPS64LoweredGetClosurePtr: 521 // Closure pointer is R22 (mips.REGCTXT). 522 gc.CheckLoweredGetClosurePtr(v) 523 case ssa.OpClobber: 524 // TODO: implement for clobberdead experiment. Nop is ok for now. 525 default: 526 v.Fatalf("genValue not implemented: %s", v.LongString()) 527 } 528 } 529 530 var blockJump = map[ssa.BlockKind]struct { 531 asm, invasm obj.As 532 }{ 533 ssa.BlockMIPS64EQ: {mips.ABEQ, mips.ABNE}, 534 ssa.BlockMIPS64NE: {mips.ABNE, mips.ABEQ}, 535 ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ}, 536 ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ}, 537 ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ}, 538 ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ}, 539 ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF}, 540 ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT}, 541 } 542 543 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) { 544 switch b.Kind { 545 case ssa.BlockPlain: 546 if b.Succs[0].Block() != next { 547 p := s.Prog(obj.AJMP) 548 p.To.Type = obj.TYPE_BRANCH 549 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 550 } 551 case ssa.BlockDefer: 552 // defer returns in R1: 553 // 0 if we should continue executing 554 // 1 if we should jump to deferreturn call 555 p := s.Prog(mips.ABNE) 556 p.From.Type = obj.TYPE_REG 557 p.From.Reg = mips.REGZERO 558 p.Reg = mips.REG_R1 559 p.To.Type = obj.TYPE_BRANCH 560 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 561 if b.Succs[0].Block() != next { 562 p := s.Prog(obj.AJMP) 563 p.To.Type = obj.TYPE_BRANCH 564 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 565 } 566 case ssa.BlockExit: 567 s.Prog(obj.AUNDEF) // tell plive.go that we never reach here 568 case ssa.BlockRet: 569 s.Prog(obj.ARET) 570 case ssa.BlockRetJmp: 571 p := s.Prog(obj.ARET) 572 p.To.Type = obj.TYPE_MEM 573 p.To.Name = obj.NAME_EXTERN 574 p.To.Sym = b.Aux.(*obj.LSym) 575 case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE, 576 ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ, 577 ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ, 578 ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF: 579 jmp := blockJump[b.Kind] 580 var p *obj.Prog 581 switch next { 582 case b.Succs[0].Block(): 583 p = s.Prog(jmp.invasm) 584 p.To.Type = obj.TYPE_BRANCH 585 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 586 case b.Succs[1].Block(): 587 p = s.Prog(jmp.asm) 588 p.To.Type = obj.TYPE_BRANCH 589 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 590 default: 591 p = s.Prog(jmp.asm) 592 p.To.Type = obj.TYPE_BRANCH 593 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 594 q := s.Prog(obj.AJMP) 595 q.To.Type = obj.TYPE_BRANCH 596 s.Branches = append(s.Branches, gc.Branch{P: q, B: b.Succs[1].Block()}) 597 } 598 if !b.Control.Type.IsFlags() { 599 p.From.Type = obj.TYPE_REG 600 p.From.Reg = b.Control.Reg() 601 } 602 default: 603 b.Fatalf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString()) 604 } 605 }