github.com/slayercat/go@v0.0.0-20170428012452-c51559813f61/src/cmd/compile/internal/mips/ssa.go (about) 1 // Copyright 2016 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package mips 6 7 import ( 8 "math" 9 10 "cmd/compile/internal/gc" 11 "cmd/compile/internal/ssa" 12 "cmd/internal/obj" 13 "cmd/internal/obj/mips" 14 ) 15 16 // isFPreg returns whether r is an FP register 17 func isFPreg(r int16) bool { 18 return mips.REG_F0 <= r && r <= mips.REG_F31 19 } 20 21 // isHILO returns whether r is HI or LO register 22 func isHILO(r int16) bool { 23 return r == mips.REG_HI || r == mips.REG_LO 24 } 25 26 // loadByType returns the load instruction of the given type. 27 func loadByType(t ssa.Type, r int16) obj.As { 28 if isFPreg(r) { 29 if t.Size() == 4 { // float32 or int32 30 return mips.AMOVF 31 } else { // float64 or int64 32 return mips.AMOVD 33 } 34 } else { 35 switch t.Size() { 36 case 1: 37 if t.IsSigned() { 38 return mips.AMOVB 39 } else { 40 return mips.AMOVBU 41 } 42 case 2: 43 if t.IsSigned() { 44 return mips.AMOVH 45 } else { 46 return mips.AMOVHU 47 } 48 case 4: 49 return mips.AMOVW 50 } 51 } 52 panic("bad load type") 53 } 54 55 // storeByType returns the store instruction of the given type. 56 func storeByType(t ssa.Type, r int16) obj.As { 57 if isFPreg(r) { 58 if t.Size() == 4 { // float32 or int32 59 return mips.AMOVF 60 } else { // float64 or int64 61 return mips.AMOVD 62 } 63 } else { 64 switch t.Size() { 65 case 1: 66 return mips.AMOVB 67 case 2: 68 return mips.AMOVH 69 case 4: 70 return mips.AMOVW 71 } 72 } 73 panic("bad store type") 74 } 75 76 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { 77 switch v.Op { 78 case ssa.OpCopy, ssa.OpMIPSMOVWconvert, ssa.OpMIPSMOVWreg: 79 t := v.Type 80 if t.IsMemory() { 81 return 82 } 83 x := v.Args[0].Reg() 84 y := v.Reg() 85 if x == y { 86 return 87 } 88 as := mips.AMOVW 89 if isFPreg(x) && isFPreg(y) { 90 as = mips.AMOVF 91 if t.Size() == 8 { 92 as = mips.AMOVD 93 } 94 } 95 96 p := s.Prog(as) 97 p.From.Type = obj.TYPE_REG 98 p.From.Reg = x 99 p.To.Type = obj.TYPE_REG 100 p.To.Reg = y 101 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) { 102 // cannot move between special registers, use TMP as intermediate 103 p.To.Reg = mips.REGTMP 104 p = s.Prog(mips.AMOVW) 105 p.From.Type = obj.TYPE_REG 106 p.From.Reg = mips.REGTMP 107 p.To.Type = obj.TYPE_REG 108 p.To.Reg = y 109 } 110 case ssa.OpMIPSMOVWnop: 111 if v.Reg() != v.Args[0].Reg() { 112 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 113 } 114 // nothing to do 115 case ssa.OpLoadReg: 116 if v.Type.IsFlags() { 117 v.Fatalf("load flags not implemented: %v", v.LongString()) 118 return 119 } 120 r := v.Reg() 121 p := s.Prog(loadByType(v.Type, r)) 122 gc.AddrAuto(&p.From, v.Args[0]) 123 p.To.Type = obj.TYPE_REG 124 p.To.Reg = r 125 if isHILO(r) { 126 // cannot directly load, load to TMP and move 127 p.To.Reg = mips.REGTMP 128 p = s.Prog(mips.AMOVW) 129 p.From.Type = obj.TYPE_REG 130 p.From.Reg = mips.REGTMP 131 p.To.Type = obj.TYPE_REG 132 p.To.Reg = r 133 } 134 case ssa.OpStoreReg: 135 if v.Type.IsFlags() { 136 v.Fatalf("store flags not implemented: %v", v.LongString()) 137 return 138 } 139 r := v.Args[0].Reg() 140 if isHILO(r) { 141 // cannot directly store, move to TMP and store 142 p := s.Prog(mips.AMOVW) 143 p.From.Type = obj.TYPE_REG 144 p.From.Reg = r 145 p.To.Type = obj.TYPE_REG 146 p.To.Reg = mips.REGTMP 147 r = mips.REGTMP 148 } 149 p := s.Prog(storeByType(v.Type, r)) 150 p.From.Type = obj.TYPE_REG 151 p.From.Reg = r 152 gc.AddrAuto(&p.To, v) 153 case ssa.OpMIPSADD, 154 ssa.OpMIPSSUB, 155 ssa.OpMIPSAND, 156 ssa.OpMIPSOR, 157 ssa.OpMIPSXOR, 158 ssa.OpMIPSNOR, 159 ssa.OpMIPSSLL, 160 ssa.OpMIPSSRL, 161 ssa.OpMIPSSRA, 162 ssa.OpMIPSADDF, 163 ssa.OpMIPSADDD, 164 ssa.OpMIPSSUBF, 165 ssa.OpMIPSSUBD, 166 ssa.OpMIPSMULF, 167 ssa.OpMIPSMULD, 168 ssa.OpMIPSDIVF, 169 ssa.OpMIPSDIVD, 170 ssa.OpMIPSMUL: 171 p := s.Prog(v.Op.Asm()) 172 p.From.Type = obj.TYPE_REG 173 p.From.Reg = v.Args[1].Reg() 174 p.Reg = v.Args[0].Reg() 175 p.To.Type = obj.TYPE_REG 176 p.To.Reg = v.Reg() 177 case ssa.OpMIPSSGT, 178 ssa.OpMIPSSGTU: 179 p := s.Prog(v.Op.Asm()) 180 p.From.Type = obj.TYPE_REG 181 p.From.Reg = v.Args[0].Reg() 182 p.Reg = v.Args[1].Reg() 183 p.To.Type = obj.TYPE_REG 184 p.To.Reg = v.Reg() 185 case ssa.OpMIPSSGTzero, 186 ssa.OpMIPSSGTUzero: 187 p := s.Prog(v.Op.Asm()) 188 p.From.Type = obj.TYPE_REG 189 p.From.Reg = v.Args[0].Reg() 190 p.Reg = mips.REGZERO 191 p.To.Type = obj.TYPE_REG 192 p.To.Reg = v.Reg() 193 case ssa.OpMIPSADDconst, 194 ssa.OpMIPSSUBconst, 195 ssa.OpMIPSANDconst, 196 ssa.OpMIPSORconst, 197 ssa.OpMIPSXORconst, 198 ssa.OpMIPSNORconst, 199 ssa.OpMIPSSLLconst, 200 ssa.OpMIPSSRLconst, 201 ssa.OpMIPSSRAconst, 202 ssa.OpMIPSSGTconst, 203 ssa.OpMIPSSGTUconst: 204 p := s.Prog(v.Op.Asm()) 205 p.From.Type = obj.TYPE_CONST 206 p.From.Offset = v.AuxInt 207 p.Reg = v.Args[0].Reg() 208 p.To.Type = obj.TYPE_REG 209 p.To.Reg = v.Reg() 210 case ssa.OpMIPSMULT, 211 ssa.OpMIPSMULTU, 212 ssa.OpMIPSDIV, 213 ssa.OpMIPSDIVU: 214 // result in hi,lo 215 p := s.Prog(v.Op.Asm()) 216 p.From.Type = obj.TYPE_REG 217 p.From.Reg = v.Args[1].Reg() 218 p.Reg = v.Args[0].Reg() 219 case ssa.OpMIPSMOVWconst: 220 r := v.Reg() 221 p := s.Prog(v.Op.Asm()) 222 p.From.Type = obj.TYPE_CONST 223 p.From.Offset = v.AuxInt 224 p.To.Type = obj.TYPE_REG 225 p.To.Reg = r 226 if isFPreg(r) || isHILO(r) { 227 // cannot move into FP or special registers, use TMP as intermediate 228 p.To.Reg = mips.REGTMP 229 p = s.Prog(mips.AMOVW) 230 p.From.Type = obj.TYPE_REG 231 p.From.Reg = mips.REGTMP 232 p.To.Type = obj.TYPE_REG 233 p.To.Reg = r 234 } 235 case ssa.OpMIPSMOVFconst, 236 ssa.OpMIPSMOVDconst: 237 p := s.Prog(v.Op.Asm()) 238 p.From.Type = obj.TYPE_FCONST 239 p.From.Val = math.Float64frombits(uint64(v.AuxInt)) 240 p.To.Type = obj.TYPE_REG 241 p.To.Reg = v.Reg() 242 case ssa.OpMIPSCMOVZ: 243 if v.Reg() != v.Args[0].Reg() { 244 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 245 } 246 p := s.Prog(v.Op.Asm()) 247 p.From.Type = obj.TYPE_REG 248 p.From.Reg = v.Args[2].Reg() 249 p.Reg = v.Args[1].Reg() 250 p.To.Type = obj.TYPE_REG 251 p.To.Reg = v.Reg() 252 case ssa.OpMIPSCMOVZzero: 253 if v.Reg() != v.Args[0].Reg() { 254 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 255 } 256 p := s.Prog(v.Op.Asm()) 257 p.From.Type = obj.TYPE_REG 258 p.From.Reg = v.Args[1].Reg() 259 p.Reg = mips.REGZERO 260 p.To.Type = obj.TYPE_REG 261 p.To.Reg = v.Reg() 262 case ssa.OpMIPSCMPEQF, 263 ssa.OpMIPSCMPEQD, 264 ssa.OpMIPSCMPGEF, 265 ssa.OpMIPSCMPGED, 266 ssa.OpMIPSCMPGTF, 267 ssa.OpMIPSCMPGTD: 268 p := s.Prog(v.Op.Asm()) 269 p.From.Type = obj.TYPE_REG 270 p.From.Reg = v.Args[0].Reg() 271 p.Reg = v.Args[1].Reg() 272 case ssa.OpMIPSMOVWaddr: 273 p := s.Prog(mips.AMOVW) 274 p.From.Type = obj.TYPE_ADDR 275 var wantreg string 276 // MOVW $sym+off(base), R 277 // the assembler expands it as the following: 278 // - base is SP: add constant offset to SP (R29) 279 // when constant is large, tmp register (R23) may be used 280 // - base is SB: load external address with relocation 281 switch v.Aux.(type) { 282 default: 283 v.Fatalf("aux is of unknown type %T", v.Aux) 284 case *ssa.ExternSymbol: 285 wantreg = "SB" 286 gc.AddAux(&p.From, v) 287 case *ssa.ArgSymbol, *ssa.AutoSymbol: 288 wantreg = "SP" 289 gc.AddAux(&p.From, v) 290 case nil: 291 // No sym, just MOVW $off(SP), R 292 wantreg = "SP" 293 p.From.Reg = mips.REGSP 294 p.From.Offset = v.AuxInt 295 } 296 if reg := v.Args[0].RegName(); reg != wantreg { 297 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg) 298 } 299 p.To.Type = obj.TYPE_REG 300 p.To.Reg = v.Reg() 301 case ssa.OpMIPSMOVBload, 302 ssa.OpMIPSMOVBUload, 303 ssa.OpMIPSMOVHload, 304 ssa.OpMIPSMOVHUload, 305 ssa.OpMIPSMOVWload, 306 ssa.OpMIPSMOVFload, 307 ssa.OpMIPSMOVDload: 308 p := s.Prog(v.Op.Asm()) 309 p.From.Type = obj.TYPE_MEM 310 p.From.Reg = v.Args[0].Reg() 311 gc.AddAux(&p.From, v) 312 p.To.Type = obj.TYPE_REG 313 p.To.Reg = v.Reg() 314 case ssa.OpMIPSMOVBstore, 315 ssa.OpMIPSMOVHstore, 316 ssa.OpMIPSMOVWstore, 317 ssa.OpMIPSMOVFstore, 318 ssa.OpMIPSMOVDstore: 319 p := s.Prog(v.Op.Asm()) 320 p.From.Type = obj.TYPE_REG 321 p.From.Reg = v.Args[1].Reg() 322 p.To.Type = obj.TYPE_MEM 323 p.To.Reg = v.Args[0].Reg() 324 gc.AddAux(&p.To, v) 325 case ssa.OpMIPSMOVBstorezero, 326 ssa.OpMIPSMOVHstorezero, 327 ssa.OpMIPSMOVWstorezero: 328 p := s.Prog(v.Op.Asm()) 329 p.From.Type = obj.TYPE_REG 330 p.From.Reg = mips.REGZERO 331 p.To.Type = obj.TYPE_MEM 332 p.To.Reg = v.Args[0].Reg() 333 gc.AddAux(&p.To, v) 334 case ssa.OpMIPSMOVBreg, 335 ssa.OpMIPSMOVBUreg, 336 ssa.OpMIPSMOVHreg, 337 ssa.OpMIPSMOVHUreg: 338 a := v.Args[0] 339 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPSMOVWreg || a.Op == ssa.OpMIPSMOVWnop { 340 a = a.Args[0] 341 } 342 if a.Op == ssa.OpLoadReg { 343 t := a.Type 344 switch { 345 case v.Op == ssa.OpMIPSMOVBreg && t.Size() == 1 && t.IsSigned(), 346 v.Op == ssa.OpMIPSMOVBUreg && t.Size() == 1 && !t.IsSigned(), 347 v.Op == ssa.OpMIPSMOVHreg && t.Size() == 2 && t.IsSigned(), 348 v.Op == ssa.OpMIPSMOVHUreg && t.Size() == 2 && !t.IsSigned(): 349 // arg is a proper-typed load, already zero/sign-extended, don't extend again 350 if v.Reg() == v.Args[0].Reg() { 351 return 352 } 353 p := s.Prog(mips.AMOVW) 354 p.From.Type = obj.TYPE_REG 355 p.From.Reg = v.Args[0].Reg() 356 p.To.Type = obj.TYPE_REG 357 p.To.Reg = v.Reg() 358 return 359 default: 360 } 361 } 362 fallthrough 363 case ssa.OpMIPSMOVWF, 364 ssa.OpMIPSMOVWD, 365 ssa.OpMIPSTRUNCFW, 366 ssa.OpMIPSTRUNCDW, 367 ssa.OpMIPSMOVFD, 368 ssa.OpMIPSMOVDF, 369 ssa.OpMIPSNEGF, 370 ssa.OpMIPSNEGD, 371 ssa.OpMIPSSQRTD, 372 ssa.OpMIPSCLZ: 373 p := s.Prog(v.Op.Asm()) 374 p.From.Type = obj.TYPE_REG 375 p.From.Reg = v.Args[0].Reg() 376 p.To.Type = obj.TYPE_REG 377 p.To.Reg = v.Reg() 378 case ssa.OpMIPSNEG: 379 // SUB from REGZERO 380 p := s.Prog(mips.ASUBU) 381 p.From.Type = obj.TYPE_REG 382 p.From.Reg = v.Args[0].Reg() 383 p.Reg = mips.REGZERO 384 p.To.Type = obj.TYPE_REG 385 p.To.Reg = v.Reg() 386 case ssa.OpMIPSLoweredZero: 387 // SUBU $4, R1 388 // MOVW R0, 4(R1) 389 // ADDU $4, R1 390 // BNE Rarg1, R1, -2(PC) 391 // arg1 is the address of the last element to zero 392 var sz int64 393 var mov obj.As 394 switch { 395 case v.AuxInt%4 == 0: 396 sz = 4 397 mov = mips.AMOVW 398 case v.AuxInt%2 == 0: 399 sz = 2 400 mov = mips.AMOVH 401 default: 402 sz = 1 403 mov = mips.AMOVB 404 } 405 p := s.Prog(mips.ASUBU) 406 p.From.Type = obj.TYPE_CONST 407 p.From.Offset = sz 408 p.To.Type = obj.TYPE_REG 409 p.To.Reg = mips.REG_R1 410 p2 := s.Prog(mov) 411 p2.From.Type = obj.TYPE_REG 412 p2.From.Reg = mips.REGZERO 413 p2.To.Type = obj.TYPE_MEM 414 p2.To.Reg = mips.REG_R1 415 p2.To.Offset = sz 416 p3 := s.Prog(mips.AADDU) 417 p3.From.Type = obj.TYPE_CONST 418 p3.From.Offset = sz 419 p3.To.Type = obj.TYPE_REG 420 p3.To.Reg = mips.REG_R1 421 p4 := s.Prog(mips.ABNE) 422 p4.From.Type = obj.TYPE_REG 423 p4.From.Reg = v.Args[1].Reg() 424 p4.Reg = mips.REG_R1 425 p4.To.Type = obj.TYPE_BRANCH 426 gc.Patch(p4, p2) 427 case ssa.OpMIPSLoweredMove: 428 // SUBU $4, R1 429 // MOVW 4(R1), Rtmp 430 // MOVW Rtmp, (R2) 431 // ADDU $4, R1 432 // ADDU $4, R2 433 // BNE Rarg2, R1, -4(PC) 434 // arg2 is the address of the last element of src 435 var sz int64 436 var mov obj.As 437 switch { 438 case v.AuxInt%4 == 0: 439 sz = 4 440 mov = mips.AMOVW 441 case v.AuxInt%2 == 0: 442 sz = 2 443 mov = mips.AMOVH 444 default: 445 sz = 1 446 mov = mips.AMOVB 447 } 448 p := s.Prog(mips.ASUBU) 449 p.From.Type = obj.TYPE_CONST 450 p.From.Offset = sz 451 p.To.Type = obj.TYPE_REG 452 p.To.Reg = mips.REG_R1 453 p2 := s.Prog(mov) 454 p2.From.Type = obj.TYPE_MEM 455 p2.From.Reg = mips.REG_R1 456 p2.From.Offset = sz 457 p2.To.Type = obj.TYPE_REG 458 p2.To.Reg = mips.REGTMP 459 p3 := s.Prog(mov) 460 p3.From.Type = obj.TYPE_REG 461 p3.From.Reg = mips.REGTMP 462 p3.To.Type = obj.TYPE_MEM 463 p3.To.Reg = mips.REG_R2 464 p4 := s.Prog(mips.AADDU) 465 p4.From.Type = obj.TYPE_CONST 466 p4.From.Offset = sz 467 p4.To.Type = obj.TYPE_REG 468 p4.To.Reg = mips.REG_R1 469 p5 := s.Prog(mips.AADDU) 470 p5.From.Type = obj.TYPE_CONST 471 p5.From.Offset = sz 472 p5.To.Type = obj.TYPE_REG 473 p5.To.Reg = mips.REG_R2 474 p6 := s.Prog(mips.ABNE) 475 p6.From.Type = obj.TYPE_REG 476 p6.From.Reg = v.Args[2].Reg() 477 p6.Reg = mips.REG_R1 478 p6.To.Type = obj.TYPE_BRANCH 479 gc.Patch(p6, p2) 480 case ssa.OpMIPSCALLstatic, ssa.OpMIPSCALLclosure, ssa.OpMIPSCALLinter: 481 s.Call(v) 482 case ssa.OpMIPSLoweredAtomicLoad: 483 s.Prog(mips.ASYNC) 484 485 p := s.Prog(mips.AMOVW) 486 p.From.Type = obj.TYPE_MEM 487 p.From.Reg = v.Args[0].Reg() 488 p.To.Type = obj.TYPE_REG 489 p.To.Reg = v.Reg0() 490 491 s.Prog(mips.ASYNC) 492 case ssa.OpMIPSLoweredAtomicStore: 493 s.Prog(mips.ASYNC) 494 495 p := s.Prog(mips.AMOVW) 496 p.From.Type = obj.TYPE_REG 497 p.From.Reg = v.Args[1].Reg() 498 p.To.Type = obj.TYPE_MEM 499 p.To.Reg = v.Args[0].Reg() 500 501 s.Prog(mips.ASYNC) 502 case ssa.OpMIPSLoweredAtomicStorezero: 503 s.Prog(mips.ASYNC) 504 505 p := s.Prog(mips.AMOVW) 506 p.From.Type = obj.TYPE_REG 507 p.From.Reg = mips.REGZERO 508 p.To.Type = obj.TYPE_MEM 509 p.To.Reg = v.Args[0].Reg() 510 511 s.Prog(mips.ASYNC) 512 case ssa.OpMIPSLoweredAtomicExchange: 513 // SYNC 514 // MOVW Rarg1, Rtmp 515 // LL (Rarg0), Rout 516 // SC Rtmp, (Rarg0) 517 // BEQ Rtmp, -3(PC) 518 // SYNC 519 s.Prog(mips.ASYNC) 520 521 p := s.Prog(mips.AMOVW) 522 p.From.Type = obj.TYPE_REG 523 p.From.Reg = v.Args[1].Reg() 524 p.To.Type = obj.TYPE_REG 525 p.To.Reg = mips.REGTMP 526 527 p1 := s.Prog(mips.ALL) 528 p1.From.Type = obj.TYPE_MEM 529 p1.From.Reg = v.Args[0].Reg() 530 p1.To.Type = obj.TYPE_REG 531 p1.To.Reg = v.Reg0() 532 533 p2 := s.Prog(mips.ASC) 534 p2.From.Type = obj.TYPE_REG 535 p2.From.Reg = mips.REGTMP 536 p2.To.Type = obj.TYPE_MEM 537 p2.To.Reg = v.Args[0].Reg() 538 539 p3 := s.Prog(mips.ABEQ) 540 p3.From.Type = obj.TYPE_REG 541 p3.From.Reg = mips.REGTMP 542 p3.To.Type = obj.TYPE_BRANCH 543 gc.Patch(p3, p) 544 545 s.Prog(mips.ASYNC) 546 case ssa.OpMIPSLoweredAtomicAdd: 547 // SYNC 548 // LL (Rarg0), Rout 549 // ADDU Rarg1, Rout, Rtmp 550 // SC Rtmp, (Rarg0) 551 // BEQ Rtmp, -3(PC) 552 // SYNC 553 // ADDU Rarg1, Rout 554 s.Prog(mips.ASYNC) 555 556 p := s.Prog(mips.ALL) 557 p.From.Type = obj.TYPE_MEM 558 p.From.Reg = v.Args[0].Reg() 559 p.To.Type = obj.TYPE_REG 560 p.To.Reg = v.Reg0() 561 562 p1 := s.Prog(mips.AADDU) 563 p1.From.Type = obj.TYPE_REG 564 p1.From.Reg = v.Args[1].Reg() 565 p1.Reg = v.Reg0() 566 p1.To.Type = obj.TYPE_REG 567 p1.To.Reg = mips.REGTMP 568 569 p2 := s.Prog(mips.ASC) 570 p2.From.Type = obj.TYPE_REG 571 p2.From.Reg = mips.REGTMP 572 p2.To.Type = obj.TYPE_MEM 573 p2.To.Reg = v.Args[0].Reg() 574 575 p3 := s.Prog(mips.ABEQ) 576 p3.From.Type = obj.TYPE_REG 577 p3.From.Reg = mips.REGTMP 578 p3.To.Type = obj.TYPE_BRANCH 579 gc.Patch(p3, p) 580 581 s.Prog(mips.ASYNC) 582 583 p4 := s.Prog(mips.AADDU) 584 p4.From.Type = obj.TYPE_REG 585 p4.From.Reg = v.Args[1].Reg() 586 p4.Reg = v.Reg0() 587 p4.To.Type = obj.TYPE_REG 588 p4.To.Reg = v.Reg0() 589 590 case ssa.OpMIPSLoweredAtomicAddconst: 591 // SYNC 592 // LL (Rarg0), Rout 593 // ADDU $auxInt, Rout, Rtmp 594 // SC Rtmp, (Rarg0) 595 // BEQ Rtmp, -3(PC) 596 // SYNC 597 // ADDU $auxInt, Rout 598 s.Prog(mips.ASYNC) 599 600 p := s.Prog(mips.ALL) 601 p.From.Type = obj.TYPE_MEM 602 p.From.Reg = v.Args[0].Reg() 603 p.To.Type = obj.TYPE_REG 604 p.To.Reg = v.Reg0() 605 606 p1 := s.Prog(mips.AADDU) 607 p1.From.Type = obj.TYPE_CONST 608 p1.From.Offset = v.AuxInt 609 p1.Reg = v.Reg0() 610 p1.To.Type = obj.TYPE_REG 611 p1.To.Reg = mips.REGTMP 612 613 p2 := s.Prog(mips.ASC) 614 p2.From.Type = obj.TYPE_REG 615 p2.From.Reg = mips.REGTMP 616 p2.To.Type = obj.TYPE_MEM 617 p2.To.Reg = v.Args[0].Reg() 618 619 p3 := s.Prog(mips.ABEQ) 620 p3.From.Type = obj.TYPE_REG 621 p3.From.Reg = mips.REGTMP 622 p3.To.Type = obj.TYPE_BRANCH 623 gc.Patch(p3, p) 624 625 s.Prog(mips.ASYNC) 626 627 p4 := s.Prog(mips.AADDU) 628 p4.From.Type = obj.TYPE_CONST 629 p4.From.Offset = v.AuxInt 630 p4.Reg = v.Reg0() 631 p4.To.Type = obj.TYPE_REG 632 p4.To.Reg = v.Reg0() 633 634 case ssa.OpMIPSLoweredAtomicAnd, 635 ssa.OpMIPSLoweredAtomicOr: 636 // SYNC 637 // LL (Rarg0), Rtmp 638 // AND/OR Rarg1, Rtmp 639 // SC Rtmp, (Rarg0) 640 // BEQ Rtmp, -3(PC) 641 // SYNC 642 s.Prog(mips.ASYNC) 643 644 p := s.Prog(mips.ALL) 645 p.From.Type = obj.TYPE_MEM 646 p.From.Reg = v.Args[0].Reg() 647 p.To.Type = obj.TYPE_REG 648 p.To.Reg = mips.REGTMP 649 650 p1 := s.Prog(v.Op.Asm()) 651 p1.From.Type = obj.TYPE_REG 652 p1.From.Reg = v.Args[1].Reg() 653 p1.Reg = mips.REGTMP 654 p1.To.Type = obj.TYPE_REG 655 p1.To.Reg = mips.REGTMP 656 657 p2 := s.Prog(mips.ASC) 658 p2.From.Type = obj.TYPE_REG 659 p2.From.Reg = mips.REGTMP 660 p2.To.Type = obj.TYPE_MEM 661 p2.To.Reg = v.Args[0].Reg() 662 663 p3 := s.Prog(mips.ABEQ) 664 p3.From.Type = obj.TYPE_REG 665 p3.From.Reg = mips.REGTMP 666 p3.To.Type = obj.TYPE_BRANCH 667 gc.Patch(p3, p) 668 669 s.Prog(mips.ASYNC) 670 671 case ssa.OpMIPSLoweredAtomicCas: 672 // MOVW $0, Rout 673 // SYNC 674 // LL (Rarg0), Rtmp 675 // BNE Rtmp, Rarg1, 4(PC) 676 // MOVW Rarg2, Rout 677 // SC Rout, (Rarg0) 678 // BEQ Rout, -4(PC) 679 // SYNC 680 p := s.Prog(mips.AMOVW) 681 p.From.Type = obj.TYPE_REG 682 p.From.Reg = mips.REGZERO 683 p.To.Type = obj.TYPE_REG 684 p.To.Reg = v.Reg0() 685 686 s.Prog(mips.ASYNC) 687 688 p1 := s.Prog(mips.ALL) 689 p1.From.Type = obj.TYPE_MEM 690 p1.From.Reg = v.Args[0].Reg() 691 p1.To.Type = obj.TYPE_REG 692 p1.To.Reg = mips.REGTMP 693 694 p2 := s.Prog(mips.ABNE) 695 p2.From.Type = obj.TYPE_REG 696 p2.From.Reg = v.Args[1].Reg() 697 p2.Reg = mips.REGTMP 698 p2.To.Type = obj.TYPE_BRANCH 699 700 p3 := s.Prog(mips.AMOVW) 701 p3.From.Type = obj.TYPE_REG 702 p3.From.Reg = v.Args[2].Reg() 703 p3.To.Type = obj.TYPE_REG 704 p3.To.Reg = v.Reg0() 705 706 p4 := s.Prog(mips.ASC) 707 p4.From.Type = obj.TYPE_REG 708 p4.From.Reg = v.Reg0() 709 p4.To.Type = obj.TYPE_MEM 710 p4.To.Reg = v.Args[0].Reg() 711 712 p5 := s.Prog(mips.ABEQ) 713 p5.From.Type = obj.TYPE_REG 714 p5.From.Reg = v.Reg0() 715 p5.To.Type = obj.TYPE_BRANCH 716 gc.Patch(p5, p1) 717 718 s.Prog(mips.ASYNC) 719 720 p6 := s.Prog(obj.ANOP) 721 gc.Patch(p2, p6) 722 723 case ssa.OpMIPSLoweredNilCheck: 724 // Issue a load which will fault if arg is nil. 725 p := s.Prog(mips.AMOVB) 726 p.From.Type = obj.TYPE_MEM 727 p.From.Reg = v.Args[0].Reg() 728 gc.AddAux(&p.From, v) 729 p.To.Type = obj.TYPE_REG 730 p.To.Reg = mips.REGTMP 731 if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers 732 gc.Warnl(v.Pos, "generated nil check") 733 } 734 case ssa.OpMIPSFPFlagTrue, 735 ssa.OpMIPSFPFlagFalse: 736 // MOVW $1, r 737 // CMOVF R0, r 738 739 cmov := mips.ACMOVF 740 if v.Op == ssa.OpMIPSFPFlagFalse { 741 cmov = mips.ACMOVT 742 } 743 p := s.Prog(mips.AMOVW) 744 p.From.Type = obj.TYPE_CONST 745 p.From.Offset = 1 746 p.To.Type = obj.TYPE_REG 747 p.To.Reg = v.Reg() 748 p1 := s.Prog(cmov) 749 p1.From.Type = obj.TYPE_REG 750 p1.From.Reg = mips.REGZERO 751 p1.To.Type = obj.TYPE_REG 752 p1.To.Reg = v.Reg() 753 754 case ssa.OpMIPSLoweredGetClosurePtr: 755 // Closure pointer is R22 (mips.REGCTXT). 756 gc.CheckLoweredGetClosurePtr(v) 757 case ssa.OpClobber: 758 // TODO: implement for clobberdead experiment. Nop is ok for now. 759 default: 760 v.Fatalf("genValue not implemented: %s", v.LongString()) 761 } 762 } 763 764 var blockJump = map[ssa.BlockKind]struct { 765 asm, invasm obj.As 766 }{ 767 ssa.BlockMIPSEQ: {mips.ABEQ, mips.ABNE}, 768 ssa.BlockMIPSNE: {mips.ABNE, mips.ABEQ}, 769 ssa.BlockMIPSLTZ: {mips.ABLTZ, mips.ABGEZ}, 770 ssa.BlockMIPSGEZ: {mips.ABGEZ, mips.ABLTZ}, 771 ssa.BlockMIPSLEZ: {mips.ABLEZ, mips.ABGTZ}, 772 ssa.BlockMIPSGTZ: {mips.ABGTZ, mips.ABLEZ}, 773 ssa.BlockMIPSFPT: {mips.ABFPT, mips.ABFPF}, 774 ssa.BlockMIPSFPF: {mips.ABFPF, mips.ABFPT}, 775 } 776 777 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) { 778 switch b.Kind { 779 case ssa.BlockPlain: 780 if b.Succs[0].Block() != next { 781 p := s.Prog(obj.AJMP) 782 p.To.Type = obj.TYPE_BRANCH 783 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 784 } 785 case ssa.BlockDefer: 786 // defer returns in R1: 787 // 0 if we should continue executing 788 // 1 if we should jump to deferreturn call 789 p := s.Prog(mips.ABNE) 790 p.From.Type = obj.TYPE_REG 791 p.From.Reg = mips.REGZERO 792 p.Reg = mips.REG_R1 793 p.To.Type = obj.TYPE_BRANCH 794 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 795 if b.Succs[0].Block() != next { 796 p := s.Prog(obj.AJMP) 797 p.To.Type = obj.TYPE_BRANCH 798 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 799 } 800 case ssa.BlockExit: 801 s.Prog(obj.AUNDEF) // tell plive.go that we never reach here 802 case ssa.BlockRet: 803 s.Prog(obj.ARET) 804 case ssa.BlockRetJmp: 805 p := s.Prog(obj.ARET) 806 p.To.Type = obj.TYPE_MEM 807 p.To.Name = obj.NAME_EXTERN 808 p.To.Sym = b.Aux.(*obj.LSym) 809 case ssa.BlockMIPSEQ, ssa.BlockMIPSNE, 810 ssa.BlockMIPSLTZ, ssa.BlockMIPSGEZ, 811 ssa.BlockMIPSLEZ, ssa.BlockMIPSGTZ, 812 ssa.BlockMIPSFPT, ssa.BlockMIPSFPF: 813 jmp := blockJump[b.Kind] 814 var p *obj.Prog 815 switch next { 816 case b.Succs[0].Block(): 817 p = s.Prog(jmp.invasm) 818 p.To.Type = obj.TYPE_BRANCH 819 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 820 case b.Succs[1].Block(): 821 p = s.Prog(jmp.asm) 822 p.To.Type = obj.TYPE_BRANCH 823 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 824 default: 825 p = s.Prog(jmp.asm) 826 p.To.Type = obj.TYPE_BRANCH 827 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 828 q := s.Prog(obj.AJMP) 829 q.To.Type = obj.TYPE_BRANCH 830 s.Branches = append(s.Branches, gc.Branch{P: q, B: b.Succs[1].Block()}) 831 } 832 if !b.Control.Type.IsFlags() { 833 p.From.Type = obj.TYPE_REG 834 p.From.Reg = b.Control.Reg() 835 } 836 default: 837 b.Fatalf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString()) 838 } 839 }