github.com/corona10/go@v0.0.0-20180224231303-7a218942be57/src/cmd/compile/internal/mips/ssa.go (about) 1 // Copyright 2016 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package mips 6 7 import ( 8 "math" 9 10 "cmd/compile/internal/gc" 11 "cmd/compile/internal/ssa" 12 "cmd/compile/internal/types" 13 "cmd/internal/obj" 14 "cmd/internal/obj/mips" 15 ) 16 17 // isFPreg returns whether r is an FP register 18 func isFPreg(r int16) bool { 19 return mips.REG_F0 <= r && r <= mips.REG_F31 20 } 21 22 // isHILO returns whether r is HI or LO register 23 func isHILO(r int16) bool { 24 return r == mips.REG_HI || r == mips.REG_LO 25 } 26 27 // loadByType returns the load instruction of the given type. 28 func loadByType(t *types.Type, r int16) obj.As { 29 if isFPreg(r) { 30 if t.Size() == 4 { // float32 or int32 31 return mips.AMOVF 32 } else { // float64 or int64 33 return mips.AMOVD 34 } 35 } else { 36 switch t.Size() { 37 case 1: 38 if t.IsSigned() { 39 return mips.AMOVB 40 } else { 41 return mips.AMOVBU 42 } 43 case 2: 44 if t.IsSigned() { 45 return mips.AMOVH 46 } else { 47 return mips.AMOVHU 48 } 49 case 4: 50 return mips.AMOVW 51 } 52 } 53 panic("bad load type") 54 } 55 56 // storeByType returns the store instruction of the given type. 57 func storeByType(t *types.Type, r int16) obj.As { 58 if isFPreg(r) { 59 if t.Size() == 4 { // float32 or int32 60 return mips.AMOVF 61 } else { // float64 or int64 62 return mips.AMOVD 63 } 64 } else { 65 switch t.Size() { 66 case 1: 67 return mips.AMOVB 68 case 2: 69 return mips.AMOVH 70 case 4: 71 return mips.AMOVW 72 } 73 } 74 panic("bad store type") 75 } 76 77 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { 78 switch v.Op { 79 case ssa.OpCopy, ssa.OpMIPSMOVWconvert, ssa.OpMIPSMOVWreg: 80 t := v.Type 81 if t.IsMemory() { 82 return 83 } 84 x := v.Args[0].Reg() 85 y := v.Reg() 86 if x == y { 87 return 88 } 89 as := mips.AMOVW 90 if isFPreg(x) && isFPreg(y) { 91 as = mips.AMOVF 92 if t.Size() == 8 { 93 as = mips.AMOVD 94 } 95 } 96 97 p := s.Prog(as) 98 p.From.Type = obj.TYPE_REG 99 p.From.Reg = x 100 p.To.Type = obj.TYPE_REG 101 p.To.Reg = y 102 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) { 103 // cannot move between special registers, use TMP as intermediate 104 p.To.Reg = mips.REGTMP 105 p = s.Prog(mips.AMOVW) 106 p.From.Type = obj.TYPE_REG 107 p.From.Reg = mips.REGTMP 108 p.To.Type = obj.TYPE_REG 109 p.To.Reg = y 110 } 111 case ssa.OpMIPSMOVWnop: 112 if v.Reg() != v.Args[0].Reg() { 113 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 114 } 115 // nothing to do 116 case ssa.OpLoadReg: 117 if v.Type.IsFlags() { 118 v.Fatalf("load flags not implemented: %v", v.LongString()) 119 return 120 } 121 r := v.Reg() 122 p := s.Prog(loadByType(v.Type, r)) 123 gc.AddrAuto(&p.From, v.Args[0]) 124 p.To.Type = obj.TYPE_REG 125 p.To.Reg = r 126 if isHILO(r) { 127 // cannot directly load, load to TMP and move 128 p.To.Reg = mips.REGTMP 129 p = s.Prog(mips.AMOVW) 130 p.From.Type = obj.TYPE_REG 131 p.From.Reg = mips.REGTMP 132 p.To.Type = obj.TYPE_REG 133 p.To.Reg = r 134 } 135 case ssa.OpStoreReg: 136 if v.Type.IsFlags() { 137 v.Fatalf("store flags not implemented: %v", v.LongString()) 138 return 139 } 140 r := v.Args[0].Reg() 141 if isHILO(r) { 142 // cannot directly store, move to TMP and store 143 p := s.Prog(mips.AMOVW) 144 p.From.Type = obj.TYPE_REG 145 p.From.Reg = r 146 p.To.Type = obj.TYPE_REG 147 p.To.Reg = mips.REGTMP 148 r = mips.REGTMP 149 } 150 p := s.Prog(storeByType(v.Type, r)) 151 p.From.Type = obj.TYPE_REG 152 p.From.Reg = r 153 gc.AddrAuto(&p.To, v) 154 case ssa.OpMIPSADD, 155 ssa.OpMIPSSUB, 156 ssa.OpMIPSAND, 157 ssa.OpMIPSOR, 158 ssa.OpMIPSXOR, 159 ssa.OpMIPSNOR, 160 ssa.OpMIPSSLL, 161 ssa.OpMIPSSRL, 162 ssa.OpMIPSSRA, 163 ssa.OpMIPSADDF, 164 ssa.OpMIPSADDD, 165 ssa.OpMIPSSUBF, 166 ssa.OpMIPSSUBD, 167 ssa.OpMIPSMULF, 168 ssa.OpMIPSMULD, 169 ssa.OpMIPSDIVF, 170 ssa.OpMIPSDIVD, 171 ssa.OpMIPSMUL: 172 p := s.Prog(v.Op.Asm()) 173 p.From.Type = obj.TYPE_REG 174 p.From.Reg = v.Args[1].Reg() 175 p.Reg = v.Args[0].Reg() 176 p.To.Type = obj.TYPE_REG 177 p.To.Reg = v.Reg() 178 case ssa.OpMIPSSGT, 179 ssa.OpMIPSSGTU: 180 p := s.Prog(v.Op.Asm()) 181 p.From.Type = obj.TYPE_REG 182 p.From.Reg = v.Args[0].Reg() 183 p.Reg = v.Args[1].Reg() 184 p.To.Type = obj.TYPE_REG 185 p.To.Reg = v.Reg() 186 case ssa.OpMIPSSGTzero, 187 ssa.OpMIPSSGTUzero: 188 p := s.Prog(v.Op.Asm()) 189 p.From.Type = obj.TYPE_REG 190 p.From.Reg = v.Args[0].Reg() 191 p.Reg = mips.REGZERO 192 p.To.Type = obj.TYPE_REG 193 p.To.Reg = v.Reg() 194 case ssa.OpMIPSADDconst, 195 ssa.OpMIPSSUBconst, 196 ssa.OpMIPSANDconst, 197 ssa.OpMIPSORconst, 198 ssa.OpMIPSXORconst, 199 ssa.OpMIPSNORconst, 200 ssa.OpMIPSSLLconst, 201 ssa.OpMIPSSRLconst, 202 ssa.OpMIPSSRAconst, 203 ssa.OpMIPSSGTconst, 204 ssa.OpMIPSSGTUconst: 205 p := s.Prog(v.Op.Asm()) 206 p.From.Type = obj.TYPE_CONST 207 p.From.Offset = v.AuxInt 208 p.Reg = v.Args[0].Reg() 209 p.To.Type = obj.TYPE_REG 210 p.To.Reg = v.Reg() 211 case ssa.OpMIPSMULT, 212 ssa.OpMIPSMULTU, 213 ssa.OpMIPSDIV, 214 ssa.OpMIPSDIVU: 215 // result in hi,lo 216 p := s.Prog(v.Op.Asm()) 217 p.From.Type = obj.TYPE_REG 218 p.From.Reg = v.Args[1].Reg() 219 p.Reg = v.Args[0].Reg() 220 case ssa.OpMIPSMOVWconst: 221 r := v.Reg() 222 p := s.Prog(v.Op.Asm()) 223 p.From.Type = obj.TYPE_CONST 224 p.From.Offset = v.AuxInt 225 p.To.Type = obj.TYPE_REG 226 p.To.Reg = r 227 if isFPreg(r) || isHILO(r) { 228 // cannot move into FP or special registers, use TMP as intermediate 229 p.To.Reg = mips.REGTMP 230 p = s.Prog(mips.AMOVW) 231 p.From.Type = obj.TYPE_REG 232 p.From.Reg = mips.REGTMP 233 p.To.Type = obj.TYPE_REG 234 p.To.Reg = r 235 } 236 case ssa.OpMIPSMOVFconst, 237 ssa.OpMIPSMOVDconst: 238 p := s.Prog(v.Op.Asm()) 239 p.From.Type = obj.TYPE_FCONST 240 p.From.Val = math.Float64frombits(uint64(v.AuxInt)) 241 p.To.Type = obj.TYPE_REG 242 p.To.Reg = v.Reg() 243 case ssa.OpMIPSCMOVZ: 244 if v.Reg() != v.Args[0].Reg() { 245 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 246 } 247 p := s.Prog(v.Op.Asm()) 248 p.From.Type = obj.TYPE_REG 249 p.From.Reg = v.Args[2].Reg() 250 p.Reg = v.Args[1].Reg() 251 p.To.Type = obj.TYPE_REG 252 p.To.Reg = v.Reg() 253 case ssa.OpMIPSCMOVZzero: 254 if v.Reg() != v.Args[0].Reg() { 255 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 256 } 257 p := s.Prog(v.Op.Asm()) 258 p.From.Type = obj.TYPE_REG 259 p.From.Reg = v.Args[1].Reg() 260 p.Reg = mips.REGZERO 261 p.To.Type = obj.TYPE_REG 262 p.To.Reg = v.Reg() 263 case ssa.OpMIPSCMPEQF, 264 ssa.OpMIPSCMPEQD, 265 ssa.OpMIPSCMPGEF, 266 ssa.OpMIPSCMPGED, 267 ssa.OpMIPSCMPGTF, 268 ssa.OpMIPSCMPGTD: 269 p := s.Prog(v.Op.Asm()) 270 p.From.Type = obj.TYPE_REG 271 p.From.Reg = v.Args[0].Reg() 272 p.Reg = v.Args[1].Reg() 273 case ssa.OpMIPSMOVWaddr: 274 p := s.Prog(mips.AMOVW) 275 p.From.Type = obj.TYPE_ADDR 276 p.From.Reg = v.Args[0].Reg() 277 var wantreg string 278 // MOVW $sym+off(base), R 279 // the assembler expands it as the following: 280 // - base is SP: add constant offset to SP (R29) 281 // when constant is large, tmp register (R23) may be used 282 // - base is SB: load external address with relocation 283 switch v.Aux.(type) { 284 default: 285 v.Fatalf("aux is of unknown type %T", v.Aux) 286 case *obj.LSym: 287 wantreg = "SB" 288 gc.AddAux(&p.From, v) 289 case *gc.Node: 290 wantreg = "SP" 291 gc.AddAux(&p.From, v) 292 case nil: 293 // No sym, just MOVW $off(SP), R 294 wantreg = "SP" 295 p.From.Offset = v.AuxInt 296 } 297 if reg := v.Args[0].RegName(); reg != wantreg { 298 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg) 299 } 300 p.To.Type = obj.TYPE_REG 301 p.To.Reg = v.Reg() 302 case ssa.OpMIPSMOVBload, 303 ssa.OpMIPSMOVBUload, 304 ssa.OpMIPSMOVHload, 305 ssa.OpMIPSMOVHUload, 306 ssa.OpMIPSMOVWload, 307 ssa.OpMIPSMOVFload, 308 ssa.OpMIPSMOVDload: 309 p := s.Prog(v.Op.Asm()) 310 p.From.Type = obj.TYPE_MEM 311 p.From.Reg = v.Args[0].Reg() 312 gc.AddAux(&p.From, v) 313 p.To.Type = obj.TYPE_REG 314 p.To.Reg = v.Reg() 315 case ssa.OpMIPSMOVBstore, 316 ssa.OpMIPSMOVHstore, 317 ssa.OpMIPSMOVWstore, 318 ssa.OpMIPSMOVFstore, 319 ssa.OpMIPSMOVDstore: 320 p := s.Prog(v.Op.Asm()) 321 p.From.Type = obj.TYPE_REG 322 p.From.Reg = v.Args[1].Reg() 323 p.To.Type = obj.TYPE_MEM 324 p.To.Reg = v.Args[0].Reg() 325 gc.AddAux(&p.To, v) 326 case ssa.OpMIPSMOVBstorezero, 327 ssa.OpMIPSMOVHstorezero, 328 ssa.OpMIPSMOVWstorezero: 329 p := s.Prog(v.Op.Asm()) 330 p.From.Type = obj.TYPE_REG 331 p.From.Reg = mips.REGZERO 332 p.To.Type = obj.TYPE_MEM 333 p.To.Reg = v.Args[0].Reg() 334 gc.AddAux(&p.To, v) 335 case ssa.OpMIPSMOVBreg, 336 ssa.OpMIPSMOVBUreg, 337 ssa.OpMIPSMOVHreg, 338 ssa.OpMIPSMOVHUreg: 339 a := v.Args[0] 340 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPSMOVWreg || a.Op == ssa.OpMIPSMOVWnop { 341 a = a.Args[0] 342 } 343 if a.Op == ssa.OpLoadReg { 344 t := a.Type 345 switch { 346 case v.Op == ssa.OpMIPSMOVBreg && t.Size() == 1 && t.IsSigned(), 347 v.Op == ssa.OpMIPSMOVBUreg && t.Size() == 1 && !t.IsSigned(), 348 v.Op == ssa.OpMIPSMOVHreg && t.Size() == 2 && t.IsSigned(), 349 v.Op == ssa.OpMIPSMOVHUreg && t.Size() == 2 && !t.IsSigned(): 350 // arg is a proper-typed load, already zero/sign-extended, don't extend again 351 if v.Reg() == v.Args[0].Reg() { 352 return 353 } 354 p := s.Prog(mips.AMOVW) 355 p.From.Type = obj.TYPE_REG 356 p.From.Reg = v.Args[0].Reg() 357 p.To.Type = obj.TYPE_REG 358 p.To.Reg = v.Reg() 359 return 360 default: 361 } 362 } 363 fallthrough 364 case ssa.OpMIPSMOVWF, 365 ssa.OpMIPSMOVWD, 366 ssa.OpMIPSTRUNCFW, 367 ssa.OpMIPSTRUNCDW, 368 ssa.OpMIPSMOVFD, 369 ssa.OpMIPSMOVDF, 370 ssa.OpMIPSNEGF, 371 ssa.OpMIPSNEGD, 372 ssa.OpMIPSSQRTD, 373 ssa.OpMIPSCLZ: 374 p := s.Prog(v.Op.Asm()) 375 p.From.Type = obj.TYPE_REG 376 p.From.Reg = v.Args[0].Reg() 377 p.To.Type = obj.TYPE_REG 378 p.To.Reg = v.Reg() 379 case ssa.OpMIPSNEG: 380 // SUB from REGZERO 381 p := s.Prog(mips.ASUBU) 382 p.From.Type = obj.TYPE_REG 383 p.From.Reg = v.Args[0].Reg() 384 p.Reg = mips.REGZERO 385 p.To.Type = obj.TYPE_REG 386 p.To.Reg = v.Reg() 387 case ssa.OpMIPSLoweredZero: 388 // SUBU $4, R1 389 // MOVW R0, 4(R1) 390 // ADDU $4, R1 391 // BNE Rarg1, R1, -2(PC) 392 // arg1 is the address of the last element to zero 393 var sz int64 394 var mov obj.As 395 switch { 396 case v.AuxInt%4 == 0: 397 sz = 4 398 mov = mips.AMOVW 399 case v.AuxInt%2 == 0: 400 sz = 2 401 mov = mips.AMOVH 402 default: 403 sz = 1 404 mov = mips.AMOVB 405 } 406 p := s.Prog(mips.ASUBU) 407 p.From.Type = obj.TYPE_CONST 408 p.From.Offset = sz 409 p.To.Type = obj.TYPE_REG 410 p.To.Reg = mips.REG_R1 411 p2 := s.Prog(mov) 412 p2.From.Type = obj.TYPE_REG 413 p2.From.Reg = mips.REGZERO 414 p2.To.Type = obj.TYPE_MEM 415 p2.To.Reg = mips.REG_R1 416 p2.To.Offset = sz 417 p3 := s.Prog(mips.AADDU) 418 p3.From.Type = obj.TYPE_CONST 419 p3.From.Offset = sz 420 p3.To.Type = obj.TYPE_REG 421 p3.To.Reg = mips.REG_R1 422 p4 := s.Prog(mips.ABNE) 423 p4.From.Type = obj.TYPE_REG 424 p4.From.Reg = v.Args[1].Reg() 425 p4.Reg = mips.REG_R1 426 p4.To.Type = obj.TYPE_BRANCH 427 gc.Patch(p4, p2) 428 case ssa.OpMIPSLoweredMove: 429 // SUBU $4, R1 430 // MOVW 4(R1), Rtmp 431 // MOVW Rtmp, (R2) 432 // ADDU $4, R1 433 // ADDU $4, R2 434 // BNE Rarg2, R1, -4(PC) 435 // arg2 is the address of the last element of src 436 var sz int64 437 var mov obj.As 438 switch { 439 case v.AuxInt%4 == 0: 440 sz = 4 441 mov = mips.AMOVW 442 case v.AuxInt%2 == 0: 443 sz = 2 444 mov = mips.AMOVH 445 default: 446 sz = 1 447 mov = mips.AMOVB 448 } 449 p := s.Prog(mips.ASUBU) 450 p.From.Type = obj.TYPE_CONST 451 p.From.Offset = sz 452 p.To.Type = obj.TYPE_REG 453 p.To.Reg = mips.REG_R1 454 p2 := s.Prog(mov) 455 p2.From.Type = obj.TYPE_MEM 456 p2.From.Reg = mips.REG_R1 457 p2.From.Offset = sz 458 p2.To.Type = obj.TYPE_REG 459 p2.To.Reg = mips.REGTMP 460 p3 := s.Prog(mov) 461 p3.From.Type = obj.TYPE_REG 462 p3.From.Reg = mips.REGTMP 463 p3.To.Type = obj.TYPE_MEM 464 p3.To.Reg = mips.REG_R2 465 p4 := s.Prog(mips.AADDU) 466 p4.From.Type = obj.TYPE_CONST 467 p4.From.Offset = sz 468 p4.To.Type = obj.TYPE_REG 469 p4.To.Reg = mips.REG_R1 470 p5 := s.Prog(mips.AADDU) 471 p5.From.Type = obj.TYPE_CONST 472 p5.From.Offset = sz 473 p5.To.Type = obj.TYPE_REG 474 p5.To.Reg = mips.REG_R2 475 p6 := s.Prog(mips.ABNE) 476 p6.From.Type = obj.TYPE_REG 477 p6.From.Reg = v.Args[2].Reg() 478 p6.Reg = mips.REG_R1 479 p6.To.Type = obj.TYPE_BRANCH 480 gc.Patch(p6, p2) 481 case ssa.OpMIPSCALLstatic, ssa.OpMIPSCALLclosure, ssa.OpMIPSCALLinter: 482 s.Call(v) 483 case ssa.OpMIPSLoweredWB: 484 p := s.Prog(obj.ACALL) 485 p.To.Type = obj.TYPE_MEM 486 p.To.Name = obj.NAME_EXTERN 487 p.To.Sym = v.Aux.(*obj.LSym) 488 case ssa.OpMIPSLoweredAtomicLoad: 489 s.Prog(mips.ASYNC) 490 491 p := s.Prog(mips.AMOVW) 492 p.From.Type = obj.TYPE_MEM 493 p.From.Reg = v.Args[0].Reg() 494 p.To.Type = obj.TYPE_REG 495 p.To.Reg = v.Reg0() 496 497 s.Prog(mips.ASYNC) 498 case ssa.OpMIPSLoweredAtomicStore: 499 s.Prog(mips.ASYNC) 500 501 p := s.Prog(mips.AMOVW) 502 p.From.Type = obj.TYPE_REG 503 p.From.Reg = v.Args[1].Reg() 504 p.To.Type = obj.TYPE_MEM 505 p.To.Reg = v.Args[0].Reg() 506 507 s.Prog(mips.ASYNC) 508 case ssa.OpMIPSLoweredAtomicStorezero: 509 s.Prog(mips.ASYNC) 510 511 p := s.Prog(mips.AMOVW) 512 p.From.Type = obj.TYPE_REG 513 p.From.Reg = mips.REGZERO 514 p.To.Type = obj.TYPE_MEM 515 p.To.Reg = v.Args[0].Reg() 516 517 s.Prog(mips.ASYNC) 518 case ssa.OpMIPSLoweredAtomicExchange: 519 // SYNC 520 // MOVW Rarg1, Rtmp 521 // LL (Rarg0), Rout 522 // SC Rtmp, (Rarg0) 523 // BEQ Rtmp, -3(PC) 524 // SYNC 525 s.Prog(mips.ASYNC) 526 527 p := s.Prog(mips.AMOVW) 528 p.From.Type = obj.TYPE_REG 529 p.From.Reg = v.Args[1].Reg() 530 p.To.Type = obj.TYPE_REG 531 p.To.Reg = mips.REGTMP 532 533 p1 := s.Prog(mips.ALL) 534 p1.From.Type = obj.TYPE_MEM 535 p1.From.Reg = v.Args[0].Reg() 536 p1.To.Type = obj.TYPE_REG 537 p1.To.Reg = v.Reg0() 538 539 p2 := s.Prog(mips.ASC) 540 p2.From.Type = obj.TYPE_REG 541 p2.From.Reg = mips.REGTMP 542 p2.To.Type = obj.TYPE_MEM 543 p2.To.Reg = v.Args[0].Reg() 544 545 p3 := s.Prog(mips.ABEQ) 546 p3.From.Type = obj.TYPE_REG 547 p3.From.Reg = mips.REGTMP 548 p3.To.Type = obj.TYPE_BRANCH 549 gc.Patch(p3, p) 550 551 s.Prog(mips.ASYNC) 552 case ssa.OpMIPSLoweredAtomicAdd: 553 // SYNC 554 // LL (Rarg0), Rout 555 // ADDU Rarg1, Rout, Rtmp 556 // SC Rtmp, (Rarg0) 557 // BEQ Rtmp, -3(PC) 558 // SYNC 559 // ADDU Rarg1, Rout 560 s.Prog(mips.ASYNC) 561 562 p := s.Prog(mips.ALL) 563 p.From.Type = obj.TYPE_MEM 564 p.From.Reg = v.Args[0].Reg() 565 p.To.Type = obj.TYPE_REG 566 p.To.Reg = v.Reg0() 567 568 p1 := s.Prog(mips.AADDU) 569 p1.From.Type = obj.TYPE_REG 570 p1.From.Reg = v.Args[1].Reg() 571 p1.Reg = v.Reg0() 572 p1.To.Type = obj.TYPE_REG 573 p1.To.Reg = mips.REGTMP 574 575 p2 := s.Prog(mips.ASC) 576 p2.From.Type = obj.TYPE_REG 577 p2.From.Reg = mips.REGTMP 578 p2.To.Type = obj.TYPE_MEM 579 p2.To.Reg = v.Args[0].Reg() 580 581 p3 := s.Prog(mips.ABEQ) 582 p3.From.Type = obj.TYPE_REG 583 p3.From.Reg = mips.REGTMP 584 p3.To.Type = obj.TYPE_BRANCH 585 gc.Patch(p3, p) 586 587 s.Prog(mips.ASYNC) 588 589 p4 := s.Prog(mips.AADDU) 590 p4.From.Type = obj.TYPE_REG 591 p4.From.Reg = v.Args[1].Reg() 592 p4.Reg = v.Reg0() 593 p4.To.Type = obj.TYPE_REG 594 p4.To.Reg = v.Reg0() 595 596 case ssa.OpMIPSLoweredAtomicAddconst: 597 // SYNC 598 // LL (Rarg0), Rout 599 // ADDU $auxInt, Rout, Rtmp 600 // SC Rtmp, (Rarg0) 601 // BEQ Rtmp, -3(PC) 602 // SYNC 603 // ADDU $auxInt, Rout 604 s.Prog(mips.ASYNC) 605 606 p := s.Prog(mips.ALL) 607 p.From.Type = obj.TYPE_MEM 608 p.From.Reg = v.Args[0].Reg() 609 p.To.Type = obj.TYPE_REG 610 p.To.Reg = v.Reg0() 611 612 p1 := s.Prog(mips.AADDU) 613 p1.From.Type = obj.TYPE_CONST 614 p1.From.Offset = v.AuxInt 615 p1.Reg = v.Reg0() 616 p1.To.Type = obj.TYPE_REG 617 p1.To.Reg = mips.REGTMP 618 619 p2 := s.Prog(mips.ASC) 620 p2.From.Type = obj.TYPE_REG 621 p2.From.Reg = mips.REGTMP 622 p2.To.Type = obj.TYPE_MEM 623 p2.To.Reg = v.Args[0].Reg() 624 625 p3 := s.Prog(mips.ABEQ) 626 p3.From.Type = obj.TYPE_REG 627 p3.From.Reg = mips.REGTMP 628 p3.To.Type = obj.TYPE_BRANCH 629 gc.Patch(p3, p) 630 631 s.Prog(mips.ASYNC) 632 633 p4 := s.Prog(mips.AADDU) 634 p4.From.Type = obj.TYPE_CONST 635 p4.From.Offset = v.AuxInt 636 p4.Reg = v.Reg0() 637 p4.To.Type = obj.TYPE_REG 638 p4.To.Reg = v.Reg0() 639 640 case ssa.OpMIPSLoweredAtomicAnd, 641 ssa.OpMIPSLoweredAtomicOr: 642 // SYNC 643 // LL (Rarg0), Rtmp 644 // AND/OR Rarg1, Rtmp 645 // SC Rtmp, (Rarg0) 646 // BEQ Rtmp, -3(PC) 647 // SYNC 648 s.Prog(mips.ASYNC) 649 650 p := s.Prog(mips.ALL) 651 p.From.Type = obj.TYPE_MEM 652 p.From.Reg = v.Args[0].Reg() 653 p.To.Type = obj.TYPE_REG 654 p.To.Reg = mips.REGTMP 655 656 p1 := s.Prog(v.Op.Asm()) 657 p1.From.Type = obj.TYPE_REG 658 p1.From.Reg = v.Args[1].Reg() 659 p1.Reg = mips.REGTMP 660 p1.To.Type = obj.TYPE_REG 661 p1.To.Reg = mips.REGTMP 662 663 p2 := s.Prog(mips.ASC) 664 p2.From.Type = obj.TYPE_REG 665 p2.From.Reg = mips.REGTMP 666 p2.To.Type = obj.TYPE_MEM 667 p2.To.Reg = v.Args[0].Reg() 668 669 p3 := s.Prog(mips.ABEQ) 670 p3.From.Type = obj.TYPE_REG 671 p3.From.Reg = mips.REGTMP 672 p3.To.Type = obj.TYPE_BRANCH 673 gc.Patch(p3, p) 674 675 s.Prog(mips.ASYNC) 676 677 case ssa.OpMIPSLoweredAtomicCas: 678 // MOVW $0, Rout 679 // SYNC 680 // LL (Rarg0), Rtmp 681 // BNE Rtmp, Rarg1, 4(PC) 682 // MOVW Rarg2, Rout 683 // SC Rout, (Rarg0) 684 // BEQ Rout, -4(PC) 685 // SYNC 686 p := s.Prog(mips.AMOVW) 687 p.From.Type = obj.TYPE_REG 688 p.From.Reg = mips.REGZERO 689 p.To.Type = obj.TYPE_REG 690 p.To.Reg = v.Reg0() 691 692 s.Prog(mips.ASYNC) 693 694 p1 := s.Prog(mips.ALL) 695 p1.From.Type = obj.TYPE_MEM 696 p1.From.Reg = v.Args[0].Reg() 697 p1.To.Type = obj.TYPE_REG 698 p1.To.Reg = mips.REGTMP 699 700 p2 := s.Prog(mips.ABNE) 701 p2.From.Type = obj.TYPE_REG 702 p2.From.Reg = v.Args[1].Reg() 703 p2.Reg = mips.REGTMP 704 p2.To.Type = obj.TYPE_BRANCH 705 706 p3 := s.Prog(mips.AMOVW) 707 p3.From.Type = obj.TYPE_REG 708 p3.From.Reg = v.Args[2].Reg() 709 p3.To.Type = obj.TYPE_REG 710 p3.To.Reg = v.Reg0() 711 712 p4 := s.Prog(mips.ASC) 713 p4.From.Type = obj.TYPE_REG 714 p4.From.Reg = v.Reg0() 715 p4.To.Type = obj.TYPE_MEM 716 p4.To.Reg = v.Args[0].Reg() 717 718 p5 := s.Prog(mips.ABEQ) 719 p5.From.Type = obj.TYPE_REG 720 p5.From.Reg = v.Reg0() 721 p5.To.Type = obj.TYPE_BRANCH 722 gc.Patch(p5, p1) 723 724 s.Prog(mips.ASYNC) 725 726 p6 := s.Prog(obj.ANOP) 727 gc.Patch(p2, p6) 728 729 case ssa.OpMIPSLoweredNilCheck: 730 // Issue a load which will fault if arg is nil. 731 p := s.Prog(mips.AMOVB) 732 p.From.Type = obj.TYPE_MEM 733 p.From.Reg = v.Args[0].Reg() 734 gc.AddAux(&p.From, v) 735 p.To.Type = obj.TYPE_REG 736 p.To.Reg = mips.REGTMP 737 if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers 738 gc.Warnl(v.Pos, "generated nil check") 739 } 740 case ssa.OpMIPSFPFlagTrue, 741 ssa.OpMIPSFPFlagFalse: 742 // MOVW $1, r 743 // CMOVF R0, r 744 745 cmov := mips.ACMOVF 746 if v.Op == ssa.OpMIPSFPFlagFalse { 747 cmov = mips.ACMOVT 748 } 749 p := s.Prog(mips.AMOVW) 750 p.From.Type = obj.TYPE_CONST 751 p.From.Offset = 1 752 p.To.Type = obj.TYPE_REG 753 p.To.Reg = v.Reg() 754 p1 := s.Prog(cmov) 755 p1.From.Type = obj.TYPE_REG 756 p1.From.Reg = mips.REGZERO 757 p1.To.Type = obj.TYPE_REG 758 p1.To.Reg = v.Reg() 759 760 case ssa.OpMIPSLoweredGetClosurePtr: 761 // Closure pointer is R22 (mips.REGCTXT). 762 gc.CheckLoweredGetClosurePtr(v) 763 case ssa.OpMIPSLoweredGetCallerSP: 764 // caller's SP is FixedFrameSize below the address of the first arg 765 p := s.Prog(mips.AMOVW) 766 p.From.Type = obj.TYPE_ADDR 767 p.From.Offset = -gc.Ctxt.FixedFrameSize() 768 p.From.Name = obj.NAME_PARAM 769 p.To.Type = obj.TYPE_REG 770 p.To.Reg = v.Reg() 771 case ssa.OpClobber: 772 // TODO: implement for clobberdead experiment. Nop is ok for now. 773 default: 774 v.Fatalf("genValue not implemented: %s", v.LongString()) 775 } 776 } 777 778 var blockJump = map[ssa.BlockKind]struct { 779 asm, invasm obj.As 780 }{ 781 ssa.BlockMIPSEQ: {mips.ABEQ, mips.ABNE}, 782 ssa.BlockMIPSNE: {mips.ABNE, mips.ABEQ}, 783 ssa.BlockMIPSLTZ: {mips.ABLTZ, mips.ABGEZ}, 784 ssa.BlockMIPSGEZ: {mips.ABGEZ, mips.ABLTZ}, 785 ssa.BlockMIPSLEZ: {mips.ABLEZ, mips.ABGTZ}, 786 ssa.BlockMIPSGTZ: {mips.ABGTZ, mips.ABLEZ}, 787 ssa.BlockMIPSFPT: {mips.ABFPT, mips.ABFPF}, 788 ssa.BlockMIPSFPF: {mips.ABFPF, mips.ABFPT}, 789 } 790 791 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) { 792 switch b.Kind { 793 case ssa.BlockPlain: 794 if b.Succs[0].Block() != next { 795 p := s.Prog(obj.AJMP) 796 p.To.Type = obj.TYPE_BRANCH 797 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 798 } 799 case ssa.BlockDefer: 800 // defer returns in R1: 801 // 0 if we should continue executing 802 // 1 if we should jump to deferreturn call 803 p := s.Prog(mips.ABNE) 804 p.From.Type = obj.TYPE_REG 805 p.From.Reg = mips.REGZERO 806 p.Reg = mips.REG_R1 807 p.To.Type = obj.TYPE_BRANCH 808 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 809 if b.Succs[0].Block() != next { 810 p := s.Prog(obj.AJMP) 811 p.To.Type = obj.TYPE_BRANCH 812 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 813 } 814 case ssa.BlockExit: 815 s.Prog(obj.AUNDEF) // tell plive.go that we never reach here 816 case ssa.BlockRet: 817 s.Prog(obj.ARET) 818 case ssa.BlockRetJmp: 819 p := s.Prog(obj.ARET) 820 p.To.Type = obj.TYPE_MEM 821 p.To.Name = obj.NAME_EXTERN 822 p.To.Sym = b.Aux.(*obj.LSym) 823 case ssa.BlockMIPSEQ, ssa.BlockMIPSNE, 824 ssa.BlockMIPSLTZ, ssa.BlockMIPSGEZ, 825 ssa.BlockMIPSLEZ, ssa.BlockMIPSGTZ, 826 ssa.BlockMIPSFPT, ssa.BlockMIPSFPF: 827 jmp := blockJump[b.Kind] 828 var p *obj.Prog 829 switch next { 830 case b.Succs[0].Block(): 831 p = s.Prog(jmp.invasm) 832 p.To.Type = obj.TYPE_BRANCH 833 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 834 case b.Succs[1].Block(): 835 p = s.Prog(jmp.asm) 836 p.To.Type = obj.TYPE_BRANCH 837 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 838 default: 839 p = s.Prog(jmp.asm) 840 p.To.Type = obj.TYPE_BRANCH 841 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 842 q := s.Prog(obj.AJMP) 843 q.To.Type = obj.TYPE_BRANCH 844 s.Branches = append(s.Branches, gc.Branch{P: q, B: b.Succs[1].Block()}) 845 } 846 if !b.Control.Type.IsFlags() { 847 p.From.Type = obj.TYPE_REG 848 p.From.Reg = b.Control.Reg() 849 } 850 default: 851 b.Fatalf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString()) 852 } 853 }