github.com/bir3/gocompiler@v0.9.2202/src/cmd/compile/internal/mips/ssa.go (about) 1 // Copyright 2016 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package mips 6 7 import ( 8 "math" 9 10 "github.com/bir3/gocompiler/src/cmd/compile/internal/base" 11 "github.com/bir3/gocompiler/src/cmd/compile/internal/ir" 12 "github.com/bir3/gocompiler/src/cmd/compile/internal/logopt" 13 "github.com/bir3/gocompiler/src/cmd/compile/internal/ssa" 14 "github.com/bir3/gocompiler/src/cmd/compile/internal/ssagen" 15 "github.com/bir3/gocompiler/src/cmd/compile/internal/types" 16 "github.com/bir3/gocompiler/src/cmd/internal/obj" 17 "github.com/bir3/gocompiler/src/cmd/internal/obj/mips" 18 ) 19 20 // isFPreg reports whether r is an FP register. 21 func isFPreg(r int16) bool { 22 return mips.REG_F0 <= r && r <= mips.REG_F31 23 } 24 25 // isHILO reports whether r is HI or LO register. 26 func isHILO(r int16) bool { 27 return r == mips.REG_HI || r == mips.REG_LO 28 } 29 30 // loadByType returns the load instruction of the given type. 31 func loadByType(t *types.Type, r int16) obj.As { 32 if isFPreg(r) { 33 if t.Size() == 4 { // float32 or int32 34 return mips.AMOVF 35 } else { // float64 or int64 36 return mips.AMOVD 37 } 38 } else { 39 switch t.Size() { 40 case 1: 41 if t.IsSigned() { 42 return mips.AMOVB 43 } else { 44 return mips.AMOVBU 45 } 46 case 2: 47 if t.IsSigned() { 48 return mips.AMOVH 49 } else { 50 return mips.AMOVHU 51 } 52 case 4: 53 return mips.AMOVW 54 } 55 } 56 panic("bad load type") 57 } 58 59 // storeByType returns the store instruction of the given type. 60 func storeByType(t *types.Type, r int16) obj.As { 61 if isFPreg(r) { 62 if t.Size() == 4 { // float32 or int32 63 return mips.AMOVF 64 } else { // float64 or int64 65 return mips.AMOVD 66 } 67 } else { 68 switch t.Size() { 69 case 1: 70 return mips.AMOVB 71 case 2: 72 return mips.AMOVH 73 case 4: 74 return mips.AMOVW 75 } 76 } 77 panic("bad store type") 78 } 79 80 func ssaGenValue(s *ssagen.State, v *ssa.Value) { 81 switch v.Op { 82 case ssa.OpCopy, ssa.OpMIPSMOVWreg: 83 t := v.Type 84 if t.IsMemory() { 85 return 86 } 87 x := v.Args[0].Reg() 88 y := v.Reg() 89 if x == y { 90 return 91 } 92 as := mips.AMOVW 93 if isFPreg(x) && isFPreg(y) { 94 as = mips.AMOVF 95 if t.Size() == 8 { 96 as = mips.AMOVD 97 } 98 } 99 100 p := s.Prog(as) 101 p.From.Type = obj.TYPE_REG 102 p.From.Reg = x 103 p.To.Type = obj.TYPE_REG 104 p.To.Reg = y 105 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) { 106 // cannot move between special registers, use TMP as intermediate 107 p.To.Reg = mips.REGTMP 108 p = s.Prog(mips.AMOVW) 109 p.From.Type = obj.TYPE_REG 110 p.From.Reg = mips.REGTMP 111 p.To.Type = obj.TYPE_REG 112 p.To.Reg = y 113 } 114 case ssa.OpMIPSMOVWnop: 115 // nothing to do 116 case ssa.OpLoadReg: 117 if v.Type.IsFlags() { 118 v.Fatalf("load flags not implemented: %v", v.LongString()) 119 return 120 } 121 r := v.Reg() 122 p := s.Prog(loadByType(v.Type, r)) 123 ssagen.AddrAuto(&p.From, v.Args[0]) 124 p.To.Type = obj.TYPE_REG 125 p.To.Reg = r 126 if isHILO(r) { 127 // cannot directly load, load to TMP and move 128 p.To.Reg = mips.REGTMP 129 p = s.Prog(mips.AMOVW) 130 p.From.Type = obj.TYPE_REG 131 p.From.Reg = mips.REGTMP 132 p.To.Type = obj.TYPE_REG 133 p.To.Reg = r 134 } 135 case ssa.OpStoreReg: 136 if v.Type.IsFlags() { 137 v.Fatalf("store flags not implemented: %v", v.LongString()) 138 return 139 } 140 r := v.Args[0].Reg() 141 if isHILO(r) { 142 // cannot directly store, move to TMP and store 143 p := s.Prog(mips.AMOVW) 144 p.From.Type = obj.TYPE_REG 145 p.From.Reg = r 146 p.To.Type = obj.TYPE_REG 147 p.To.Reg = mips.REGTMP 148 r = mips.REGTMP 149 } 150 p := s.Prog(storeByType(v.Type, r)) 151 p.From.Type = obj.TYPE_REG 152 p.From.Reg = r 153 ssagen.AddrAuto(&p.To, v) 154 case ssa.OpMIPSADD, 155 ssa.OpMIPSSUB, 156 ssa.OpMIPSAND, 157 ssa.OpMIPSOR, 158 ssa.OpMIPSXOR, 159 ssa.OpMIPSNOR, 160 ssa.OpMIPSSLL, 161 ssa.OpMIPSSRL, 162 ssa.OpMIPSSRA, 163 ssa.OpMIPSADDF, 164 ssa.OpMIPSADDD, 165 ssa.OpMIPSSUBF, 166 ssa.OpMIPSSUBD, 167 ssa.OpMIPSMULF, 168 ssa.OpMIPSMULD, 169 ssa.OpMIPSDIVF, 170 ssa.OpMIPSDIVD, 171 ssa.OpMIPSMUL: 172 p := s.Prog(v.Op.Asm()) 173 p.From.Type = obj.TYPE_REG 174 p.From.Reg = v.Args[1].Reg() 175 p.Reg = v.Args[0].Reg() 176 p.To.Type = obj.TYPE_REG 177 p.To.Reg = v.Reg() 178 case ssa.OpMIPSSGT, 179 ssa.OpMIPSSGTU: 180 p := s.Prog(v.Op.Asm()) 181 p.From.Type = obj.TYPE_REG 182 p.From.Reg = v.Args[0].Reg() 183 p.Reg = v.Args[1].Reg() 184 p.To.Type = obj.TYPE_REG 185 p.To.Reg = v.Reg() 186 case ssa.OpMIPSSGTzero, 187 ssa.OpMIPSSGTUzero: 188 p := s.Prog(v.Op.Asm()) 189 p.From.Type = obj.TYPE_REG 190 p.From.Reg = v.Args[0].Reg() 191 p.Reg = mips.REGZERO 192 p.To.Type = obj.TYPE_REG 193 p.To.Reg = v.Reg() 194 case ssa.OpMIPSADDconst, 195 ssa.OpMIPSSUBconst, 196 ssa.OpMIPSANDconst, 197 ssa.OpMIPSORconst, 198 ssa.OpMIPSXORconst, 199 ssa.OpMIPSNORconst, 200 ssa.OpMIPSSLLconst, 201 ssa.OpMIPSSRLconst, 202 ssa.OpMIPSSRAconst, 203 ssa.OpMIPSSGTconst, 204 ssa.OpMIPSSGTUconst: 205 p := s.Prog(v.Op.Asm()) 206 p.From.Type = obj.TYPE_CONST 207 p.From.Offset = v.AuxInt 208 p.Reg = v.Args[0].Reg() 209 p.To.Type = obj.TYPE_REG 210 p.To.Reg = v.Reg() 211 case ssa.OpMIPSMULT, 212 ssa.OpMIPSMULTU, 213 ssa.OpMIPSDIV, 214 ssa.OpMIPSDIVU: 215 // result in hi,lo 216 p := s.Prog(v.Op.Asm()) 217 p.From.Type = obj.TYPE_REG 218 p.From.Reg = v.Args[1].Reg() 219 p.Reg = v.Args[0].Reg() 220 case ssa.OpMIPSMOVWconst: 221 r := v.Reg() 222 p := s.Prog(v.Op.Asm()) 223 p.From.Type = obj.TYPE_CONST 224 p.From.Offset = v.AuxInt 225 p.To.Type = obj.TYPE_REG 226 p.To.Reg = r 227 if isFPreg(r) || isHILO(r) { 228 // cannot move into FP or special registers, use TMP as intermediate 229 p.To.Reg = mips.REGTMP 230 p = s.Prog(mips.AMOVW) 231 p.From.Type = obj.TYPE_REG 232 p.From.Reg = mips.REGTMP 233 p.To.Type = obj.TYPE_REG 234 p.To.Reg = r 235 } 236 case ssa.OpMIPSMOVFconst, 237 ssa.OpMIPSMOVDconst: 238 p := s.Prog(v.Op.Asm()) 239 p.From.Type = obj.TYPE_FCONST 240 p.From.Val = math.Float64frombits(uint64(v.AuxInt)) 241 p.To.Type = obj.TYPE_REG 242 p.To.Reg = v.Reg() 243 case ssa.OpMIPSCMOVZ: 244 p := s.Prog(v.Op.Asm()) 245 p.From.Type = obj.TYPE_REG 246 p.From.Reg = v.Args[2].Reg() 247 p.Reg = v.Args[1].Reg() 248 p.To.Type = obj.TYPE_REG 249 p.To.Reg = v.Reg() 250 case ssa.OpMIPSCMOVZzero: 251 p := s.Prog(v.Op.Asm()) 252 p.From.Type = obj.TYPE_REG 253 p.From.Reg = v.Args[1].Reg() 254 p.Reg = mips.REGZERO 255 p.To.Type = obj.TYPE_REG 256 p.To.Reg = v.Reg() 257 case ssa.OpMIPSCMPEQF, 258 ssa.OpMIPSCMPEQD, 259 ssa.OpMIPSCMPGEF, 260 ssa.OpMIPSCMPGED, 261 ssa.OpMIPSCMPGTF, 262 ssa.OpMIPSCMPGTD: 263 p := s.Prog(v.Op.Asm()) 264 p.From.Type = obj.TYPE_REG 265 p.From.Reg = v.Args[0].Reg() 266 p.Reg = v.Args[1].Reg() 267 case ssa.OpMIPSMOVWaddr: 268 p := s.Prog(mips.AMOVW) 269 p.From.Type = obj.TYPE_ADDR 270 p.From.Reg = v.Args[0].Reg() 271 var wantreg string 272 // MOVW $sym+off(base), R 273 // the assembler expands it as the following: 274 // - base is SP: add constant offset to SP (R29) 275 // when constant is large, tmp register (R23) may be used 276 // - base is SB: load external address with relocation 277 switch v.Aux.(type) { 278 default: 279 v.Fatalf("aux is of unknown type %T", v.Aux) 280 case *obj.LSym: 281 wantreg = "SB" 282 ssagen.AddAux(&p.From, v) 283 case *ir.Name: 284 wantreg = "SP" 285 ssagen.AddAux(&p.From, v) 286 case nil: 287 // No sym, just MOVW $off(SP), R 288 wantreg = "SP" 289 p.From.Offset = v.AuxInt 290 } 291 if reg := v.Args[0].RegName(); reg != wantreg { 292 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg) 293 } 294 p.To.Type = obj.TYPE_REG 295 p.To.Reg = v.Reg() 296 case ssa.OpMIPSMOVBload, 297 ssa.OpMIPSMOVBUload, 298 ssa.OpMIPSMOVHload, 299 ssa.OpMIPSMOVHUload, 300 ssa.OpMIPSMOVWload, 301 ssa.OpMIPSMOVFload, 302 ssa.OpMIPSMOVDload: 303 p := s.Prog(v.Op.Asm()) 304 p.From.Type = obj.TYPE_MEM 305 p.From.Reg = v.Args[0].Reg() 306 ssagen.AddAux(&p.From, v) 307 p.To.Type = obj.TYPE_REG 308 p.To.Reg = v.Reg() 309 case ssa.OpMIPSMOVBstore, 310 ssa.OpMIPSMOVHstore, 311 ssa.OpMIPSMOVWstore, 312 ssa.OpMIPSMOVFstore, 313 ssa.OpMIPSMOVDstore: 314 p := s.Prog(v.Op.Asm()) 315 p.From.Type = obj.TYPE_REG 316 p.From.Reg = v.Args[1].Reg() 317 p.To.Type = obj.TYPE_MEM 318 p.To.Reg = v.Args[0].Reg() 319 ssagen.AddAux(&p.To, v) 320 case ssa.OpMIPSMOVBstorezero, 321 ssa.OpMIPSMOVHstorezero, 322 ssa.OpMIPSMOVWstorezero: 323 p := s.Prog(v.Op.Asm()) 324 p.From.Type = obj.TYPE_REG 325 p.From.Reg = mips.REGZERO 326 p.To.Type = obj.TYPE_MEM 327 p.To.Reg = v.Args[0].Reg() 328 ssagen.AddAux(&p.To, v) 329 case ssa.OpMIPSMOVBreg, 330 ssa.OpMIPSMOVBUreg, 331 ssa.OpMIPSMOVHreg, 332 ssa.OpMIPSMOVHUreg: 333 a := v.Args[0] 334 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPSMOVWreg || a.Op == ssa.OpMIPSMOVWnop { 335 a = a.Args[0] 336 } 337 if a.Op == ssa.OpLoadReg { 338 t := a.Type 339 switch { 340 case v.Op == ssa.OpMIPSMOVBreg && t.Size() == 1 && t.IsSigned(), 341 v.Op == ssa.OpMIPSMOVBUreg && t.Size() == 1 && !t.IsSigned(), 342 v.Op == ssa.OpMIPSMOVHreg && t.Size() == 2 && t.IsSigned(), 343 v.Op == ssa.OpMIPSMOVHUreg && t.Size() == 2 && !t.IsSigned(): 344 // arg is a proper-typed load, already zero/sign-extended, don't extend again 345 if v.Reg() == v.Args[0].Reg() { 346 return 347 } 348 p := s.Prog(mips.AMOVW) 349 p.From.Type = obj.TYPE_REG 350 p.From.Reg = v.Args[0].Reg() 351 p.To.Type = obj.TYPE_REG 352 p.To.Reg = v.Reg() 353 return 354 default: 355 } 356 } 357 fallthrough 358 case ssa.OpMIPSMOVWF, 359 ssa.OpMIPSMOVWD, 360 ssa.OpMIPSTRUNCFW, 361 ssa.OpMIPSTRUNCDW, 362 ssa.OpMIPSMOVFD, 363 ssa.OpMIPSMOVDF, 364 ssa.OpMIPSMOVWfpgp, 365 ssa.OpMIPSMOVWgpfp, 366 ssa.OpMIPSNEGF, 367 ssa.OpMIPSNEGD, 368 ssa.OpMIPSABSD, 369 ssa.OpMIPSSQRTF, 370 ssa.OpMIPSSQRTD, 371 ssa.OpMIPSCLZ: 372 p := s.Prog(v.Op.Asm()) 373 p.From.Type = obj.TYPE_REG 374 p.From.Reg = v.Args[0].Reg() 375 p.To.Type = obj.TYPE_REG 376 p.To.Reg = v.Reg() 377 case ssa.OpMIPSNEG: 378 // SUB from REGZERO 379 p := s.Prog(mips.ASUBU) 380 p.From.Type = obj.TYPE_REG 381 p.From.Reg = v.Args[0].Reg() 382 p.Reg = mips.REGZERO 383 p.To.Type = obj.TYPE_REG 384 p.To.Reg = v.Reg() 385 case ssa.OpMIPSLoweredZero: 386 // SUBU $4, R1 387 // MOVW R0, 4(R1) 388 // ADDU $4, R1 389 // BNE Rarg1, R1, -2(PC) 390 // arg1 is the address of the last element to zero 391 var sz int64 392 var mov obj.As 393 switch { 394 case v.AuxInt%4 == 0: 395 sz = 4 396 mov = mips.AMOVW 397 case v.AuxInt%2 == 0: 398 sz = 2 399 mov = mips.AMOVH 400 default: 401 sz = 1 402 mov = mips.AMOVB 403 } 404 p := s.Prog(mips.ASUBU) 405 p.From.Type = obj.TYPE_CONST 406 p.From.Offset = sz 407 p.To.Type = obj.TYPE_REG 408 p.To.Reg = mips.REG_R1 409 p2 := s.Prog(mov) 410 p2.From.Type = obj.TYPE_REG 411 p2.From.Reg = mips.REGZERO 412 p2.To.Type = obj.TYPE_MEM 413 p2.To.Reg = mips.REG_R1 414 p2.To.Offset = sz 415 p3 := s.Prog(mips.AADDU) 416 p3.From.Type = obj.TYPE_CONST 417 p3.From.Offset = sz 418 p3.To.Type = obj.TYPE_REG 419 p3.To.Reg = mips.REG_R1 420 p4 := s.Prog(mips.ABNE) 421 p4.From.Type = obj.TYPE_REG 422 p4.From.Reg = v.Args[1].Reg() 423 p4.Reg = mips.REG_R1 424 p4.To.Type = obj.TYPE_BRANCH 425 p4.To.SetTarget(p2) 426 case ssa.OpMIPSLoweredMove: 427 // SUBU $4, R1 428 // MOVW 4(R1), Rtmp 429 // MOVW Rtmp, (R2) 430 // ADDU $4, R1 431 // ADDU $4, R2 432 // BNE Rarg2, R1, -4(PC) 433 // arg2 is the address of the last element of src 434 var sz int64 435 var mov obj.As 436 switch { 437 case v.AuxInt%4 == 0: 438 sz = 4 439 mov = mips.AMOVW 440 case v.AuxInt%2 == 0: 441 sz = 2 442 mov = mips.AMOVH 443 default: 444 sz = 1 445 mov = mips.AMOVB 446 } 447 p := s.Prog(mips.ASUBU) 448 p.From.Type = obj.TYPE_CONST 449 p.From.Offset = sz 450 p.To.Type = obj.TYPE_REG 451 p.To.Reg = mips.REG_R1 452 p2 := s.Prog(mov) 453 p2.From.Type = obj.TYPE_MEM 454 p2.From.Reg = mips.REG_R1 455 p2.From.Offset = sz 456 p2.To.Type = obj.TYPE_REG 457 p2.To.Reg = mips.REGTMP 458 p3 := s.Prog(mov) 459 p3.From.Type = obj.TYPE_REG 460 p3.From.Reg = mips.REGTMP 461 p3.To.Type = obj.TYPE_MEM 462 p3.To.Reg = mips.REG_R2 463 p4 := s.Prog(mips.AADDU) 464 p4.From.Type = obj.TYPE_CONST 465 p4.From.Offset = sz 466 p4.To.Type = obj.TYPE_REG 467 p4.To.Reg = mips.REG_R1 468 p5 := s.Prog(mips.AADDU) 469 p5.From.Type = obj.TYPE_CONST 470 p5.From.Offset = sz 471 p5.To.Type = obj.TYPE_REG 472 p5.To.Reg = mips.REG_R2 473 p6 := s.Prog(mips.ABNE) 474 p6.From.Type = obj.TYPE_REG 475 p6.From.Reg = v.Args[2].Reg() 476 p6.Reg = mips.REG_R1 477 p6.To.Type = obj.TYPE_BRANCH 478 p6.To.SetTarget(p2) 479 case ssa.OpMIPSCALLstatic, ssa.OpMIPSCALLclosure, ssa.OpMIPSCALLinter: 480 s.Call(v) 481 case ssa.OpMIPSCALLtail: 482 s.TailCall(v) 483 case ssa.OpMIPSLoweredWB: 484 p := s.Prog(obj.ACALL) 485 p.To.Type = obj.TYPE_MEM 486 p.To.Name = obj.NAME_EXTERN 487 // AuxInt encodes how many buffer entries we need. 488 p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1] 489 case ssa.OpMIPSLoweredPanicBoundsA, ssa.OpMIPSLoweredPanicBoundsB, ssa.OpMIPSLoweredPanicBoundsC: 490 p := s.Prog(obj.ACALL) 491 p.To.Type = obj.TYPE_MEM 492 p.To.Name = obj.NAME_EXTERN 493 p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt] 494 s.UseArgs(8) // space used in callee args area by assembly stubs 495 case ssa.OpMIPSLoweredPanicExtendA, ssa.OpMIPSLoweredPanicExtendB, ssa.OpMIPSLoweredPanicExtendC: 496 p := s.Prog(obj.ACALL) 497 p.To.Type = obj.TYPE_MEM 498 p.To.Name = obj.NAME_EXTERN 499 p.To.Sym = ssagen.ExtendCheckFunc[v.AuxInt] 500 s.UseArgs(12) // space used in callee args area by assembly stubs 501 case ssa.OpMIPSLoweredAtomicLoad8, 502 ssa.OpMIPSLoweredAtomicLoad32: 503 s.Prog(mips.ASYNC) 504 505 var op obj.As 506 switch v.Op { 507 case ssa.OpMIPSLoweredAtomicLoad8: 508 op = mips.AMOVB 509 case ssa.OpMIPSLoweredAtomicLoad32: 510 op = mips.AMOVW 511 } 512 p := s.Prog(op) 513 p.From.Type = obj.TYPE_MEM 514 p.From.Reg = v.Args[0].Reg() 515 p.To.Type = obj.TYPE_REG 516 p.To.Reg = v.Reg0() 517 518 s.Prog(mips.ASYNC) 519 case ssa.OpMIPSLoweredAtomicStore8, 520 ssa.OpMIPSLoweredAtomicStore32: 521 s.Prog(mips.ASYNC) 522 523 var op obj.As 524 switch v.Op { 525 case ssa.OpMIPSLoweredAtomicStore8: 526 op = mips.AMOVB 527 case ssa.OpMIPSLoweredAtomicStore32: 528 op = mips.AMOVW 529 } 530 p := s.Prog(op) 531 p.From.Type = obj.TYPE_REG 532 p.From.Reg = v.Args[1].Reg() 533 p.To.Type = obj.TYPE_MEM 534 p.To.Reg = v.Args[0].Reg() 535 536 s.Prog(mips.ASYNC) 537 case ssa.OpMIPSLoweredAtomicStorezero: 538 s.Prog(mips.ASYNC) 539 540 p := s.Prog(mips.AMOVW) 541 p.From.Type = obj.TYPE_REG 542 p.From.Reg = mips.REGZERO 543 p.To.Type = obj.TYPE_MEM 544 p.To.Reg = v.Args[0].Reg() 545 546 s.Prog(mips.ASYNC) 547 case ssa.OpMIPSLoweredAtomicExchange: 548 // SYNC 549 // MOVW Rarg1, Rtmp 550 // LL (Rarg0), Rout 551 // SC Rtmp, (Rarg0) 552 // BEQ Rtmp, -3(PC) 553 // SYNC 554 s.Prog(mips.ASYNC) 555 556 p := s.Prog(mips.AMOVW) 557 p.From.Type = obj.TYPE_REG 558 p.From.Reg = v.Args[1].Reg() 559 p.To.Type = obj.TYPE_REG 560 p.To.Reg = mips.REGTMP 561 562 p1 := s.Prog(mips.ALL) 563 p1.From.Type = obj.TYPE_MEM 564 p1.From.Reg = v.Args[0].Reg() 565 p1.To.Type = obj.TYPE_REG 566 p1.To.Reg = v.Reg0() 567 568 p2 := s.Prog(mips.ASC) 569 p2.From.Type = obj.TYPE_REG 570 p2.From.Reg = mips.REGTMP 571 p2.To.Type = obj.TYPE_MEM 572 p2.To.Reg = v.Args[0].Reg() 573 574 p3 := s.Prog(mips.ABEQ) 575 p3.From.Type = obj.TYPE_REG 576 p3.From.Reg = mips.REGTMP 577 p3.To.Type = obj.TYPE_BRANCH 578 p3.To.SetTarget(p) 579 580 s.Prog(mips.ASYNC) 581 case ssa.OpMIPSLoweredAtomicAdd: 582 // SYNC 583 // LL (Rarg0), Rout 584 // ADDU Rarg1, Rout, Rtmp 585 // SC Rtmp, (Rarg0) 586 // BEQ Rtmp, -3(PC) 587 // SYNC 588 // ADDU Rarg1, Rout 589 s.Prog(mips.ASYNC) 590 591 p := s.Prog(mips.ALL) 592 p.From.Type = obj.TYPE_MEM 593 p.From.Reg = v.Args[0].Reg() 594 p.To.Type = obj.TYPE_REG 595 p.To.Reg = v.Reg0() 596 597 p1 := s.Prog(mips.AADDU) 598 p1.From.Type = obj.TYPE_REG 599 p1.From.Reg = v.Args[1].Reg() 600 p1.Reg = v.Reg0() 601 p1.To.Type = obj.TYPE_REG 602 p1.To.Reg = mips.REGTMP 603 604 p2 := s.Prog(mips.ASC) 605 p2.From.Type = obj.TYPE_REG 606 p2.From.Reg = mips.REGTMP 607 p2.To.Type = obj.TYPE_MEM 608 p2.To.Reg = v.Args[0].Reg() 609 610 p3 := s.Prog(mips.ABEQ) 611 p3.From.Type = obj.TYPE_REG 612 p3.From.Reg = mips.REGTMP 613 p3.To.Type = obj.TYPE_BRANCH 614 p3.To.SetTarget(p) 615 616 s.Prog(mips.ASYNC) 617 618 p4 := s.Prog(mips.AADDU) 619 p4.From.Type = obj.TYPE_REG 620 p4.From.Reg = v.Args[1].Reg() 621 p4.Reg = v.Reg0() 622 p4.To.Type = obj.TYPE_REG 623 p4.To.Reg = v.Reg0() 624 625 case ssa.OpMIPSLoweredAtomicAddconst: 626 // SYNC 627 // LL (Rarg0), Rout 628 // ADDU $auxInt, Rout, Rtmp 629 // SC Rtmp, (Rarg0) 630 // BEQ Rtmp, -3(PC) 631 // SYNC 632 // ADDU $auxInt, Rout 633 s.Prog(mips.ASYNC) 634 635 p := s.Prog(mips.ALL) 636 p.From.Type = obj.TYPE_MEM 637 p.From.Reg = v.Args[0].Reg() 638 p.To.Type = obj.TYPE_REG 639 p.To.Reg = v.Reg0() 640 641 p1 := s.Prog(mips.AADDU) 642 p1.From.Type = obj.TYPE_CONST 643 p1.From.Offset = v.AuxInt 644 p1.Reg = v.Reg0() 645 p1.To.Type = obj.TYPE_REG 646 p1.To.Reg = mips.REGTMP 647 648 p2 := s.Prog(mips.ASC) 649 p2.From.Type = obj.TYPE_REG 650 p2.From.Reg = mips.REGTMP 651 p2.To.Type = obj.TYPE_MEM 652 p2.To.Reg = v.Args[0].Reg() 653 654 p3 := s.Prog(mips.ABEQ) 655 p3.From.Type = obj.TYPE_REG 656 p3.From.Reg = mips.REGTMP 657 p3.To.Type = obj.TYPE_BRANCH 658 p3.To.SetTarget(p) 659 660 s.Prog(mips.ASYNC) 661 662 p4 := s.Prog(mips.AADDU) 663 p4.From.Type = obj.TYPE_CONST 664 p4.From.Offset = v.AuxInt 665 p4.Reg = v.Reg0() 666 p4.To.Type = obj.TYPE_REG 667 p4.To.Reg = v.Reg0() 668 669 case ssa.OpMIPSLoweredAtomicAnd, 670 ssa.OpMIPSLoweredAtomicOr: 671 // SYNC 672 // LL (Rarg0), Rtmp 673 // AND/OR Rarg1, Rtmp 674 // SC Rtmp, (Rarg0) 675 // BEQ Rtmp, -3(PC) 676 // SYNC 677 s.Prog(mips.ASYNC) 678 679 p := s.Prog(mips.ALL) 680 p.From.Type = obj.TYPE_MEM 681 p.From.Reg = v.Args[0].Reg() 682 p.To.Type = obj.TYPE_REG 683 p.To.Reg = mips.REGTMP 684 685 p1 := s.Prog(v.Op.Asm()) 686 p1.From.Type = obj.TYPE_REG 687 p1.From.Reg = v.Args[1].Reg() 688 p1.Reg = mips.REGTMP 689 p1.To.Type = obj.TYPE_REG 690 p1.To.Reg = mips.REGTMP 691 692 p2 := s.Prog(mips.ASC) 693 p2.From.Type = obj.TYPE_REG 694 p2.From.Reg = mips.REGTMP 695 p2.To.Type = obj.TYPE_MEM 696 p2.To.Reg = v.Args[0].Reg() 697 698 p3 := s.Prog(mips.ABEQ) 699 p3.From.Type = obj.TYPE_REG 700 p3.From.Reg = mips.REGTMP 701 p3.To.Type = obj.TYPE_BRANCH 702 p3.To.SetTarget(p) 703 704 s.Prog(mips.ASYNC) 705 706 case ssa.OpMIPSLoweredAtomicCas: 707 // MOVW $0, Rout 708 // SYNC 709 // LL (Rarg0), Rtmp 710 // BNE Rtmp, Rarg1, 4(PC) 711 // MOVW Rarg2, Rout 712 // SC Rout, (Rarg0) 713 // BEQ Rout, -4(PC) 714 // SYNC 715 p := s.Prog(mips.AMOVW) 716 p.From.Type = obj.TYPE_REG 717 p.From.Reg = mips.REGZERO 718 p.To.Type = obj.TYPE_REG 719 p.To.Reg = v.Reg0() 720 721 s.Prog(mips.ASYNC) 722 723 p1 := s.Prog(mips.ALL) 724 p1.From.Type = obj.TYPE_MEM 725 p1.From.Reg = v.Args[0].Reg() 726 p1.To.Type = obj.TYPE_REG 727 p1.To.Reg = mips.REGTMP 728 729 p2 := s.Prog(mips.ABNE) 730 p2.From.Type = obj.TYPE_REG 731 p2.From.Reg = v.Args[1].Reg() 732 p2.Reg = mips.REGTMP 733 p2.To.Type = obj.TYPE_BRANCH 734 735 p3 := s.Prog(mips.AMOVW) 736 p3.From.Type = obj.TYPE_REG 737 p3.From.Reg = v.Args[2].Reg() 738 p3.To.Type = obj.TYPE_REG 739 p3.To.Reg = v.Reg0() 740 741 p4 := s.Prog(mips.ASC) 742 p4.From.Type = obj.TYPE_REG 743 p4.From.Reg = v.Reg0() 744 p4.To.Type = obj.TYPE_MEM 745 p4.To.Reg = v.Args[0].Reg() 746 747 p5 := s.Prog(mips.ABEQ) 748 p5.From.Type = obj.TYPE_REG 749 p5.From.Reg = v.Reg0() 750 p5.To.Type = obj.TYPE_BRANCH 751 p5.To.SetTarget(p1) 752 753 s.Prog(mips.ASYNC) 754 755 p6 := s.Prog(obj.ANOP) 756 p2.To.SetTarget(p6) 757 758 case ssa.OpMIPSLoweredNilCheck: 759 // Issue a load which will fault if arg is nil. 760 p := s.Prog(mips.AMOVB) 761 p.From.Type = obj.TYPE_MEM 762 p.From.Reg = v.Args[0].Reg() 763 ssagen.AddAux(&p.From, v) 764 p.To.Type = obj.TYPE_REG 765 p.To.Reg = mips.REGTMP 766 if logopt.Enabled() { 767 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) 768 } 769 if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers 770 base.WarnfAt(v.Pos, "generated nil check") 771 } 772 case ssa.OpMIPSFPFlagTrue, 773 ssa.OpMIPSFPFlagFalse: 774 // MOVW $1, r 775 // CMOVF R0, r 776 777 cmov := mips.ACMOVF 778 if v.Op == ssa.OpMIPSFPFlagFalse { 779 cmov = mips.ACMOVT 780 } 781 p := s.Prog(mips.AMOVW) 782 p.From.Type = obj.TYPE_CONST 783 p.From.Offset = 1 784 p.To.Type = obj.TYPE_REG 785 p.To.Reg = v.Reg() 786 p1 := s.Prog(cmov) 787 p1.From.Type = obj.TYPE_REG 788 p1.From.Reg = mips.REGZERO 789 p1.To.Type = obj.TYPE_REG 790 p1.To.Reg = v.Reg() 791 792 case ssa.OpMIPSLoweredGetClosurePtr: 793 // Closure pointer is R22 (mips.REGCTXT). 794 ssagen.CheckLoweredGetClosurePtr(v) 795 case ssa.OpMIPSLoweredGetCallerSP: 796 // caller's SP is FixedFrameSize below the address of the first arg 797 p := s.Prog(mips.AMOVW) 798 p.From.Type = obj.TYPE_ADDR 799 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize 800 p.From.Name = obj.NAME_PARAM 801 p.To.Type = obj.TYPE_REG 802 p.To.Reg = v.Reg() 803 case ssa.OpMIPSLoweredGetCallerPC: 804 p := s.Prog(obj.AGETCALLERPC) 805 p.To.Type = obj.TYPE_REG 806 p.To.Reg = v.Reg() 807 case ssa.OpClobber, ssa.OpClobberReg: 808 // TODO: implement for clobberdead experiment. Nop is ok for now. 809 default: 810 v.Fatalf("genValue not implemented: %s", v.LongString()) 811 } 812 } 813 814 var blockJump = map[ssa.BlockKind]struct { 815 asm, invasm obj.As 816 }{ 817 ssa.BlockMIPSEQ: {mips.ABEQ, mips.ABNE}, 818 ssa.BlockMIPSNE: {mips.ABNE, mips.ABEQ}, 819 ssa.BlockMIPSLTZ: {mips.ABLTZ, mips.ABGEZ}, 820 ssa.BlockMIPSGEZ: {mips.ABGEZ, mips.ABLTZ}, 821 ssa.BlockMIPSLEZ: {mips.ABLEZ, mips.ABGTZ}, 822 ssa.BlockMIPSGTZ: {mips.ABGTZ, mips.ABLEZ}, 823 ssa.BlockMIPSFPT: {mips.ABFPT, mips.ABFPF}, 824 ssa.BlockMIPSFPF: {mips.ABFPF, mips.ABFPT}, 825 } 826 827 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) { 828 switch b.Kind { 829 case ssa.BlockPlain: 830 if b.Succs[0].Block() != next { 831 p := s.Prog(obj.AJMP) 832 p.To.Type = obj.TYPE_BRANCH 833 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()}) 834 } 835 case ssa.BlockDefer: 836 // defer returns in R1: 837 // 0 if we should continue executing 838 // 1 if we should jump to deferreturn call 839 p := s.Prog(mips.ABNE) 840 p.From.Type = obj.TYPE_REG 841 p.From.Reg = mips.REGZERO 842 p.Reg = mips.REG_R1 843 p.To.Type = obj.TYPE_BRANCH 844 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()}) 845 if b.Succs[0].Block() != next { 846 p := s.Prog(obj.AJMP) 847 p.To.Type = obj.TYPE_BRANCH 848 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()}) 849 } 850 case ssa.BlockExit, ssa.BlockRetJmp: 851 case ssa.BlockRet: 852 s.Prog(obj.ARET) 853 case ssa.BlockMIPSEQ, ssa.BlockMIPSNE, 854 ssa.BlockMIPSLTZ, ssa.BlockMIPSGEZ, 855 ssa.BlockMIPSLEZ, ssa.BlockMIPSGTZ, 856 ssa.BlockMIPSFPT, ssa.BlockMIPSFPF: 857 jmp := blockJump[b.Kind] 858 var p *obj.Prog 859 switch next { 860 case b.Succs[0].Block(): 861 p = s.Br(jmp.invasm, b.Succs[1].Block()) 862 case b.Succs[1].Block(): 863 p = s.Br(jmp.asm, b.Succs[0].Block()) 864 default: 865 if b.Likely != ssa.BranchUnlikely { 866 p = s.Br(jmp.asm, b.Succs[0].Block()) 867 s.Br(obj.AJMP, b.Succs[1].Block()) 868 } else { 869 p = s.Br(jmp.invasm, b.Succs[1].Block()) 870 s.Br(obj.AJMP, b.Succs[0].Block()) 871 } 872 } 873 if !b.Controls[0].Type.IsFlags() { 874 p.From.Type = obj.TYPE_REG 875 p.From.Reg = b.Controls[0].Reg() 876 } 877 default: 878 b.Fatalf("branch not implemented: %s", b.LongString()) 879 } 880 }