github.com/epfl-dcsl/gotee@v0.0.0-20200909122901-014b35f5e5e9/src/cmd/compile/internal/mips/ssa.go (about) 1 // Copyright 2016 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package mips 6 7 import ( 8 "math" 9 10 "cmd/compile/internal/gc" 11 "cmd/compile/internal/ssa" 12 "cmd/compile/internal/types" 13 "cmd/internal/obj" 14 "cmd/internal/obj/mips" 15 ) 16 17 // isFPreg returns whether r is an FP register 18 func isFPreg(r int16) bool { 19 return mips.REG_F0 <= r && r <= mips.REG_F31 20 } 21 22 // isHILO returns whether r is HI or LO register 23 func isHILO(r int16) bool { 24 return r == mips.REG_HI || r == mips.REG_LO 25 } 26 27 // loadByType returns the load instruction of the given type. 28 func loadByType(t *types.Type, r int16) obj.As { 29 if isFPreg(r) { 30 if t.Size() == 4 { // float32 or int32 31 return mips.AMOVF 32 } else { // float64 or int64 33 return mips.AMOVD 34 } 35 } else { 36 switch t.Size() { 37 case 1: 38 if t.IsSigned() { 39 return mips.AMOVB 40 } else { 41 return mips.AMOVBU 42 } 43 case 2: 44 if t.IsSigned() { 45 return mips.AMOVH 46 } else { 47 return mips.AMOVHU 48 } 49 case 4: 50 return mips.AMOVW 51 } 52 } 53 panic("bad load type") 54 } 55 56 // storeByType returns the store instruction of the given type. 57 func storeByType(t *types.Type, r int16) obj.As { 58 if isFPreg(r) { 59 if t.Size() == 4 { // float32 or int32 60 return mips.AMOVF 61 } else { // float64 or int64 62 return mips.AMOVD 63 } 64 } else { 65 switch t.Size() { 66 case 1: 67 return mips.AMOVB 68 case 2: 69 return mips.AMOVH 70 case 4: 71 return mips.AMOVW 72 } 73 } 74 panic("bad store type") 75 } 76 77 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { 78 switch v.Op { 79 case ssa.OpCopy, ssa.OpMIPSMOVWconvert, ssa.OpMIPSMOVWreg: 80 t := v.Type 81 if t.IsMemory() { 82 return 83 } 84 x := v.Args[0].Reg() 85 y := v.Reg() 86 if x == y { 87 return 88 } 89 as := mips.AMOVW 90 if isFPreg(x) && isFPreg(y) { 91 as = mips.AMOVF 92 if t.Size() == 8 { 93 as = mips.AMOVD 94 } 95 } 96 97 p := s.Prog(as) 98 p.From.Type = obj.TYPE_REG 99 p.From.Reg = x 100 p.To.Type = obj.TYPE_REG 101 p.To.Reg = y 102 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) { 103 // cannot move between special registers, use TMP as intermediate 104 p.To.Reg = mips.REGTMP 105 p = s.Prog(mips.AMOVW) 106 p.From.Type = obj.TYPE_REG 107 p.From.Reg = mips.REGTMP 108 p.To.Type = obj.TYPE_REG 109 p.To.Reg = y 110 } 111 case ssa.OpMIPSMOVWnop: 112 if v.Reg() != v.Args[0].Reg() { 113 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 114 } 115 // nothing to do 116 case ssa.OpLoadReg: 117 if v.Type.IsFlags() { 118 v.Fatalf("load flags not implemented: %v", v.LongString()) 119 return 120 } 121 r := v.Reg() 122 p := s.Prog(loadByType(v.Type, r)) 123 gc.AddrAuto(&p.From, v.Args[0]) 124 p.To.Type = obj.TYPE_REG 125 p.To.Reg = r 126 if isHILO(r) { 127 // cannot directly load, load to TMP and move 128 p.To.Reg = mips.REGTMP 129 p = s.Prog(mips.AMOVW) 130 p.From.Type = obj.TYPE_REG 131 p.From.Reg = mips.REGTMP 132 p.To.Type = obj.TYPE_REG 133 p.To.Reg = r 134 } 135 case ssa.OpStoreReg: 136 if v.Type.IsFlags() { 137 v.Fatalf("store flags not implemented: %v", v.LongString()) 138 return 139 } 140 r := v.Args[0].Reg() 141 if isHILO(r) { 142 // cannot directly store, move to TMP and store 143 p := s.Prog(mips.AMOVW) 144 p.From.Type = obj.TYPE_REG 145 p.From.Reg = r 146 p.To.Type = obj.TYPE_REG 147 p.To.Reg = mips.REGTMP 148 r = mips.REGTMP 149 } 150 p := s.Prog(storeByType(v.Type, r)) 151 p.From.Type = obj.TYPE_REG 152 p.From.Reg = r 153 gc.AddrAuto(&p.To, v) 154 case ssa.OpMIPSADD, 155 ssa.OpMIPSSUB, 156 ssa.OpMIPSAND, 157 ssa.OpMIPSOR, 158 ssa.OpMIPSXOR, 159 ssa.OpMIPSNOR, 160 ssa.OpMIPSSLL, 161 ssa.OpMIPSSRL, 162 ssa.OpMIPSSRA, 163 ssa.OpMIPSADDF, 164 ssa.OpMIPSADDD, 165 ssa.OpMIPSSUBF, 166 ssa.OpMIPSSUBD, 167 ssa.OpMIPSMULF, 168 ssa.OpMIPSMULD, 169 ssa.OpMIPSDIVF, 170 ssa.OpMIPSDIVD, 171 ssa.OpMIPSMUL: 172 p := s.Prog(v.Op.Asm()) 173 p.From.Type = obj.TYPE_REG 174 p.From.Reg = v.Args[1].Reg() 175 p.Reg = v.Args[0].Reg() 176 p.To.Type = obj.TYPE_REG 177 p.To.Reg = v.Reg() 178 case ssa.OpMIPSSGT, 179 ssa.OpMIPSSGTU: 180 p := s.Prog(v.Op.Asm()) 181 p.From.Type = obj.TYPE_REG 182 p.From.Reg = v.Args[0].Reg() 183 p.Reg = v.Args[1].Reg() 184 p.To.Type = obj.TYPE_REG 185 p.To.Reg = v.Reg() 186 case ssa.OpMIPSSGTzero, 187 ssa.OpMIPSSGTUzero: 188 p := s.Prog(v.Op.Asm()) 189 p.From.Type = obj.TYPE_REG 190 p.From.Reg = v.Args[0].Reg() 191 p.Reg = mips.REGZERO 192 p.To.Type = obj.TYPE_REG 193 p.To.Reg = v.Reg() 194 case ssa.OpMIPSADDconst, 195 ssa.OpMIPSSUBconst, 196 ssa.OpMIPSANDconst, 197 ssa.OpMIPSORconst, 198 ssa.OpMIPSXORconst, 199 ssa.OpMIPSNORconst, 200 ssa.OpMIPSSLLconst, 201 ssa.OpMIPSSRLconst, 202 ssa.OpMIPSSRAconst, 203 ssa.OpMIPSSGTconst, 204 ssa.OpMIPSSGTUconst: 205 p := s.Prog(v.Op.Asm()) 206 p.From.Type = obj.TYPE_CONST 207 p.From.Offset = v.AuxInt 208 p.Reg = v.Args[0].Reg() 209 p.To.Type = obj.TYPE_REG 210 p.To.Reg = v.Reg() 211 case ssa.OpMIPSMULT, 212 ssa.OpMIPSMULTU, 213 ssa.OpMIPSDIV, 214 ssa.OpMIPSDIVU: 215 // result in hi,lo 216 p := s.Prog(v.Op.Asm()) 217 p.From.Type = obj.TYPE_REG 218 p.From.Reg = v.Args[1].Reg() 219 p.Reg = v.Args[0].Reg() 220 case ssa.OpMIPSMOVWconst: 221 r := v.Reg() 222 p := s.Prog(v.Op.Asm()) 223 p.From.Type = obj.TYPE_CONST 224 p.From.Offset = v.AuxInt 225 p.To.Type = obj.TYPE_REG 226 p.To.Reg = r 227 if isFPreg(r) || isHILO(r) { 228 // cannot move into FP or special registers, use TMP as intermediate 229 p.To.Reg = mips.REGTMP 230 p = s.Prog(mips.AMOVW) 231 p.From.Type = obj.TYPE_REG 232 p.From.Reg = mips.REGTMP 233 p.To.Type = obj.TYPE_REG 234 p.To.Reg = r 235 } 236 case ssa.OpMIPSMOVFconst, 237 ssa.OpMIPSMOVDconst: 238 p := s.Prog(v.Op.Asm()) 239 p.From.Type = obj.TYPE_FCONST 240 p.From.Val = math.Float64frombits(uint64(v.AuxInt)) 241 p.To.Type = obj.TYPE_REG 242 p.To.Reg = v.Reg() 243 case ssa.OpMIPSCMOVZ: 244 if v.Reg() != v.Args[0].Reg() { 245 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 246 } 247 p := s.Prog(v.Op.Asm()) 248 p.From.Type = obj.TYPE_REG 249 p.From.Reg = v.Args[2].Reg() 250 p.Reg = v.Args[1].Reg() 251 p.To.Type = obj.TYPE_REG 252 p.To.Reg = v.Reg() 253 case ssa.OpMIPSCMOVZzero: 254 if v.Reg() != v.Args[0].Reg() { 255 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 256 } 257 p := s.Prog(v.Op.Asm()) 258 p.From.Type = obj.TYPE_REG 259 p.From.Reg = v.Args[1].Reg() 260 p.Reg = mips.REGZERO 261 p.To.Type = obj.TYPE_REG 262 p.To.Reg = v.Reg() 263 case ssa.OpMIPSCMPEQF, 264 ssa.OpMIPSCMPEQD, 265 ssa.OpMIPSCMPGEF, 266 ssa.OpMIPSCMPGED, 267 ssa.OpMIPSCMPGTF, 268 ssa.OpMIPSCMPGTD: 269 p := s.Prog(v.Op.Asm()) 270 p.From.Type = obj.TYPE_REG 271 p.From.Reg = v.Args[0].Reg() 272 p.Reg = v.Args[1].Reg() 273 case ssa.OpMIPSMOVWaddr: 274 p := s.Prog(mips.AMOVW) 275 p.From.Type = obj.TYPE_ADDR 276 p.From.Reg = v.Args[0].Reg() 277 var wantreg string 278 // MOVW $sym+off(base), R 279 // the assembler expands it as the following: 280 // - base is SP: add constant offset to SP (R29) 281 // when constant is large, tmp register (R23) may be used 282 // - base is SB: load external address with relocation 283 switch v.Aux.(type) { 284 default: 285 v.Fatalf("aux is of unknown type %T", v.Aux) 286 case *obj.LSym: 287 wantreg = "SB" 288 gc.AddAux(&p.From, v) 289 case *gc.Node: 290 wantreg = "SP" 291 gc.AddAux(&p.From, v) 292 case nil: 293 // No sym, just MOVW $off(SP), R 294 wantreg = "SP" 295 p.From.Offset = v.AuxInt 296 } 297 if reg := v.Args[0].RegName(); reg != wantreg { 298 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg) 299 } 300 p.To.Type = obj.TYPE_REG 301 p.To.Reg = v.Reg() 302 case ssa.OpMIPSMOVBload, 303 ssa.OpMIPSMOVBUload, 304 ssa.OpMIPSMOVHload, 305 ssa.OpMIPSMOVHUload, 306 ssa.OpMIPSMOVWload, 307 ssa.OpMIPSMOVFload, 308 ssa.OpMIPSMOVDload: 309 p := s.Prog(v.Op.Asm()) 310 p.From.Type = obj.TYPE_MEM 311 p.From.Reg = v.Args[0].Reg() 312 gc.AddAux(&p.From, v) 313 p.To.Type = obj.TYPE_REG 314 p.To.Reg = v.Reg() 315 case ssa.OpMIPSMOVBstore, 316 ssa.OpMIPSMOVHstore, 317 ssa.OpMIPSMOVWstore, 318 ssa.OpMIPSMOVFstore, 319 ssa.OpMIPSMOVDstore: 320 p := s.Prog(v.Op.Asm()) 321 p.From.Type = obj.TYPE_REG 322 p.From.Reg = v.Args[1].Reg() 323 p.To.Type = obj.TYPE_MEM 324 p.To.Reg = v.Args[0].Reg() 325 gc.AddAux(&p.To, v) 326 case ssa.OpMIPSMOVBstorezero, 327 ssa.OpMIPSMOVHstorezero, 328 ssa.OpMIPSMOVWstorezero: 329 p := s.Prog(v.Op.Asm()) 330 p.From.Type = obj.TYPE_REG 331 p.From.Reg = mips.REGZERO 332 p.To.Type = obj.TYPE_MEM 333 p.To.Reg = v.Args[0].Reg() 334 gc.AddAux(&p.To, v) 335 case ssa.OpMIPSMOVBreg, 336 ssa.OpMIPSMOVBUreg, 337 ssa.OpMIPSMOVHreg, 338 ssa.OpMIPSMOVHUreg: 339 a := v.Args[0] 340 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPSMOVWreg || a.Op == ssa.OpMIPSMOVWnop { 341 a = a.Args[0] 342 } 343 if a.Op == ssa.OpLoadReg { 344 t := a.Type 345 switch { 346 case v.Op == ssa.OpMIPSMOVBreg && t.Size() == 1 && t.IsSigned(), 347 v.Op == ssa.OpMIPSMOVBUreg && t.Size() == 1 && !t.IsSigned(), 348 v.Op == ssa.OpMIPSMOVHreg && t.Size() == 2 && t.IsSigned(), 349 v.Op == ssa.OpMIPSMOVHUreg && t.Size() == 2 && !t.IsSigned(): 350 // arg is a proper-typed load, already zero/sign-extended, don't extend again 351 if v.Reg() == v.Args[0].Reg() { 352 return 353 } 354 p := s.Prog(mips.AMOVW) 355 p.From.Type = obj.TYPE_REG 356 p.From.Reg = v.Args[0].Reg() 357 p.To.Type = obj.TYPE_REG 358 p.To.Reg = v.Reg() 359 return 360 default: 361 } 362 } 363 fallthrough 364 case ssa.OpMIPSMOVWF, 365 ssa.OpMIPSMOVWD, 366 ssa.OpMIPSTRUNCFW, 367 ssa.OpMIPSTRUNCDW, 368 ssa.OpMIPSMOVFD, 369 ssa.OpMIPSMOVDF, 370 ssa.OpMIPSNEGF, 371 ssa.OpMIPSNEGD, 372 ssa.OpMIPSSQRTD, 373 ssa.OpMIPSCLZ: 374 p := s.Prog(v.Op.Asm()) 375 p.From.Type = obj.TYPE_REG 376 p.From.Reg = v.Args[0].Reg() 377 p.To.Type = obj.TYPE_REG 378 p.To.Reg = v.Reg() 379 case ssa.OpMIPSNEG: 380 // SUB from REGZERO 381 p := s.Prog(mips.ASUBU) 382 p.From.Type = obj.TYPE_REG 383 p.From.Reg = v.Args[0].Reg() 384 p.Reg = mips.REGZERO 385 p.To.Type = obj.TYPE_REG 386 p.To.Reg = v.Reg() 387 case ssa.OpMIPSLoweredZero: 388 // SUBU $4, R1 389 // MOVW R0, 4(R1) 390 // ADDU $4, R1 391 // BNE Rarg1, R1, -2(PC) 392 // arg1 is the address of the last element to zero 393 var sz int64 394 var mov obj.As 395 switch { 396 case v.AuxInt%4 == 0: 397 sz = 4 398 mov = mips.AMOVW 399 case v.AuxInt%2 == 0: 400 sz = 2 401 mov = mips.AMOVH 402 default: 403 sz = 1 404 mov = mips.AMOVB 405 } 406 p := s.Prog(mips.ASUBU) 407 p.From.Type = obj.TYPE_CONST 408 p.From.Offset = sz 409 p.To.Type = obj.TYPE_REG 410 p.To.Reg = mips.REG_R1 411 p2 := s.Prog(mov) 412 p2.From.Type = obj.TYPE_REG 413 p2.From.Reg = mips.REGZERO 414 p2.To.Type = obj.TYPE_MEM 415 p2.To.Reg = mips.REG_R1 416 p2.To.Offset = sz 417 p3 := s.Prog(mips.AADDU) 418 p3.From.Type = obj.TYPE_CONST 419 p3.From.Offset = sz 420 p3.To.Type = obj.TYPE_REG 421 p3.To.Reg = mips.REG_R1 422 p4 := s.Prog(mips.ABNE) 423 p4.From.Type = obj.TYPE_REG 424 p4.From.Reg = v.Args[1].Reg() 425 p4.Reg = mips.REG_R1 426 p4.To.Type = obj.TYPE_BRANCH 427 gc.Patch(p4, p2) 428 case ssa.OpMIPSLoweredMove: 429 // SUBU $4, R1 430 // MOVW 4(R1), Rtmp 431 // MOVW Rtmp, (R2) 432 // ADDU $4, R1 433 // ADDU $4, R2 434 // BNE Rarg2, R1, -4(PC) 435 // arg2 is the address of the last element of src 436 var sz int64 437 var mov obj.As 438 switch { 439 case v.AuxInt%4 == 0: 440 sz = 4 441 mov = mips.AMOVW 442 case v.AuxInt%2 == 0: 443 sz = 2 444 mov = mips.AMOVH 445 default: 446 sz = 1 447 mov = mips.AMOVB 448 } 449 p := s.Prog(mips.ASUBU) 450 p.From.Type = obj.TYPE_CONST 451 p.From.Offset = sz 452 p.To.Type = obj.TYPE_REG 453 p.To.Reg = mips.REG_R1 454 p2 := s.Prog(mov) 455 p2.From.Type = obj.TYPE_MEM 456 p2.From.Reg = mips.REG_R1 457 p2.From.Offset = sz 458 p2.To.Type = obj.TYPE_REG 459 p2.To.Reg = mips.REGTMP 460 p3 := s.Prog(mov) 461 p3.From.Type = obj.TYPE_REG 462 p3.From.Reg = mips.REGTMP 463 p3.To.Type = obj.TYPE_MEM 464 p3.To.Reg = mips.REG_R2 465 p4 := s.Prog(mips.AADDU) 466 p4.From.Type = obj.TYPE_CONST 467 p4.From.Offset = sz 468 p4.To.Type = obj.TYPE_REG 469 p4.To.Reg = mips.REG_R1 470 p5 := s.Prog(mips.AADDU) 471 p5.From.Type = obj.TYPE_CONST 472 p5.From.Offset = sz 473 p5.To.Type = obj.TYPE_REG 474 p5.To.Reg = mips.REG_R2 475 p6 := s.Prog(mips.ABNE) 476 p6.From.Type = obj.TYPE_REG 477 p6.From.Reg = v.Args[2].Reg() 478 p6.Reg = mips.REG_R1 479 p6.To.Type = obj.TYPE_BRANCH 480 gc.Patch(p6, p2) 481 case ssa.OpMIPSCALLstatic, ssa.OpMIPSCALLclosure, ssa.OpMIPSCALLinter: 482 s.Call(v) 483 case ssa.OpMIPSLoweredAtomicLoad: 484 s.Prog(mips.ASYNC) 485 486 p := s.Prog(mips.AMOVW) 487 p.From.Type = obj.TYPE_MEM 488 p.From.Reg = v.Args[0].Reg() 489 p.To.Type = obj.TYPE_REG 490 p.To.Reg = v.Reg0() 491 492 s.Prog(mips.ASYNC) 493 case ssa.OpMIPSLoweredAtomicStore: 494 s.Prog(mips.ASYNC) 495 496 p := s.Prog(mips.AMOVW) 497 p.From.Type = obj.TYPE_REG 498 p.From.Reg = v.Args[1].Reg() 499 p.To.Type = obj.TYPE_MEM 500 p.To.Reg = v.Args[0].Reg() 501 502 s.Prog(mips.ASYNC) 503 case ssa.OpMIPSLoweredAtomicStorezero: 504 s.Prog(mips.ASYNC) 505 506 p := s.Prog(mips.AMOVW) 507 p.From.Type = obj.TYPE_REG 508 p.From.Reg = mips.REGZERO 509 p.To.Type = obj.TYPE_MEM 510 p.To.Reg = v.Args[0].Reg() 511 512 s.Prog(mips.ASYNC) 513 case ssa.OpMIPSLoweredAtomicExchange: 514 // SYNC 515 // MOVW Rarg1, Rtmp 516 // LL (Rarg0), Rout 517 // SC Rtmp, (Rarg0) 518 // BEQ Rtmp, -3(PC) 519 // SYNC 520 s.Prog(mips.ASYNC) 521 522 p := s.Prog(mips.AMOVW) 523 p.From.Type = obj.TYPE_REG 524 p.From.Reg = v.Args[1].Reg() 525 p.To.Type = obj.TYPE_REG 526 p.To.Reg = mips.REGTMP 527 528 p1 := s.Prog(mips.ALL) 529 p1.From.Type = obj.TYPE_MEM 530 p1.From.Reg = v.Args[0].Reg() 531 p1.To.Type = obj.TYPE_REG 532 p1.To.Reg = v.Reg0() 533 534 p2 := s.Prog(mips.ASC) 535 p2.From.Type = obj.TYPE_REG 536 p2.From.Reg = mips.REGTMP 537 p2.To.Type = obj.TYPE_MEM 538 p2.To.Reg = v.Args[0].Reg() 539 540 p3 := s.Prog(mips.ABEQ) 541 p3.From.Type = obj.TYPE_REG 542 p3.From.Reg = mips.REGTMP 543 p3.To.Type = obj.TYPE_BRANCH 544 gc.Patch(p3, p) 545 546 s.Prog(mips.ASYNC) 547 case ssa.OpMIPSLoweredAtomicAdd: 548 // SYNC 549 // LL (Rarg0), Rout 550 // ADDU Rarg1, Rout, Rtmp 551 // SC Rtmp, (Rarg0) 552 // BEQ Rtmp, -3(PC) 553 // SYNC 554 // ADDU Rarg1, Rout 555 s.Prog(mips.ASYNC) 556 557 p := s.Prog(mips.ALL) 558 p.From.Type = obj.TYPE_MEM 559 p.From.Reg = v.Args[0].Reg() 560 p.To.Type = obj.TYPE_REG 561 p.To.Reg = v.Reg0() 562 563 p1 := s.Prog(mips.AADDU) 564 p1.From.Type = obj.TYPE_REG 565 p1.From.Reg = v.Args[1].Reg() 566 p1.Reg = v.Reg0() 567 p1.To.Type = obj.TYPE_REG 568 p1.To.Reg = mips.REGTMP 569 570 p2 := s.Prog(mips.ASC) 571 p2.From.Type = obj.TYPE_REG 572 p2.From.Reg = mips.REGTMP 573 p2.To.Type = obj.TYPE_MEM 574 p2.To.Reg = v.Args[0].Reg() 575 576 p3 := s.Prog(mips.ABEQ) 577 p3.From.Type = obj.TYPE_REG 578 p3.From.Reg = mips.REGTMP 579 p3.To.Type = obj.TYPE_BRANCH 580 gc.Patch(p3, p) 581 582 s.Prog(mips.ASYNC) 583 584 p4 := s.Prog(mips.AADDU) 585 p4.From.Type = obj.TYPE_REG 586 p4.From.Reg = v.Args[1].Reg() 587 p4.Reg = v.Reg0() 588 p4.To.Type = obj.TYPE_REG 589 p4.To.Reg = v.Reg0() 590 591 case ssa.OpMIPSLoweredAtomicAddconst: 592 // SYNC 593 // LL (Rarg0), Rout 594 // ADDU $auxInt, Rout, Rtmp 595 // SC Rtmp, (Rarg0) 596 // BEQ Rtmp, -3(PC) 597 // SYNC 598 // ADDU $auxInt, Rout 599 s.Prog(mips.ASYNC) 600 601 p := s.Prog(mips.ALL) 602 p.From.Type = obj.TYPE_MEM 603 p.From.Reg = v.Args[0].Reg() 604 p.To.Type = obj.TYPE_REG 605 p.To.Reg = v.Reg0() 606 607 p1 := s.Prog(mips.AADDU) 608 p1.From.Type = obj.TYPE_CONST 609 p1.From.Offset = v.AuxInt 610 p1.Reg = v.Reg0() 611 p1.To.Type = obj.TYPE_REG 612 p1.To.Reg = mips.REGTMP 613 614 p2 := s.Prog(mips.ASC) 615 p2.From.Type = obj.TYPE_REG 616 p2.From.Reg = mips.REGTMP 617 p2.To.Type = obj.TYPE_MEM 618 p2.To.Reg = v.Args[0].Reg() 619 620 p3 := s.Prog(mips.ABEQ) 621 p3.From.Type = obj.TYPE_REG 622 p3.From.Reg = mips.REGTMP 623 p3.To.Type = obj.TYPE_BRANCH 624 gc.Patch(p3, p) 625 626 s.Prog(mips.ASYNC) 627 628 p4 := s.Prog(mips.AADDU) 629 p4.From.Type = obj.TYPE_CONST 630 p4.From.Offset = v.AuxInt 631 p4.Reg = v.Reg0() 632 p4.To.Type = obj.TYPE_REG 633 p4.To.Reg = v.Reg0() 634 635 case ssa.OpMIPSLoweredAtomicAnd, 636 ssa.OpMIPSLoweredAtomicOr: 637 // SYNC 638 // LL (Rarg0), Rtmp 639 // AND/OR Rarg1, Rtmp 640 // SC Rtmp, (Rarg0) 641 // BEQ Rtmp, -3(PC) 642 // SYNC 643 s.Prog(mips.ASYNC) 644 645 p := s.Prog(mips.ALL) 646 p.From.Type = obj.TYPE_MEM 647 p.From.Reg = v.Args[0].Reg() 648 p.To.Type = obj.TYPE_REG 649 p.To.Reg = mips.REGTMP 650 651 p1 := s.Prog(v.Op.Asm()) 652 p1.From.Type = obj.TYPE_REG 653 p1.From.Reg = v.Args[1].Reg() 654 p1.Reg = mips.REGTMP 655 p1.To.Type = obj.TYPE_REG 656 p1.To.Reg = mips.REGTMP 657 658 p2 := s.Prog(mips.ASC) 659 p2.From.Type = obj.TYPE_REG 660 p2.From.Reg = mips.REGTMP 661 p2.To.Type = obj.TYPE_MEM 662 p2.To.Reg = v.Args[0].Reg() 663 664 p3 := s.Prog(mips.ABEQ) 665 p3.From.Type = obj.TYPE_REG 666 p3.From.Reg = mips.REGTMP 667 p3.To.Type = obj.TYPE_BRANCH 668 gc.Patch(p3, p) 669 670 s.Prog(mips.ASYNC) 671 672 case ssa.OpMIPSLoweredAtomicCas: 673 // MOVW $0, Rout 674 // SYNC 675 // LL (Rarg0), Rtmp 676 // BNE Rtmp, Rarg1, 4(PC) 677 // MOVW Rarg2, Rout 678 // SC Rout, (Rarg0) 679 // BEQ Rout, -4(PC) 680 // SYNC 681 p := s.Prog(mips.AMOVW) 682 p.From.Type = obj.TYPE_REG 683 p.From.Reg = mips.REGZERO 684 p.To.Type = obj.TYPE_REG 685 p.To.Reg = v.Reg0() 686 687 s.Prog(mips.ASYNC) 688 689 p1 := s.Prog(mips.ALL) 690 p1.From.Type = obj.TYPE_MEM 691 p1.From.Reg = v.Args[0].Reg() 692 p1.To.Type = obj.TYPE_REG 693 p1.To.Reg = mips.REGTMP 694 695 p2 := s.Prog(mips.ABNE) 696 p2.From.Type = obj.TYPE_REG 697 p2.From.Reg = v.Args[1].Reg() 698 p2.Reg = mips.REGTMP 699 p2.To.Type = obj.TYPE_BRANCH 700 701 p3 := s.Prog(mips.AMOVW) 702 p3.From.Type = obj.TYPE_REG 703 p3.From.Reg = v.Args[2].Reg() 704 p3.To.Type = obj.TYPE_REG 705 p3.To.Reg = v.Reg0() 706 707 p4 := s.Prog(mips.ASC) 708 p4.From.Type = obj.TYPE_REG 709 p4.From.Reg = v.Reg0() 710 p4.To.Type = obj.TYPE_MEM 711 p4.To.Reg = v.Args[0].Reg() 712 713 p5 := s.Prog(mips.ABEQ) 714 p5.From.Type = obj.TYPE_REG 715 p5.From.Reg = v.Reg0() 716 p5.To.Type = obj.TYPE_BRANCH 717 gc.Patch(p5, p1) 718 719 s.Prog(mips.ASYNC) 720 721 p6 := s.Prog(obj.ANOP) 722 gc.Patch(p2, p6) 723 724 case ssa.OpMIPSLoweredNilCheck: 725 // Issue a load which will fault if arg is nil. 726 p := s.Prog(mips.AMOVB) 727 p.From.Type = obj.TYPE_MEM 728 p.From.Reg = v.Args[0].Reg() 729 gc.AddAux(&p.From, v) 730 p.To.Type = obj.TYPE_REG 731 p.To.Reg = mips.REGTMP 732 if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers 733 gc.Warnl(v.Pos, "generated nil check") 734 } 735 case ssa.OpMIPSFPFlagTrue, 736 ssa.OpMIPSFPFlagFalse: 737 // MOVW $1, r 738 // CMOVF R0, r 739 740 cmov := mips.ACMOVF 741 if v.Op == ssa.OpMIPSFPFlagFalse { 742 cmov = mips.ACMOVT 743 } 744 p := s.Prog(mips.AMOVW) 745 p.From.Type = obj.TYPE_CONST 746 p.From.Offset = 1 747 p.To.Type = obj.TYPE_REG 748 p.To.Reg = v.Reg() 749 p1 := s.Prog(cmov) 750 p1.From.Type = obj.TYPE_REG 751 p1.From.Reg = mips.REGZERO 752 p1.To.Type = obj.TYPE_REG 753 p1.To.Reg = v.Reg() 754 755 case ssa.OpMIPSLoweredGetClosurePtr: 756 // Closure pointer is R22 (mips.REGCTXT). 757 gc.CheckLoweredGetClosurePtr(v) 758 case ssa.OpMIPSLoweredGetCallerSP: 759 // caller's SP is FixedFrameSize below the address of the first arg 760 p := s.Prog(mips.AMOVW) 761 p.From.Type = obj.TYPE_ADDR 762 p.From.Offset = -gc.Ctxt.FixedFrameSize() 763 p.From.Name = obj.NAME_PARAM 764 p.To.Type = obj.TYPE_REG 765 p.To.Reg = v.Reg() 766 case ssa.OpClobber: 767 // TODO: implement for clobberdead experiment. Nop is ok for now. 768 default: 769 v.Fatalf("genValue not implemented: %s", v.LongString()) 770 } 771 } 772 773 var blockJump = map[ssa.BlockKind]struct { 774 asm, invasm obj.As 775 }{ 776 ssa.BlockMIPSEQ: {mips.ABEQ, mips.ABNE}, 777 ssa.BlockMIPSNE: {mips.ABNE, mips.ABEQ}, 778 ssa.BlockMIPSLTZ: {mips.ABLTZ, mips.ABGEZ}, 779 ssa.BlockMIPSGEZ: {mips.ABGEZ, mips.ABLTZ}, 780 ssa.BlockMIPSLEZ: {mips.ABLEZ, mips.ABGTZ}, 781 ssa.BlockMIPSGTZ: {mips.ABGTZ, mips.ABLEZ}, 782 ssa.BlockMIPSFPT: {mips.ABFPT, mips.ABFPF}, 783 ssa.BlockMIPSFPF: {mips.ABFPF, mips.ABFPT}, 784 } 785 786 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) { 787 switch b.Kind { 788 case ssa.BlockPlain: 789 if b.Succs[0].Block() != next { 790 p := s.Prog(obj.AJMP) 791 p.To.Type = obj.TYPE_BRANCH 792 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 793 } 794 case ssa.BlockDefer: 795 // defer returns in R1: 796 // 0 if we should continue executing 797 // 1 if we should jump to deferreturn call 798 p := s.Prog(mips.ABNE) 799 p.From.Type = obj.TYPE_REG 800 p.From.Reg = mips.REGZERO 801 p.Reg = mips.REG_R1 802 p.To.Type = obj.TYPE_BRANCH 803 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 804 if b.Succs[0].Block() != next { 805 p := s.Prog(obj.AJMP) 806 p.To.Type = obj.TYPE_BRANCH 807 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 808 } 809 case ssa.BlockExit: 810 s.Prog(obj.AUNDEF) // tell plive.go that we never reach here 811 case ssa.BlockRet: 812 s.Prog(obj.ARET) 813 case ssa.BlockRetJmp: 814 p := s.Prog(obj.ARET) 815 p.To.Type = obj.TYPE_MEM 816 p.To.Name = obj.NAME_EXTERN 817 p.To.Sym = b.Aux.(*obj.LSym) 818 case ssa.BlockMIPSEQ, ssa.BlockMIPSNE, 819 ssa.BlockMIPSLTZ, ssa.BlockMIPSGEZ, 820 ssa.BlockMIPSLEZ, ssa.BlockMIPSGTZ, 821 ssa.BlockMIPSFPT, ssa.BlockMIPSFPF: 822 jmp := blockJump[b.Kind] 823 var p *obj.Prog 824 switch next { 825 case b.Succs[0].Block(): 826 p = s.Prog(jmp.invasm) 827 p.To.Type = obj.TYPE_BRANCH 828 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 829 case b.Succs[1].Block(): 830 p = s.Prog(jmp.asm) 831 p.To.Type = obj.TYPE_BRANCH 832 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 833 default: 834 p = s.Prog(jmp.asm) 835 p.To.Type = obj.TYPE_BRANCH 836 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 837 q := s.Prog(obj.AJMP) 838 q.To.Type = obj.TYPE_BRANCH 839 s.Branches = append(s.Branches, gc.Branch{P: q, B: b.Succs[1].Block()}) 840 } 841 if !b.Control.Type.IsFlags() { 842 p.From.Type = obj.TYPE_REG 843 p.From.Reg = b.Control.Reg() 844 } 845 default: 846 b.Fatalf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString()) 847 } 848 }