github.com/gagliardetto/golang-go@v0.0.0-20201020153340-53909ea70814/cmd/compile/internal/mips64/ssa.go (about) 1 // Copyright 2016 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package mips64 6 7 import ( 8 "math" 9 10 "github.com/gagliardetto/golang-go/cmd/compile/internal/gc" 11 "github.com/gagliardetto/golang-go/cmd/compile/internal/logopt" 12 "github.com/gagliardetto/golang-go/cmd/compile/internal/ssa" 13 "github.com/gagliardetto/golang-go/cmd/compile/internal/types" 14 "github.com/gagliardetto/golang-go/cmd/internal/obj" 15 "github.com/gagliardetto/golang-go/cmd/internal/obj/mips" 16 ) 17 18 // isFPreg reports whether r is an FP register 19 func isFPreg(r int16) bool { 20 return mips.REG_F0 <= r && r <= mips.REG_F31 21 } 22 23 // isHILO reports whether r is HI or LO register 24 func isHILO(r int16) bool { 25 return r == mips.REG_HI || r == mips.REG_LO 26 } 27 28 // loadByType returns the load instruction of the given type. 29 func loadByType(t *types.Type, r int16) obj.As { 30 if isFPreg(r) { 31 if t.Size() == 4 { // float32 or int32 32 return mips.AMOVF 33 } else { // float64 or int64 34 return mips.AMOVD 35 } 36 } else { 37 switch t.Size() { 38 case 1: 39 if t.IsSigned() { 40 return mips.AMOVB 41 } else { 42 return mips.AMOVBU 43 } 44 case 2: 45 if t.IsSigned() { 46 return mips.AMOVH 47 } else { 48 return mips.AMOVHU 49 } 50 case 4: 51 if t.IsSigned() { 52 return mips.AMOVW 53 } else { 54 return mips.AMOVWU 55 } 56 case 8: 57 return mips.AMOVV 58 } 59 } 60 panic("bad load type") 61 } 62 63 // storeByType returns the store instruction of the given type. 64 func storeByType(t *types.Type, r int16) obj.As { 65 if isFPreg(r) { 66 if t.Size() == 4 { // float32 or int32 67 return mips.AMOVF 68 } else { // float64 or int64 69 return mips.AMOVD 70 } 71 } else { 72 switch t.Size() { 73 case 1: 74 return mips.AMOVB 75 case 2: 76 return mips.AMOVH 77 case 4: 78 return mips.AMOVW 79 case 8: 80 return mips.AMOVV 81 } 82 } 83 panic("bad store type") 84 } 85 86 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { 87 switch v.Op { 88 case ssa.OpCopy, ssa.OpMIPS64MOVVreg: 89 if v.Type.IsMemory() { 90 return 91 } 92 x := v.Args[0].Reg() 93 y := v.Reg() 94 if x == y { 95 return 96 } 97 as := mips.AMOVV 98 if isFPreg(x) && isFPreg(y) { 99 as = mips.AMOVD 100 } 101 p := s.Prog(as) 102 p.From.Type = obj.TYPE_REG 103 p.From.Reg = x 104 p.To.Type = obj.TYPE_REG 105 p.To.Reg = y 106 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) { 107 // cannot move between special registers, use TMP as intermediate 108 p.To.Reg = mips.REGTMP 109 p = s.Prog(mips.AMOVV) 110 p.From.Type = obj.TYPE_REG 111 p.From.Reg = mips.REGTMP 112 p.To.Type = obj.TYPE_REG 113 p.To.Reg = y 114 } 115 case ssa.OpMIPS64MOVVnop: 116 if v.Reg() != v.Args[0].Reg() { 117 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 118 } 119 // nothing to do 120 case ssa.OpLoadReg: 121 if v.Type.IsFlags() { 122 v.Fatalf("load flags not implemented: %v", v.LongString()) 123 return 124 } 125 r := v.Reg() 126 p := s.Prog(loadByType(v.Type, r)) 127 gc.AddrAuto(&p.From, v.Args[0]) 128 p.To.Type = obj.TYPE_REG 129 p.To.Reg = r 130 if isHILO(r) { 131 // cannot directly load, load to TMP and move 132 p.To.Reg = mips.REGTMP 133 p = s.Prog(mips.AMOVV) 134 p.From.Type = obj.TYPE_REG 135 p.From.Reg = mips.REGTMP 136 p.To.Type = obj.TYPE_REG 137 p.To.Reg = r 138 } 139 case ssa.OpStoreReg: 140 if v.Type.IsFlags() { 141 v.Fatalf("store flags not implemented: %v", v.LongString()) 142 return 143 } 144 r := v.Args[0].Reg() 145 if isHILO(r) { 146 // cannot directly store, move to TMP and store 147 p := s.Prog(mips.AMOVV) 148 p.From.Type = obj.TYPE_REG 149 p.From.Reg = r 150 p.To.Type = obj.TYPE_REG 151 p.To.Reg = mips.REGTMP 152 r = mips.REGTMP 153 } 154 p := s.Prog(storeByType(v.Type, r)) 155 p.From.Type = obj.TYPE_REG 156 p.From.Reg = r 157 gc.AddrAuto(&p.To, v) 158 case ssa.OpMIPS64ADDV, 159 ssa.OpMIPS64SUBV, 160 ssa.OpMIPS64AND, 161 ssa.OpMIPS64OR, 162 ssa.OpMIPS64XOR, 163 ssa.OpMIPS64NOR, 164 ssa.OpMIPS64SLLV, 165 ssa.OpMIPS64SRLV, 166 ssa.OpMIPS64SRAV, 167 ssa.OpMIPS64ADDF, 168 ssa.OpMIPS64ADDD, 169 ssa.OpMIPS64SUBF, 170 ssa.OpMIPS64SUBD, 171 ssa.OpMIPS64MULF, 172 ssa.OpMIPS64MULD, 173 ssa.OpMIPS64DIVF, 174 ssa.OpMIPS64DIVD: 175 p := s.Prog(v.Op.Asm()) 176 p.From.Type = obj.TYPE_REG 177 p.From.Reg = v.Args[1].Reg() 178 p.Reg = v.Args[0].Reg() 179 p.To.Type = obj.TYPE_REG 180 p.To.Reg = v.Reg() 181 case ssa.OpMIPS64SGT, 182 ssa.OpMIPS64SGTU: 183 p := s.Prog(v.Op.Asm()) 184 p.From.Type = obj.TYPE_REG 185 p.From.Reg = v.Args[0].Reg() 186 p.Reg = v.Args[1].Reg() 187 p.To.Type = obj.TYPE_REG 188 p.To.Reg = v.Reg() 189 case ssa.OpMIPS64ADDVconst, 190 ssa.OpMIPS64SUBVconst, 191 ssa.OpMIPS64ANDconst, 192 ssa.OpMIPS64ORconst, 193 ssa.OpMIPS64XORconst, 194 ssa.OpMIPS64NORconst, 195 ssa.OpMIPS64SLLVconst, 196 ssa.OpMIPS64SRLVconst, 197 ssa.OpMIPS64SRAVconst, 198 ssa.OpMIPS64SGTconst, 199 ssa.OpMIPS64SGTUconst: 200 p := s.Prog(v.Op.Asm()) 201 p.From.Type = obj.TYPE_CONST 202 p.From.Offset = v.AuxInt 203 p.Reg = v.Args[0].Reg() 204 p.To.Type = obj.TYPE_REG 205 p.To.Reg = v.Reg() 206 case ssa.OpMIPS64MULV, 207 ssa.OpMIPS64MULVU, 208 ssa.OpMIPS64DIVV, 209 ssa.OpMIPS64DIVVU: 210 // result in hi,lo 211 p := s.Prog(v.Op.Asm()) 212 p.From.Type = obj.TYPE_REG 213 p.From.Reg = v.Args[1].Reg() 214 p.Reg = v.Args[0].Reg() 215 case ssa.OpMIPS64MOVVconst: 216 r := v.Reg() 217 p := s.Prog(v.Op.Asm()) 218 p.From.Type = obj.TYPE_CONST 219 p.From.Offset = v.AuxInt 220 p.To.Type = obj.TYPE_REG 221 p.To.Reg = r 222 if isFPreg(r) || isHILO(r) { 223 // cannot move into FP or special registers, use TMP as intermediate 224 p.To.Reg = mips.REGTMP 225 p = s.Prog(mips.AMOVV) 226 p.From.Type = obj.TYPE_REG 227 p.From.Reg = mips.REGTMP 228 p.To.Type = obj.TYPE_REG 229 p.To.Reg = r 230 } 231 case ssa.OpMIPS64MOVFconst, 232 ssa.OpMIPS64MOVDconst: 233 p := s.Prog(v.Op.Asm()) 234 p.From.Type = obj.TYPE_FCONST 235 p.From.Val = math.Float64frombits(uint64(v.AuxInt)) 236 p.To.Type = obj.TYPE_REG 237 p.To.Reg = v.Reg() 238 case ssa.OpMIPS64CMPEQF, 239 ssa.OpMIPS64CMPEQD, 240 ssa.OpMIPS64CMPGEF, 241 ssa.OpMIPS64CMPGED, 242 ssa.OpMIPS64CMPGTF, 243 ssa.OpMIPS64CMPGTD: 244 p := s.Prog(v.Op.Asm()) 245 p.From.Type = obj.TYPE_REG 246 p.From.Reg = v.Args[0].Reg() 247 p.Reg = v.Args[1].Reg() 248 case ssa.OpMIPS64MOVVaddr: 249 p := s.Prog(mips.AMOVV) 250 p.From.Type = obj.TYPE_ADDR 251 p.From.Reg = v.Args[0].Reg() 252 var wantreg string 253 // MOVV $sym+off(base), R 254 // the assembler expands it as the following: 255 // - base is SP: add constant offset to SP (R29) 256 // when constant is large, tmp register (R23) may be used 257 // - base is SB: load external address with relocation 258 switch v.Aux.(type) { 259 default: 260 v.Fatalf("aux is of unknown type %T", v.Aux) 261 case *obj.LSym: 262 wantreg = "SB" 263 gc.AddAux(&p.From, v) 264 case *gc.Node: 265 wantreg = "SP" 266 gc.AddAux(&p.From, v) 267 case nil: 268 // No sym, just MOVV $off(SP), R 269 wantreg = "SP" 270 p.From.Offset = v.AuxInt 271 } 272 if reg := v.Args[0].RegName(); reg != wantreg { 273 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg) 274 } 275 p.To.Type = obj.TYPE_REG 276 p.To.Reg = v.Reg() 277 case ssa.OpMIPS64MOVBload, 278 ssa.OpMIPS64MOVBUload, 279 ssa.OpMIPS64MOVHload, 280 ssa.OpMIPS64MOVHUload, 281 ssa.OpMIPS64MOVWload, 282 ssa.OpMIPS64MOVWUload, 283 ssa.OpMIPS64MOVVload, 284 ssa.OpMIPS64MOVFload, 285 ssa.OpMIPS64MOVDload: 286 p := s.Prog(v.Op.Asm()) 287 p.From.Type = obj.TYPE_MEM 288 p.From.Reg = v.Args[0].Reg() 289 gc.AddAux(&p.From, v) 290 p.To.Type = obj.TYPE_REG 291 p.To.Reg = v.Reg() 292 case ssa.OpMIPS64MOVBstore, 293 ssa.OpMIPS64MOVHstore, 294 ssa.OpMIPS64MOVWstore, 295 ssa.OpMIPS64MOVVstore, 296 ssa.OpMIPS64MOVFstore, 297 ssa.OpMIPS64MOVDstore: 298 p := s.Prog(v.Op.Asm()) 299 p.From.Type = obj.TYPE_REG 300 p.From.Reg = v.Args[1].Reg() 301 p.To.Type = obj.TYPE_MEM 302 p.To.Reg = v.Args[0].Reg() 303 gc.AddAux(&p.To, v) 304 case ssa.OpMIPS64MOVBstorezero, 305 ssa.OpMIPS64MOVHstorezero, 306 ssa.OpMIPS64MOVWstorezero, 307 ssa.OpMIPS64MOVVstorezero: 308 p := s.Prog(v.Op.Asm()) 309 p.From.Type = obj.TYPE_REG 310 p.From.Reg = mips.REGZERO 311 p.To.Type = obj.TYPE_MEM 312 p.To.Reg = v.Args[0].Reg() 313 gc.AddAux(&p.To, v) 314 case ssa.OpMIPS64MOVBreg, 315 ssa.OpMIPS64MOVBUreg, 316 ssa.OpMIPS64MOVHreg, 317 ssa.OpMIPS64MOVHUreg, 318 ssa.OpMIPS64MOVWreg, 319 ssa.OpMIPS64MOVWUreg: 320 a := v.Args[0] 321 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg { 322 a = a.Args[0] 323 } 324 if a.Op == ssa.OpLoadReg { 325 t := a.Type 326 switch { 327 case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(), 328 v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(), 329 v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(), 330 v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(), 331 v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(), 332 v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned(): 333 // arg is a proper-typed load, already zero/sign-extended, don't extend again 334 if v.Reg() == v.Args[0].Reg() { 335 return 336 } 337 p := s.Prog(mips.AMOVV) 338 p.From.Type = obj.TYPE_REG 339 p.From.Reg = v.Args[0].Reg() 340 p.To.Type = obj.TYPE_REG 341 p.To.Reg = v.Reg() 342 return 343 default: 344 } 345 } 346 fallthrough 347 case ssa.OpMIPS64MOVWF, 348 ssa.OpMIPS64MOVWD, 349 ssa.OpMIPS64TRUNCFW, 350 ssa.OpMIPS64TRUNCDW, 351 ssa.OpMIPS64MOVVF, 352 ssa.OpMIPS64MOVVD, 353 ssa.OpMIPS64TRUNCFV, 354 ssa.OpMIPS64TRUNCDV, 355 ssa.OpMIPS64MOVFD, 356 ssa.OpMIPS64MOVDF, 357 ssa.OpMIPS64NEGF, 358 ssa.OpMIPS64NEGD, 359 ssa.OpMIPS64SQRTD: 360 p := s.Prog(v.Op.Asm()) 361 p.From.Type = obj.TYPE_REG 362 p.From.Reg = v.Args[0].Reg() 363 p.To.Type = obj.TYPE_REG 364 p.To.Reg = v.Reg() 365 case ssa.OpMIPS64NEGV: 366 // SUB from REGZERO 367 p := s.Prog(mips.ASUBVU) 368 p.From.Type = obj.TYPE_REG 369 p.From.Reg = v.Args[0].Reg() 370 p.Reg = mips.REGZERO 371 p.To.Type = obj.TYPE_REG 372 p.To.Reg = v.Reg() 373 case ssa.OpMIPS64DUFFZERO: 374 // runtime.duffzero expects start address - 8 in R1 375 p := s.Prog(mips.ASUBVU) 376 p.From.Type = obj.TYPE_CONST 377 p.From.Offset = 8 378 p.Reg = v.Args[0].Reg() 379 p.To.Type = obj.TYPE_REG 380 p.To.Reg = mips.REG_R1 381 p = s.Prog(obj.ADUFFZERO) 382 p.To.Type = obj.TYPE_MEM 383 p.To.Name = obj.NAME_EXTERN 384 p.To.Sym = gc.Duffzero 385 p.To.Offset = v.AuxInt 386 case ssa.OpMIPS64LoweredZero: 387 // SUBV $8, R1 388 // MOVV R0, 8(R1) 389 // ADDV $8, R1 390 // BNE Rarg1, R1, -2(PC) 391 // arg1 is the address of the last element to zero 392 var sz int64 393 var mov obj.As 394 switch { 395 case v.AuxInt%8 == 0: 396 sz = 8 397 mov = mips.AMOVV 398 case v.AuxInt%4 == 0: 399 sz = 4 400 mov = mips.AMOVW 401 case v.AuxInt%2 == 0: 402 sz = 2 403 mov = mips.AMOVH 404 default: 405 sz = 1 406 mov = mips.AMOVB 407 } 408 p := s.Prog(mips.ASUBVU) 409 p.From.Type = obj.TYPE_CONST 410 p.From.Offset = sz 411 p.To.Type = obj.TYPE_REG 412 p.To.Reg = mips.REG_R1 413 p2 := s.Prog(mov) 414 p2.From.Type = obj.TYPE_REG 415 p2.From.Reg = mips.REGZERO 416 p2.To.Type = obj.TYPE_MEM 417 p2.To.Reg = mips.REG_R1 418 p2.To.Offset = sz 419 p3 := s.Prog(mips.AADDVU) 420 p3.From.Type = obj.TYPE_CONST 421 p3.From.Offset = sz 422 p3.To.Type = obj.TYPE_REG 423 p3.To.Reg = mips.REG_R1 424 p4 := s.Prog(mips.ABNE) 425 p4.From.Type = obj.TYPE_REG 426 p4.From.Reg = v.Args[1].Reg() 427 p4.Reg = mips.REG_R1 428 p4.To.Type = obj.TYPE_BRANCH 429 gc.Patch(p4, p2) 430 case ssa.OpMIPS64DUFFCOPY: 431 p := s.Prog(obj.ADUFFCOPY) 432 p.To.Type = obj.TYPE_MEM 433 p.To.Name = obj.NAME_EXTERN 434 p.To.Sym = gc.Duffcopy 435 p.To.Offset = v.AuxInt 436 case ssa.OpMIPS64LoweredMove: 437 // SUBV $8, R1 438 // MOVV 8(R1), Rtmp 439 // MOVV Rtmp, (R2) 440 // ADDV $8, R1 441 // ADDV $8, R2 442 // BNE Rarg2, R1, -4(PC) 443 // arg2 is the address of the last element of src 444 var sz int64 445 var mov obj.As 446 switch { 447 case v.AuxInt%8 == 0: 448 sz = 8 449 mov = mips.AMOVV 450 case v.AuxInt%4 == 0: 451 sz = 4 452 mov = mips.AMOVW 453 case v.AuxInt%2 == 0: 454 sz = 2 455 mov = mips.AMOVH 456 default: 457 sz = 1 458 mov = mips.AMOVB 459 } 460 p := s.Prog(mips.ASUBVU) 461 p.From.Type = obj.TYPE_CONST 462 p.From.Offset = sz 463 p.To.Type = obj.TYPE_REG 464 p.To.Reg = mips.REG_R1 465 p2 := s.Prog(mov) 466 p2.From.Type = obj.TYPE_MEM 467 p2.From.Reg = mips.REG_R1 468 p2.From.Offset = sz 469 p2.To.Type = obj.TYPE_REG 470 p2.To.Reg = mips.REGTMP 471 p3 := s.Prog(mov) 472 p3.From.Type = obj.TYPE_REG 473 p3.From.Reg = mips.REGTMP 474 p3.To.Type = obj.TYPE_MEM 475 p3.To.Reg = mips.REG_R2 476 p4 := s.Prog(mips.AADDVU) 477 p4.From.Type = obj.TYPE_CONST 478 p4.From.Offset = sz 479 p4.To.Type = obj.TYPE_REG 480 p4.To.Reg = mips.REG_R1 481 p5 := s.Prog(mips.AADDVU) 482 p5.From.Type = obj.TYPE_CONST 483 p5.From.Offset = sz 484 p5.To.Type = obj.TYPE_REG 485 p5.To.Reg = mips.REG_R2 486 p6 := s.Prog(mips.ABNE) 487 p6.From.Type = obj.TYPE_REG 488 p6.From.Reg = v.Args[2].Reg() 489 p6.Reg = mips.REG_R1 490 p6.To.Type = obj.TYPE_BRANCH 491 gc.Patch(p6, p2) 492 case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter: 493 s.Call(v) 494 case ssa.OpMIPS64LoweredWB: 495 p := s.Prog(obj.ACALL) 496 p.To.Type = obj.TYPE_MEM 497 p.To.Name = obj.NAME_EXTERN 498 p.To.Sym = v.Aux.(*obj.LSym) 499 case ssa.OpMIPS64LoweredPanicBoundsA, ssa.OpMIPS64LoweredPanicBoundsB, ssa.OpMIPS64LoweredPanicBoundsC: 500 p := s.Prog(obj.ACALL) 501 p.To.Type = obj.TYPE_MEM 502 p.To.Name = obj.NAME_EXTERN 503 p.To.Sym = gc.BoundsCheckFunc[v.AuxInt] 504 s.UseArgs(16) // space used in callee args area by assembly stubs 505 case ssa.OpMIPS64LoweredAtomicLoad8, ssa.OpMIPS64LoweredAtomicLoad32, ssa.OpMIPS64LoweredAtomicLoad64: 506 as := mips.AMOVV 507 switch v.Op { 508 case ssa.OpMIPS64LoweredAtomicLoad8: 509 as = mips.AMOVB 510 case ssa.OpMIPS64LoweredAtomicLoad32: 511 as = mips.AMOVW 512 } 513 s.Prog(mips.ASYNC) 514 p := s.Prog(as) 515 p.From.Type = obj.TYPE_MEM 516 p.From.Reg = v.Args[0].Reg() 517 p.To.Type = obj.TYPE_REG 518 p.To.Reg = v.Reg0() 519 s.Prog(mips.ASYNC) 520 case ssa.OpMIPS64LoweredAtomicStore8, ssa.OpMIPS64LoweredAtomicStore32, ssa.OpMIPS64LoweredAtomicStore64: 521 as := mips.AMOVV 522 switch v.Op { 523 case ssa.OpMIPS64LoweredAtomicStore8: 524 as = mips.AMOVB 525 case ssa.OpMIPS64LoweredAtomicStore32: 526 as = mips.AMOVW 527 } 528 s.Prog(mips.ASYNC) 529 p := s.Prog(as) 530 p.From.Type = obj.TYPE_REG 531 p.From.Reg = v.Args[1].Reg() 532 p.To.Type = obj.TYPE_MEM 533 p.To.Reg = v.Args[0].Reg() 534 s.Prog(mips.ASYNC) 535 case ssa.OpMIPS64LoweredAtomicStorezero32, ssa.OpMIPS64LoweredAtomicStorezero64: 536 as := mips.AMOVV 537 if v.Op == ssa.OpMIPS64LoweredAtomicStorezero32 { 538 as = mips.AMOVW 539 } 540 s.Prog(mips.ASYNC) 541 p := s.Prog(as) 542 p.From.Type = obj.TYPE_REG 543 p.From.Reg = mips.REGZERO 544 p.To.Type = obj.TYPE_MEM 545 p.To.Reg = v.Args[0].Reg() 546 s.Prog(mips.ASYNC) 547 case ssa.OpMIPS64LoweredAtomicExchange32, ssa.OpMIPS64LoweredAtomicExchange64: 548 // SYNC 549 // MOVV Rarg1, Rtmp 550 // LL (Rarg0), Rout 551 // SC Rtmp, (Rarg0) 552 // BEQ Rtmp, -3(PC) 553 // SYNC 554 ll := mips.ALLV 555 sc := mips.ASCV 556 if v.Op == ssa.OpMIPS64LoweredAtomicExchange32 { 557 ll = mips.ALL 558 sc = mips.ASC 559 } 560 s.Prog(mips.ASYNC) 561 p := s.Prog(mips.AMOVV) 562 p.From.Type = obj.TYPE_REG 563 p.From.Reg = v.Args[1].Reg() 564 p.To.Type = obj.TYPE_REG 565 p.To.Reg = mips.REGTMP 566 p1 := s.Prog(ll) 567 p1.From.Type = obj.TYPE_MEM 568 p1.From.Reg = v.Args[0].Reg() 569 p1.To.Type = obj.TYPE_REG 570 p1.To.Reg = v.Reg0() 571 p2 := s.Prog(sc) 572 p2.From.Type = obj.TYPE_REG 573 p2.From.Reg = mips.REGTMP 574 p2.To.Type = obj.TYPE_MEM 575 p2.To.Reg = v.Args[0].Reg() 576 p3 := s.Prog(mips.ABEQ) 577 p3.From.Type = obj.TYPE_REG 578 p3.From.Reg = mips.REGTMP 579 p3.To.Type = obj.TYPE_BRANCH 580 gc.Patch(p3, p) 581 s.Prog(mips.ASYNC) 582 case ssa.OpMIPS64LoweredAtomicAdd32, ssa.OpMIPS64LoweredAtomicAdd64: 583 // SYNC 584 // LL (Rarg0), Rout 585 // ADDV Rarg1, Rout, Rtmp 586 // SC Rtmp, (Rarg0) 587 // BEQ Rtmp, -3(PC) 588 // SYNC 589 // ADDV Rarg1, Rout 590 ll := mips.ALLV 591 sc := mips.ASCV 592 if v.Op == ssa.OpMIPS64LoweredAtomicAdd32 { 593 ll = mips.ALL 594 sc = mips.ASC 595 } 596 s.Prog(mips.ASYNC) 597 p := s.Prog(ll) 598 p.From.Type = obj.TYPE_MEM 599 p.From.Reg = v.Args[0].Reg() 600 p.To.Type = obj.TYPE_REG 601 p.To.Reg = v.Reg0() 602 p1 := s.Prog(mips.AADDVU) 603 p1.From.Type = obj.TYPE_REG 604 p1.From.Reg = v.Args[1].Reg() 605 p1.Reg = v.Reg0() 606 p1.To.Type = obj.TYPE_REG 607 p1.To.Reg = mips.REGTMP 608 p2 := s.Prog(sc) 609 p2.From.Type = obj.TYPE_REG 610 p2.From.Reg = mips.REGTMP 611 p2.To.Type = obj.TYPE_MEM 612 p2.To.Reg = v.Args[0].Reg() 613 p3 := s.Prog(mips.ABEQ) 614 p3.From.Type = obj.TYPE_REG 615 p3.From.Reg = mips.REGTMP 616 p3.To.Type = obj.TYPE_BRANCH 617 gc.Patch(p3, p) 618 s.Prog(mips.ASYNC) 619 p4 := s.Prog(mips.AADDVU) 620 p4.From.Type = obj.TYPE_REG 621 p4.From.Reg = v.Args[1].Reg() 622 p4.Reg = v.Reg0() 623 p4.To.Type = obj.TYPE_REG 624 p4.To.Reg = v.Reg0() 625 case ssa.OpMIPS64LoweredAtomicAddconst32, ssa.OpMIPS64LoweredAtomicAddconst64: 626 // SYNC 627 // LL (Rarg0), Rout 628 // ADDV $auxint, Rout, Rtmp 629 // SC Rtmp, (Rarg0) 630 // BEQ Rtmp, -3(PC) 631 // SYNC 632 // ADDV $auxint, Rout 633 ll := mips.ALLV 634 sc := mips.ASCV 635 if v.Op == ssa.OpMIPS64LoweredAtomicAddconst32 { 636 ll = mips.ALL 637 sc = mips.ASC 638 } 639 s.Prog(mips.ASYNC) 640 p := s.Prog(ll) 641 p.From.Type = obj.TYPE_MEM 642 p.From.Reg = v.Args[0].Reg() 643 p.To.Type = obj.TYPE_REG 644 p.To.Reg = v.Reg0() 645 p1 := s.Prog(mips.AADDVU) 646 p1.From.Type = obj.TYPE_CONST 647 p1.From.Offset = v.AuxInt 648 p1.Reg = v.Reg0() 649 p1.To.Type = obj.TYPE_REG 650 p1.To.Reg = mips.REGTMP 651 p2 := s.Prog(sc) 652 p2.From.Type = obj.TYPE_REG 653 p2.From.Reg = mips.REGTMP 654 p2.To.Type = obj.TYPE_MEM 655 p2.To.Reg = v.Args[0].Reg() 656 p3 := s.Prog(mips.ABEQ) 657 p3.From.Type = obj.TYPE_REG 658 p3.From.Reg = mips.REGTMP 659 p3.To.Type = obj.TYPE_BRANCH 660 gc.Patch(p3, p) 661 s.Prog(mips.ASYNC) 662 p4 := s.Prog(mips.AADDVU) 663 p4.From.Type = obj.TYPE_CONST 664 p4.From.Offset = v.AuxInt 665 p4.Reg = v.Reg0() 666 p4.To.Type = obj.TYPE_REG 667 p4.To.Reg = v.Reg0() 668 case ssa.OpMIPS64LoweredAtomicCas32, ssa.OpMIPS64LoweredAtomicCas64: 669 // MOVV $0, Rout 670 // SYNC 671 // LL (Rarg0), Rtmp 672 // BNE Rtmp, Rarg1, 4(PC) 673 // MOVV Rarg2, Rout 674 // SC Rout, (Rarg0) 675 // BEQ Rout, -4(PC) 676 // SYNC 677 ll := mips.ALLV 678 sc := mips.ASCV 679 if v.Op == ssa.OpMIPS64LoweredAtomicCas32 { 680 ll = mips.ALL 681 sc = mips.ASC 682 } 683 p := s.Prog(mips.AMOVV) 684 p.From.Type = obj.TYPE_REG 685 p.From.Reg = mips.REGZERO 686 p.To.Type = obj.TYPE_REG 687 p.To.Reg = v.Reg0() 688 s.Prog(mips.ASYNC) 689 p1 := s.Prog(ll) 690 p1.From.Type = obj.TYPE_MEM 691 p1.From.Reg = v.Args[0].Reg() 692 p1.To.Type = obj.TYPE_REG 693 p1.To.Reg = mips.REGTMP 694 p2 := s.Prog(mips.ABNE) 695 p2.From.Type = obj.TYPE_REG 696 p2.From.Reg = v.Args[1].Reg() 697 p2.Reg = mips.REGTMP 698 p2.To.Type = obj.TYPE_BRANCH 699 p3 := s.Prog(mips.AMOVV) 700 p3.From.Type = obj.TYPE_REG 701 p3.From.Reg = v.Args[2].Reg() 702 p3.To.Type = obj.TYPE_REG 703 p3.To.Reg = v.Reg0() 704 p4 := s.Prog(sc) 705 p4.From.Type = obj.TYPE_REG 706 p4.From.Reg = v.Reg0() 707 p4.To.Type = obj.TYPE_MEM 708 p4.To.Reg = v.Args[0].Reg() 709 p5 := s.Prog(mips.ABEQ) 710 p5.From.Type = obj.TYPE_REG 711 p5.From.Reg = v.Reg0() 712 p5.To.Type = obj.TYPE_BRANCH 713 gc.Patch(p5, p1) 714 p6 := s.Prog(mips.ASYNC) 715 gc.Patch(p2, p6) 716 case ssa.OpMIPS64LoweredNilCheck: 717 // Issue a load which will fault if arg is nil. 718 p := s.Prog(mips.AMOVB) 719 p.From.Type = obj.TYPE_MEM 720 p.From.Reg = v.Args[0].Reg() 721 gc.AddAux(&p.From, v) 722 p.To.Type = obj.TYPE_REG 723 p.To.Reg = mips.REGTMP 724 if logopt.Enabled() { 725 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) 726 } 727 if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers 728 gc.Warnl(v.Pos, "generated nil check") 729 } 730 case ssa.OpMIPS64FPFlagTrue, 731 ssa.OpMIPS64FPFlagFalse: 732 // MOVV $0, r 733 // BFPF 2(PC) 734 // MOVV $1, r 735 branch := mips.ABFPF 736 if v.Op == ssa.OpMIPS64FPFlagFalse { 737 branch = mips.ABFPT 738 } 739 p := s.Prog(mips.AMOVV) 740 p.From.Type = obj.TYPE_REG 741 p.From.Reg = mips.REGZERO 742 p.To.Type = obj.TYPE_REG 743 p.To.Reg = v.Reg() 744 p2 := s.Prog(branch) 745 p2.To.Type = obj.TYPE_BRANCH 746 p3 := s.Prog(mips.AMOVV) 747 p3.From.Type = obj.TYPE_CONST 748 p3.From.Offset = 1 749 p3.To.Type = obj.TYPE_REG 750 p3.To.Reg = v.Reg() 751 p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land 752 gc.Patch(p2, p4) 753 case ssa.OpMIPS64LoweredGetClosurePtr: 754 // Closure pointer is R22 (mips.REGCTXT). 755 gc.CheckLoweredGetClosurePtr(v) 756 case ssa.OpMIPS64LoweredGetCallerSP: 757 // caller's SP is FixedFrameSize below the address of the first arg 758 p := s.Prog(mips.AMOVV) 759 p.From.Type = obj.TYPE_ADDR 760 p.From.Offset = -gc.Ctxt.FixedFrameSize() 761 p.From.Name = obj.NAME_PARAM 762 p.To.Type = obj.TYPE_REG 763 p.To.Reg = v.Reg() 764 case ssa.OpMIPS64LoweredGetCallerPC: 765 p := s.Prog(obj.AGETCALLERPC) 766 p.To.Type = obj.TYPE_REG 767 p.To.Reg = v.Reg() 768 case ssa.OpClobber: 769 // TODO: implement for clobberdead experiment. Nop is ok for now. 770 default: 771 v.Fatalf("genValue not implemented: %s", v.LongString()) 772 } 773 } 774 775 var blockJump = map[ssa.BlockKind]struct { 776 asm, invasm obj.As 777 }{ 778 ssa.BlockMIPS64EQ: {mips.ABEQ, mips.ABNE}, 779 ssa.BlockMIPS64NE: {mips.ABNE, mips.ABEQ}, 780 ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ}, 781 ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ}, 782 ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ}, 783 ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ}, 784 ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF}, 785 ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT}, 786 } 787 788 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) { 789 switch b.Kind { 790 case ssa.BlockPlain: 791 if b.Succs[0].Block() != next { 792 p := s.Prog(obj.AJMP) 793 p.To.Type = obj.TYPE_BRANCH 794 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 795 } 796 case ssa.BlockDefer: 797 // defer returns in R1: 798 // 0 if we should continue executing 799 // 1 if we should jump to deferreturn call 800 p := s.Prog(mips.ABNE) 801 p.From.Type = obj.TYPE_REG 802 p.From.Reg = mips.REGZERO 803 p.Reg = mips.REG_R1 804 p.To.Type = obj.TYPE_BRANCH 805 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 806 if b.Succs[0].Block() != next { 807 p := s.Prog(obj.AJMP) 808 p.To.Type = obj.TYPE_BRANCH 809 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 810 } 811 case ssa.BlockExit: 812 case ssa.BlockRet: 813 s.Prog(obj.ARET) 814 case ssa.BlockRetJmp: 815 p := s.Prog(obj.ARET) 816 p.To.Type = obj.TYPE_MEM 817 p.To.Name = obj.NAME_EXTERN 818 p.To.Sym = b.Aux.(*obj.LSym) 819 case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE, 820 ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ, 821 ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ, 822 ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF: 823 jmp := blockJump[b.Kind] 824 var p *obj.Prog 825 switch next { 826 case b.Succs[0].Block(): 827 p = s.Br(jmp.invasm, b.Succs[1].Block()) 828 case b.Succs[1].Block(): 829 p = s.Br(jmp.asm, b.Succs[0].Block()) 830 default: 831 if b.Likely != ssa.BranchUnlikely { 832 p = s.Br(jmp.asm, b.Succs[0].Block()) 833 s.Br(obj.AJMP, b.Succs[1].Block()) 834 } else { 835 p = s.Br(jmp.invasm, b.Succs[1].Block()) 836 s.Br(obj.AJMP, b.Succs[0].Block()) 837 } 838 } 839 if !b.Controls[0].Type.IsFlags() { 840 p.From.Type = obj.TYPE_REG 841 p.From.Reg = b.Controls[0].Reg() 842 } 843 default: 844 b.Fatalf("branch not implemented: %s", b.LongString()) 845 } 846 }