github.com/corona10/go@v0.0.0-20180224231303-7a218942be57/src/cmd/compile/internal/mips64/ssa.go (about) 1 // Copyright 2016 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package mips64 6 7 import ( 8 "math" 9 10 "cmd/compile/internal/gc" 11 "cmd/compile/internal/ssa" 12 "cmd/compile/internal/types" 13 "cmd/internal/obj" 14 "cmd/internal/obj/mips" 15 ) 16 17 // isFPreg returns whether r is an FP register 18 func isFPreg(r int16) bool { 19 return mips.REG_F0 <= r && r <= mips.REG_F31 20 } 21 22 // isHILO returns whether r is HI or LO register 23 func isHILO(r int16) bool { 24 return r == mips.REG_HI || r == mips.REG_LO 25 } 26 27 // loadByType returns the load instruction of the given type. 28 func loadByType(t *types.Type, r int16) obj.As { 29 if isFPreg(r) { 30 if t.Size() == 4 { // float32 or int32 31 return mips.AMOVF 32 } else { // float64 or int64 33 return mips.AMOVD 34 } 35 } else { 36 switch t.Size() { 37 case 1: 38 if t.IsSigned() { 39 return mips.AMOVB 40 } else { 41 return mips.AMOVBU 42 } 43 case 2: 44 if t.IsSigned() { 45 return mips.AMOVH 46 } else { 47 return mips.AMOVHU 48 } 49 case 4: 50 if t.IsSigned() { 51 return mips.AMOVW 52 } else { 53 return mips.AMOVWU 54 } 55 case 8: 56 return mips.AMOVV 57 } 58 } 59 panic("bad load type") 60 } 61 62 // storeByType returns the store instruction of the given type. 63 func storeByType(t *types.Type, r int16) obj.As { 64 if isFPreg(r) { 65 if t.Size() == 4 { // float32 or int32 66 return mips.AMOVF 67 } else { // float64 or int64 68 return mips.AMOVD 69 } 70 } else { 71 switch t.Size() { 72 case 1: 73 return mips.AMOVB 74 case 2: 75 return mips.AMOVH 76 case 4: 77 return mips.AMOVW 78 case 8: 79 return mips.AMOVV 80 } 81 } 82 panic("bad store type") 83 } 84 85 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { 86 switch v.Op { 87 case ssa.OpCopy, ssa.OpMIPS64MOVVconvert, ssa.OpMIPS64MOVVreg: 88 if v.Type.IsMemory() { 89 return 90 } 91 x := v.Args[0].Reg() 92 y := v.Reg() 93 if x == y { 94 return 95 } 96 as := mips.AMOVV 97 if isFPreg(x) && isFPreg(y) { 98 as = mips.AMOVD 99 } 100 p := s.Prog(as) 101 p.From.Type = obj.TYPE_REG 102 p.From.Reg = x 103 p.To.Type = obj.TYPE_REG 104 p.To.Reg = y 105 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) { 106 // cannot move between special registers, use TMP as intermediate 107 p.To.Reg = mips.REGTMP 108 p = s.Prog(mips.AMOVV) 109 p.From.Type = obj.TYPE_REG 110 p.From.Reg = mips.REGTMP 111 p.To.Type = obj.TYPE_REG 112 p.To.Reg = y 113 } 114 case ssa.OpMIPS64MOVVnop: 115 if v.Reg() != v.Args[0].Reg() { 116 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 117 } 118 // nothing to do 119 case ssa.OpLoadReg: 120 if v.Type.IsFlags() { 121 v.Fatalf("load flags not implemented: %v", v.LongString()) 122 return 123 } 124 r := v.Reg() 125 p := s.Prog(loadByType(v.Type, r)) 126 gc.AddrAuto(&p.From, v.Args[0]) 127 p.To.Type = obj.TYPE_REG 128 p.To.Reg = r 129 if isHILO(r) { 130 // cannot directly load, load to TMP and move 131 p.To.Reg = mips.REGTMP 132 p = s.Prog(mips.AMOVV) 133 p.From.Type = obj.TYPE_REG 134 p.From.Reg = mips.REGTMP 135 p.To.Type = obj.TYPE_REG 136 p.To.Reg = r 137 } 138 case ssa.OpStoreReg: 139 if v.Type.IsFlags() { 140 v.Fatalf("store flags not implemented: %v", v.LongString()) 141 return 142 } 143 r := v.Args[0].Reg() 144 if isHILO(r) { 145 // cannot directly store, move to TMP and store 146 p := s.Prog(mips.AMOVV) 147 p.From.Type = obj.TYPE_REG 148 p.From.Reg = r 149 p.To.Type = obj.TYPE_REG 150 p.To.Reg = mips.REGTMP 151 r = mips.REGTMP 152 } 153 p := s.Prog(storeByType(v.Type, r)) 154 p.From.Type = obj.TYPE_REG 155 p.From.Reg = r 156 gc.AddrAuto(&p.To, v) 157 case ssa.OpMIPS64ADDV, 158 ssa.OpMIPS64SUBV, 159 ssa.OpMIPS64AND, 160 ssa.OpMIPS64OR, 161 ssa.OpMIPS64XOR, 162 ssa.OpMIPS64NOR, 163 ssa.OpMIPS64SLLV, 164 ssa.OpMIPS64SRLV, 165 ssa.OpMIPS64SRAV, 166 ssa.OpMIPS64ADDF, 167 ssa.OpMIPS64ADDD, 168 ssa.OpMIPS64SUBF, 169 ssa.OpMIPS64SUBD, 170 ssa.OpMIPS64MULF, 171 ssa.OpMIPS64MULD, 172 ssa.OpMIPS64DIVF, 173 ssa.OpMIPS64DIVD: 174 p := s.Prog(v.Op.Asm()) 175 p.From.Type = obj.TYPE_REG 176 p.From.Reg = v.Args[1].Reg() 177 p.Reg = v.Args[0].Reg() 178 p.To.Type = obj.TYPE_REG 179 p.To.Reg = v.Reg() 180 case ssa.OpMIPS64SGT, 181 ssa.OpMIPS64SGTU: 182 p := s.Prog(v.Op.Asm()) 183 p.From.Type = obj.TYPE_REG 184 p.From.Reg = v.Args[0].Reg() 185 p.Reg = v.Args[1].Reg() 186 p.To.Type = obj.TYPE_REG 187 p.To.Reg = v.Reg() 188 case ssa.OpMIPS64ADDVconst, 189 ssa.OpMIPS64SUBVconst, 190 ssa.OpMIPS64ANDconst, 191 ssa.OpMIPS64ORconst, 192 ssa.OpMIPS64XORconst, 193 ssa.OpMIPS64NORconst, 194 ssa.OpMIPS64SLLVconst, 195 ssa.OpMIPS64SRLVconst, 196 ssa.OpMIPS64SRAVconst, 197 ssa.OpMIPS64SGTconst, 198 ssa.OpMIPS64SGTUconst: 199 p := s.Prog(v.Op.Asm()) 200 p.From.Type = obj.TYPE_CONST 201 p.From.Offset = v.AuxInt 202 p.Reg = v.Args[0].Reg() 203 p.To.Type = obj.TYPE_REG 204 p.To.Reg = v.Reg() 205 case ssa.OpMIPS64MULV, 206 ssa.OpMIPS64MULVU, 207 ssa.OpMIPS64DIVV, 208 ssa.OpMIPS64DIVVU: 209 // result in hi,lo 210 p := s.Prog(v.Op.Asm()) 211 p.From.Type = obj.TYPE_REG 212 p.From.Reg = v.Args[1].Reg() 213 p.Reg = v.Args[0].Reg() 214 case ssa.OpMIPS64MOVVconst: 215 r := v.Reg() 216 p := s.Prog(v.Op.Asm()) 217 p.From.Type = obj.TYPE_CONST 218 p.From.Offset = v.AuxInt 219 p.To.Type = obj.TYPE_REG 220 p.To.Reg = r 221 if isFPreg(r) || isHILO(r) { 222 // cannot move into FP or special registers, use TMP as intermediate 223 p.To.Reg = mips.REGTMP 224 p = s.Prog(mips.AMOVV) 225 p.From.Type = obj.TYPE_REG 226 p.From.Reg = mips.REGTMP 227 p.To.Type = obj.TYPE_REG 228 p.To.Reg = r 229 } 230 case ssa.OpMIPS64MOVFconst, 231 ssa.OpMIPS64MOVDconst: 232 p := s.Prog(v.Op.Asm()) 233 p.From.Type = obj.TYPE_FCONST 234 p.From.Val = math.Float64frombits(uint64(v.AuxInt)) 235 p.To.Type = obj.TYPE_REG 236 p.To.Reg = v.Reg() 237 case ssa.OpMIPS64CMPEQF, 238 ssa.OpMIPS64CMPEQD, 239 ssa.OpMIPS64CMPGEF, 240 ssa.OpMIPS64CMPGED, 241 ssa.OpMIPS64CMPGTF, 242 ssa.OpMIPS64CMPGTD: 243 p := s.Prog(v.Op.Asm()) 244 p.From.Type = obj.TYPE_REG 245 p.From.Reg = v.Args[0].Reg() 246 p.Reg = v.Args[1].Reg() 247 case ssa.OpMIPS64MOVVaddr: 248 p := s.Prog(mips.AMOVV) 249 p.From.Type = obj.TYPE_ADDR 250 p.From.Reg = v.Args[0].Reg() 251 var wantreg string 252 // MOVV $sym+off(base), R 253 // the assembler expands it as the following: 254 // - base is SP: add constant offset to SP (R29) 255 // when constant is large, tmp register (R23) may be used 256 // - base is SB: load external address with relocation 257 switch v.Aux.(type) { 258 default: 259 v.Fatalf("aux is of unknown type %T", v.Aux) 260 case *obj.LSym: 261 wantreg = "SB" 262 gc.AddAux(&p.From, v) 263 case *gc.Node: 264 wantreg = "SP" 265 gc.AddAux(&p.From, v) 266 case nil: 267 // No sym, just MOVV $off(SP), R 268 wantreg = "SP" 269 p.From.Offset = v.AuxInt 270 } 271 if reg := v.Args[0].RegName(); reg != wantreg { 272 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg) 273 } 274 p.To.Type = obj.TYPE_REG 275 p.To.Reg = v.Reg() 276 case ssa.OpMIPS64MOVBload, 277 ssa.OpMIPS64MOVBUload, 278 ssa.OpMIPS64MOVHload, 279 ssa.OpMIPS64MOVHUload, 280 ssa.OpMIPS64MOVWload, 281 ssa.OpMIPS64MOVWUload, 282 ssa.OpMIPS64MOVVload, 283 ssa.OpMIPS64MOVFload, 284 ssa.OpMIPS64MOVDload: 285 p := s.Prog(v.Op.Asm()) 286 p.From.Type = obj.TYPE_MEM 287 p.From.Reg = v.Args[0].Reg() 288 gc.AddAux(&p.From, v) 289 p.To.Type = obj.TYPE_REG 290 p.To.Reg = v.Reg() 291 case ssa.OpMIPS64MOVBstore, 292 ssa.OpMIPS64MOVHstore, 293 ssa.OpMIPS64MOVWstore, 294 ssa.OpMIPS64MOVVstore, 295 ssa.OpMIPS64MOVFstore, 296 ssa.OpMIPS64MOVDstore: 297 p := s.Prog(v.Op.Asm()) 298 p.From.Type = obj.TYPE_REG 299 p.From.Reg = v.Args[1].Reg() 300 p.To.Type = obj.TYPE_MEM 301 p.To.Reg = v.Args[0].Reg() 302 gc.AddAux(&p.To, v) 303 case ssa.OpMIPS64MOVBstorezero, 304 ssa.OpMIPS64MOVHstorezero, 305 ssa.OpMIPS64MOVWstorezero, 306 ssa.OpMIPS64MOVVstorezero: 307 p := s.Prog(v.Op.Asm()) 308 p.From.Type = obj.TYPE_REG 309 p.From.Reg = mips.REGZERO 310 p.To.Type = obj.TYPE_MEM 311 p.To.Reg = v.Args[0].Reg() 312 gc.AddAux(&p.To, v) 313 case ssa.OpMIPS64MOVBreg, 314 ssa.OpMIPS64MOVBUreg, 315 ssa.OpMIPS64MOVHreg, 316 ssa.OpMIPS64MOVHUreg, 317 ssa.OpMIPS64MOVWreg, 318 ssa.OpMIPS64MOVWUreg: 319 a := v.Args[0] 320 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg { 321 a = a.Args[0] 322 } 323 if a.Op == ssa.OpLoadReg { 324 t := a.Type 325 switch { 326 case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(), 327 v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(), 328 v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(), 329 v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(), 330 v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(), 331 v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned(): 332 // arg is a proper-typed load, already zero/sign-extended, don't extend again 333 if v.Reg() == v.Args[0].Reg() { 334 return 335 } 336 p := s.Prog(mips.AMOVV) 337 p.From.Type = obj.TYPE_REG 338 p.From.Reg = v.Args[0].Reg() 339 p.To.Type = obj.TYPE_REG 340 p.To.Reg = v.Reg() 341 return 342 default: 343 } 344 } 345 fallthrough 346 case ssa.OpMIPS64MOVWF, 347 ssa.OpMIPS64MOVWD, 348 ssa.OpMIPS64TRUNCFW, 349 ssa.OpMIPS64TRUNCDW, 350 ssa.OpMIPS64MOVVF, 351 ssa.OpMIPS64MOVVD, 352 ssa.OpMIPS64TRUNCFV, 353 ssa.OpMIPS64TRUNCDV, 354 ssa.OpMIPS64MOVFD, 355 ssa.OpMIPS64MOVDF, 356 ssa.OpMIPS64NEGF, 357 ssa.OpMIPS64NEGD, 358 ssa.OpMIPS64SQRTD: 359 p := s.Prog(v.Op.Asm()) 360 p.From.Type = obj.TYPE_REG 361 p.From.Reg = v.Args[0].Reg() 362 p.To.Type = obj.TYPE_REG 363 p.To.Reg = v.Reg() 364 case ssa.OpMIPS64NEGV: 365 // SUB from REGZERO 366 p := s.Prog(mips.ASUBVU) 367 p.From.Type = obj.TYPE_REG 368 p.From.Reg = v.Args[0].Reg() 369 p.Reg = mips.REGZERO 370 p.To.Type = obj.TYPE_REG 371 p.To.Reg = v.Reg() 372 case ssa.OpMIPS64DUFFZERO: 373 // runtime.duffzero expects start address - 8 in R1 374 p := s.Prog(mips.ASUBVU) 375 p.From.Type = obj.TYPE_CONST 376 p.From.Offset = 8 377 p.Reg = v.Args[0].Reg() 378 p.To.Type = obj.TYPE_REG 379 p.To.Reg = mips.REG_R1 380 p = s.Prog(obj.ADUFFZERO) 381 p.To.Type = obj.TYPE_MEM 382 p.To.Name = obj.NAME_EXTERN 383 p.To.Sym = gc.Duffzero 384 p.To.Offset = v.AuxInt 385 case ssa.OpMIPS64LoweredZero: 386 // SUBV $8, R1 387 // MOVV R0, 8(R1) 388 // ADDV $8, R1 389 // BNE Rarg1, R1, -2(PC) 390 // arg1 is the address of the last element to zero 391 var sz int64 392 var mov obj.As 393 switch { 394 case v.AuxInt%8 == 0: 395 sz = 8 396 mov = mips.AMOVV 397 case v.AuxInt%4 == 0: 398 sz = 4 399 mov = mips.AMOVW 400 case v.AuxInt%2 == 0: 401 sz = 2 402 mov = mips.AMOVH 403 default: 404 sz = 1 405 mov = mips.AMOVB 406 } 407 p := s.Prog(mips.ASUBVU) 408 p.From.Type = obj.TYPE_CONST 409 p.From.Offset = sz 410 p.To.Type = obj.TYPE_REG 411 p.To.Reg = mips.REG_R1 412 p2 := s.Prog(mov) 413 p2.From.Type = obj.TYPE_REG 414 p2.From.Reg = mips.REGZERO 415 p2.To.Type = obj.TYPE_MEM 416 p2.To.Reg = mips.REG_R1 417 p2.To.Offset = sz 418 p3 := s.Prog(mips.AADDVU) 419 p3.From.Type = obj.TYPE_CONST 420 p3.From.Offset = sz 421 p3.To.Type = obj.TYPE_REG 422 p3.To.Reg = mips.REG_R1 423 p4 := s.Prog(mips.ABNE) 424 p4.From.Type = obj.TYPE_REG 425 p4.From.Reg = v.Args[1].Reg() 426 p4.Reg = mips.REG_R1 427 p4.To.Type = obj.TYPE_BRANCH 428 gc.Patch(p4, p2) 429 case ssa.OpMIPS64LoweredMove: 430 // SUBV $8, R1 431 // MOVV 8(R1), Rtmp 432 // MOVV Rtmp, (R2) 433 // ADDV $8, R1 434 // ADDV $8, R2 435 // BNE Rarg2, R1, -4(PC) 436 // arg2 is the address of the last element of src 437 var sz int64 438 var mov obj.As 439 switch { 440 case v.AuxInt%8 == 0: 441 sz = 8 442 mov = mips.AMOVV 443 case v.AuxInt%4 == 0: 444 sz = 4 445 mov = mips.AMOVW 446 case v.AuxInt%2 == 0: 447 sz = 2 448 mov = mips.AMOVH 449 default: 450 sz = 1 451 mov = mips.AMOVB 452 } 453 p := s.Prog(mips.ASUBVU) 454 p.From.Type = obj.TYPE_CONST 455 p.From.Offset = sz 456 p.To.Type = obj.TYPE_REG 457 p.To.Reg = mips.REG_R1 458 p2 := s.Prog(mov) 459 p2.From.Type = obj.TYPE_MEM 460 p2.From.Reg = mips.REG_R1 461 p2.From.Offset = sz 462 p2.To.Type = obj.TYPE_REG 463 p2.To.Reg = mips.REGTMP 464 p3 := s.Prog(mov) 465 p3.From.Type = obj.TYPE_REG 466 p3.From.Reg = mips.REGTMP 467 p3.To.Type = obj.TYPE_MEM 468 p3.To.Reg = mips.REG_R2 469 p4 := s.Prog(mips.AADDVU) 470 p4.From.Type = obj.TYPE_CONST 471 p4.From.Offset = sz 472 p4.To.Type = obj.TYPE_REG 473 p4.To.Reg = mips.REG_R1 474 p5 := s.Prog(mips.AADDVU) 475 p5.From.Type = obj.TYPE_CONST 476 p5.From.Offset = sz 477 p5.To.Type = obj.TYPE_REG 478 p5.To.Reg = mips.REG_R2 479 p6 := s.Prog(mips.ABNE) 480 p6.From.Type = obj.TYPE_REG 481 p6.From.Reg = v.Args[2].Reg() 482 p6.Reg = mips.REG_R1 483 p6.To.Type = obj.TYPE_BRANCH 484 gc.Patch(p6, p2) 485 case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter: 486 s.Call(v) 487 case ssa.OpMIPS64LoweredWB: 488 p := s.Prog(obj.ACALL) 489 p.To.Type = obj.TYPE_MEM 490 p.To.Name = obj.NAME_EXTERN 491 p.To.Sym = v.Aux.(*obj.LSym) 492 case ssa.OpMIPS64LoweredAtomicLoad32, ssa.OpMIPS64LoweredAtomicLoad64: 493 as := mips.AMOVV 494 if v.Op == ssa.OpMIPS64LoweredAtomicLoad32 { 495 as = mips.AMOVW 496 } 497 s.Prog(mips.ASYNC) 498 p := s.Prog(as) 499 p.From.Type = obj.TYPE_MEM 500 p.From.Reg = v.Args[0].Reg() 501 p.To.Type = obj.TYPE_REG 502 p.To.Reg = v.Reg0() 503 s.Prog(mips.ASYNC) 504 case ssa.OpMIPS64LoweredAtomicStore32, ssa.OpMIPS64LoweredAtomicStore64: 505 as := mips.AMOVV 506 if v.Op == ssa.OpMIPS64LoweredAtomicStore32 { 507 as = mips.AMOVW 508 } 509 s.Prog(mips.ASYNC) 510 p := s.Prog(as) 511 p.From.Type = obj.TYPE_REG 512 p.From.Reg = v.Args[1].Reg() 513 p.To.Type = obj.TYPE_MEM 514 p.To.Reg = v.Args[0].Reg() 515 s.Prog(mips.ASYNC) 516 case ssa.OpMIPS64LoweredAtomicStorezero32, ssa.OpMIPS64LoweredAtomicStorezero64: 517 as := mips.AMOVV 518 if v.Op == ssa.OpMIPS64LoweredAtomicStorezero32 { 519 as = mips.AMOVW 520 } 521 s.Prog(mips.ASYNC) 522 p := s.Prog(as) 523 p.From.Type = obj.TYPE_REG 524 p.From.Reg = mips.REGZERO 525 p.To.Type = obj.TYPE_MEM 526 p.To.Reg = v.Args[0].Reg() 527 s.Prog(mips.ASYNC) 528 case ssa.OpMIPS64LoweredAtomicExchange32, ssa.OpMIPS64LoweredAtomicExchange64: 529 // SYNC 530 // MOVV Rarg1, Rtmp 531 // LL (Rarg0), Rout 532 // SC Rtmp, (Rarg0) 533 // BEQ Rtmp, -3(PC) 534 // SYNC 535 ll := mips.ALLV 536 sc := mips.ASCV 537 if v.Op == ssa.OpMIPS64LoweredAtomicExchange32 { 538 ll = mips.ALL 539 sc = mips.ASC 540 } 541 s.Prog(mips.ASYNC) 542 p := s.Prog(mips.AMOVV) 543 p.From.Type = obj.TYPE_REG 544 p.From.Reg = v.Args[1].Reg() 545 p.To.Type = obj.TYPE_REG 546 p.To.Reg = mips.REGTMP 547 p1 := s.Prog(ll) 548 p1.From.Type = obj.TYPE_MEM 549 p1.From.Reg = v.Args[0].Reg() 550 p1.To.Type = obj.TYPE_REG 551 p1.To.Reg = v.Reg0() 552 p2 := s.Prog(sc) 553 p2.From.Type = obj.TYPE_REG 554 p2.From.Reg = mips.REGTMP 555 p2.To.Type = obj.TYPE_MEM 556 p2.To.Reg = v.Args[0].Reg() 557 p3 := s.Prog(mips.ABEQ) 558 p3.From.Type = obj.TYPE_REG 559 p3.From.Reg = mips.REGTMP 560 p3.To.Type = obj.TYPE_BRANCH 561 gc.Patch(p3, p) 562 s.Prog(mips.ASYNC) 563 case ssa.OpMIPS64LoweredAtomicAdd32, ssa.OpMIPS64LoweredAtomicAdd64: 564 // SYNC 565 // LL (Rarg0), Rout 566 // ADDV Rarg1, Rout, Rtmp 567 // SC Rtmp, (Rarg0) 568 // BEQ Rtmp, -3(PC) 569 // SYNC 570 // ADDV Rarg1, Rout 571 ll := mips.ALLV 572 sc := mips.ASCV 573 if v.Op == ssa.OpMIPS64LoweredAtomicAdd32 { 574 ll = mips.ALL 575 sc = mips.ASC 576 } 577 s.Prog(mips.ASYNC) 578 p := s.Prog(ll) 579 p.From.Type = obj.TYPE_MEM 580 p.From.Reg = v.Args[0].Reg() 581 p.To.Type = obj.TYPE_REG 582 p.To.Reg = v.Reg0() 583 p1 := s.Prog(mips.AADDVU) 584 p1.From.Type = obj.TYPE_REG 585 p1.From.Reg = v.Args[1].Reg() 586 p1.Reg = v.Reg0() 587 p1.To.Type = obj.TYPE_REG 588 p1.To.Reg = mips.REGTMP 589 p2 := s.Prog(sc) 590 p2.From.Type = obj.TYPE_REG 591 p2.From.Reg = mips.REGTMP 592 p2.To.Type = obj.TYPE_MEM 593 p2.To.Reg = v.Args[0].Reg() 594 p3 := s.Prog(mips.ABEQ) 595 p3.From.Type = obj.TYPE_REG 596 p3.From.Reg = mips.REGTMP 597 p3.To.Type = obj.TYPE_BRANCH 598 gc.Patch(p3, p) 599 s.Prog(mips.ASYNC) 600 p4 := s.Prog(mips.AADDVU) 601 p4.From.Type = obj.TYPE_REG 602 p4.From.Reg = v.Args[1].Reg() 603 p4.Reg = v.Reg0() 604 p4.To.Type = obj.TYPE_REG 605 p4.To.Reg = v.Reg0() 606 case ssa.OpMIPS64LoweredAtomicAddconst32, ssa.OpMIPS64LoweredAtomicAddconst64: 607 // SYNC 608 // LL (Rarg0), Rout 609 // ADDV $auxint, Rout, Rtmp 610 // SC Rtmp, (Rarg0) 611 // BEQ Rtmp, -3(PC) 612 // SYNC 613 // ADDV $auxint, Rout 614 ll := mips.ALLV 615 sc := mips.ASCV 616 if v.Op == ssa.OpMIPS64LoweredAtomicAddconst32 { 617 ll = mips.ALL 618 sc = mips.ASC 619 } 620 s.Prog(mips.ASYNC) 621 p := s.Prog(ll) 622 p.From.Type = obj.TYPE_MEM 623 p.From.Reg = v.Args[0].Reg() 624 p.To.Type = obj.TYPE_REG 625 p.To.Reg = v.Reg0() 626 p1 := s.Prog(mips.AADDVU) 627 p1.From.Type = obj.TYPE_CONST 628 p1.From.Offset = v.AuxInt 629 p1.Reg = v.Reg0() 630 p1.To.Type = obj.TYPE_REG 631 p1.To.Reg = mips.REGTMP 632 p2 := s.Prog(sc) 633 p2.From.Type = obj.TYPE_REG 634 p2.From.Reg = mips.REGTMP 635 p2.To.Type = obj.TYPE_MEM 636 p2.To.Reg = v.Args[0].Reg() 637 p3 := s.Prog(mips.ABEQ) 638 p3.From.Type = obj.TYPE_REG 639 p3.From.Reg = mips.REGTMP 640 p3.To.Type = obj.TYPE_BRANCH 641 gc.Patch(p3, p) 642 s.Prog(mips.ASYNC) 643 p4 := s.Prog(mips.AADDVU) 644 p4.From.Type = obj.TYPE_CONST 645 p4.From.Offset = v.AuxInt 646 p4.Reg = v.Reg0() 647 p4.To.Type = obj.TYPE_REG 648 p4.To.Reg = v.Reg0() 649 case ssa.OpMIPS64LoweredAtomicCas32, ssa.OpMIPS64LoweredAtomicCas64: 650 // MOVV $0, Rout 651 // SYNC 652 // LL (Rarg0), Rtmp 653 // BNE Rtmp, Rarg1, 4(PC) 654 // MOVV Rarg2, Rout 655 // SC Rout, (Rarg0) 656 // BEQ Rout, -4(PC) 657 // SYNC 658 ll := mips.ALLV 659 sc := mips.ASCV 660 if v.Op == ssa.OpMIPS64LoweredAtomicCas32 { 661 ll = mips.ALL 662 sc = mips.ASC 663 } 664 p := s.Prog(mips.AMOVV) 665 p.From.Type = obj.TYPE_REG 666 p.From.Reg = mips.REGZERO 667 p.To.Type = obj.TYPE_REG 668 p.To.Reg = v.Reg0() 669 s.Prog(mips.ASYNC) 670 p1 := s.Prog(ll) 671 p1.From.Type = obj.TYPE_MEM 672 p1.From.Reg = v.Args[0].Reg() 673 p1.To.Type = obj.TYPE_REG 674 p1.To.Reg = mips.REGTMP 675 p2 := s.Prog(mips.ABNE) 676 p2.From.Type = obj.TYPE_REG 677 p2.From.Reg = v.Args[1].Reg() 678 p2.Reg = mips.REGTMP 679 p2.To.Type = obj.TYPE_BRANCH 680 p3 := s.Prog(mips.AMOVV) 681 p3.From.Type = obj.TYPE_REG 682 p3.From.Reg = v.Args[2].Reg() 683 p3.To.Type = obj.TYPE_REG 684 p3.To.Reg = v.Reg0() 685 p4 := s.Prog(sc) 686 p4.From.Type = obj.TYPE_REG 687 p4.From.Reg = v.Reg0() 688 p4.To.Type = obj.TYPE_MEM 689 p4.To.Reg = v.Args[0].Reg() 690 p5 := s.Prog(mips.ABEQ) 691 p5.From.Type = obj.TYPE_REG 692 p5.From.Reg = v.Reg0() 693 p5.To.Type = obj.TYPE_BRANCH 694 gc.Patch(p5, p1) 695 p6 := s.Prog(mips.ASYNC) 696 gc.Patch(p2, p6) 697 case ssa.OpMIPS64LoweredNilCheck: 698 // Issue a load which will fault if arg is nil. 699 p := s.Prog(mips.AMOVB) 700 p.From.Type = obj.TYPE_MEM 701 p.From.Reg = v.Args[0].Reg() 702 gc.AddAux(&p.From, v) 703 p.To.Type = obj.TYPE_REG 704 p.To.Reg = mips.REGTMP 705 if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers 706 gc.Warnl(v.Pos, "generated nil check") 707 } 708 case ssa.OpMIPS64FPFlagTrue, 709 ssa.OpMIPS64FPFlagFalse: 710 // MOVV $0, r 711 // BFPF 2(PC) 712 // MOVV $1, r 713 branch := mips.ABFPF 714 if v.Op == ssa.OpMIPS64FPFlagFalse { 715 branch = mips.ABFPT 716 } 717 p := s.Prog(mips.AMOVV) 718 p.From.Type = obj.TYPE_REG 719 p.From.Reg = mips.REGZERO 720 p.To.Type = obj.TYPE_REG 721 p.To.Reg = v.Reg() 722 p2 := s.Prog(branch) 723 p2.To.Type = obj.TYPE_BRANCH 724 p3 := s.Prog(mips.AMOVV) 725 p3.From.Type = obj.TYPE_CONST 726 p3.From.Offset = 1 727 p3.To.Type = obj.TYPE_REG 728 p3.To.Reg = v.Reg() 729 p4 := s.Prog(obj.ANOP) // not a machine instruction, for branch to land 730 gc.Patch(p2, p4) 731 case ssa.OpMIPS64LoweredGetClosurePtr: 732 // Closure pointer is R22 (mips.REGCTXT). 733 gc.CheckLoweredGetClosurePtr(v) 734 case ssa.OpMIPS64LoweredGetCallerSP: 735 // caller's SP is FixedFrameSize below the address of the first arg 736 p := s.Prog(mips.AMOVV) 737 p.From.Type = obj.TYPE_ADDR 738 p.From.Offset = -gc.Ctxt.FixedFrameSize() 739 p.From.Name = obj.NAME_PARAM 740 p.To.Type = obj.TYPE_REG 741 p.To.Reg = v.Reg() 742 case ssa.OpClobber: 743 // TODO: implement for clobberdead experiment. Nop is ok for now. 744 default: 745 v.Fatalf("genValue not implemented: %s", v.LongString()) 746 } 747 } 748 749 var blockJump = map[ssa.BlockKind]struct { 750 asm, invasm obj.As 751 }{ 752 ssa.BlockMIPS64EQ: {mips.ABEQ, mips.ABNE}, 753 ssa.BlockMIPS64NE: {mips.ABNE, mips.ABEQ}, 754 ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ}, 755 ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ}, 756 ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ}, 757 ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ}, 758 ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF}, 759 ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT}, 760 } 761 762 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) { 763 switch b.Kind { 764 case ssa.BlockPlain: 765 if b.Succs[0].Block() != next { 766 p := s.Prog(obj.AJMP) 767 p.To.Type = obj.TYPE_BRANCH 768 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 769 } 770 case ssa.BlockDefer: 771 // defer returns in R1: 772 // 0 if we should continue executing 773 // 1 if we should jump to deferreturn call 774 p := s.Prog(mips.ABNE) 775 p.From.Type = obj.TYPE_REG 776 p.From.Reg = mips.REGZERO 777 p.Reg = mips.REG_R1 778 p.To.Type = obj.TYPE_BRANCH 779 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 780 if b.Succs[0].Block() != next { 781 p := s.Prog(obj.AJMP) 782 p.To.Type = obj.TYPE_BRANCH 783 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 784 } 785 case ssa.BlockExit: 786 s.Prog(obj.AUNDEF) // tell plive.go that we never reach here 787 case ssa.BlockRet: 788 s.Prog(obj.ARET) 789 case ssa.BlockRetJmp: 790 p := s.Prog(obj.ARET) 791 p.To.Type = obj.TYPE_MEM 792 p.To.Name = obj.NAME_EXTERN 793 p.To.Sym = b.Aux.(*obj.LSym) 794 case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE, 795 ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ, 796 ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ, 797 ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF: 798 jmp := blockJump[b.Kind] 799 var p *obj.Prog 800 switch next { 801 case b.Succs[0].Block(): 802 p = s.Prog(jmp.invasm) 803 p.To.Type = obj.TYPE_BRANCH 804 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 805 case b.Succs[1].Block(): 806 p = s.Prog(jmp.asm) 807 p.To.Type = obj.TYPE_BRANCH 808 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 809 default: 810 p = s.Prog(jmp.asm) 811 p.To.Type = obj.TYPE_BRANCH 812 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 813 q := s.Prog(obj.AJMP) 814 q.To.Type = obj.TYPE_BRANCH 815 s.Branches = append(s.Branches, gc.Branch{P: q, B: b.Succs[1].Block()}) 816 } 817 if !b.Control.Type.IsFlags() { 818 p.From.Type = obj.TYPE_REG 819 p.From.Reg = b.Control.Reg() 820 } 821 default: 822 b.Fatalf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString()) 823 } 824 }